mirror of
https://github.com/jupyter/notebook.git
synced 2025-01-30 12:11:32 +08:00
use our own maybe_future
instead of the monkeypatch we did to keep the backport patch small requiring tornado 5 simplifies things a ton because tornado.concurrent.Future is asyncio.Future
This commit is contained in:
parent
88aae11172
commit
729183b148
@ -4,16 +4,10 @@
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import os
|
||||
import json
|
||||
import struct
|
||||
import warnings
|
||||
import sys
|
||||
|
||||
try:
|
||||
from urllib.parse import urlparse # Py 3
|
||||
except ImportError:
|
||||
from urlparse import urlparse # Py 2
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import tornado
|
||||
from tornado import gen, ioloop, web
|
||||
@ -23,8 +17,10 @@ from jupyter_client.session import Session
|
||||
from jupyter_client.jsonutil import date_default, extract_dates
|
||||
from ipython_genutils.py3compat import cast_unicode
|
||||
|
||||
from notebook.utils import maybe_future
|
||||
from .handlers import IPythonHandler
|
||||
|
||||
|
||||
def serialize_binary_message(msg):
|
||||
"""serialize a message as a binary blob
|
||||
|
||||
@ -251,17 +247,17 @@ class ZMQStreamHandler(WebSocketMixin, WebSocketHandler):
|
||||
|
||||
|
||||
class AuthenticatedZMQStreamHandler(ZMQStreamHandler, IPythonHandler):
|
||||
|
||||
|
||||
def set_default_headers(self):
|
||||
"""Undo the set_default_headers in IPythonHandler
|
||||
|
||||
|
||||
which doesn't make sense for websockets
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def pre_get(self):
|
||||
"""Run before finishing the GET request
|
||||
|
||||
|
||||
Extend this method to add logic that should fire before
|
||||
the websocket finishes completing.
|
||||
"""
|
||||
@ -269,21 +265,21 @@ class AuthenticatedZMQStreamHandler(ZMQStreamHandler, IPythonHandler):
|
||||
if self.get_current_user() is None:
|
||||
self.log.warning("Couldn't authenticate WebSocket connection")
|
||||
raise web.HTTPError(403)
|
||||
|
||||
|
||||
if self.get_argument('session_id', False):
|
||||
self.session.session = cast_unicode(self.get_argument('session_id'))
|
||||
else:
|
||||
self.log.warning("No session ID specified")
|
||||
|
||||
|
||||
@gen.coroutine
|
||||
def get(self, *args, **kwargs):
|
||||
# pre_get can be a coroutine in subclasses
|
||||
# assign and yield in two step to avoid tornado 3 issues
|
||||
res = self.pre_get()
|
||||
yield gen.maybe_future(res)
|
||||
yield maybe_future(res)
|
||||
res = super(AuthenticatedZMQStreamHandler, self).get(*args, **kwargs)
|
||||
yield gen.maybe_future(res)
|
||||
|
||||
yield maybe_future(res)
|
||||
|
||||
def initialize(self):
|
||||
self.log.debug("Initializing websocket connection %s", self.request.path)
|
||||
self.session = Session(config=self.config)
|
||||
|
@ -2,13 +2,16 @@
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
from . import tools
|
||||
from notebook.utils import url2path
|
||||
from notebook.base.handlers import IPythonHandler
|
||||
from notebook.services.config import ConfigManager
|
||||
|
||||
from ipython_genutils.importstring import import_item
|
||||
from tornado import web, gen
|
||||
|
||||
from notebook.utils import maybe_future, url2path
|
||||
from notebook.base.handlers import IPythonHandler
|
||||
from notebook.services.config import ConfigManager
|
||||
|
||||
from . import tools
|
||||
|
||||
|
||||
class BundlerHandler(IPythonHandler):
|
||||
def initialize(self):
|
||||
@ -74,7 +77,7 @@ class BundlerHandler(IPythonHandler):
|
||||
|
||||
# Let the bundler respond in any way it sees fit and assume it will
|
||||
# finish the request
|
||||
yield gen.maybe_future(bundler_mod.bundle(self, model))
|
||||
yield maybe_future(bundler_mod.bundle(self, model))
|
||||
|
||||
_bundler_id_regex = r'(?P<bundler_id>[A-Za-z0-9_]+)'
|
||||
|
||||
|
@ -5,16 +5,12 @@
|
||||
|
||||
import mimetypes
|
||||
import json
|
||||
from base64 import decodebytes
|
||||
|
||||
try: #PY3
|
||||
from base64 import decodebytes
|
||||
except ImportError: #PY2
|
||||
from base64 import decodestring as decodebytes
|
||||
|
||||
|
||||
from tornado import gen, web
|
||||
from tornado import web
|
||||
|
||||
from notebook.base.handlers import IPythonHandler
|
||||
from notebook.utils import maybe_future
|
||||
|
||||
|
||||
class FilesHandler(IPythonHandler):
|
||||
@ -51,7 +47,7 @@ class FilesHandler(IPythonHandler):
|
||||
else:
|
||||
name = path
|
||||
|
||||
model = yield gen.maybe_future(cm.get(path, type='file', content=include_body))
|
||||
model = yield maybe_future(cm.get(path, type='file', content=include_body))
|
||||
|
||||
if self.get_argument("download", False):
|
||||
self.set_attachment_header(name)
|
||||
|
@ -3,15 +3,15 @@
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from itertools import chain
|
||||
import json
|
||||
import os
|
||||
|
||||
from tornado import gen, web
|
||||
|
||||
from ...base.handlers import IPythonHandler, APIHandler
|
||||
from notebook._tz import utcfromtimestamp, isoformat
|
||||
from notebook.utils import maybe_future
|
||||
|
||||
import os
|
||||
|
||||
class APISpecHandler(web.StaticFileHandler, IPythonHandler):
|
||||
|
||||
@ -22,10 +22,11 @@ class APISpecHandler(web.StaticFileHandler, IPythonHandler):
|
||||
def get(self):
|
||||
self.log.warning("Serving api spec (experimental, incomplete)")
|
||||
return web.StaticFileHandler.get(self, 'api.yaml')
|
||||
|
||||
|
||||
def get_content_type(self):
|
||||
return 'text/x-yaml'
|
||||
|
||||
|
||||
class APIStatusHandler(APIHandler):
|
||||
|
||||
_track_activity = False
|
||||
@ -37,7 +38,7 @@ class APIStatusHandler(APIHandler):
|
||||
started = self.settings.get('started', utcfromtimestamp(0))
|
||||
started = isoformat(started)
|
||||
|
||||
kernels = yield gen.maybe_future(self.kernel_manager.list_kernels())
|
||||
kernels = yield maybe_future(self.kernel_manager.list_kernels())
|
||||
total_connections = sum(k['connections'] for k in kernels)
|
||||
last_activity = isoformat(self.application.last_activity())
|
||||
model = {
|
||||
@ -48,6 +49,7 @@ class APIStatusHandler(APIHandler):
|
||||
}
|
||||
self.finish(json.dumps(model, sort_keys=True))
|
||||
|
||||
|
||||
default_handlers = [
|
||||
(r"/api/spec.yaml", APISpecHandler),
|
||||
(r"/api/status", APIStatusHandler),
|
||||
|
@ -10,7 +10,7 @@ import json
|
||||
|
||||
from tornado import gen, web
|
||||
|
||||
from notebook.utils import url_path_join, url_escape
|
||||
from notebook.utils import maybe_future, url_path_join, url_escape
|
||||
from jupyter_client.jsonutil import date_default
|
||||
|
||||
from notebook.base.handlers import (
|
||||
@ -108,7 +108,7 @@ class ContentsHandler(APIHandler):
|
||||
raise web.HTTPError(400, u'Content %r is invalid' % content)
|
||||
content = int(content)
|
||||
|
||||
model = yield gen.maybe_future(self.contents_manager.get(
|
||||
model = yield maybe_future(self.contents_manager.get(
|
||||
path=path, type=type, format=format, content=content,
|
||||
))
|
||||
validate_model(model, expect_content=content)
|
||||
@ -122,7 +122,7 @@ class ContentsHandler(APIHandler):
|
||||
model = self.get_json_body()
|
||||
if model is None:
|
||||
raise web.HTTPError(400, u'JSON body missing')
|
||||
model = yield gen.maybe_future(cm.update(model, path))
|
||||
model = yield maybe_future(cm.update(model, path))
|
||||
validate_model(model, expect_content=False)
|
||||
self._finish_model(model)
|
||||
|
||||
@ -133,7 +133,7 @@ class ContentsHandler(APIHandler):
|
||||
copy_from=copy_from,
|
||||
copy_to=copy_to or '',
|
||||
))
|
||||
model = yield gen.maybe_future(self.contents_manager.copy(copy_from, copy_to))
|
||||
model = yield maybe_future(self.contents_manager.copy(copy_from, copy_to))
|
||||
self.set_status(201)
|
||||
validate_model(model, expect_content=False)
|
||||
self._finish_model(model)
|
||||
@ -142,7 +142,7 @@ class ContentsHandler(APIHandler):
|
||||
def _upload(self, model, path):
|
||||
"""Handle upload of a new file to path"""
|
||||
self.log.info(u"Uploading file to %s", path)
|
||||
model = yield gen.maybe_future(self.contents_manager.new(model, path))
|
||||
model = yield maybe_future(self.contents_manager.new(model, path))
|
||||
self.set_status(201)
|
||||
validate_model(model, expect_content=False)
|
||||
self._finish_model(model)
|
||||
@ -151,7 +151,7 @@ class ContentsHandler(APIHandler):
|
||||
def _new_untitled(self, path, type='', ext=''):
|
||||
"""Create a new, empty untitled entity"""
|
||||
self.log.info(u"Creating new %s in %s", type or 'file', path)
|
||||
model = yield gen.maybe_future(self.contents_manager.new_untitled(path=path, type=type, ext=ext))
|
||||
model = yield maybe_future(self.contents_manager.new_untitled(path=path, type=type, ext=ext))
|
||||
self.set_status(201)
|
||||
validate_model(model, expect_content=False)
|
||||
self._finish_model(model)
|
||||
@ -162,7 +162,7 @@ class ContentsHandler(APIHandler):
|
||||
chunk = model.get("chunk", None)
|
||||
if not chunk or chunk == -1: # Avoid tedious log information
|
||||
self.log.info(u"Saving file at %s", path)
|
||||
model = yield gen.maybe_future(self.contents_manager.save(model, path))
|
||||
model = yield maybe_future(self.contents_manager.save(model, path))
|
||||
validate_model(model, expect_content=False)
|
||||
self._finish_model(model)
|
||||
|
||||
@ -182,11 +182,11 @@ class ContentsHandler(APIHandler):
|
||||
|
||||
cm = self.contents_manager
|
||||
|
||||
file_exists = yield gen.maybe_future(cm.file_exists(path))
|
||||
file_exists = yield maybe_future(cm.file_exists(path))
|
||||
if file_exists:
|
||||
raise web.HTTPError(400, "Cannot POST to files, use PUT instead.")
|
||||
|
||||
dir_exists = yield gen.maybe_future(cm.dir_exists(path))
|
||||
dir_exists = yield maybe_future(cm.dir_exists(path))
|
||||
if not dir_exists:
|
||||
raise web.HTTPError(404, "No such directory: %s" % path)
|
||||
|
||||
@ -220,13 +220,13 @@ class ContentsHandler(APIHandler):
|
||||
if model:
|
||||
if model.get('copy_from'):
|
||||
raise web.HTTPError(400, "Cannot copy with PUT, only POST")
|
||||
exists = yield gen.maybe_future(self.contents_manager.file_exists(path))
|
||||
exists = yield maybe_future(self.contents_manager.file_exists(path))
|
||||
if exists:
|
||||
yield gen.maybe_future(self._save(model, path))
|
||||
yield maybe_future(self._save(model, path))
|
||||
else:
|
||||
yield gen.maybe_future(self._upload(model, path))
|
||||
yield maybe_future(self._upload(model, path))
|
||||
else:
|
||||
yield gen.maybe_future(self._new_untitled(path))
|
||||
yield maybe_future(self._new_untitled(path))
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
@ -234,7 +234,7 @@ class ContentsHandler(APIHandler):
|
||||
"""delete a file in the given path"""
|
||||
cm = self.contents_manager
|
||||
self.log.warning('delete %s', path)
|
||||
yield gen.maybe_future(cm.delete(path))
|
||||
yield maybe_future(cm.delete(path))
|
||||
self.set_status(204)
|
||||
self.finish()
|
||||
|
||||
@ -246,7 +246,7 @@ class CheckpointsHandler(APIHandler):
|
||||
def get(self, path=''):
|
||||
"""get lists checkpoints for a file"""
|
||||
cm = self.contents_manager
|
||||
checkpoints = yield gen.maybe_future(cm.list_checkpoints(path))
|
||||
checkpoints = yield maybe_future(cm.list_checkpoints(path))
|
||||
data = json.dumps(checkpoints, default=date_default)
|
||||
self.finish(data)
|
||||
|
||||
@ -255,7 +255,7 @@ class CheckpointsHandler(APIHandler):
|
||||
def post(self, path=''):
|
||||
"""post creates a new checkpoint"""
|
||||
cm = self.contents_manager
|
||||
checkpoint = yield gen.maybe_future(cm.create_checkpoint(path))
|
||||
checkpoint = yield maybe_future(cm.create_checkpoint(path))
|
||||
data = json.dumps(checkpoint, default=date_default)
|
||||
location = url_path_join(self.base_url, 'api/contents',
|
||||
url_escape(path), 'checkpoints', url_escape(checkpoint['id']))
|
||||
@ -271,7 +271,7 @@ class ModifyCheckpointsHandler(APIHandler):
|
||||
def post(self, path, checkpoint_id):
|
||||
"""post restores a file from a checkpoint"""
|
||||
cm = self.contents_manager
|
||||
yield gen.maybe_future(cm.restore_checkpoint(checkpoint_id, path))
|
||||
yield maybe_future(cm.restore_checkpoint(checkpoint_id, path))
|
||||
self.set_status(204)
|
||||
self.finish()
|
||||
|
||||
@ -280,7 +280,7 @@ class ModifyCheckpointsHandler(APIHandler):
|
||||
def delete(self, path, checkpoint_id):
|
||||
"""delete clears a checkpoint for a given file"""
|
||||
cm = self.contents_manager
|
||||
yield gen.maybe_future(cm.delete_checkpoint(checkpoint_id, path))
|
||||
yield maybe_future(cm.delete_checkpoint(checkpoint_id, path))
|
||||
self.set_status(204)
|
||||
self.finish()
|
||||
|
||||
@ -307,7 +307,7 @@ class TrustNotebooksHandler(IPythonHandler):
|
||||
@gen.coroutine
|
||||
def post(self,path=''):
|
||||
cm = self.contents_manager
|
||||
yield gen.maybe_future(cm.trust_notebook(path))
|
||||
yield maybe_future(cm.trust_notebook(path))
|
||||
self.set_status(201)
|
||||
self.finish()
|
||||
#-----------------------------------------------------------------------------
|
||||
|
@ -14,14 +14,14 @@ from tornado import gen, web
|
||||
from tornado.concurrent import Future
|
||||
from tornado.ioloop import IOLoop
|
||||
|
||||
from jupyter_client import protocol_version as client_protocol_version
|
||||
from jupyter_client.jsonutil import date_default
|
||||
from ipython_genutils.py3compat import cast_unicode
|
||||
from notebook.utils import url_path_join, url_escape
|
||||
from notebook.utils import maybe_future, url_path_join, url_escape
|
||||
|
||||
from ...base.handlers import APIHandler
|
||||
from ...base.zmqhandlers import AuthenticatedZMQStreamHandler, deserialize_binary_message
|
||||
|
||||
from jupyter_client import protocol_version as client_protocol_version
|
||||
|
||||
class MainKernelHandler(APIHandler):
|
||||
|
||||
@ -29,7 +29,7 @@ class MainKernelHandler(APIHandler):
|
||||
@gen.coroutine
|
||||
def get(self):
|
||||
km = self.kernel_manager
|
||||
kernels = yield gen.maybe_future(km.list_kernels())
|
||||
kernels = yield maybe_future(km.list_kernels())
|
||||
self.finish(json.dumps(kernels, default=date_default))
|
||||
|
||||
@web.authenticated
|
||||
@ -44,8 +44,8 @@ class MainKernelHandler(APIHandler):
|
||||
else:
|
||||
model.setdefault('name', km.default_kernel_name)
|
||||
|
||||
kernel_id = yield gen.maybe_future(km.start_kernel(kernel_name=model['name']))
|
||||
model = yield gen.maybe_future(km.kernel_model(kernel_id))
|
||||
kernel_id = yield maybe_future(km.start_kernel(kernel_name=model['name']))
|
||||
model = yield maybe_future(km.kernel_model(kernel_id))
|
||||
location = url_path_join(self.base_url, 'api', 'kernels', url_escape(kernel_id))
|
||||
self.set_header('Location', location)
|
||||
self.set_status(201)
|
||||
@ -64,7 +64,7 @@ class KernelHandler(APIHandler):
|
||||
@gen.coroutine
|
||||
def delete(self, kernel_id):
|
||||
km = self.kernel_manager
|
||||
yield gen.maybe_future(km.shutdown_kernel(kernel_id))
|
||||
yield maybe_future(km.shutdown_kernel(kernel_id))
|
||||
self.set_status(204)
|
||||
self.finish()
|
||||
|
||||
@ -81,12 +81,12 @@ class KernelActionHandler(APIHandler):
|
||||
if action == 'restart':
|
||||
|
||||
try:
|
||||
yield gen.maybe_future(km.restart_kernel(kernel_id))
|
||||
yield maybe_future(km.restart_kernel(kernel_id))
|
||||
except Exception as e:
|
||||
self.log.error("Exception restarting kernel", exc_info=True)
|
||||
self.set_status(500)
|
||||
else:
|
||||
model = yield gen.maybe_future(km.kernel_model(kernel_id))
|
||||
model = yield maybe_future(km.kernel_model(kernel_id))
|
||||
self.write(json.dumps(model, default=date_default))
|
||||
self.finish()
|
||||
|
||||
|
@ -22,7 +22,7 @@ from traitlets import (Any, Bool, Dict, List, Unicode, TraitError, Integer,
|
||||
Float, Instance, default, validate
|
||||
)
|
||||
|
||||
from notebook.utils import to_os_path, exists
|
||||
from notebook.utils import maybe_future, to_os_path, exists
|
||||
from notebook._tz import utcnow, isoformat
|
||||
from ipython_genutils.py3compat import getcwd
|
||||
|
||||
@ -164,7 +164,7 @@ class MappingKernelManager(MultiKernelManager):
|
||||
if kernel_id is None:
|
||||
if path is not None:
|
||||
kwargs['cwd'] = self.cwd_for_path(path)
|
||||
kernel_id = yield gen.maybe_future(
|
||||
kernel_id = yield maybe_future(
|
||||
super(MappingKernelManager, self).start_kernel(**kwargs)
|
||||
)
|
||||
self._kernel_connections[kernel_id] = 0
|
||||
@ -306,7 +306,7 @@ class MappingKernelManager(MultiKernelManager):
|
||||
def restart_kernel(self, kernel_id):
|
||||
"""Restart a kernel by kernel_id"""
|
||||
self._check_kernel_id(kernel_id)
|
||||
yield gen.maybe_future(super(MappingKernelManager, self).restart_kernel(kernel_id))
|
||||
yield maybe_future(super(MappingKernelManager, self).restart_kernel(kernel_id))
|
||||
kernel = self.get_kernel(kernel_id)
|
||||
# return a Future that will resolve when the kernel has successfully restarted
|
||||
channel = kernel.connect_shell()
|
||||
|
@ -14,7 +14,8 @@ pjoin = os.path.join
|
||||
from tornado import web, gen
|
||||
|
||||
from ...base.handlers import APIHandler
|
||||
from ...utils import url_path_join, url_unescape
|
||||
from ...utils import maybe_future, url_path_join, url_unescape
|
||||
|
||||
|
||||
|
||||
def kernelspec_model(handler, name, spec_dict, resource_dir):
|
||||
@ -62,7 +63,7 @@ class MainKernelSpecHandler(APIHandler):
|
||||
model = {}
|
||||
model['default'] = km.default_kernel_name
|
||||
model['kernelspecs'] = specs = {}
|
||||
kspecs = yield gen.maybe_future(ksm.get_all_specs())
|
||||
kspecs = yield maybe_future(ksm.get_all_specs())
|
||||
for kernel_name, kernel_info in kspecs.items():
|
||||
try:
|
||||
if is_kernelspec_model(kernel_info):
|
||||
@ -85,7 +86,7 @@ class KernelSpecHandler(APIHandler):
|
||||
ksm = self.kernel_spec_manager
|
||||
kernel_name = url_unescape(kernel_name)
|
||||
try:
|
||||
spec = yield gen.maybe_future(ksm.get_kernel_spec(kernel_name))
|
||||
spec = yield maybe_future(ksm.get_kernel_spec(kernel_name))
|
||||
except KeyError:
|
||||
raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name)
|
||||
if is_kernelspec_model(spec):
|
||||
|
@ -7,13 +7,12 @@ Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-16%3A-
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from tornado import gen, web
|
||||
|
||||
from ...base.handlers import APIHandler
|
||||
from jupyter_client.jsonutil import date_default
|
||||
from notebook.utils import url_path_join
|
||||
from notebook.utils import maybe_future, url_path_join
|
||||
from jupyter_client.kernelspec import NoSuchKernel
|
||||
|
||||
|
||||
@ -24,7 +23,7 @@ class SessionRootHandler(APIHandler):
|
||||
def get(self):
|
||||
# Return a list of running sessions
|
||||
sm = self.session_manager
|
||||
sessions = yield gen.maybe_future(sm.list_sessions())
|
||||
sessions = yield maybe_future(sm.list_sessions())
|
||||
self.finish(json.dumps(sessions, default=date_default))
|
||||
|
||||
@web.authenticated
|
||||
@ -62,12 +61,12 @@ class SessionRootHandler(APIHandler):
|
||||
self.log.debug("No kernel specified, using default kernel")
|
||||
kernel_name = None
|
||||
|
||||
exists = yield gen.maybe_future(sm.session_exists(path=path))
|
||||
exists = yield maybe_future(sm.session_exists(path=path))
|
||||
if exists:
|
||||
model = yield gen.maybe_future(sm.get_session(path=path))
|
||||
model = yield maybe_future(sm.get_session(path=path))
|
||||
else:
|
||||
try:
|
||||
model = yield gen.maybe_future(
|
||||
model = yield maybe_future(
|
||||
sm.create_session(path=path, kernel_name=kernel_name,
|
||||
kernel_id=kernel_id, name=name,
|
||||
type=mtype))
|
||||
@ -93,7 +92,7 @@ class SessionHandler(APIHandler):
|
||||
def get(self, session_id):
|
||||
# Returns the JSON model for a single session
|
||||
sm = self.session_manager
|
||||
model = yield gen.maybe_future(sm.get_session(session_id=session_id))
|
||||
model = yield maybe_future(sm.get_session(session_id=session_id))
|
||||
self.finish(json.dumps(model, default=date_default))
|
||||
|
||||
@web.authenticated
|
||||
@ -111,7 +110,7 @@ class SessionHandler(APIHandler):
|
||||
raise web.HTTPError(400, "No JSON data provided")
|
||||
|
||||
# get the previous session model
|
||||
before = yield gen.maybe_future(sm.get_session(session_id=session_id))
|
||||
before = yield maybe_future(sm.get_session(session_id=session_id))
|
||||
|
||||
changes = {}
|
||||
if 'notebook' in model and 'path' in model['notebook']:
|
||||
@ -138,13 +137,13 @@ class SessionHandler(APIHandler):
|
||||
path=before['path'], type=before['type'])
|
||||
changes['kernel_id'] = kernel_id
|
||||
|
||||
yield gen.maybe_future(sm.update_session(session_id, **changes))
|
||||
model = yield gen.maybe_future(sm.get_session(session_id=session_id))
|
||||
yield maybe_future(sm.update_session(session_id, **changes))
|
||||
model = yield maybe_future(sm.get_session(session_id=session_id))
|
||||
|
||||
if model['kernel']['id'] != before['kernel']['id']:
|
||||
# kernel_id changed because we got a new kernel
|
||||
# shutdown the old one
|
||||
yield gen.maybe_future(
|
||||
yield maybe_future(
|
||||
km.shutdown_kernel(before['kernel']['id'])
|
||||
)
|
||||
self.finish(json.dumps(model, default=date_default))
|
||||
@ -155,7 +154,7 @@ class SessionHandler(APIHandler):
|
||||
# Deletes the session with given session_id
|
||||
sm = self.session_manager
|
||||
try:
|
||||
yield gen.maybe_future(sm.delete_session(session_id))
|
||||
yield maybe_future(sm.delete_session(session_id))
|
||||
except KeyError:
|
||||
# the kernel was deleted but the session wasn't!
|
||||
raise web.HTTPError(410, "Kernel deleted before session")
|
||||
|
@ -17,6 +17,8 @@ from traitlets.config.configurable import LoggingConfigurable
|
||||
from ipython_genutils.py3compat import unicode_type
|
||||
from traitlets import Instance
|
||||
|
||||
from notebook.utils import maybe_future
|
||||
|
||||
|
||||
class SessionManager(LoggingConfigurable):
|
||||
|
||||
@ -67,7 +69,7 @@ class SessionManager(LoggingConfigurable):
|
||||
# row, thereby terminating the session. This can be done via a call to
|
||||
# row_to_model that tolerates that condition. If row_to_model returns None,
|
||||
# we'll return false, since, at that point, the session doesn't exist anyway.
|
||||
model = yield gen.maybe_future(self.row_to_model(row, tolerate_culled=True))
|
||||
model = yield maybe_future(self.row_to_model(row, tolerate_culled=True))
|
||||
if model is not None:
|
||||
exists = True
|
||||
raise gen.Return(exists)
|
||||
@ -84,7 +86,7 @@ class SessionManager(LoggingConfigurable):
|
||||
pass
|
||||
else:
|
||||
kernel_id = yield self.start_kernel_for_session(session_id, path, name, type, kernel_name)
|
||||
result = yield gen.maybe_future(
|
||||
result = yield maybe_future(
|
||||
self.save_session(session_id, path=path, name=name, type=type, kernel_id=kernel_id)
|
||||
)
|
||||
# py2-compat
|
||||
@ -95,7 +97,7 @@ class SessionManager(LoggingConfigurable):
|
||||
"""Start a new kernel for a given session."""
|
||||
# allow contents manager to specify kernels cwd
|
||||
kernel_path = self.contents_manager.get_kernel_path(path=path)
|
||||
kernel_id = yield gen.maybe_future(
|
||||
kernel_id = yield maybe_future(
|
||||
self.kernel_manager.start_kernel(path=kernel_path, kernel_name=kernel_name)
|
||||
)
|
||||
# py2-compat
|
||||
@ -130,7 +132,7 @@ class SessionManager(LoggingConfigurable):
|
||||
self.cursor.execute("INSERT INTO session VALUES (?,?,?,?,?)",
|
||||
(session_id, path, name, type, kernel_id)
|
||||
)
|
||||
result = yield gen.maybe_future(self.get_session(session_id=session_id))
|
||||
result = yield maybe_future(self.get_session(session_id=session_id))
|
||||
raise gen.Return(result)
|
||||
|
||||
@gen.coroutine
|
||||
@ -177,7 +179,7 @@ class SessionManager(LoggingConfigurable):
|
||||
|
||||
raise web.HTTPError(404, u'Session not found: %s' % (', '.join(q)))
|
||||
|
||||
model = yield gen.maybe_future(self.row_to_model(row))
|
||||
model = yield maybe_future(self.row_to_model(row))
|
||||
raise gen.Return(model)
|
||||
|
||||
@gen.coroutine
|
||||
@ -196,7 +198,7 @@ class SessionManager(LoggingConfigurable):
|
||||
and the value replaces the current value in the session
|
||||
with session_id.
|
||||
"""
|
||||
yield gen.maybe_future(self.get_session(session_id=session_id))
|
||||
yield maybe_future(self.get_session(session_id=session_id))
|
||||
|
||||
if not kwargs:
|
||||
# no changes
|
||||
@ -217,7 +219,7 @@ class SessionManager(LoggingConfigurable):
|
||||
@gen.coroutine
|
||||
def row_to_model(self, row, tolerate_culled=False):
|
||||
"""Takes sqlite database session row and turns it into a dictionary"""
|
||||
kernel_culled = yield gen.maybe_future(self.kernel_culled(row['kernel_id']))
|
||||
kernel_culled = yield maybe_future(self.kernel_culled(row['kernel_id']))
|
||||
if kernel_culled:
|
||||
# The kernel was culled or died without deleting the session.
|
||||
# We can't use delete_session here because that tries to find
|
||||
@ -236,7 +238,7 @@ class SessionManager(LoggingConfigurable):
|
||||
raise gen.Return(None)
|
||||
raise KeyError(msg)
|
||||
|
||||
kernel_model = yield gen.maybe_future(self.kernel_manager.kernel_model(row['kernel_id']))
|
||||
kernel_model = yield maybe_future(self.kernel_manager.kernel_model(row['kernel_id']))
|
||||
model = {
|
||||
'id': row['session_id'],
|
||||
'path': row['path'],
|
||||
@ -259,7 +261,7 @@ class SessionManager(LoggingConfigurable):
|
||||
# which messes up the cursor if we're iterating over rows.
|
||||
for row in c.fetchall():
|
||||
try:
|
||||
model = yield gen.maybe_future(self.row_to_model(row))
|
||||
model = yield maybe_future(self.row_to_model(row))
|
||||
result.append(model)
|
||||
except KeyError:
|
||||
pass
|
||||
@ -268,6 +270,6 @@ class SessionManager(LoggingConfigurable):
|
||||
@gen.coroutine
|
||||
def delete_session(self, session_id):
|
||||
"""Deletes the row in the session database with given session_id"""
|
||||
session = yield gen.maybe_future(self.get_session(session_id=session_id))
|
||||
yield gen.maybe_future(self.kernel_manager.shutdown_kernel(session['kernel']['id']))
|
||||
session = yield maybe_future(self.get_session(session_id=session_id))
|
||||
yield maybe_future(self.kernel_manager.shutdown_kernel(session['kernel']['id']))
|
||||
self.cursor.execute("DELETE FROM session WHERE session_id=?", (session_id,))
|
||||
|
@ -3,23 +3,16 @@ import os
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from tornado import gen
|
||||
from tornado.httpclient import HTTPRequest, HTTPResponse, HTTPError
|
||||
from traitlets.config import Config
|
||||
from .launchnotebook import NotebookTestBase
|
||||
from notebook.gateway.managers import GatewayClient
|
||||
|
||||
try:
|
||||
from unittest.mock import patch, Mock
|
||||
except ImportError:
|
||||
from mock import patch, Mock # py2
|
||||
|
||||
try:
|
||||
from io import StringIO
|
||||
except ImportError:
|
||||
import StringIO
|
||||
from io import StringIO
|
||||
from unittest.mock import patch
|
||||
|
||||
import nose.tools as nt
|
||||
from tornado import gen
|
||||
from tornado.httpclient import HTTPRequest, HTTPResponse, HTTPError
|
||||
|
||||
from notebook.gateway.managers import GatewayClient
|
||||
from notebook.utils import maybe_future
|
||||
from .launchnotebook import NotebookTestBase
|
||||
|
||||
|
||||
def generate_kernelspec(name):
|
||||
@ -58,7 +51,7 @@ def mock_gateway_request(url, **kwargs):
|
||||
# Fetch all kernelspecs
|
||||
if endpoint.endswith('/api/kernelspecs') and method == 'GET':
|
||||
response_buf = StringIO(json.dumps(kernelspecs))
|
||||
response = yield gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf))
|
||||
response = yield maybe_future(HTTPResponse(request, 200, buffer=response_buf))
|
||||
raise gen.Return(response)
|
||||
|
||||
# Fetch named kernelspec
|
||||
@ -67,7 +60,7 @@ def mock_gateway_request(url, **kwargs):
|
||||
kspecs = kernelspecs.get('kernelspecs')
|
||||
if requested_kernelspec in kspecs:
|
||||
response_buf = StringIO(json.dumps(kspecs.get(requested_kernelspec)))
|
||||
response = yield gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf))
|
||||
response = yield maybe_future(HTTPResponse(request, 200, buffer=response_buf))
|
||||
raise gen.Return(response)
|
||||
else:
|
||||
raise HTTPError(404, message='Kernelspec does not exist: %s' % requested_kernelspec)
|
||||
@ -82,7 +75,7 @@ def mock_gateway_request(url, **kwargs):
|
||||
model = generate_model(name)
|
||||
running_kernels[model.get('id')] = model # Register model as a running kernel
|
||||
response_buf = StringIO(json.dumps(model))
|
||||
response = yield gen.maybe_future(HTTPResponse(request, 201, buffer=response_buf))
|
||||
response = yield maybe_future(HTTPResponse(request, 201, buffer=response_buf))
|
||||
raise gen.Return(response)
|
||||
|
||||
# Fetch list of running kernels
|
||||
@ -92,7 +85,7 @@ def mock_gateway_request(url, **kwargs):
|
||||
model = running_kernels.get(kernel_id)
|
||||
kernels.append(model)
|
||||
response_buf = StringIO(json.dumps(kernels))
|
||||
response = yield gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf))
|
||||
response = yield maybe_future(HTTPResponse(request, 200, buffer=response_buf))
|
||||
raise gen.Return(response)
|
||||
|
||||
# Interrupt or restart existing kernel
|
||||
@ -101,14 +94,14 @@ def mock_gateway_request(url, **kwargs):
|
||||
|
||||
if action == 'interrupt':
|
||||
if requested_kernel_id in running_kernels:
|
||||
response = yield gen.maybe_future(HTTPResponse(request, 204))
|
||||
response = yield maybe_future(HTTPResponse(request, 204))
|
||||
raise gen.Return(response)
|
||||
else:
|
||||
raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id)
|
||||
elif action == 'restart':
|
||||
if requested_kernel_id in running_kernels:
|
||||
response_buf = StringIO(json.dumps(running_kernels.get(requested_kernel_id)))
|
||||
response = yield gen.maybe_future(HTTPResponse(request, 204, buffer=response_buf))
|
||||
response = yield maybe_future(HTTPResponse(request, 204, buffer=response_buf))
|
||||
raise gen.Return(response)
|
||||
else:
|
||||
raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id)
|
||||
@ -119,7 +112,7 @@ def mock_gateway_request(url, **kwargs):
|
||||
if endpoint.rfind('/api/kernels/') >= 0 and method == 'DELETE':
|
||||
requested_kernel_id = endpoint.rpartition('/')[2]
|
||||
running_kernels.pop(requested_kernel_id) # Simulate shutdown by removing kernel from running set
|
||||
response = yield gen.maybe_future(HTTPResponse(request, 204))
|
||||
response = yield maybe_future(HTTPResponse(request, 204))
|
||||
raise gen.Return(response)
|
||||
|
||||
# Fetch existing kernel
|
||||
@ -127,7 +120,7 @@ def mock_gateway_request(url, **kwargs):
|
||||
requested_kernel_id = endpoint.rpartition('/')[2]
|
||||
if requested_kernel_id in running_kernels:
|
||||
response_buf = StringIO(json.dumps(running_kernels.get(requested_kernel_id)))
|
||||
response = yield gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf))
|
||||
response = yield maybe_future(HTTPResponse(request, 200, buffer=response_buf))
|
||||
raise gen.Return(response)
|
||||
else:
|
||||
raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id)
|
||||
|
@ -5,26 +5,16 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import asyncio
|
||||
import concurrent.futures
|
||||
import ctypes
|
||||
import errno
|
||||
import inspect
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
try:
|
||||
from inspect import isawaitable
|
||||
except ImportError:
|
||||
def isawaitable(f):
|
||||
"""If isawaitable is undefined, nothing is awaitable"""
|
||||
return False
|
||||
|
||||
try:
|
||||
from concurrent.futures import Future as ConcurrentFuture
|
||||
except ImportError:
|
||||
class ConcurrentFuture:
|
||||
"""If concurrent.futures isn't importable, nothing will be a c.f.Future"""
|
||||
pass
|
||||
|
||||
try:
|
||||
from urllib.parse import quote, unquote, urlparse, urljoin
|
||||
@ -327,31 +317,18 @@ else:
|
||||
|
||||
|
||||
def maybe_future(obj):
|
||||
"""Like tornado's gen.maybe_future
|
||||
"""Like tornado's deprecated gen.maybe_future
|
||||
|
||||
but more compatible with asyncio for recent versions
|
||||
of tornado
|
||||
"""
|
||||
if isinstance(obj, TornadoFuture):
|
||||
return obj
|
||||
elif isawaitable(obj):
|
||||
if inspect.isawaitable(obj):
|
||||
return asyncio.ensure_future(obj)
|
||||
elif isinstance(obj, ConcurrentFuture):
|
||||
elif isinstance(obj, concurrent.futures.Future):
|
||||
return asyncio.wrap_future(obj)
|
||||
else:
|
||||
# not awaitable, wrap scalar in future
|
||||
f = TornadoFuture()
|
||||
f = asyncio.Future()
|
||||
f.set_result(obj)
|
||||
return f
|
||||
|
||||
# monkeypatch tornado gen.maybe_future
|
||||
# on Python 3
|
||||
# TODO: remove monkeypatch after backporting smaller fix to 5.x
|
||||
try:
|
||||
import asyncio
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
import tornado.gen
|
||||
tornado.gen.maybe_future = maybe_future
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user