mirror of
https://github.com/jupyterhub/jupyterhub.git
synced 2025-10-18 07:23:00 +00:00
Re-sync with master
This commit is contained in:
3
.flake8
3
.flake8
@@ -4,9 +4,10 @@
|
||||
# W: style warnings
|
||||
# C: complexity
|
||||
# F401: module imported but unused
|
||||
# F403: import *
|
||||
# F811: redefinition of unused `name` from line `N`
|
||||
# F841: local variable assigned but never used
|
||||
ignore = E, C, W, F401, F811, F841
|
||||
ignore = E, C, W, F401, F403, F811, F841
|
||||
|
||||
exclude =
|
||||
.cache,
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# http://travis-ci.org/#!/jupyter/jupyterhub
|
||||
language: python
|
||||
sudo: false
|
||||
python:
|
||||
- nightly
|
||||
- 3.6
|
||||
@@ -12,6 +12,7 @@ before_install:
|
||||
- npm install
|
||||
- npm install -g configurable-http-proxy
|
||||
install:
|
||||
- pip install -U pip
|
||||
- pip install --pre -r dev-requirements.txt .
|
||||
|
||||
# running tests
|
||||
|
@@ -117,10 +117,11 @@ To start the Hub on a specific url and port ``10.0.1.2:443`` with **https**:
|
||||
### Authenticators
|
||||
|
||||
| Authenticator | Description |
|
||||
| -------------------------------------------------------------------- | ------------------------------------------------- |
|
||||
| --------------------------------------------------------------------------- | ------------------------------------------------- |
|
||||
| PAMAuthenticator | Default, built-in authenticator |
|
||||
| [OAuthenticator](https://github.com/jupyterhub/oauthenticator) | OAuth + JupyterHub Authenticator = OAuthenticator |
|
||||
| [ldapauthenticator](https://github.com/jupyterhub/ldapauthenticator) | Simple LDAP Authenticator Plugin for JupyterHub |
|
||||
| [kdcAuthenticator](https://github.com/bloomberg/jupyterhub-kdcauthenticator)| Kerberos Authenticator Plugin for JupyterHub |
|
||||
|
||||
### Spawners
|
||||
|
||||
|
@@ -10,7 +10,9 @@ dependencies:
|
||||
- sqlalchemy>=1
|
||||
- tornado>=4.1
|
||||
- traitlets>=4.1
|
||||
- sphinx>=1.3.6,!=1.5.4
|
||||
- sphinx>=1.4, !=1.5.4
|
||||
- sphinx_rtd_theme
|
||||
- pip:
|
||||
- jupyter_alabaster_theme
|
||||
- python-oauth2
|
||||
- recommonmark==0.4.0
|
||||
|
@@ -1,3 +1,3 @@
|
||||
-r ../requirements.txt
|
||||
sphinx>=1.3.6
|
||||
sphinx>=1.4
|
||||
recommonmark==0.4.0
|
@@ -8,7 +8,7 @@ import shlex
|
||||
import recommonmark.parser
|
||||
|
||||
# Set paths
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
@@ -21,6 +21,7 @@ extensions = [
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.napoleon',
|
||||
'autodoc_traits',
|
||||
'jupyter_alabaster_theme',
|
||||
]
|
||||
|
||||
templates_path = ['_templates']
|
||||
@@ -66,7 +67,7 @@ source_suffix = ['.rst', '.md']
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages.
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_theme = 'jupyter_alabaster_theme'
|
||||
|
||||
#html_theme_options = {}
|
||||
#html_theme_path = []
|
||||
@@ -163,17 +164,15 @@ epub_exclude_files = ['search.html']
|
||||
|
||||
# -- Intersphinx ----------------------------------------------------------
|
||||
|
||||
intersphinx_mapping = {'https://docs.python.org/': None}
|
||||
intersphinx_mapping = {'https://docs.python.org/3/': None}
|
||||
|
||||
# -- Read The Docs --------------------------------------------------------
|
||||
|
||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||
|
||||
if not on_rtd:
|
||||
# only import and set the theme if we're building docs locally
|
||||
import sphinx_rtd_theme
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
import jupyter_alabaster_theme
|
||||
html_theme = 'jupyter_alabaster_theme'
|
||||
html_theme_path = [jupyter_alabaster_theme.get_path()]
|
||||
else:
|
||||
# readthedocs.org uses their theme by default, so no need to specify it
|
||||
# build rest-api, since RTD doesn't run make
|
||||
|
@@ -118,8 +118,8 @@ server {
|
||||
|
||||
server_name HUB.DOMAIN.TLD;
|
||||
|
||||
ssl_certificate /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem
|
||||
ssl_certificate_key /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem
|
||||
ssl_certificate /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem;
|
||||
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
12
docs/source/configuration-guide.rst
Normal file
12
docs/source/configuration-guide.rst
Normal file
@@ -0,0 +1,12 @@
|
||||
Configuration Guide
|
||||
===================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
authenticators
|
||||
spawners
|
||||
services
|
||||
config-examples
|
||||
upgrading
|
||||
troubleshooting
|
@@ -29,7 +29,7 @@ JupyterHub's basic flow of operations includes:
|
||||
- The Hub configures the proxy to forward URL prefixes to the single-user notebook servers
|
||||
|
||||
For convenient administration of the Hub, its users, and :doc:`services`
|
||||
(added in version 7.0), JupyterHub also provides a
|
||||
(added in version 0.7), JupyterHub also provides a
|
||||
`REST API <http://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyterhub/jupyterhub/master/docs/rest-api.yml#!/default>`__.
|
||||
|
||||
Contents
|
||||
@@ -43,16 +43,6 @@ Contents
|
||||
* :doc:`websecurity`
|
||||
* :doc:`rest`
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:hidden:
|
||||
:caption: User Guide
|
||||
|
||||
quickstart
|
||||
getting-started
|
||||
howitworks
|
||||
websecurity
|
||||
rest
|
||||
|
||||
**Configuration Guide**
|
||||
|
||||
@@ -60,35 +50,14 @@ Contents
|
||||
* :doc:`spawners`
|
||||
* :doc:`services`
|
||||
* :doc:`config-examples`
|
||||
* :doc:`jupyterhub-deployment-aws`
|
||||
* :doc:`upgrading`
|
||||
* :doc:`troubleshooting`
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:hidden:
|
||||
:caption: Configuration Guide
|
||||
|
||||
authenticators
|
||||
spawners
|
||||
services
|
||||
config-examples
|
||||
jupyterhub-deployment-aws
|
||||
upgrading
|
||||
troubleshooting
|
||||
|
||||
|
||||
**API Reference**
|
||||
|
||||
* :doc:`api/index`
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:hidden:
|
||||
:caption: API Reference
|
||||
|
||||
api/index
|
||||
|
||||
|
||||
**About JupyterHub**
|
||||
|
||||
@@ -96,15 +65,6 @@ Contents
|
||||
* :doc:`contributor-list`
|
||||
* :doc:`gallery-jhub-deployments`
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:hidden:
|
||||
:caption: About JupyterHub
|
||||
|
||||
changelog
|
||||
contributor-list
|
||||
gallery-jhub-deployments
|
||||
|
||||
|
||||
Indices and tables
|
||||
------------------
|
||||
@@ -118,3 +78,18 @@ Questions? Suggestions?
|
||||
|
||||
- `Jupyter mailing list <https://groups.google.com/forum/#!forum/jupyter>`_
|
||||
- `Jupyter website <https://jupyter.org>`_
|
||||
|
||||
.. _contents:
|
||||
|
||||
Full Table of Contents
|
||||
----------------------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
user-guide
|
||||
configuration-guide
|
||||
api/index
|
||||
changelog
|
||||
contributor-list
|
||||
gallery-jhub-deployments
|
||||
|
@@ -1,4 +1,5 @@
|
||||
# JupyterHub Deployment on AWS
|
||||
|
||||
Documentation on deploying JupyterHub on an AWS EC2 Instance using NGINX Plus.
|
||||
|
||||
>CAUTION: Document is a work-in-progress. Information found on this page is partially incomplete and may require additional research.
|
||||
@@ -42,4 +43,3 @@ Refer to the [Amazon EC2 Security Groups for Linux Instances Page](http://docs.a
|
||||
- [x] Setting Up Amazon EC2 Instance
|
||||
- [ ] Setting Up JupyterHub & Web Server on EC2 VM
|
||||
- [ ] Setting Up Docker Spawner
|
||||
|
11
docs/source/user-guide.rst
Normal file
11
docs/source/user-guide.rst
Normal file
@@ -0,0 +1,11 @@
|
||||
JupyterHub User Guide
|
||||
=====================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 3
|
||||
|
||||
quickstart
|
||||
getting-started
|
||||
howitworks
|
||||
websecurity
|
||||
rest
|
@@ -18,6 +18,8 @@ class TokenAPIHandler(APIHandler):
|
||||
@token_authenticated
|
||||
def get(self, token):
|
||||
orm_token = orm.APIToken.find(self.db, token)
|
||||
if orm_token is None:
|
||||
orm_token = orm.OAuthAccessToken.find(self.db, token)
|
||||
if orm_token is None:
|
||||
raise web.HTTPError(404)
|
||||
if orm_token.user:
|
||||
|
@@ -32,7 +32,7 @@ class APIHandler(BaseHandler):
|
||||
self.log.warning("Blocking API request with no referer")
|
||||
return False
|
||||
|
||||
host_path = url_path_join(host, self.hub.server.base_url)
|
||||
host_path = url_path_join(host, self.hub.base_url)
|
||||
referer_path = referer.split('://', 1)[-1]
|
||||
if not (referer_path + '/').startswith(host_path):
|
||||
self.log.warning("Blocking Cross Origin API request. Referer: %s, Host: %s",
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import json
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from tornado import gen, web
|
||||
|
||||
@@ -21,7 +22,7 @@ class ProxyAPIHandler(APIHandler):
|
||||
This is the same as fetching the routing table directly from the proxy,
|
||||
but without clients needing to maintain separate
|
||||
"""
|
||||
routes = yield self.proxy.get_routes()
|
||||
routes = yield self.proxy.get_all_routes()
|
||||
self.write(json.dumps(routes))
|
||||
|
||||
@admin_only
|
||||
@@ -48,17 +49,11 @@ class ProxyAPIHandler(APIHandler):
|
||||
if not isinstance(model, dict):
|
||||
raise web.HTTPError(400, "Request body must be JSON dict")
|
||||
|
||||
server = self.proxy.api_server
|
||||
if 'ip' in model:
|
||||
server.ip = model['ip']
|
||||
if 'port' in model:
|
||||
server.port = model['port']
|
||||
if 'protocol' in model:
|
||||
server.proto = model['protocol']
|
||||
if 'api_url' in model:
|
||||
self.proxy.api_url = model['api_url']
|
||||
if 'auth_token' in model:
|
||||
self.proxy.auth_token = model['auth_token']
|
||||
self.db.commit()
|
||||
self.log.info("Updated proxy at %s", server.bind_url)
|
||||
self.log.info("Updated proxy at %s", self.proxy)
|
||||
yield self.proxy.check_routes(self.users, self.services)
|
||||
|
||||
|
||||
|
@@ -20,6 +20,11 @@ class SelfAPIHandler(APIHandler):
|
||||
@web.authenticated
|
||||
def get(self):
|
||||
user = self.get_current_user()
|
||||
if user is None:
|
||||
# whoami can be accessed via oauth token
|
||||
user = self.get_current_user_oauth_token()
|
||||
if user is None:
|
||||
raise web.HTTPError(403)
|
||||
self.write(json.dumps(self.user_model(user)))
|
||||
|
||||
|
||||
@@ -219,7 +224,7 @@ class UserCreateNamedServerAPIHandler(APIHandler):
|
||||
def post(self, name):
|
||||
user = self.find_user(name)
|
||||
if user is None:
|
||||
raise HTTPError(404, "No such user %r" % name)
|
||||
raise web.HTTPError(404, "No such user %r" % name)
|
||||
#if user.running:
|
||||
# # include notify, so that a server that died is noticed immediately
|
||||
# state = yield user.spawner.poll_and_notify()
|
||||
|
@@ -30,7 +30,6 @@ from sqlalchemy.orm import scoped_session
|
||||
|
||||
import tornado.httpserver
|
||||
import tornado.options
|
||||
from tornado.httpclient import HTTPError
|
||||
from tornado.ioloop import IOLoop, PeriodicCallback
|
||||
from tornado.log import app_log, access_log, gen_log
|
||||
from tornado import gen, web
|
||||
@@ -54,6 +53,7 @@ from .user import User, UserDict
|
||||
from .oauth.store import make_provider
|
||||
from ._data import DATA_FILES_PATH
|
||||
from .log import CoroutineLogFormatter, log_request
|
||||
from .proxy import Proxy, ConfigurableHTTPProxy
|
||||
from .traitlets import URLPrefix, Command
|
||||
from .utils import (
|
||||
url_path_join,
|
||||
@@ -62,6 +62,7 @@ from .utils import (
|
||||
# classes for config
|
||||
from .auth import Authenticator, PAMAuthenticator
|
||||
from .spawner import Spawner, LocalProcessSpawner
|
||||
from .objects import Hub
|
||||
|
||||
# For faking stats
|
||||
from .emptyclass import EmptyClass
|
||||
@@ -140,7 +141,6 @@ class NewToken(Application):
|
||||
hub = JupyterHub(parent=self)
|
||||
hub.load_config_file(hub.config_file)
|
||||
hub.init_db()
|
||||
hub.hub = hub.db.query(orm.Hub).first()
|
||||
hub.init_users()
|
||||
user = orm.User.find(hub.db, self.name)
|
||||
if user is None:
|
||||
@@ -217,6 +217,8 @@ class JupyterHub(Application):
|
||||
aliases = Dict(aliases)
|
||||
flags = Dict(flags)
|
||||
|
||||
raise_config_file_errors = True
|
||||
|
||||
subcommands = {
|
||||
'token': (NewToken, "Generate an API token for a user"),
|
||||
'upgrade-db': (UpgradeDB, "Upgrade your JupyterHub state database to the current version."),
|
||||
@@ -349,52 +351,67 @@ class JupyterHub(Application):
|
||||
help="Supply extra arguments that will be passed to Jinja environment."
|
||||
).tag(config=True)
|
||||
|
||||
proxy_cmd = Command('configurable-http-proxy',
|
||||
help="""The command to start the http proxy.
|
||||
|
||||
Only override if configurable-http-proxy is not on your PATH
|
||||
"""
|
||||
proxy_class = Type(ConfigurableHTTPProxy, Proxy,
|
||||
help="""Select the Proxy API implementation."""
|
||||
).tag(config=True)
|
||||
|
||||
proxy_cmd = Command([], config=True,
|
||||
help="DEPRECATED. Use ConfigurableHTTPProxy.command",
|
||||
).tag(config=True)
|
||||
|
||||
debug_proxy = Bool(False,
|
||||
help="show debug output in configurable-http-proxy"
|
||||
help="DEPRECATED: Use ConfigurableHTTPProxy.debug",
|
||||
).tag(config=True)
|
||||
proxy_auth_token = Unicode(
|
||||
help="""The Proxy Auth token.
|
||||
|
||||
Loaded from the CONFIGPROXY_AUTH_TOKEN env variable by default.
|
||||
"""
|
||||
help="DEPRECATED: Use ConfigurableHTTPProxy.auth_token"
|
||||
).tag(config=True)
|
||||
|
||||
@default('proxy_auth_token')
|
||||
def _proxy_auth_token_default(self):
|
||||
token = os.environ.get('CONFIGPROXY_AUTH_TOKEN', None)
|
||||
if not token:
|
||||
self.log.warning('\n'.join([
|
||||
"",
|
||||
"Generating CONFIGPROXY_AUTH_TOKEN. Restarting the Hub will require restarting the proxy.",
|
||||
"Set CONFIGPROXY_AUTH_TOKEN env or JupyterHub.proxy_auth_token config to avoid this message.",
|
||||
"",
|
||||
]))
|
||||
token = orm.new_token()
|
||||
return token
|
||||
_proxy_config_map = {
|
||||
'proxy_cmd': 'command',
|
||||
'debug_proxy': 'debug',
|
||||
'proxy_auth_token': 'auth_token',
|
||||
}
|
||||
@observe(*_proxy_config_map)
|
||||
def _deprecated_proxy_config(self, change):
|
||||
dest = self._proxy_config_map[change.name]
|
||||
self.log.warning("JupyterHub.%s is deprecated in JupyterHub 0.8, use ConfigurableHTTPProxy.%s", change.name, dest)
|
||||
self.config.ConfigurableHTTPProxy[dest] = change.new
|
||||
|
||||
proxy_api_ip = Unicode('127.0.0.1',
|
||||
help="The ip for the proxy API handlers"
|
||||
proxy_api_ip = Unicode(
|
||||
help="DEPRECATED: Use ConfigurableHTTPProxy.api_url"
|
||||
).tag(config=True)
|
||||
proxy_api_port = Integer(
|
||||
help="The port for the proxy API handlers"
|
||||
help="DEPRECATED: Use ConfigurableHTTPProxy.api_url"
|
||||
).tag(config=True)
|
||||
|
||||
@default('proxy_api_port')
|
||||
def _proxy_api_port_default(self):
|
||||
return self.port + 1
|
||||
@observe('proxy_api_port', 'proxy_api_ip')
|
||||
def _deprecated_proxy_api(self, change):
|
||||
self.log.warning("JupyterHub.%s is deprecated in JupyterHub 0.8, use ConfigurableHTTPProxy.api_url", change.name)
|
||||
self.config.ConfigurableHTTPProxy.api_url = 'http://{}:{}'.format(
|
||||
self.proxy_api_ip or '127.0.0.1',
|
||||
self.proxy_api_port or self.port + 1,
|
||||
)
|
||||
|
||||
hub_port = Integer(8081,
|
||||
help="The port for this process"
|
||||
help="The port for the Hub process"
|
||||
).tag(config=True)
|
||||
hub_ip = Unicode('127.0.0.1',
|
||||
help="The ip for this process"
|
||||
help="""The ip address for the Hub process to *bind* to.
|
||||
|
||||
See `hub_connect_ip` for cases where the bind and connect address should differ.
|
||||
"""
|
||||
).tag(config=True)
|
||||
hub_connect_ip = Unicode('',
|
||||
help="""The ip or hostname for proxies and spawners to use
|
||||
for connecting to the Hub.
|
||||
|
||||
Use when the bind address (`hub_ip`) is 0.0.0.0 or otherwise different
|
||||
from the connect address.
|
||||
|
||||
Default: when `hub_ip` is 0.0.0.0, use `socket.gethostname()`, otherwise use `hub_ip`.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
)
|
||||
hub_prefix = URLPrefix('/hub/',
|
||||
help="The prefix for the hub server. Always /base_url/hub/"
|
||||
)
|
||||
@@ -682,10 +699,6 @@ class JupyterHub(Application):
|
||||
def init_ports(self):
|
||||
if self.hub_port == self.port:
|
||||
raise TraitError("The hub and proxy cannot both listen on port %i" % self.port)
|
||||
if self.hub_port == self.proxy_api_port:
|
||||
raise TraitError("The hub and proxy API cannot both listen on port %i" % self.hub_port)
|
||||
if self.proxy_api_port == self.port:
|
||||
raise TraitError("The proxy's public and API ports cannot both be %i" % self.port)
|
||||
|
||||
@staticmethod
|
||||
def add_url_prefix(prefix, handlers):
|
||||
@@ -805,36 +818,6 @@ class JupyterHub(Application):
|
||||
self._local.db = scoped_session(self.session_factory)()
|
||||
return self._local.db
|
||||
|
||||
@property
|
||||
def hub(self):
|
||||
if not getattr(self._local, 'hub', None):
|
||||
q = self.db.query(orm.Hub)
|
||||
assert q.count() <= 1
|
||||
self._local.hub = q.first()
|
||||
if self.subdomain_host and self._local.hub:
|
||||
self._local.hub.host = self.subdomain_host
|
||||
return self._local.hub
|
||||
|
||||
@hub.setter
|
||||
def hub(self, hub):
|
||||
self._local.hub = hub
|
||||
if hub and self.subdomain_host:
|
||||
hub.host = self.subdomain_host
|
||||
|
||||
@property
|
||||
def proxy(self):
|
||||
if not getattr(self._local, 'proxy', None):
|
||||
q = self.db.query(orm.Proxy)
|
||||
assert q.count() <= 1
|
||||
p = self._local.proxy = q.first()
|
||||
if p:
|
||||
p.auth_token = self.proxy_auth_token
|
||||
return self._local.proxy
|
||||
|
||||
@proxy.setter
|
||||
def proxy(self, proxy):
|
||||
self._local.proxy = proxy
|
||||
|
||||
def init_db(self):
|
||||
"""Create the database connection"""
|
||||
self.log.debug("Connecting to db: %s", self.db_url)
|
||||
@@ -861,28 +844,15 @@ class JupyterHub(Application):
|
||||
|
||||
def init_hub(self):
|
||||
"""Load the Hub config into the database"""
|
||||
self.hub = self.db.query(orm.Hub).first()
|
||||
if self.hub is None:
|
||||
self.hub = orm.Hub(
|
||||
server=orm.Server(
|
||||
self.hub = Hub(
|
||||
ip=self.hub_ip,
|
||||
port=self.hub_port,
|
||||
base_url=self.hub_prefix,
|
||||
cookie_name='jupyter-hub-token',
|
||||
public_host=self.subdomain_host,
|
||||
)
|
||||
)
|
||||
self.db.add(self.hub)
|
||||
else:
|
||||
server = self.hub.server
|
||||
server.ip = self.hub_ip
|
||||
server.port = self.hub_port
|
||||
server.base_url = self.hub_prefix
|
||||
if self.subdomain_host:
|
||||
if not self.subdomain_host:
|
||||
raise ValueError("Must specify subdomain_host when using subdomains."
|
||||
" This should be the public domain[:port] of the Hub.")
|
||||
|
||||
self.db.commit()
|
||||
if self.hub_connect_ip:
|
||||
self.hub.connect_ip = self.hub_connect_ip
|
||||
|
||||
@gen.coroutine
|
||||
def init_users(self):
|
||||
@@ -954,11 +924,20 @@ class JupyterHub(Application):
|
||||
try:
|
||||
yield gen.maybe_future(self.authenticator.add_user(user))
|
||||
except Exception:
|
||||
# TODO: Review approach to synchronize whitelist with db
|
||||
# known cause of the exception is a user who has already been removed from the system
|
||||
# but the user still exists in the hub's user db
|
||||
self.log.exception("Error adding user %r already in db", user.name)
|
||||
db.commit() # can add_user touch the db?
|
||||
if self.authenticator.delete_invalid_users:
|
||||
self.log.warning("Deleting invalid user %r from the Hub database", user.name)
|
||||
db.delete(user)
|
||||
else:
|
||||
self.log.warning(dedent("""
|
||||
You can set
|
||||
c.Authenticator.delete_invalid_users = True
|
||||
to automatically delete users from the Hub database that no longer pass
|
||||
Authenticator validation,
|
||||
such as when user accounts are deleted from the external system
|
||||
without notifying JupyterHub.
|
||||
"""))
|
||||
db.commit()
|
||||
|
||||
# The whitelist set and the users in the db are now the same.
|
||||
# From this point on, any user changes should be done simultaneously
|
||||
@@ -1058,7 +1037,6 @@ class JupyterHub(Application):
|
||||
base_url=self.base_url,
|
||||
db=self.db, orm=orm_service,
|
||||
domain=domain, host=host,
|
||||
hub_api_url=self.hub.api_url,
|
||||
hub=self.hub,
|
||||
)
|
||||
|
||||
@@ -1151,7 +1129,14 @@ class JupyterHub(Application):
|
||||
self.users[orm_user.id] = user = User(orm_user, self.tornado_settings)
|
||||
self.log.debug("Loading state for %s from db", user.name)
|
||||
spawner = user.spawner
|
||||
status = 0
|
||||
if user.server:
|
||||
try:
|
||||
status = yield spawner.poll()
|
||||
except Exception:
|
||||
self.log.exception("Failed to poll spawner for %s, assuming the spawner is not running.", user.name)
|
||||
status = -1
|
||||
|
||||
if status is None:
|
||||
self.log.info("%s still running", user.name)
|
||||
spawner.add_poll_callback(user_stopped, user)
|
||||
@@ -1173,131 +1158,37 @@ class JupyterHub(Application):
|
||||
db.commit()
|
||||
|
||||
def init_oauth(self):
|
||||
base_url = self.hub.server.base_url
|
||||
self.oauth_provider = make_provider(
|
||||
self.session_factory,
|
||||
url_prefix=url_path_join(self.hub.server.base_url, 'api/oauth2'),
|
||||
login_url=self.authenticator.login_url(self.hub.server.base_url),
|
||||
url_prefix=url_path_join(base_url, 'api/oauth2'),
|
||||
login_url=url_path_join(base_url, 'login')
|
||||
,
|
||||
)
|
||||
|
||||
def init_proxy(self):
|
||||
"""Load the Proxy config into the database"""
|
||||
self.proxy = self.db.query(orm.Proxy).first()
|
||||
if self.proxy is None:
|
||||
self.proxy = orm.Proxy(
|
||||
public_server=orm.Server(),
|
||||
api_server=orm.Server(),
|
||||
"""Load the Proxy config"""
|
||||
# FIXME: handle deprecated config here
|
||||
public_url = 'http{s}://{ip}:{port}{base_url}'.format(
|
||||
s='s' if self.ssl_cert else '',
|
||||
ip=self.ip,
|
||||
port=self.port,
|
||||
base_url=self.base_url,
|
||||
)
|
||||
self.db.add(self.proxy)
|
||||
self.db.commit()
|
||||
self.proxy.auth_token = self.proxy_auth_token # not persisted
|
||||
self.proxy.log = self.log
|
||||
self.proxy.public_server.ip = self.ip
|
||||
self.proxy.public_server.port = self.port
|
||||
self.proxy.public_server.base_url = self.base_url
|
||||
self.proxy.api_server.ip = self.proxy_api_ip
|
||||
self.proxy.api_server.port = self.proxy_api_port
|
||||
self.proxy.api_server.base_url = '/api/routes/'
|
||||
self.db.commit()
|
||||
|
||||
@gen.coroutine
|
||||
def start_proxy(self):
|
||||
"""Actually start the configurable-http-proxy"""
|
||||
# check for proxy
|
||||
if self.proxy.public_server.is_up() or self.proxy.api_server.is_up():
|
||||
# check for *authenticated* access to the proxy (auth token can change)
|
||||
try:
|
||||
routes = yield self.proxy.get_routes()
|
||||
except (HTTPError, OSError, socket.error) as e:
|
||||
if isinstance(e, HTTPError) and e.code == 403:
|
||||
msg = "Did CONFIGPROXY_AUTH_TOKEN change?"
|
||||
else:
|
||||
msg = "Is something else using %s?" % self.proxy.public_server.bind_url
|
||||
self.log.error("Proxy appears to be running at %s, but I can't access it (%s)\n%s",
|
||||
self.proxy.public_server.bind_url, e, msg)
|
||||
self.exit(1)
|
||||
return
|
||||
else:
|
||||
self.log.info("Proxy already running at: %s", self.proxy.public_server.bind_url)
|
||||
yield self.proxy.check_routes(self.users, self._service_map, routes)
|
||||
self.proxy_process = None
|
||||
return
|
||||
|
||||
env = os.environ.copy()
|
||||
env['CONFIGPROXY_AUTH_TOKEN'] = self.proxy.auth_token
|
||||
cmd = self.proxy_cmd + [
|
||||
'--ip', self.proxy.public_server.ip,
|
||||
'--port', str(self.proxy.public_server.port),
|
||||
'--api-ip', self.proxy.api_server.ip,
|
||||
'--api-port', str(self.proxy.api_server.port),
|
||||
'--default-target', self.hub.server.host,
|
||||
'--error-target', url_path_join(self.hub.server.url, 'error'),
|
||||
]
|
||||
if self.subdomain_host:
|
||||
cmd.append('--host-routing')
|
||||
if self.debug_proxy:
|
||||
cmd.extend(['--log-level', 'debug'])
|
||||
if self.ssl_key:
|
||||
cmd.extend(['--ssl-key', self.ssl_key])
|
||||
if self.ssl_cert:
|
||||
cmd.extend(['--ssl-cert', self.ssl_cert])
|
||||
if self.statsd_host:
|
||||
cmd.extend([
|
||||
'--statsd-host', self.statsd_host,
|
||||
'--statsd-port', str(self.statsd_port),
|
||||
'--statsd-prefix', self.statsd_prefix + '.chp'
|
||||
])
|
||||
# Warn if SSL is not used
|
||||
if ' --ssl' not in ' '.join(cmd):
|
||||
self.log.warning("Running JupyterHub without SSL."
|
||||
" I hope there is SSL termination happening somewhere else...")
|
||||
self.log.info("Starting proxy @ %s", self.proxy.public_server.bind_url)
|
||||
self.log.debug("Proxy cmd: %s", cmd)
|
||||
try:
|
||||
self.proxy_process = Popen(cmd, env=env, start_new_session=True)
|
||||
except FileNotFoundError as e:
|
||||
self.log.error(
|
||||
"Failed to find proxy %r\n"
|
||||
"The proxy can be installed with `npm install -g configurable-http-proxy`"
|
||||
% self.proxy_cmd
|
||||
self.proxy = self.proxy_class(
|
||||
db=self.db,
|
||||
public_url=public_url,
|
||||
parent=self,
|
||||
app=self,
|
||||
log=self.log,
|
||||
hub=self.hub,
|
||||
ssl_cert=self.ssl_cert,
|
||||
ssl_key=self.ssl_key,
|
||||
)
|
||||
self.exit(1)
|
||||
|
||||
def _check():
|
||||
status = self.proxy_process.poll()
|
||||
if status is not None:
|
||||
e = RuntimeError("Proxy failed to start with exit code %i" % status)
|
||||
# py2-compatible `raise e from None`
|
||||
e.__cause__ = None
|
||||
raise e
|
||||
|
||||
for server in (self.proxy.public_server, self.proxy.api_server):
|
||||
for i in range(10):
|
||||
_check()
|
||||
try:
|
||||
yield server.wait_up(1)
|
||||
except TimeoutError:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
yield server.wait_up(1)
|
||||
self.log.debug("Proxy started and appears to be up")
|
||||
|
||||
@gen.coroutine
|
||||
def check_proxy(self):
|
||||
if self.proxy_process.poll() is None:
|
||||
return
|
||||
self.log.error("Proxy stopped with exit code %r",
|
||||
'unknown' if self.proxy_process is None else self.proxy_process.poll()
|
||||
)
|
||||
yield self.start_proxy()
|
||||
self.log.info("Setting up routes on new proxy")
|
||||
yield self.proxy.add_all_users(self.users)
|
||||
yield self.proxy.add_all_services(self.services)
|
||||
self.log.info("New proxy back up, and good to go")
|
||||
|
||||
def init_tornado_settings(self):
|
||||
"""Set up the tornado settings dict."""
|
||||
base_url = self.hub.server.base_url
|
||||
base_url = self.hub.base_url
|
||||
jinja_options = dict(
|
||||
autoescape=True,
|
||||
)
|
||||
@@ -1307,7 +1198,7 @@ class JupyterHub(Application):
|
||||
**jinja_options
|
||||
)
|
||||
|
||||
login_url = self.authenticator.login_url(base_url)
|
||||
login_url = url_path_join(base_url, 'login')
|
||||
logout_url = self.authenticator.logout_url(base_url)
|
||||
|
||||
# if running from git, disable caching of require.js
|
||||
@@ -1335,7 +1226,7 @@ class JupyterHub(Application):
|
||||
login_url=login_url,
|
||||
logout_url=logout_url,
|
||||
static_path=os.path.join(self.data_files_path, 'static'),
|
||||
static_url_prefix=url_path_join(self.hub.server.base_url, 'static/'),
|
||||
static_url_prefix=url_path_join(self.hub.base_url, 'static/'),
|
||||
static_handler_class=CacheControlStaticFilesHandler,
|
||||
template_path=self.template_paths,
|
||||
jinja2_env=jinja_env,
|
||||
@@ -1420,13 +1311,8 @@ class JupyterHub(Application):
|
||||
|
||||
# clean up proxy while single-user servers are shutting down
|
||||
if self.cleanup_proxy:
|
||||
if self.proxy_process:
|
||||
self.log.info("Cleaning up proxy[%i]...", self.proxy_process.pid)
|
||||
if self.proxy_process.poll() is None:
|
||||
try:
|
||||
self.proxy_process.terminate()
|
||||
except Exception as e:
|
||||
self.log.error("Failed to terminate proxy process: %s", e)
|
||||
if self.proxy.should_start:
|
||||
yield gen.maybe_future(self.proxy.stop())
|
||||
else:
|
||||
self.log.info("I didn't start the proxy, I can't clean it up")
|
||||
else:
|
||||
@@ -1482,26 +1368,30 @@ class JupyterHub(Application):
|
||||
@gen.coroutine
|
||||
def update_last_activity(self):
|
||||
"""Update User.last_activity timestamps from the proxy"""
|
||||
routes = yield self.proxy.get_routes()
|
||||
routes = yield self.proxy.get_all_routes()
|
||||
users_count = 0
|
||||
active_users_count = 0
|
||||
for prefix, route in routes.items():
|
||||
if 'user' not in route:
|
||||
route_data = route['data']
|
||||
if 'user' not in route_data:
|
||||
# not a user route, ignore it
|
||||
continue
|
||||
user = orm.User.find(self.db, route['user'])
|
||||
users_count += 1
|
||||
if 'last_activity' not in route_data:
|
||||
# no last activity data (possibly proxy other than CHP)
|
||||
continue
|
||||
user = orm.User.find(self.db, route_data['user'])
|
||||
if user is None:
|
||||
self.log.warning("Found no user for route: %s", route)
|
||||
continue
|
||||
try:
|
||||
dt = datetime.strptime(route['last_activity'], ISO8601_ms)
|
||||
dt = datetime.strptime(route_data['last_activity'], ISO8601_ms)
|
||||
except Exception:
|
||||
dt = datetime.strptime(route['last_activity'], ISO8601_s)
|
||||
dt = datetime.strptime(route_data['last_activity'], ISO8601_s)
|
||||
user.last_activity = max(user.last_activity, dt)
|
||||
# FIXME: Make this configurable duration. 30 minutes for now!
|
||||
if (datetime.now() - user.last_activity).total_seconds() < 30 * 60:
|
||||
active_users_count += 1
|
||||
users_count += 1
|
||||
self.statsd.gauge('users.running', users_count)
|
||||
self.statsd.gauge('users.active', active_users_count)
|
||||
|
||||
@@ -1528,17 +1418,20 @@ class JupyterHub(Application):
|
||||
try:
|
||||
self.http_server.listen(self.hub_port, address=self.hub_ip)
|
||||
except Exception:
|
||||
self.log.error("Failed to bind hub to %s", self.hub.server.bind_url)
|
||||
self.log.error("Failed to bind hub to %s", self.hub.bind_url)
|
||||
raise
|
||||
else:
|
||||
self.log.info("Hub API listening on %s", self.hub.server.bind_url)
|
||||
self.log.info("Hub API listening on %s", self.hub.bind_url)
|
||||
|
||||
# start the proxy
|
||||
if self.proxy.should_start:
|
||||
try:
|
||||
yield self.start_proxy()
|
||||
yield self.proxy.start()
|
||||
except Exception as e:
|
||||
self.log.critical("Failed to start proxy", exc_info=True)
|
||||
self.exit(1)
|
||||
else:
|
||||
self.log.info("Not starting proxy")
|
||||
|
||||
# start the service(s)
|
||||
for service_name, service in self._service_map.items():
|
||||
@@ -1572,12 +1465,6 @@ class JupyterHub(Application):
|
||||
loop.add_callback(self.proxy.add_all_users, self.users)
|
||||
loop.add_callback(self.proxy.add_all_services, self._service_map)
|
||||
|
||||
if self.proxy_process:
|
||||
# only check / restart the proxy if we started it in the first place.
|
||||
# this means a restarted Hub cannot restart a Proxy that its
|
||||
# predecessor started.
|
||||
pc = PeriodicCallback(self.check_proxy, 1e3 * self.proxy_check_interval)
|
||||
pc.start()
|
||||
|
||||
if self.service_check_interval and any(s.url for s in self._service_map.values()):
|
||||
pc = PeriodicCallback(self.check_services_health, 1e3 * self.service_check_interval)
|
||||
@@ -1587,7 +1474,7 @@ class JupyterHub(Application):
|
||||
pc = PeriodicCallback(self.update_last_activity, 1e3 * self.last_activity_interval)
|
||||
pc.start()
|
||||
|
||||
self.log.info("JupyterHub is now running at %s", self.proxy.public_server.url)
|
||||
self.log.info("JupyterHub is now running at %s", self.proxy.public_url)
|
||||
# register cleanup on both TERM and INT
|
||||
atexit.register(self.atexit)
|
||||
self.init_signal()
|
||||
|
@@ -31,7 +31,7 @@ class Authenticator(LoggingConfigurable):
|
||||
help="""
|
||||
Set of users that will have admin rights on this JupyterHub.
|
||||
|
||||
Admin users have extra privilages:
|
||||
Admin users have extra privileges:
|
||||
- Use the admin panel to see list of users logged in
|
||||
- Add / remove users in some authenticators
|
||||
- Restart / halt the hub
|
||||
@@ -125,6 +125,23 @@ class Authenticator(LoggingConfigurable):
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
delete_invalid_users = Bool(False,
|
||||
help="""Delete any users from the database that do not pass validation
|
||||
|
||||
When JupyterHub starts, `.add_user` will be called
|
||||
on each user in the database to verify that all users are still valid.
|
||||
|
||||
If `delete_invalid_users` is True,
|
||||
any users that do not pass validation will be deleted from the database.
|
||||
Use this if users might be deleted from an external system,
|
||||
such as local user accounts.
|
||||
|
||||
If False (default), invalid users remain in the Hub's database
|
||||
and a warning will be issued.
|
||||
This is the default to avoid data loss due to config changes.
|
||||
"""
|
||||
)
|
||||
|
||||
def normalize_username(self, username):
|
||||
"""Normalize the given username and return it
|
||||
|
||||
@@ -250,10 +267,23 @@ class Authenticator(LoggingConfigurable):
|
||||
"""
|
||||
self.whitelist.discard(user.name)
|
||||
|
||||
auto_login = Bool(False, config=True,
|
||||
help="""Automatically begin the login process
|
||||
|
||||
rather than starting with a "Login with..." link at `/hub/login`
|
||||
|
||||
To work, `.login_url()` must give a URL other than the default `/hub/login`,
|
||||
such as an oauth handler or another automatic login handler,
|
||||
registered with `.get_handlers()`.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
)
|
||||
|
||||
def login_url(self, base_url):
|
||||
"""Override this when registering a custom login handler
|
||||
|
||||
Generally used by authenticators that do not use simple form based authentication.
|
||||
Generally used by authenticators that do not use simple form-based authentication.
|
||||
|
||||
The subclass overriding this is responsible for making sure there is a handler
|
||||
available to handle the URL returned from this method, using the `get_handlers`
|
||||
|
@@ -89,5 +89,4 @@ def _alembic(*args):
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
_alembic(*sys.argv[1:])
|
||||
|
@@ -17,8 +17,9 @@ from tornado.web import RequestHandler
|
||||
from tornado import gen, web
|
||||
|
||||
from .. import orm
|
||||
from ..user import User
|
||||
from ..objects import Server
|
||||
from ..spawner import LocalProcessSpawner
|
||||
from ..user import User
|
||||
from ..utils import url_path_join
|
||||
|
||||
# pattern for the authentication token header
|
||||
@@ -103,7 +104,7 @@ class BaseHandler(RequestHandler):
|
||||
@property
|
||||
def csp_report_uri(self):
|
||||
return self.settings.get('csp_report_uri',
|
||||
url_path_join(self.hub.server.base_url, 'security/csp-report')
|
||||
url_path_join(self.hub.base_url, 'security/csp-report')
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -141,13 +142,35 @@ class BaseHandler(RequestHandler):
|
||||
def cookie_max_age_days(self):
|
||||
return self.settings.get('cookie_max_age_days', None)
|
||||
|
||||
def get_current_user_token(self):
|
||||
"""get_current_user from Authorization header token"""
|
||||
def get_auth_token(self):
|
||||
"""Get the authorization token from Authorization header"""
|
||||
auth_header = self.request.headers.get('Authorization', '')
|
||||
match = auth_header_pat.match(auth_header)
|
||||
if not match:
|
||||
return None
|
||||
token = match.group(1)
|
||||
return match.group(1)
|
||||
|
||||
def get_current_user_oauth_token(self):
|
||||
"""Get the current user identified by OAuth access token
|
||||
|
||||
Separate from API token because OAuth access tokens
|
||||
can only be used for identifying users,
|
||||
not using the API.
|
||||
"""
|
||||
token = self.get_auth_token()
|
||||
if token is None:
|
||||
return None
|
||||
orm_token = orm.OAuthAccessToken.find(self.db, token)
|
||||
if orm_token is None:
|
||||
return None
|
||||
else:
|
||||
return self._user_from_orm(orm_token.user)
|
||||
|
||||
def get_current_user_token(self):
|
||||
"""get_current_user from Authorization header token"""
|
||||
token = self.get_auth_token()
|
||||
if token is None:
|
||||
return None
|
||||
orm_token = orm.APIToken.find(self.db, token)
|
||||
if orm_token is None:
|
||||
return None
|
||||
@@ -162,7 +185,7 @@ class BaseHandler(RequestHandler):
|
||||
max_age_days=self.cookie_max_age_days,
|
||||
)
|
||||
def clear():
|
||||
self.clear_cookie(cookie_name, path=self.hub.server.base_url)
|
||||
self.clear_cookie(cookie_name, path=self.hub.base_url)
|
||||
|
||||
if cookie_id is None:
|
||||
if self.get_cookie(cookie_name):
|
||||
@@ -186,7 +209,7 @@ class BaseHandler(RequestHandler):
|
||||
|
||||
def get_current_user_cookie(self):
|
||||
"""get_current_user from a cookie token"""
|
||||
return self._user_for_cookie(self.hub.server.cookie_name)
|
||||
return self._user_for_cookie(self.hub.cookie_name)
|
||||
|
||||
def get_current_user(self):
|
||||
"""get current username"""
|
||||
@@ -223,9 +246,7 @@ class BaseHandler(RequestHandler):
|
||||
kwargs = {}
|
||||
if self.subdomain_host:
|
||||
kwargs['domain'] = self.domain
|
||||
if user and user.server:
|
||||
self.clear_cookie(user.server.cookie_name, path=user.server.base_url, **kwargs)
|
||||
self.clear_cookie(self.hub.server.cookie_name, path=self.hub.server.base_url, **kwargs)
|
||||
self.clear_cookie(self.hub.cookie_name, path=self.hub.base_url, **kwargs)
|
||||
self.clear_cookie('jupyterhub-services', path=url_path_join(self.base_url, 'services'))
|
||||
|
||||
def _set_user_cookie(self, user, server):
|
||||
@@ -414,7 +435,7 @@ class BaseHandler(RequestHandler):
|
||||
def template_namespace(self):
|
||||
user = self.get_current_user()
|
||||
return dict(
|
||||
base_url=self.hub.server.base_url,
|
||||
base_url=self.hub.base_url,
|
||||
prefix=self.base_url,
|
||||
user=user,
|
||||
login_url=self.settings['login_url'],
|
||||
@@ -480,7 +501,7 @@ class PrefixRedirectHandler(BaseHandler):
|
||||
else:
|
||||
path = self.request.path
|
||||
self.redirect(url_path_join(
|
||||
self.hub.server.base_url, path,
|
||||
self.hub.base_url, path,
|
||||
), permanent=False)
|
||||
|
||||
|
||||
@@ -506,12 +527,12 @@ class UserSpawnHandler(BaseHandler):
|
||||
port = host_info.port
|
||||
if not port:
|
||||
port = 443 if host_info.scheme == 'https' else 80
|
||||
if port != self.proxy.public_server.port and port == self.hub.server.port:
|
||||
if port != Server.from_url(self.proxy.public_url).port and port == self.hub.port:
|
||||
self.log.warning("""
|
||||
Detected possible direct connection to Hub's private ip: %s, bypassing proxy.
|
||||
This will result in a redirect loop.
|
||||
Make sure to connect to the proxied public URL %s
|
||||
""", self.request.full_url(), self.proxy.public_server.url)
|
||||
""", self.request.full_url(), self.proxy.public_url)
|
||||
|
||||
# logged in as correct user, spawn the server
|
||||
if current_user.spawner:
|
||||
@@ -526,14 +547,14 @@ class UserSpawnHandler(BaseHandler):
|
||||
status = yield current_user.spawner.poll()
|
||||
if status is not None:
|
||||
if current_user.spawner.options_form:
|
||||
self.redirect(url_concat(url_path_join(self.hub.server.base_url, 'spawn'),
|
||||
self.redirect(url_concat(url_path_join(self.hub.base_url, 'spawn'),
|
||||
{'next': self.request.uri}))
|
||||
return
|
||||
else:
|
||||
yield self.spawn_single_user(current_user)
|
||||
# set login cookie anew
|
||||
self.set_login_cookie(current_user)
|
||||
without_prefix = self.request.uri[len(self.hub.server.base_url):]
|
||||
without_prefix = self.request.uri[len(self.hub.base_url):]
|
||||
target = url_path_join(self.base_url, without_prefix)
|
||||
if self.subdomain_host:
|
||||
target = current_user.host + target
|
||||
|
@@ -7,8 +7,8 @@ from urllib.parse import urlparse
|
||||
|
||||
from tornado.escape import url_escape
|
||||
from tornado import gen
|
||||
from tornado.httputil import url_concat
|
||||
|
||||
from ..utils import url_path_join
|
||||
from .base import BaseHandler
|
||||
|
||||
|
||||
@@ -19,12 +19,11 @@ class LogoutHandler(BaseHandler):
|
||||
if user:
|
||||
self.log.info("User logged out: %s", user.name)
|
||||
self.clear_login_cookie()
|
||||
for name in user.other_user_cookies:
|
||||
self.clear_login_cookie(name)
|
||||
user.other_user_cookies = set([])
|
||||
self.statsd.incr('logout')
|
||||
|
||||
self.redirect(url_path_join(self.hub.server.base_url, 'login'), permanent=False)
|
||||
if self.authenticator.auto_login:
|
||||
self.render('logout.html')
|
||||
else:
|
||||
self.redirect(self.settings['login_url'], permanent=False)
|
||||
|
||||
|
||||
class LoginHandler(BaseHandler):
|
||||
@@ -37,6 +36,10 @@ class LoginHandler(BaseHandler):
|
||||
login_error=login_error,
|
||||
custom_html=self.authenticator.custom_html,
|
||||
login_url=self.settings['login_url'],
|
||||
authenticator_login_url=url_concat(
|
||||
self.authenticator.login_url(self.hub.server.base_url),
|
||||
{'next': self.get_argument('next', '')},
|
||||
),
|
||||
)
|
||||
|
||||
def get(self):
|
||||
@@ -54,12 +57,22 @@ class LoginHandler(BaseHandler):
|
||||
if user.running:
|
||||
next_url = user.url
|
||||
else:
|
||||
next_url = self.hub.server.base_url
|
||||
next_url = self.hub.base_url
|
||||
# set new login cookie
|
||||
# because single-user cookie may have been cleared or incorrect
|
||||
self.set_login_cookie(self.get_current_user())
|
||||
self.redirect(next_url, permanent=False)
|
||||
else:
|
||||
if self.authenticator.auto_login:
|
||||
auto_login_url = self.authenticator.login_url(self.hub.server.base_url)
|
||||
if auto_login_url == self.settings['login_url']:
|
||||
self.authenticator.auto_login = False
|
||||
self.log.warning("Authenticator.auto_login cannot be used without a custom login_url")
|
||||
else:
|
||||
if next_url:
|
||||
auto_login_url = url_concat(auto_login_url, {'next': next_url})
|
||||
self.redirect(auto_login_url)
|
||||
return
|
||||
username = self.get_argument('username', default='')
|
||||
self.finish(self._render(username=username))
|
||||
|
||||
@@ -68,7 +81,7 @@ class LoginHandler(BaseHandler):
|
||||
# parse the arguments dict
|
||||
data = {}
|
||||
for arg in self.request.arguments:
|
||||
data[arg] = self.get_argument(arg)
|
||||
data[arg] = self.get_argument(arg, strip=False)
|
||||
|
||||
auth_timer = self.statsd.timer('login.authenticate').start()
|
||||
username = yield self.authenticate(data)
|
||||
@@ -88,7 +101,7 @@ class LoginHandler(BaseHandler):
|
||||
next_url = self.get_argument('next', default='')
|
||||
if not next_url.startswith('/'):
|
||||
next_url = ''
|
||||
next_url = next_url or self.hub.server.base_url
|
||||
next_url = next_url or self.hub.base_url
|
||||
self.redirect(next_url)
|
||||
self.log.info("User logged in: %s", username)
|
||||
else:
|
||||
|
@@ -37,7 +37,7 @@ class RootHandler(BaseHandler):
|
||||
# The next request will be handled by UserSpawnHandler,
|
||||
# ultimately redirecting to the logged-in user's server.
|
||||
without_prefix = next_url[len(self.base_url):]
|
||||
next_url = url_path_join(self.hub.server.base_url, without_prefix)
|
||||
next_url = url_path_join(self.hub.base_url, without_prefix)
|
||||
self.log.warning("Redirecting %s to %s. For sharing public links, use /user-redirect/",
|
||||
self.request.uri, next_url,
|
||||
)
|
||||
@@ -50,10 +50,10 @@ class RootHandler(BaseHandler):
|
||||
self.log.debug("User is running: %s", url)
|
||||
self.set_login_cookie(user) # set cookie
|
||||
else:
|
||||
url = url_path_join(self.hub.server.base_url, 'home')
|
||||
url = url_path_join(self.hub.base_url, 'home')
|
||||
self.log.debug("User is not running: %s", url)
|
||||
else:
|
||||
url = url_path_join(self.hub.server.base_url, 'login')
|
||||
url = self.settings['login_url']
|
||||
self.redirect(url)
|
||||
|
||||
|
||||
@@ -67,13 +67,9 @@ class HomeHandler(BaseHandler):
|
||||
if user.running:
|
||||
# trigger poll_and_notify event in case of a server that died
|
||||
yield user.spawner.poll_and_notify()
|
||||
url = user.url
|
||||
else:
|
||||
url = url_concat(url_path_join(self.base_url, 'spawn'),
|
||||
{'next': self.request.uri})
|
||||
html = self.render_template('home.html',
|
||||
user=user,
|
||||
url=url,
|
||||
url=user.url,
|
||||
)
|
||||
self.finish(html)
|
||||
|
||||
@@ -215,7 +211,7 @@ class ProxyErrorHandler(BaseHandler):
|
||||
status_message = responses.get(status_code, 'Unknown HTTP Error')
|
||||
# build template namespace
|
||||
|
||||
hub_home = url_path_join(self.hub.server.base_url, 'home')
|
||||
hub_home = url_path_join(self.hub.base_url, 'home')
|
||||
message_html = ''
|
||||
if status_code == 503:
|
||||
message_html = ' '.join([
|
||||
|
@@ -6,7 +6,7 @@ import json
|
||||
import traceback
|
||||
|
||||
from tornado.log import LogFormatter, access_log
|
||||
from tornado.web import StaticFileHandler
|
||||
from tornado.web import StaticFileHandler, HTTPError
|
||||
|
||||
|
||||
def coroutine_traceback(typ, value, tb):
|
||||
@@ -85,16 +85,38 @@ def log_request(handler):
|
||||
headers = _scrub_headers(request.headers)
|
||||
|
||||
request_time = 1000.0 * handler.request.request_time()
|
||||
|
||||
try:
|
||||
user = handler.get_current_user()
|
||||
except HTTPError:
|
||||
username = ''
|
||||
else:
|
||||
if user is None:
|
||||
username = ''
|
||||
elif isinstance(user, str):
|
||||
username = user
|
||||
elif isinstance(user, dict):
|
||||
username = user['name']
|
||||
else:
|
||||
username = user.name
|
||||
|
||||
ns = dict(
|
||||
status=status,
|
||||
method=request.method,
|
||||
ip=request.remote_ip,
|
||||
uri=uri,
|
||||
request_time=request_time,
|
||||
user=user.name if user else ''
|
||||
user=username,
|
||||
location='',
|
||||
)
|
||||
msg = "{status} {method} {uri} ({user}@{ip}) {request_time:.2f}ms"
|
||||
msg = "{status} {method} {uri}{location} ({user}@{ip}) {request_time:.2f}ms"
|
||||
if status >= 500 and status != 502:
|
||||
log_method(json.dumps(headers, indent=2))
|
||||
elif status in {301, 302}:
|
||||
# log redirect targets
|
||||
# FIXME: _headers is private, but there doesn't appear to be a public way
|
||||
# to get headers from tornado
|
||||
location = handler._headers.get('Location')
|
||||
if location:
|
||||
ns['location'] = ' → {}'.format(location)
|
||||
log_method(msg.format(**ns))
|
||||
|
@@ -5,8 +5,8 @@ implements https://python-oauth2.readthedocs.io/en/latest/store.html
|
||||
|
||||
import threading
|
||||
|
||||
from oauth2.datatype import Client, AccessToken, AuthorizationCode
|
||||
from oauth2.error import AccessTokenNotFound, AuthCodeNotFound, ClientNotFoundError, UserNotAuthenticated
|
||||
from oauth2.datatype import Client, AuthorizationCode
|
||||
from oauth2.error import AuthCodeNotFound, ClientNotFoundError, UserNotAuthenticated
|
||||
from oauth2.grant import AuthorizationCodeGrant
|
||||
from oauth2.web import AuthorizationCodeGrantSiteAdapter
|
||||
import oauth2.store
|
||||
@@ -17,7 +17,6 @@ from sqlalchemy.orm import scoped_session
|
||||
from tornado.escape import url_escape
|
||||
|
||||
from .. import orm
|
||||
from jupyterhub.orm import APIToken
|
||||
from ..utils import url_path_join, hash_token, compare_token
|
||||
|
||||
|
||||
@@ -66,17 +65,6 @@ class HubDBMixin(object):
|
||||
class AccessTokenStore(HubDBMixin, oauth2.store.AccessTokenStore):
|
||||
"""OAuth2 AccessTokenStore, storing data in the Hub database"""
|
||||
|
||||
def _access_token_from_orm(self, orm_token):
|
||||
"""Transform an ORM AccessToken record into an oauth2 AccessToken instance"""
|
||||
return AccessToken(
|
||||
client_id=orm_token.client_id,
|
||||
grant_type=orm_token.grant_type,
|
||||
expires_at=orm_token.expires_at,
|
||||
refresh_token=orm_token.refresh_token,
|
||||
refresh_expires_at=orm_token.refresh_expires_at,
|
||||
user_id=orm_token.user_id,
|
||||
)
|
||||
|
||||
def save_token(self, access_token):
|
||||
"""
|
||||
Stores an access token in the database.
|
||||
@@ -86,17 +74,14 @@ class AccessTokenStore(HubDBMixin, oauth2.store.AccessTokenStore):
|
||||
"""
|
||||
|
||||
user = self.db.query(orm.User).filter(orm.User.id == access_token.user_id).first()
|
||||
token = user.new_api_token(access_token.token)
|
||||
orm_api_token = APIToken.find(self.db, token, kind='user')
|
||||
|
||||
orm_access_token = orm.OAuthAccessToken(
|
||||
client_id=access_token.client_id,
|
||||
grant_type=access_token.grant_type,
|
||||
expires_at=access_token.expires_at,
|
||||
refresh_token=access_token.refresh_token,
|
||||
refresh_expires_at=access_token.refresh_expires_at,
|
||||
token=access_token.token,
|
||||
user=user,
|
||||
api_token=orm_api_token,
|
||||
)
|
||||
self.db.add(orm_access_token)
|
||||
self.db.commit()
|
||||
|
149
jupyterhub/objects.py
Normal file
149
jupyterhub/objects.py
Normal file
@@ -0,0 +1,149 @@
|
||||
"""Some general objects for use in JupyterHub"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import socket
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from tornado import gen
|
||||
|
||||
from traitlets import (
|
||||
HasTraits, Instance, Integer, Unicode,
|
||||
default, observe,
|
||||
)
|
||||
from . import orm
|
||||
from .utils import (
|
||||
url_path_join, can_connect, wait_for_server,
|
||||
wait_for_http_server, random_port,
|
||||
)
|
||||
|
||||
class Server(HasTraits):
|
||||
"""An object representing an HTTP endpoint.
|
||||
|
||||
*Some* of these reside in the database (user servers),
|
||||
but others (Hub, proxy) are in-memory only.
|
||||
"""
|
||||
orm_server = Instance(orm.Server, allow_none=True)
|
||||
|
||||
ip = Unicode()
|
||||
connect_ip = Unicode()
|
||||
proto = Unicode('http')
|
||||
port = Integer()
|
||||
base_url = Unicode('/')
|
||||
cookie_name = Unicode('')
|
||||
|
||||
@property
|
||||
def _connect_ip(self):
|
||||
"""The address to use when connecting to this server
|
||||
|
||||
When `ip` is set to a real ip address, the same value is used.
|
||||
When `ip` refers to 'all interfaces' (e.g. '0.0.0.0'),
|
||||
clients connect via hostname by default.
|
||||
Setting `connect_ip` explicitly overrides any default behavior.
|
||||
"""
|
||||
if self.connect_ip:
|
||||
return self.connect_ip
|
||||
elif self.ip in {'', '0.0.0.0'}:
|
||||
# if listening on all interfaces, default to hostname for connect
|
||||
return socket.gethostname()
|
||||
else:
|
||||
return self.ip
|
||||
|
||||
@classmethod
|
||||
def from_url(cls, url):
|
||||
"""Create a Server from a given URL"""
|
||||
urlinfo = urlparse(url)
|
||||
proto = urlinfo.scheme
|
||||
ip = urlinfo.hostname or ''
|
||||
port = urlinfo.port
|
||||
if not port:
|
||||
if proto == 'https':
|
||||
port = 443
|
||||
else:
|
||||
port = 80
|
||||
return cls(proto=proto, ip=ip, port=port, base_url=urlinfo.path)
|
||||
|
||||
@default('port')
|
||||
def _default_port(self):
|
||||
return random_port()
|
||||
|
||||
@observe('orm_server')
|
||||
def _orm_server_changed(self, change):
|
||||
"""When we get an orm_server, get attributes from there."""
|
||||
obj = change.new
|
||||
self.proto = obj.proto
|
||||
self.ip = obj.ip
|
||||
self.port = obj.port
|
||||
self.base_url = obj.base_url
|
||||
self.cookie_name = obj.cookie_name
|
||||
|
||||
# setter to pass through to the database
|
||||
@observe('ip', 'proto', 'port', 'base_url', 'cookie_name')
|
||||
def _change(self, change):
|
||||
if self.orm_server:
|
||||
setattr(self.orm_server, change.name, change.new)
|
||||
|
||||
@property
|
||||
def host(self):
|
||||
return "{proto}://{ip}:{port}".format(
|
||||
proto=self.proto,
|
||||
ip=self._connect_ip,
|
||||
port=self.port,
|
||||
)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return "{host}{uri}".format(
|
||||
host=self.host,
|
||||
uri=self.base_url,
|
||||
)
|
||||
|
||||
@property
|
||||
def bind_url(self):
|
||||
"""representation of URL used for binding
|
||||
|
||||
Never used in APIs, only logging,
|
||||
since it can be non-connectable value, such as '', meaning all interfaces.
|
||||
"""
|
||||
if self.ip in {'', '0.0.0.0'}:
|
||||
return self.url.replace(self._connect_ip, self.ip or '*', 1)
|
||||
return self.url
|
||||
|
||||
@gen.coroutine
|
||||
def wait_up(self, timeout=10, http=False):
|
||||
"""Wait for this server to come up"""
|
||||
if http:
|
||||
yield wait_for_http_server(self.url, timeout=timeout)
|
||||
else:
|
||||
yield wait_for_server(self._connect_ip, self.port, timeout=timeout)
|
||||
|
||||
def is_up(self):
|
||||
"""Is the server accepting connections?"""
|
||||
return can_connect(self.ip or '127.0.0.1', self.port)
|
||||
|
||||
|
||||
class Hub(Server):
|
||||
"""Bring it all together at the hub.
|
||||
|
||||
The Hub is a server, plus its API path suffix
|
||||
|
||||
the api_url is the full URL plus the api_path suffix on the end
|
||||
of the server base_url.
|
||||
"""
|
||||
|
||||
@property
|
||||
def server(self):
|
||||
"""backward-compat"""
|
||||
return self
|
||||
public_host = Unicode()
|
||||
|
||||
@property
|
||||
def api_url(self):
|
||||
"""return the full API url (with proto://host...)"""
|
||||
return url_path_join(self.url, 'api')
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s:%s>" % (
|
||||
self.__class__.__name__, self.server.ip, self.server.port,
|
||||
)
|
@@ -79,251 +79,6 @@ class Server(Base):
|
||||
def __repr__(self):
|
||||
return "<Server(%s:%s)>" % (self.ip, self.port)
|
||||
|
||||
@property
|
||||
def host(self):
|
||||
ip = self.ip
|
||||
if ip in {'', '0.0.0.0'}:
|
||||
# when listening on all interfaces, connect to localhost
|
||||
ip = '127.0.0.1'
|
||||
return "{proto}://{ip}:{port}".format(
|
||||
proto=self.proto,
|
||||
ip=ip,
|
||||
port=self.port,
|
||||
)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return "{host}{uri}".format(
|
||||
host=self.host,
|
||||
uri=self.base_url,
|
||||
)
|
||||
|
||||
@property
|
||||
def bind_url(self):
|
||||
"""representation of URL used for binding
|
||||
|
||||
Never used in APIs, only logging,
|
||||
since it can be non-connectable value, such as '', meaning all interfaces.
|
||||
"""
|
||||
if self.ip in {'', '0.0.0.0'}:
|
||||
return self.url.replace('127.0.0.1', self.ip or '*', 1)
|
||||
return self.url
|
||||
|
||||
@gen.coroutine
|
||||
def wait_up(self, timeout=10, http=False):
|
||||
"""Wait for this server to come up"""
|
||||
if http:
|
||||
yield wait_for_http_server(self.url, timeout=timeout)
|
||||
else:
|
||||
yield wait_for_server(self.ip or '127.0.0.1', self.port, timeout=timeout)
|
||||
|
||||
def is_up(self):
|
||||
"""Is the server accepting connections?"""
|
||||
return can_connect(self.ip or '127.0.0.1', self.port)
|
||||
|
||||
|
||||
class Proxy(Base):
|
||||
"""A configurable-http-proxy instance.
|
||||
|
||||
A proxy consists of the API server info and the public-facing server info,
|
||||
plus an auth token for configuring the proxy table.
|
||||
"""
|
||||
__tablename__ = 'proxies'
|
||||
id = Column(Integer, primary_key=True)
|
||||
auth_token = None
|
||||
_public_server_id = Column(Integer, ForeignKey('servers.id'))
|
||||
public_server = relationship(Server, primaryjoin=_public_server_id == Server.id)
|
||||
_api_server_id = Column(Integer, ForeignKey('servers.id'))
|
||||
api_server = relationship(Server, primaryjoin=_api_server_id == Server.id)
|
||||
|
||||
def __repr__(self):
|
||||
if self.public_server:
|
||||
return "<%s %s:%s>" % (
|
||||
self.__class__.__name__, self.public_server.ip, self.public_server.port,
|
||||
)
|
||||
else:
|
||||
return "<%s [unconfigured]>" % self.__class__.__name__
|
||||
|
||||
def api_request(self, path, method='GET', body=None, client=None):
|
||||
"""Make an authenticated API request of the proxy"""
|
||||
client = client or AsyncHTTPClient()
|
||||
url = url_path_join(self.api_server.url, path)
|
||||
|
||||
if isinstance(body, dict):
|
||||
body = json.dumps(body)
|
||||
self.log.debug("Fetching %s %s", method, url)
|
||||
req = HTTPRequest(url,
|
||||
method=method,
|
||||
headers={'Authorization': 'token {}'.format(self.auth_token)},
|
||||
body=body,
|
||||
)
|
||||
|
||||
return client.fetch(req)
|
||||
|
||||
@gen.coroutine
|
||||
def add_service(self, service, client=None):
|
||||
"""Add a service's server to the proxy table."""
|
||||
if not service.server:
|
||||
raise RuntimeError(
|
||||
"Service %s does not have an http endpoint to add to the proxy.", service.name)
|
||||
|
||||
self.log.info("Adding service %s to proxy %s => %s",
|
||||
service.name, service.proxy_path, service.server.host,
|
||||
)
|
||||
|
||||
yield self.api_request(service.proxy_path,
|
||||
method='POST',
|
||||
body=dict(
|
||||
target=service.server.host,
|
||||
service=service.name,
|
||||
),
|
||||
client=client,
|
||||
)
|
||||
|
||||
@gen.coroutine
|
||||
def delete_service(self, service, client=None):
|
||||
"""Remove a service's server from the proxy table."""
|
||||
self.log.info("Removing service %s from proxy", service.name)
|
||||
yield self.api_request(service.proxy_path,
|
||||
method='DELETE',
|
||||
client=client,
|
||||
)
|
||||
|
||||
# FIX-ME
|
||||
# we need to add a reference to a specific server
|
||||
@gen.coroutine
|
||||
def add_user(self, user, client=None):
|
||||
"""Add a user's server to the proxy table."""
|
||||
self.log.info("Adding user %s to proxy %s => %s",
|
||||
user.name, user.proxy_path, user.server.host,
|
||||
)
|
||||
|
||||
if user.spawn_pending:
|
||||
raise RuntimeError(
|
||||
"User %s's spawn is pending, shouldn't be added to the proxy yet!", user.name)
|
||||
|
||||
yield self.api_request(user.proxy_path,
|
||||
method='POST',
|
||||
body=dict(
|
||||
target=user.server.host,
|
||||
user=user.name,
|
||||
),
|
||||
client=client,
|
||||
)
|
||||
|
||||
@gen.coroutine
|
||||
def delete_user(self, user, client=None):
|
||||
"""Remove a user's server from the proxy table."""
|
||||
self.log.info("Removing user %s from proxy", user.name)
|
||||
yield self.api_request(user.proxy_path,
|
||||
method='DELETE',
|
||||
client=client,
|
||||
)
|
||||
|
||||
@gen.coroutine
|
||||
def add_all_services(self, service_dict):
|
||||
"""Update the proxy table from the database.
|
||||
|
||||
Used when loading up a new proxy.
|
||||
"""
|
||||
db = inspect(self).session
|
||||
futures = []
|
||||
for orm_service in db.query(Service):
|
||||
service = service_dict[orm_service.name]
|
||||
if service.server:
|
||||
futures.append(self.add_service(service))
|
||||
# wait after submitting them all
|
||||
for f in futures:
|
||||
yield f
|
||||
|
||||
@gen.coroutine
|
||||
def add_all_users(self, user_dict):
|
||||
"""Update the proxy table from the database.
|
||||
|
||||
Used when loading up a new proxy.
|
||||
"""
|
||||
db = inspect(self).session
|
||||
futures = []
|
||||
for orm_user in db.query(User):
|
||||
user = user_dict[orm_user]
|
||||
if user.running:
|
||||
futures.append(self.add_user(user))
|
||||
# wait after submitting them all
|
||||
for f in futures:
|
||||
yield f
|
||||
|
||||
@gen.coroutine
|
||||
def get_routes(self, client=None):
|
||||
"""Fetch the proxy's routes"""
|
||||
resp = yield self.api_request('', client=client)
|
||||
return json.loads(resp.body.decode('utf8', 'replace'))
|
||||
|
||||
# FIX-ME
|
||||
# we need to add a reference to a specific server
|
||||
@gen.coroutine
|
||||
def check_routes(self, user_dict, service_dict, routes=None):
|
||||
"""Check that all users are properly routed on the proxy"""
|
||||
if not routes:
|
||||
routes = yield self.get_routes()
|
||||
|
||||
user_routes = { r['user'] for r in routes.values() if 'user' in r }
|
||||
futures = []
|
||||
db = inspect(self).session
|
||||
for orm_user in db.query(User):
|
||||
user = user_dict[orm_user]
|
||||
if user.running:
|
||||
if user.name not in user_routes:
|
||||
self.log.warning("Adding missing route for %s (%s)", user.name, user.server)
|
||||
futures.append(self.add_user(user))
|
||||
else:
|
||||
# User not running, make sure it's not in the table
|
||||
if user.name in user_routes:
|
||||
self.log.warning("Removing route for not running %s", user.name)
|
||||
futures.append(self.delete_user(user))
|
||||
|
||||
# check service routes
|
||||
service_routes = { r['service'] for r in routes.values() if 'service' in r }
|
||||
for orm_service in db.query(Service).filter(Service.server != None):
|
||||
service = service_dict[orm_service.name]
|
||||
if service.server is None:
|
||||
# This should never be True, but seems to be on rare occasion.
|
||||
# catch filter bug, either in sqlalchemy or my understanding of its behavior
|
||||
self.log.error("Service %s has no server, but wasn't filtered out.", service)
|
||||
continue
|
||||
if service.name not in service_routes:
|
||||
self.log.warning("Adding missing route for %s (%s)", service.name, service.server)
|
||||
futures.append(self.add_service(service))
|
||||
for f in futures:
|
||||
yield f
|
||||
|
||||
|
||||
class Hub(Base):
|
||||
"""Bring it all together at the hub.
|
||||
|
||||
The Hub is a server, plus its API path suffix
|
||||
|
||||
the api_url is the full URL plus the api_path suffix on the end
|
||||
of the server base_url.
|
||||
"""
|
||||
__tablename__ = 'hubs'
|
||||
id = Column(Integer, primary_key=True)
|
||||
_server_id = Column(Integer, ForeignKey('servers.id'))
|
||||
server = relationship(Server, primaryjoin=_server_id == Server.id)
|
||||
host = ''
|
||||
|
||||
@property
|
||||
def api_url(self):
|
||||
"""return the full API url (with proto://host...)"""
|
||||
return url_path_join(self.server.url, 'api')
|
||||
|
||||
def __repr__(self):
|
||||
if self.server:
|
||||
return "<%s %s:%s>" % (
|
||||
self.__class__.__name__, self.server.ip, self.server.port,
|
||||
)
|
||||
else:
|
||||
return "<%s [unconfigured]>" % self.__class__.__name__
|
||||
|
||||
|
||||
# user:group many:many mapping table
|
||||
user_group_map = Table('user_group_map', Base.metadata,
|
||||
@@ -393,25 +148,14 @@ class User(Base):
|
||||
# group mapping
|
||||
groups = relationship('Group', secondary='user_group_map', back_populates='users')
|
||||
|
||||
other_user_cookies = set([])
|
||||
|
||||
@property
|
||||
def server(self):
|
||||
"""Returns the first element of servers.
|
||||
Returns None if the list is empty.
|
||||
"""
|
||||
if len(self.servers) == 0:
|
||||
return None
|
||||
else:
|
||||
return self.servers[0]
|
||||
|
||||
def __repr__(self):
|
||||
if self.server:
|
||||
if self.servers:
|
||||
server = self.servers[0]
|
||||
return "<{cls}({name}@{ip}:{port})>".format(
|
||||
cls=self.__class__.__name__,
|
||||
name=self.name,
|
||||
ip=self.server.ip,
|
||||
port=self.server.port,
|
||||
ip=server.ip,
|
||||
port=server.port,
|
||||
)
|
||||
else:
|
||||
return "<{cls}({name} [unconfigured])>".format(
|
||||
@@ -508,8 +252,65 @@ class Service(Base):
|
||||
"""
|
||||
return db.query(cls).filter(cls.name == name).first()
|
||||
|
||||
class Hashed(object):
|
||||
"""Mixin for tables with hashed tokens"""
|
||||
prefix_length = 4
|
||||
algorithm = "sha512"
|
||||
rounds = 16384
|
||||
salt_bytes = 8
|
||||
min_length = 8
|
||||
|
||||
class APIToken(Base):
|
||||
@property
|
||||
def token(self):
|
||||
raise AttributeError("token is write-only")
|
||||
|
||||
@token.setter
|
||||
def token(self, token):
|
||||
"""Store the hashed value and prefix for a token"""
|
||||
self.prefix = token[:self.prefix_length]
|
||||
self.hashed = hash_token(token, rounds=self.rounds, salt=self.salt_bytes, algorithm=self.algorithm)
|
||||
|
||||
def match(self, token):
|
||||
"""Is this my token?"""
|
||||
return compare_token(self.hashed, token)
|
||||
|
||||
@classmethod
|
||||
def check_token(cls, db, token):
|
||||
"""Check if a token is acceptable"""
|
||||
if len(token) < cls.min_length:
|
||||
raise ValueError("Tokens must be at least %i characters, got %r" % (
|
||||
cls.min_length, token)
|
||||
)
|
||||
found = cls.find(db, token)
|
||||
if found:
|
||||
raise ValueError("Collision on token: %s..." % token[:cls.prefix_length])
|
||||
|
||||
@classmethod
|
||||
def find_prefix(cls, db, token):
|
||||
"""Start the query for matching token.
|
||||
|
||||
Returns an SQLAlchemy query already filtered by prefix-matches.
|
||||
"""
|
||||
prefix = token[:cls.prefix_length]
|
||||
# since we can't filter on hashed values, filter on prefix
|
||||
# so we aren't comparing with all tokens
|
||||
return db.query(cls).filter(bindparam('prefix', prefix).startswith(cls.prefix))
|
||||
|
||||
@classmethod
|
||||
def find(cls, db, token):
|
||||
"""Find a token object by value.
|
||||
|
||||
Returns None if not found.
|
||||
|
||||
`kind='user'` only returns API tokens for users
|
||||
`kind='service'` only returns API tokens for services
|
||||
"""
|
||||
prefix_match = cls.find_prefix(db, token)
|
||||
for orm_token in prefix_match:
|
||||
if orm_token.match(token):
|
||||
return orm_token
|
||||
|
||||
class APIToken(Hashed, Base):
|
||||
"""An API token"""
|
||||
__tablename__ = 'api_tokens'
|
||||
|
||||
@@ -523,21 +324,7 @@ class APIToken(Base):
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
hashed = Column(Unicode(1023))
|
||||
prefix = Column(Unicode(1023))
|
||||
prefix_length = 4
|
||||
algorithm = "sha512"
|
||||
rounds = 16384
|
||||
salt_bytes = 8
|
||||
|
||||
@property
|
||||
def token(self):
|
||||
raise AttributeError("token is write-only")
|
||||
|
||||
@token.setter
|
||||
def token(self, token):
|
||||
"""Store the hashed value and prefix for a token"""
|
||||
self.prefix = token[:self.prefix_length]
|
||||
self.hashed = hash_token(token, rounds=self.rounds, salt=self.salt_bytes, algorithm=self.algorithm)
|
||||
prefix = Column(Unicode(16))
|
||||
|
||||
def __repr__(self):
|
||||
if self.user is not None:
|
||||
@@ -566,10 +353,7 @@ class APIToken(Base):
|
||||
`kind='user'` only returns API tokens for users
|
||||
`kind='service'` only returns API tokens for services
|
||||
"""
|
||||
prefix = token[:cls.prefix_length]
|
||||
# since we can't filter on hashed values, filter on prefix
|
||||
# so we aren't comparing with all tokens
|
||||
prefix_match = db.query(cls).filter(bindparam('prefix', prefix).startswith(cls.prefix))
|
||||
prefix_match = cls.find_prefix(db, token)
|
||||
if kind == 'user':
|
||||
prefix_match = prefix_match.filter(cls.user_id != None)
|
||||
elif kind == 'service':
|
||||
@@ -580,10 +364,6 @@ class APIToken(Base):
|
||||
if orm_token.match(token):
|
||||
return orm_token
|
||||
|
||||
def match(self, token):
|
||||
"""Is this my token?"""
|
||||
return compare_token(self.hashed, token)
|
||||
|
||||
@classmethod
|
||||
def new(cls, token=None, user=None, service=None):
|
||||
"""Generate a new API token for a user or service"""
|
||||
@@ -593,12 +373,8 @@ class APIToken(Base):
|
||||
if token is None:
|
||||
token = new_token()
|
||||
else:
|
||||
if len(token) < 8:
|
||||
raise ValueError("Tokens must be at least 8 characters, got %r" % token)
|
||||
found = APIToken.find(db, token)
|
||||
if found:
|
||||
raise ValueError("Collision on token: %s..." % token[:4])
|
||||
orm_token = APIToken(token=token)
|
||||
cls.check_token(db, token)
|
||||
orm_token = cls(token=token)
|
||||
if user:
|
||||
assert user.id is not None
|
||||
orm_token.user_id = user.id
|
||||
@@ -624,19 +400,29 @@ class GrantType(enum.Enum):
|
||||
refresh_token = 'refresh_token'
|
||||
|
||||
|
||||
class OAuthAccessToken(Base):
|
||||
class OAuthAccessToken(Hashed, Base):
|
||||
__tablename__ = 'oauth_access_tokens'
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
client_id = Column(Unicode(1023))
|
||||
grant_type = Column(Enum(GrantType), nullable=False)
|
||||
expires_at = Column(Integer)
|
||||
refresh_token = Column(Unicode(36))
|
||||
refresh_token = Column(Unicode(64))
|
||||
refresh_expires_at = Column(Integer)
|
||||
user_id = Column(Integer, ForeignKey('users.id', ondelete='CASCADE'))
|
||||
user = relationship(User)
|
||||
api_token_id = Column(Integer, ForeignKey('api_tokens.id', ondelete='CASCADE'))
|
||||
api_token = relationship(APIToken, backref='oauth_token')
|
||||
session = None # for API-equivalence with APIToken
|
||||
|
||||
# from Hashed
|
||||
hashed = Column(Unicode(64))
|
||||
prefix = Column(Unicode(16))
|
||||
|
||||
def __repr__(self):
|
||||
return "<{cls}('{prefix}...', user='{user}'>".format(
|
||||
cls=self.__class__.__name__,
|
||||
user=self.user and self.user.name,
|
||||
prefix=self.prefix,
|
||||
)
|
||||
|
||||
|
||||
class OAuthCode(Base):
|
||||
|
416
jupyterhub/proxy.py
Normal file
416
jupyterhub/proxy.py
Normal file
@@ -0,0 +1,416 @@
|
||||
"""API for JupyterHub's proxy."""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import json
|
||||
import os
|
||||
from subprocess import Popen
|
||||
import time
|
||||
|
||||
from tornado import gen
|
||||
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
|
||||
from tornado.ioloop import PeriodicCallback
|
||||
|
||||
|
||||
from traitlets import (
|
||||
Any, Bool, Instance, Integer, Unicode,
|
||||
default,
|
||||
)
|
||||
from jupyterhub.traitlets import Command
|
||||
|
||||
from traitlets.config import LoggingConfigurable
|
||||
from .objects import Server
|
||||
from .orm import Service, User
|
||||
from . import utils
|
||||
from .utils import url_path_join
|
||||
|
||||
|
||||
class Proxy(LoggingConfigurable):
|
||||
"""Base class for configurable proxies that JupyterHub can use."""
|
||||
|
||||
db = Any()
|
||||
app = Any()
|
||||
hub = Any()
|
||||
public_url = Unicode()
|
||||
ssl_key = Unicode()
|
||||
ssl_cert = Unicode()
|
||||
|
||||
should_start = Bool(True, config=True,
|
||||
help="""Should the Hub start the proxy.
|
||||
|
||||
If True, the Hub will start the proxy and stop it.
|
||||
Set to False if the proxy is managed externally,
|
||||
such as by systemd, docker, or another service manager.
|
||||
""")
|
||||
|
||||
def start(self):
|
||||
"""Start the proxy.
|
||||
|
||||
Will be called during startup if should_start is True.
|
||||
"""
|
||||
|
||||
def stop(self):
|
||||
"""Stop the proxy.
|
||||
|
||||
Will be called during teardown if should_start is True.
|
||||
"""
|
||||
|
||||
@gen.coroutine
|
||||
def add_route(self, routespec, target, data):
|
||||
"""Add a route to the proxy.
|
||||
|
||||
Args:
|
||||
routespec (str): A specification for which this route will be matched.
|
||||
Could be either a url_prefix or a fqdn.
|
||||
target (str): A URL that will be the target of this route.
|
||||
data (dict): A JSONable dict that will be associated with this route, and will
|
||||
be returned when retrieving information about this route.
|
||||
|
||||
Will raise an appropriate Exception (FIXME: find what?) if the route could
|
||||
not be added.
|
||||
|
||||
The proxy implementation should also have a way to associate the fact that a
|
||||
route came from JupyterHub.
|
||||
"""
|
||||
pass
|
||||
|
||||
@gen.coroutine
|
||||
def delete_route(self, routespec):
|
||||
"""Delete a route with a given routespec if it exists."""
|
||||
pass
|
||||
|
||||
@gen.coroutine
|
||||
def get_route(self, routespec):
|
||||
"""Return the route info for a given routespec.
|
||||
|
||||
Args:
|
||||
routespec (str): The route specification that was used to add this routespec
|
||||
|
||||
Returns:
|
||||
result (dict): with the following keys:
|
||||
`routespec`: The normalized route specification passed in to add_route
|
||||
`target`: The target for this route
|
||||
`data`: The arbitrary data that was passed in by JupyterHub when adding this
|
||||
route.
|
||||
None: if there are no routes matching the given routespec
|
||||
"""
|
||||
pass
|
||||
|
||||
@gen.coroutine
|
||||
def get_all_routes(self):
|
||||
"""Fetch and return all the routes associated by JupyterHub from the
|
||||
proxy.
|
||||
|
||||
Should return a dictionary of routes, where the keys are
|
||||
routespecs and each value is the dict that would be returned by
|
||||
`get_route(routespec)`.
|
||||
"""
|
||||
pass
|
||||
|
||||
# Most basic implementers must only implement above methods
|
||||
|
||||
@gen.coroutine
|
||||
def add_service(self, service, client=None):
|
||||
"""Add a service's server to the proxy table."""
|
||||
if not service.server:
|
||||
raise RuntimeError(
|
||||
"Service %s does not have an http endpoint to add to the proxy.", service.name)
|
||||
|
||||
self.log.info("Adding service %s to proxy %s => %s",
|
||||
service.name, service.proxy_path, service.server.host,
|
||||
)
|
||||
|
||||
yield self.add_route(
|
||||
service.proxy_path,
|
||||
service.server.host,
|
||||
{'service': service.name}
|
||||
)
|
||||
|
||||
@gen.coroutine
|
||||
def delete_service(self, service, client=None):
|
||||
"""Remove a service's server from the proxy table."""
|
||||
self.log.info("Removing service %s from proxy", service.name)
|
||||
yield self.delete_route(service.proxy_path)
|
||||
|
||||
@gen.coroutine
|
||||
def add_user(self, user, client=None):
|
||||
"""Add a user's server to the proxy table."""
|
||||
self.log.info("Adding user %s to proxy %s => %s",
|
||||
user.name, user.proxy_path, user.server.host,
|
||||
)
|
||||
|
||||
if user.spawn_pending:
|
||||
raise RuntimeError(
|
||||
"User %s's spawn is pending, shouldn't be added to the proxy yet!", user.name)
|
||||
|
||||
yield self.add_route(
|
||||
user.proxy_path,
|
||||
user.server.host,
|
||||
{'user': user.name}
|
||||
)
|
||||
|
||||
@gen.coroutine
|
||||
def delete_user(self, user):
|
||||
"""Remove a user's server from the proxy table."""
|
||||
self.log.info("Removing user %s from proxy", user.name)
|
||||
yield self.delete_route(user.proxy_path)
|
||||
|
||||
@gen.coroutine
|
||||
def add_all_services(self, service_dict):
|
||||
"""Update the proxy table from the database.
|
||||
|
||||
Used when loading up a new proxy.
|
||||
"""
|
||||
db = self.db
|
||||
futures = []
|
||||
for orm_service in db.query(Service):
|
||||
service = service_dict[orm_service.name]
|
||||
if service.server:
|
||||
futures.append(self.add_service(service))
|
||||
# wait after submitting them all
|
||||
for f in futures:
|
||||
yield f
|
||||
|
||||
@gen.coroutine
|
||||
def add_all_users(self, user_dict):
|
||||
"""Update the proxy table from the database.
|
||||
|
||||
Used when loading up a new proxy.
|
||||
"""
|
||||
db = self.db
|
||||
futures = []
|
||||
for orm_user in db.query(User):
|
||||
user = user_dict[orm_user]
|
||||
if user.running:
|
||||
futures.append(self.add_user(user))
|
||||
# wait after submitting them all
|
||||
for f in futures:
|
||||
yield f
|
||||
|
||||
@gen.coroutine
|
||||
def check_routes(self, user_dict, service_dict, routes=None):
|
||||
"""Check that all users are properly routed on the proxy."""
|
||||
if not routes:
|
||||
routes = yield self.get_all_routes()
|
||||
|
||||
user_routes = {r['data']['user'] for r in routes.values() if 'user' in r['data']}
|
||||
futures = []
|
||||
db = self.db
|
||||
for orm_user in db.query(User):
|
||||
user = user_dict[orm_user]
|
||||
if user.running:
|
||||
if user.name not in user_routes:
|
||||
self.log.warning(
|
||||
"Adding missing route for %s (%s)", user.name, user.server)
|
||||
futures.append(self.add_user(user))
|
||||
else:
|
||||
# User not running, make sure it's not in the table
|
||||
if user.name in user_routes:
|
||||
self.log.warning(
|
||||
"Removing route for not running %s", user.name)
|
||||
futures.append(self.delete_user(user))
|
||||
|
||||
# check service routes
|
||||
service_routes = {r['data']['service']
|
||||
for r in routes.values() if 'service' in r['data']}
|
||||
for orm_service in db.query(Service).filter(
|
||||
Service.server is not None):
|
||||
service = service_dict[orm_service.name]
|
||||
if service.server is None:
|
||||
# This should never be True, but seems to be on rare occasion.
|
||||
# catch filter bug, either in sqlalchemy or my understanding of
|
||||
# its behavior
|
||||
self.log.error(
|
||||
"Service %s has no server, but wasn't filtered out.", service)
|
||||
continue
|
||||
if service.name not in service_routes:
|
||||
self.log.warning("Adding missing route for %s (%s)",
|
||||
service.name, service.server)
|
||||
futures.append(self.add_service(service))
|
||||
for f in futures:
|
||||
yield f
|
||||
|
||||
@gen.coroutine
|
||||
def restore_routes(self):
|
||||
self.log.info("Setting up routes on new proxy")
|
||||
yield self.add_all_users(self.app.users)
|
||||
yield self.add_all_services(self.app.services)
|
||||
self.log.info("New proxy back up, and good to go")
|
||||
|
||||
|
||||
class ConfigurableHTTPProxy(Proxy):
|
||||
"""Proxy implementation for the default configurable-http-proxy."""
|
||||
|
||||
proxy_process = Any()
|
||||
client = Instance(AsyncHTTPClient, ())
|
||||
|
||||
debug = Bool(False, help="Add debug-level logging to the Proxy", config=True)
|
||||
auth_token = Unicode(
|
||||
help="""The Proxy Auth token.
|
||||
|
||||
Loaded from the CONFIGPROXY_AUTH_TOKEN env variable by default.
|
||||
""",
|
||||
).tag(config=True)
|
||||
check_running_interval = Integer(5, config=True)
|
||||
|
||||
@default('auth_token')
|
||||
def _auth_token_default(self):
|
||||
token = os.environ.get('CONFIGPROXY_AUTH_TOKEN', None)
|
||||
if not token:
|
||||
self.log.warning('\n'.join([
|
||||
"",
|
||||
"Generating CONFIGPROXY_AUTH_TOKEN. Restarting the Hub will require restarting the proxy.",
|
||||
"Set CONFIGPROXY_AUTH_TOKEN env or JupyterHub.proxy_auth_token config to avoid this message.",
|
||||
"",
|
||||
]))
|
||||
token = utils.new_token()
|
||||
return token
|
||||
|
||||
api_url = Unicode('http://127.0.0.1:8001', config=True,
|
||||
help="""The ip (or hostname) of the proxy's API endpoint"""
|
||||
)
|
||||
command = Command('configurable-http-proxy', config=True,
|
||||
help="""The command to start the proxy"""
|
||||
)
|
||||
|
||||
@gen.coroutine
|
||||
def start(self):
|
||||
public_server = Server.from_url(self.public_url)
|
||||
api_server = Server.from_url(self.api_url)
|
||||
env = os.environ.copy()
|
||||
env['CONFIGPROXY_AUTH_TOKEN'] = self.auth_token
|
||||
cmd = self.command + [
|
||||
'--ip', public_server.ip,
|
||||
'--port', str(public_server.port),
|
||||
'--api-ip', api_server.ip,
|
||||
'--api-port', str(api_server.port),
|
||||
'--default-target', self.hub.host,
|
||||
'--error-target', url_path_join(self.hub.url, 'error'),
|
||||
]
|
||||
if self.app.subdomain_host:
|
||||
cmd.append('--host-routing')
|
||||
if self.debug:
|
||||
cmd.extend(['--log-level', 'debug'])
|
||||
if self.ssl_key:
|
||||
cmd.extend(['--ssl-key', self.ssl_key])
|
||||
if self.ssl_cert:
|
||||
cmd.extend(['--ssl-cert', self.ssl_cert])
|
||||
if self.app.statsd_host:
|
||||
cmd.extend([
|
||||
'--statsd-host', self.app.statsd_host,
|
||||
'--statsd-port', str(self.app.statsd_port),
|
||||
'--statsd-prefix', self.app.statsd_prefix + '.chp'
|
||||
])
|
||||
# Warn if SSL is not used
|
||||
if ' --ssl' not in ' '.join(cmd):
|
||||
self.log.warning("Running JupyterHub without SSL."
|
||||
" I hope there is SSL termination happening somewhere else...")
|
||||
self.log.info("Starting proxy @ %s", public_server.bind_url)
|
||||
self.log.debug("Proxy cmd: %s", cmd)
|
||||
try:
|
||||
self.proxy_process = Popen(cmd, env=env, start_new_session=True)
|
||||
except FileNotFoundError as e:
|
||||
self.log.error(
|
||||
"Failed to find proxy %r\n"
|
||||
"The proxy can be installed with `npm install -g configurable-http-proxy`"
|
||||
% self.cmd
|
||||
)
|
||||
self.exit(1)
|
||||
|
||||
def _check_process():
|
||||
status = self.proxy_process.poll()
|
||||
if status is not None:
|
||||
e = RuntimeError(
|
||||
"Proxy failed to start with exit code %i" % status)
|
||||
# py2-compatible `raise e from None`
|
||||
e.__cause__ = None
|
||||
raise e
|
||||
|
||||
for server in (public_server, api_server):
|
||||
for i in range(10):
|
||||
_check_process()
|
||||
try:
|
||||
yield server.wait_up(1)
|
||||
except TimeoutError:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
yield server.wait_up(1)
|
||||
time.sleep(1)
|
||||
_check_process()
|
||||
self.log.debug("Proxy started and appears to be up")
|
||||
pc = PeriodicCallback(self.check_running, 1e3 * self.check_running_interval)
|
||||
pc.start()
|
||||
|
||||
def stop(self):
|
||||
self.log.info("Cleaning up proxy[%i]...", self.proxy_process.pid)
|
||||
if self.proxy_process.poll() is None:
|
||||
try:
|
||||
self.proxy_process.terminate()
|
||||
except Exception as e:
|
||||
self.log.error("Failed to terminate proxy process: %s", e)
|
||||
|
||||
@gen.coroutine
|
||||
def check_running(self):
|
||||
"""Check if the proxy is still running"""
|
||||
if self.proxy_process.poll() is None:
|
||||
return
|
||||
self.log.error("Proxy stopped with exit code %r",
|
||||
'unknown' if self.proxy_process is None else self.proxy_process.poll()
|
||||
)
|
||||
yield self.start()
|
||||
yield self.restore_routes()
|
||||
|
||||
def api_request(self, path, method='GET', body=None, client=None):
|
||||
"""Make an authenticated API request of the proxy."""
|
||||
client = client or AsyncHTTPClient()
|
||||
url = url_path_join(self.api_url, 'api/routes', path)
|
||||
|
||||
if isinstance(body, dict):
|
||||
body = json.dumps(body)
|
||||
self.log.debug("Proxy: Fetching %s %s", method, url)
|
||||
req = HTTPRequest(url,
|
||||
method=method,
|
||||
headers={'Authorization': 'token {}'.format(
|
||||
self.auth_token)},
|
||||
body=body,
|
||||
)
|
||||
|
||||
return client.fetch(req)
|
||||
|
||||
def add_route(self, routespec, target, data=None):
|
||||
body = data or {}
|
||||
body['target'] = target
|
||||
return self.api_request(routespec,
|
||||
method='POST',
|
||||
body=body,
|
||||
)
|
||||
|
||||
def delete_route(self, routespec):
|
||||
return self.api_request(routespec, method='DELETE')
|
||||
|
||||
def _reformat_routespec(self, routespec, chp_data):
|
||||
"""Reformat CHP data format to JupyterHub's proxy API."""
|
||||
target = chp_data.pop('target')
|
||||
return {
|
||||
'routespec': routespec,
|
||||
'target': target,
|
||||
'data': chp_data,
|
||||
}
|
||||
|
||||
@gen.coroutine
|
||||
def get_route(self, routespec):
|
||||
chp_data = yield self.api_request(routespec, method='DELETE')
|
||||
return self._reformat_routespec(routespec, chp_data)
|
||||
|
||||
@gen.coroutine
|
||||
def get_all_routes(self, client=None):
|
||||
"""Fetch the proxy's routes."""
|
||||
resp = yield self.api_request('', client=client)
|
||||
chp_routes = json.loads(resp.body.decode('utf8', 'replace'))
|
||||
all_routes = {}
|
||||
for routespec, chp_data in chp_routes.items():
|
||||
all_routes[routespec] = self._reformat_routespec(
|
||||
routespec, chp_data)
|
||||
return all_routes
|
@@ -7,7 +7,6 @@ HubAuth can be used in any application, even outside tornado.
|
||||
HubAuthenticated is a mixin class for tornado handlers that should authenticate with the Hub.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
@@ -494,6 +493,18 @@ class HubOAuth(HubAuth):
|
||||
handler.clear_cookie(self.cookie_name, path=self.base_url)
|
||||
|
||||
|
||||
class UserNotAllowed(Exception):
|
||||
"""Exception raised when a user is identified and not allowed"""
|
||||
def __init__(self, model):
|
||||
self.model = model
|
||||
|
||||
def __str__(self):
|
||||
return '<{cls} {kind}={name}>'.format(
|
||||
cls=self.__class__.__name__,
|
||||
kind=self.model['kind'],
|
||||
name=self.model['name'],
|
||||
)
|
||||
|
||||
|
||||
class HubAuthenticated(object):
|
||||
"""Mixin for tornado handlers that are authenticated with JupyterHub
|
||||
@@ -568,7 +579,7 @@ class HubAuthenticated(object):
|
||||
"""
|
||||
|
||||
name = model['name']
|
||||
kind = model.get('kind', 'user')
|
||||
kind = model.setdefault('kind', 'user')
|
||||
if self.allow_all:
|
||||
app_log.debug("Allowing Hub %s %s (all Hub users and services allowed)", kind, name)
|
||||
return model
|
||||
@@ -584,7 +595,7 @@ class HubAuthenticated(object):
|
||||
return model
|
||||
else:
|
||||
app_log.warning("Not allowing Hub service %s", name)
|
||||
return None
|
||||
raise UserNotAllowed(model)
|
||||
|
||||
if self.hub_users and name in self.hub_users:
|
||||
# user in whitelist
|
||||
@@ -597,7 +608,7 @@ class HubAuthenticated(object):
|
||||
return model
|
||||
else:
|
||||
app_log.warning("Not allowing Hub user %s", name)
|
||||
return None
|
||||
raise UserNotAllowed(model)
|
||||
|
||||
def get_current_user(self):
|
||||
"""Tornado's authentication method
|
||||
@@ -611,7 +622,15 @@ class HubAuthenticated(object):
|
||||
if not user_model:
|
||||
self._hub_auth_user_cache = None
|
||||
return
|
||||
try:
|
||||
self._hub_auth_user_cache = self.check_hub_user(user_model)
|
||||
except UserNotAllowed as e:
|
||||
# cache None, in case get_user is called again while processing the error
|
||||
self._hub_auth_user_cache = None
|
||||
raise HTTPError(403, "{kind} {name} is not allowed.".format(**e.model))
|
||||
except Exception:
|
||||
self._hub_auth_user_cache = None
|
||||
raise
|
||||
return self._hub_auth_user_cache
|
||||
|
||||
|
||||
@@ -638,6 +657,8 @@ class HubOAuthCallbackHandler(HubOAuthenticated, RequestHandler):
|
||||
# TODO: make async (in a Thread?)
|
||||
token = self.hub_auth.token_for_code(code)
|
||||
user_model = self.hub_auth.user_for_token(token)
|
||||
if user_model is None:
|
||||
raise HTTPError(500, "oauth callback failed to identify a user")
|
||||
app_log.info("Logged-in user %s", user_model)
|
||||
self.hub_auth.set_cookie(self, token)
|
||||
next_url = self.get_argument('next', '') or self.hub_auth.base_url
|
||||
|
@@ -39,7 +39,6 @@ A hub-managed service with no URL:
|
||||
}
|
||||
"""
|
||||
|
||||
from getpass import getuser
|
||||
import pipes
|
||||
import shutil
|
||||
from subprocess import Popen
|
||||
@@ -52,6 +51,7 @@ from traitlets import (
|
||||
from traitlets.config import LoggingConfigurable
|
||||
|
||||
from .. import orm
|
||||
from ..objects import Server
|
||||
from ..traitlets import Command
|
||||
from ..spawner import LocalProcessSpawner, set_user_setuid
|
||||
from ..utils import url_path_join
|
||||
@@ -60,7 +60,7 @@ class _MockUser(HasTraits):
|
||||
name = Unicode()
|
||||
server = Instance(orm.Server, allow_none=True)
|
||||
state = Dict()
|
||||
service = Instance(__module__ + '.Service')
|
||||
service = Instance(__name__ + '.Service')
|
||||
host = Unicode()
|
||||
|
||||
@property
|
||||
@@ -72,6 +72,12 @@ class _MockUser(HasTraits):
|
||||
else:
|
||||
return self.server.base_url
|
||||
|
||||
@property
|
||||
def base_url(self):
|
||||
if not self.server:
|
||||
return ''
|
||||
return self.server.base_url
|
||||
|
||||
# We probably shouldn't use a Spawner here,
|
||||
# but there are too many concepts to share.
|
||||
|
||||
@@ -84,11 +90,17 @@ class _ServiceSpawner(LocalProcessSpawner):
|
||||
cmd = Command(minlen=0)
|
||||
|
||||
def make_preexec_fn(self, name):
|
||||
if not name or name == getuser():
|
||||
if not name:
|
||||
# no setuid if no name
|
||||
return
|
||||
return set_user_setuid(name, chdir=False)
|
||||
|
||||
def user_env(self, env):
|
||||
if not self.user.name:
|
||||
return env
|
||||
else:
|
||||
return super().user_env(env)
|
||||
|
||||
def start(self):
|
||||
"""Start the process"""
|
||||
env = self.get_env()
|
||||
@@ -188,7 +200,7 @@ class Service(LoggingConfigurable):
|
||||
Only used if the Hub is spawning the service.
|
||||
"""
|
||||
).tag(input=True)
|
||||
user = Unicode(getuser(),
|
||||
user = Unicode("",
|
||||
help="""The user to become when launching the service.
|
||||
|
||||
If unspecified, run the service as the same user as the Hub.
|
||||
@@ -221,7 +233,10 @@ class Service(LoggingConfigurable):
|
||||
|
||||
@property
|
||||
def server(self):
|
||||
return self.orm.server
|
||||
if self.orm.server:
|
||||
return Server(orm_server=self.orm.server)
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
@@ -252,9 +267,6 @@ class Service(LoggingConfigurable):
|
||||
env.update(self.environment)
|
||||
|
||||
env['JUPYTERHUB_SERVICE_NAME'] = self.name
|
||||
env['JUPYTERHUB_API_TOKEN'] = self.api_token
|
||||
env['JUPYTERHUB_API_URL'] = self.hub_api_url
|
||||
env['JUPYTERHUB_BASE_URL'] = self.base_url
|
||||
if self.url:
|
||||
env['JUPYTERHUB_SERVICE_URL'] = self.url
|
||||
env['JUPYTERHUB_SERVICE_PREFIX'] = self.server.base_url
|
||||
|
@@ -5,12 +5,13 @@
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import os
|
||||
from textwrap import dedent
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from jinja2 import ChoiceLoader, FunctionLoader
|
||||
|
||||
from tornado import ioloop
|
||||
from textwrap import dedent
|
||||
from tornado.web import HTTPError
|
||||
|
||||
try:
|
||||
import notebook
|
||||
@@ -37,6 +38,7 @@ from notebook.auth.logout import LogoutHandler
|
||||
from notebook.base.handlers import IPythonHandler
|
||||
|
||||
from jupyterhub import __version__
|
||||
from .log import log_request
|
||||
from .services.auth import HubOAuth, HubOAuthenticated, HubOAuthCallbackHandler
|
||||
from .utils import url_path_join
|
||||
|
||||
@@ -119,6 +121,8 @@ class OAuthCallbackHandler(HubOAuthCallbackHandler, IPythonHandler):
|
||||
# TODO: make async (in a Thread?)
|
||||
token = self.hub_auth.token_for_code(code)
|
||||
user_model = self.hub_auth.user_for_token(token)
|
||||
if user_model is None:
|
||||
raise HTTPError(500, "oauth callback failed to identify a user")
|
||||
self.log.info("Logged-in user %s", user_model)
|
||||
self.hub_auth.set_cookie(self, token)
|
||||
next_url = self.get_argument('next', '') or self.base_url
|
||||
@@ -190,6 +194,14 @@ class SingleUserNotebookApp(NotebookApp):
|
||||
user = CUnicode().tag(config=True)
|
||||
group = CUnicode().tag(config=True)
|
||||
|
||||
@default('user')
|
||||
def _default_user(self):
|
||||
return os.environ.get('JUPYTERHUB_USER') or ''
|
||||
|
||||
@default('group')
|
||||
def _default_group(self):
|
||||
return os.environ.get('JUPYTERHUB_GROUP') or ''
|
||||
|
||||
@observe('user')
|
||||
def _user_changed(self, change):
|
||||
self.log.name = change.new
|
||||
@@ -225,23 +237,25 @@ class SingleUserNotebookApp(NotebookApp):
|
||||
value = value + '/'
|
||||
return value
|
||||
|
||||
@default('cookie_name')
|
||||
def _cookie_name_default(self):
|
||||
if os.environ.get('JUPYTERHUB_SERVICE_NAME'):
|
||||
# if I'm a service, use the services cookie name
|
||||
return 'jupyterhub-services'
|
||||
|
||||
@default('port')
|
||||
def _port_default(self):
|
||||
if os.environ.get('JUPYTERHUB_SERVICE_URL'):
|
||||
url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
|
||||
if url.port:
|
||||
return url.port
|
||||
elif url.scheme == 'http':
|
||||
return 80
|
||||
elif url.scheme == 'https':
|
||||
return 443
|
||||
return 8888
|
||||
|
||||
@default('ip')
|
||||
def _ip_default(self):
|
||||
if os.environ.get('JUPYTERHUB_SERVICE_URL'):
|
||||
url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
|
||||
if url.hostname:
|
||||
return url.hostname
|
||||
return '127.0.0.1'
|
||||
|
||||
aliases = aliases
|
||||
flags = flags
|
||||
@@ -348,6 +362,7 @@ class SingleUserNotebookApp(NotebookApp):
|
||||
# load the hub-related settings into the tornado settings dict
|
||||
self.init_hub_auth()
|
||||
s = self.tornado_settings
|
||||
s['log_function'] = log_request
|
||||
s['user'] = self.user
|
||||
s['group'] = self.group
|
||||
s['hub_prefix'] = self.hub_prefix
|
||||
|
@@ -65,7 +65,7 @@ class Spawner(LoggingConfigurable):
|
||||
"""
|
||||
)
|
||||
|
||||
ip = Unicode('127.0.0.1',
|
||||
ip = Unicode('',
|
||||
help="""
|
||||
The IP address (or hostname) the single-user server should listen on.
|
||||
|
||||
@@ -431,10 +431,17 @@ class Spawner(LoggingConfigurable):
|
||||
env['JUPYTERHUB_ADMIN_ACCESS'] = '1'
|
||||
# OAuth settings
|
||||
env['JUPYTERHUB_CLIENT_ID'] = self.oauth_client_id
|
||||
env['JUPYTERHUB_HOST'] = self.hub.host
|
||||
env['JUPYTERHUB_HOST'] = self.hub.public_host
|
||||
env['JUPYTERHUB_OAUTH_CALLBACK_URL'] = \
|
||||
url_path_join(self.user.url, 'oauth_callback')
|
||||
|
||||
# Info previously passed on args
|
||||
env['JUPYTERHUB_USER'] = self.user.name
|
||||
env['JUPYTERHUB_API_URL'] = self.hub.api_url
|
||||
env['JUPYTERHUB_BASE_URL'] = self.hub.base_url[:-4]
|
||||
if self.server:
|
||||
env['JUPYTERHUB_SERVICE_PREFIX'] = self.server.base_url
|
||||
|
||||
# Put in limit and guarantee info if they exist.
|
||||
# Note that this is for use by the humans / notebook extensions in the
|
||||
# single-user notebook server, and not for direct usage by the spawners
|
||||
@@ -493,13 +500,6 @@ class Spawner(LoggingConfigurable):
|
||||
|
||||
Doesn't expect shell expansion to happen.
|
||||
"""
|
||||
args = [
|
||||
'--user="%s"' % self.user.name,
|
||||
'--base-url="%s"' % self.server.base_url,
|
||||
'--hub-host="%s"' % self.hub.host,
|
||||
'--hub-prefix="%s"' % self.hub.server.base_url,
|
||||
'--hub-api-url="%s"' % self.hub.api_url,
|
||||
]
|
||||
if self.ip:
|
||||
args.append('--ip="%s"' % self.ip)
|
||||
|
||||
@@ -539,10 +539,13 @@ class Spawner(LoggingConfigurable):
|
||||
def stop(self, now=False):
|
||||
"""Stop the single-user server
|
||||
|
||||
If `now` is set to `False`, do not wait for the server to stop. Otherwise, wait for
|
||||
the server to stop before returning.
|
||||
If `now` is False (default), shutdown the server as gracefully as possible,
|
||||
e.g. starting with SIGINT, then SIGTERM, then SIGKILL.
|
||||
If `now` is True, terminate the server immediately.
|
||||
|
||||
Must be a Tornado coroutine.
|
||||
The coroutine should return when the single-user server process is no longer running.
|
||||
|
||||
Must be a coroutine.
|
||||
"""
|
||||
raise NotImplementedError("Override in subclass. Must be a Tornado gen.coroutine.")
|
||||
|
||||
@@ -616,7 +619,10 @@ class Spawner(LoggingConfigurable):
|
||||
|
||||
self.stop_polling()
|
||||
|
||||
for callback in self._callbacks:
|
||||
# clear callbacks list
|
||||
self._callbacks, callbacks = ([], self._callbacks)
|
||||
|
||||
for callback in callbacks:
|
||||
try:
|
||||
yield gen.maybe_future(callback())
|
||||
except Exception:
|
||||
@@ -917,8 +923,11 @@ class LocalProcessSpawner(Spawner):
|
||||
def stop(self, now=False):
|
||||
"""Stop the single-user server process for the current user.
|
||||
|
||||
If `now` is set to True, do not wait for the process to die.
|
||||
Otherwise, it'll wait.
|
||||
If `now` is False (default), shutdown the server as gracefully as possible,
|
||||
e.g. starting with SIGINT, then SIGTERM, then SIGKILL.
|
||||
If `now` is True, terminate the server immediately.
|
||||
|
||||
The coroutine should return when the process is no longer running.
|
||||
"""
|
||||
if not now:
|
||||
status = yield self.poll()
|
||||
|
@@ -9,7 +9,7 @@ from subprocess import TimeoutExpired
|
||||
import time
|
||||
from unittest import mock
|
||||
from pytest import fixture, raises
|
||||
from tornado import ioloop
|
||||
from tornado import ioloop, gen
|
||||
|
||||
from .. import orm
|
||||
from ..utils import random_port
|
||||
@@ -32,11 +32,7 @@ def db():
|
||||
name=getuser(),
|
||||
)
|
||||
user.servers.append(orm.Server())
|
||||
hub = orm.Hub(
|
||||
server=orm.Server(),
|
||||
)
|
||||
_db.add(user)
|
||||
_db.add(hub)
|
||||
_db.commit()
|
||||
return _db
|
||||
|
||||
@@ -57,6 +53,9 @@ def app(request):
|
||||
|
||||
|
||||
def fin():
|
||||
# disconnect logging during cleanup because pytest closes captured FDs prematurely
|
||||
mocked_app.log.handlers = []
|
||||
|
||||
MockHub.clear_instance()
|
||||
mocked_app.stop()
|
||||
request.addfinalizer(fin)
|
||||
@@ -85,10 +84,14 @@ def _mockservice(request, app, url=False):
|
||||
with mock.patch.object(jupyterhub.services.service, '_ServiceSpawner', MockServiceSpawner):
|
||||
app.services = [spec]
|
||||
app.init_services()
|
||||
app.io_loop.add_callback(app.proxy.add_all_services, app._service_map)
|
||||
assert name in app._service_map
|
||||
service = app._service_map[name]
|
||||
app.io_loop.add_callback(service.start)
|
||||
@gen.coroutine
|
||||
def start():
|
||||
# wait for proxy to be updated before starting the service
|
||||
yield app.proxy.add_all_services(app._service_map)
|
||||
service.start()
|
||||
app.io_loop.add_callback(start)
|
||||
def cleanup():
|
||||
service.stop()
|
||||
app.services[:] = []
|
||||
@@ -100,6 +103,8 @@ def _mockservice(request, app, url=False):
|
||||
# ensure process finishes starting
|
||||
with raises(TimeoutExpired):
|
||||
service.proc.wait(1)
|
||||
if url:
|
||||
ioloop.IOLoop().run_sync(service.server.wait_up)
|
||||
return service
|
||||
|
||||
|
||||
|
@@ -4,7 +4,6 @@ import os
|
||||
import sys
|
||||
from tempfile import NamedTemporaryFile
|
||||
import threading
|
||||
|
||||
from unittest import mock
|
||||
|
||||
import requests
|
||||
@@ -18,9 +17,10 @@ from traitlets import default
|
||||
from ..app import JupyterHub
|
||||
from ..auth import PAMAuthenticator
|
||||
from .. import orm
|
||||
from ..objects import Server
|
||||
from ..spawner import LocalProcessSpawner
|
||||
from ..singleuser import SingleUserNotebookApp
|
||||
from ..utils import random_port
|
||||
from ..utils import random_port, url_path_join
|
||||
|
||||
from pamela import PAMError
|
||||
|
||||
@@ -165,7 +165,7 @@ class MockHub(JupyterHub):
|
||||
self.db.add(user)
|
||||
self.db.commit()
|
||||
yield super(MockHub, self).start()
|
||||
yield self.hub.server.wait_up(http=True)
|
||||
yield self.hub.wait_up(http=True)
|
||||
self.io_loop.add_callback(evt.set)
|
||||
|
||||
def _start():
|
||||
@@ -207,19 +207,24 @@ def public_host(app):
|
||||
if app.subdomain_host:
|
||||
return app.subdomain_host
|
||||
else:
|
||||
return app.proxy.public_server.host
|
||||
return Server.from_url(app.proxy.public_url).host
|
||||
|
||||
|
||||
def public_url(app, user_or_service=None):
|
||||
def public_url(app, user_or_service=None, path=''):
|
||||
"""Return the full, public base URL (including prefix) of the given JupyterHub instance."""
|
||||
if user_or_service:
|
||||
if app.subdomain_host:
|
||||
host = user_or_service.host
|
||||
else:
|
||||
host = public_host(app)
|
||||
return host + user_or_service.server.base_url
|
||||
prefix = user_or_service.server.base_url
|
||||
else:
|
||||
return public_host(app) + app.proxy.public_server.base_url
|
||||
host = public_host(app)
|
||||
prefix = Server.from_url(app.proxy.public_url).base_url
|
||||
if path:
|
||||
return host + url_path_join(prefix, path)
|
||||
else:
|
||||
return host + prefix
|
||||
|
||||
|
||||
# single-user-server mocking:
|
||||
@@ -241,7 +246,8 @@ class StubSingleUserSpawner(MockSpawner):
|
||||
_thread = None
|
||||
@gen.coroutine
|
||||
def start(self):
|
||||
self.user.server.port = random_port()
|
||||
ip = self.ip = '127.0.0.1'
|
||||
port = self.port = random_port()
|
||||
env = self.get_env()
|
||||
args = self.get_args()
|
||||
evt = threading.Event()
|
||||
@@ -262,6 +268,7 @@ class StubSingleUserSpawner(MockSpawner):
|
||||
self._thread.start()
|
||||
ready = evt.wait(timeout=3)
|
||||
assert ready
|
||||
return (ip, port)
|
||||
|
||||
@gen.coroutine
|
||||
def stop(self):
|
||||
|
@@ -83,7 +83,7 @@ def auth_header(db, name):
|
||||
@check_db_locks
|
||||
def api_request(app, *api_path, **kwargs):
|
||||
"""Make an API request"""
|
||||
base_url = app.hub.server.url
|
||||
base_url = app.hub.url
|
||||
headers = kwargs.setdefault('headers', {})
|
||||
|
||||
if 'Authorization' not in headers:
|
||||
@@ -94,7 +94,7 @@ def api_request(app, *api_path, **kwargs):
|
||||
f = getattr(requests, method)
|
||||
resp = f(url, **kwargs)
|
||||
assert "frame-ancestors 'self'" in resp.headers['Content-Security-Policy']
|
||||
assert ujoin(app.hub.server.base_url, "security/csp-report") in resp.headers['Content-Security-Policy']
|
||||
assert ujoin(app.hub.base_url, "security/csp-report") in resp.headers['Content-Security-Policy']
|
||||
assert 'http' not in resp.headers['Content-Security-Policy']
|
||||
return resp
|
||||
|
||||
@@ -132,7 +132,7 @@ def test_auth_api(app):
|
||||
|
||||
|
||||
def test_referer_check(app, io_loop):
|
||||
url = ujoin(public_host(app), app.hub.server.base_url)
|
||||
url = ujoin(public_host(app), app.hub.base_url)
|
||||
host = urlparse(url).netloc
|
||||
user = find_user(app.db, 'admin')
|
||||
if user is None:
|
||||
@@ -423,10 +423,13 @@ def test_spawn(app, io_loop):
|
||||
r = requests.get(ujoin(url, 'args'))
|
||||
assert r.status_code == 200
|
||||
argv = r.json()
|
||||
for expected in ['--user="%s"' % name, '--base-url="%s"' % user.server.base_url]:
|
||||
assert expected in argv
|
||||
assert '--port' in ' '.join(argv)
|
||||
r = requests.get(ujoin(url, 'env'))
|
||||
env = r.json()
|
||||
for expected in ['JUPYTERHUB_USER', 'JUPYTERHUB_BASE_URL', 'JUPYTERHUB_API_TOKEN']:
|
||||
assert expected in env
|
||||
if app.subdomain_host:
|
||||
assert '--hub-host="%s"' % app.subdomain_host in argv
|
||||
assert env['JUPYTERHUB_HOST'] == app.subdomain_host
|
||||
|
||||
r = api_request(app, 'users', name, 'server', method='delete')
|
||||
assert r.status_code == 204
|
||||
@@ -779,7 +782,7 @@ def test_get_service(app, mockservice_url):
|
||||
|
||||
|
||||
def test_root_api(app):
|
||||
base_url = app.hub.server.url
|
||||
base_url = app.hub.url
|
||||
url = ujoin(base_url, 'api')
|
||||
r = requests.get(url)
|
||||
r.raise_for_status()
|
||||
|
@@ -3,10 +3,13 @@
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import socket
|
||||
|
||||
import pytest
|
||||
from tornado import gen
|
||||
|
||||
from .. import orm
|
||||
from .. import objects
|
||||
from ..user import User
|
||||
from .mocking import MockSpawner
|
||||
|
||||
@@ -20,53 +23,25 @@ def test_server(db):
|
||||
assert server.proto == 'http'
|
||||
assert isinstance(server.port, int)
|
||||
assert isinstance(server.cookie_name, str)
|
||||
assert server.host == 'http://127.0.0.1:%i' % server.port
|
||||
|
||||
# test wrapper
|
||||
server = objects.Server(orm_server=server)
|
||||
assert server.host == 'http://%s:%i' % (socket.gethostname(), server.port)
|
||||
assert server.url == server.host + '/'
|
||||
assert server.bind_url == 'http://*:%i/' % server.port
|
||||
server.ip = '127.0.0.1'
|
||||
assert server.host == 'http://127.0.0.1:%i' % server.port
|
||||
assert server.url == server.host + '/'
|
||||
|
||||
|
||||
def test_proxy(db):
|
||||
proxy = orm.Proxy(
|
||||
auth_token='abc-123',
|
||||
public_server=orm.Server(
|
||||
ip='192.168.1.1',
|
||||
port=8000,
|
||||
),
|
||||
api_server=orm.Server(
|
||||
ip='127.0.0.1',
|
||||
port=8001,
|
||||
),
|
||||
)
|
||||
db.add(proxy)
|
||||
db.commit()
|
||||
assert proxy.public_server.ip == '192.168.1.1'
|
||||
assert proxy.api_server.ip == '127.0.0.1'
|
||||
assert proxy.auth_token == 'abc-123'
|
||||
|
||||
|
||||
def test_hub(db):
|
||||
hub = orm.Hub(
|
||||
server=orm.Server(
|
||||
ip = '1.2.3.4',
|
||||
port = 1234,
|
||||
base_url='/hubtest/',
|
||||
),
|
||||
|
||||
)
|
||||
db.add(hub)
|
||||
db.commit()
|
||||
assert hub.server.ip == '1.2.3.4'
|
||||
assert hub.server.port == 1234
|
||||
assert hub.api_url == 'http://1.2.3.4:1234/hubtest/api'
|
||||
server.connect_ip = 'hub'
|
||||
assert server.host == 'http://hub:%i' % server.port
|
||||
assert server.url == server.host + '/'
|
||||
|
||||
|
||||
def test_user(db):
|
||||
user = orm.User(name='kaylee',
|
||||
user = User(orm.User(name='kaylee',
|
||||
state={'pid': 4234},
|
||||
)
|
||||
))
|
||||
server = orm.Server()
|
||||
user.servers.append(server)
|
||||
db.add(user)
|
||||
|
@@ -3,9 +3,12 @@
|
||||
from urllib.parse import urlencode, urlparse
|
||||
|
||||
import requests
|
||||
from tornado import gen
|
||||
|
||||
from ..handlers import BaseHandler
|
||||
from ..utils import url_path_join as ujoin
|
||||
from .. import orm
|
||||
from ..auth import Authenticator
|
||||
|
||||
import mock
|
||||
from .mocking import FormSpawner, public_url, public_host
|
||||
@@ -13,7 +16,7 @@ from .test_api import api_request
|
||||
|
||||
def get_page(path, app, hub=True, **kw):
|
||||
if hub:
|
||||
prefix = app.hub.server.base_url
|
||||
prefix = app.hub.base_url
|
||||
else:
|
||||
prefix = app.base_url
|
||||
base_url = ujoin(public_host(app), prefix)
|
||||
@@ -21,11 +24,11 @@ def get_page(path, app, hub=True, **kw):
|
||||
return requests.get(ujoin(base_url, path), **kw)
|
||||
|
||||
def test_root_no_auth(app, io_loop):
|
||||
print(app.hub.server.is_up())
|
||||
routes = io_loop.run_sync(app.proxy.get_routes)
|
||||
print(app.hub.is_up())
|
||||
routes = io_loop.run_sync(app.proxy.get_all_routes)
|
||||
print(routes)
|
||||
print(app.hub.server)
|
||||
url = ujoin(public_host(app), app.hub.server.base_url)
|
||||
url = ujoin(public_host(app), app.hub.base_url)
|
||||
print(url)
|
||||
r = requests.get(url)
|
||||
r.raise_for_status()
|
||||
@@ -120,7 +123,7 @@ def test_spawn_page(app):
|
||||
|
||||
def test_spawn_form(app, io_loop):
|
||||
with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}):
|
||||
base_url = ujoin(public_host(app), app.hub.server.base_url)
|
||||
base_url = ujoin(public_host(app), app.hub.base_url)
|
||||
cookies = app.login_user('jones')
|
||||
orm_u = orm.User.find(app.db, 'jones')
|
||||
u = app.users[orm_u]
|
||||
@@ -142,7 +145,7 @@ def test_spawn_form(app, io_loop):
|
||||
|
||||
def test_spawn_form_with_file(app, io_loop):
|
||||
with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}):
|
||||
base_url = ujoin(public_host(app), app.hub.server.base_url)
|
||||
base_url = ujoin(public_host(app), app.hub.base_url)
|
||||
cookies = app.login_user('jones')
|
||||
orm_u = orm.User.find(app.db, 'jones')
|
||||
u = app.users[orm_u]
|
||||
@@ -178,7 +181,7 @@ def test_user_redirect(app):
|
||||
assert path == ujoin(app.base_url, '/hub/login')
|
||||
query = urlparse(r.url).query
|
||||
assert query == urlencode({
|
||||
'next': ujoin(app.hub.server.base_url, '/user-redirect/tree/top/')
|
||||
'next': ujoin(app.hub.base_url, '/user-redirect/tree/top/')
|
||||
})
|
||||
|
||||
r = get_page('/user-redirect/notebooks/test.ipynb', app, cookies=cookies)
|
||||
@@ -229,6 +232,27 @@ def test_login_fail(app):
|
||||
assert not r.cookies
|
||||
|
||||
|
||||
def test_login_strip(app):
|
||||
"""Test that login form doesn't strip whitespace from passwords"""
|
||||
form_data = {
|
||||
'username': 'spiff',
|
||||
'password': ' space man ',
|
||||
}
|
||||
base_url = public_url(app)
|
||||
called_with = []
|
||||
@gen.coroutine
|
||||
def mock_authenticate(handler, data):
|
||||
called_with.append(data)
|
||||
|
||||
with mock.patch.object(app.authenticator, 'authenticate', mock_authenticate):
|
||||
r = requests.post(base_url + 'hub/login',
|
||||
data=form_data,
|
||||
allow_redirects=False,
|
||||
)
|
||||
|
||||
assert called_with == [form_data]
|
||||
|
||||
|
||||
def test_login_redirect(app, io_loop):
|
||||
cookies = app.login_user('river')
|
||||
user = app.users['river']
|
||||
@@ -253,6 +277,28 @@ def test_login_redirect(app, io_loop):
|
||||
assert r.headers['Location'].endswith('/hub/admin')
|
||||
|
||||
|
||||
def test_auto_login(app, io_loop, request):
|
||||
class DummyLoginHandler(BaseHandler):
|
||||
def get(self):
|
||||
self.write('ok!')
|
||||
base_url = public_url(app) + '/'
|
||||
app.tornado_application.add_handlers(".*$", [
|
||||
(ujoin(app.hub.server.base_url, 'dummy'), DummyLoginHandler),
|
||||
])
|
||||
# no auto_login: end up at /hub/login
|
||||
r = requests.get(base_url)
|
||||
assert r.url == public_url(app, path='hub/login')
|
||||
# enable auto_login: redirect from /hub/login to /hub/dummy
|
||||
authenticator = Authenticator(auto_login=True)
|
||||
authenticator.login_url = lambda base_url: ujoin(base_url, 'dummy')
|
||||
|
||||
with mock.patch.dict(app.tornado_application.settings, {
|
||||
'authenticator': authenticator,
|
||||
}):
|
||||
r = requests.get(base_url)
|
||||
assert r.url == public_url(app, path='hub/dummy')
|
||||
|
||||
|
||||
def test_logout(app):
|
||||
name = 'wash'
|
||||
cookies = app.login_user(name)
|
||||
@@ -274,7 +320,7 @@ def test_login_no_whitelist_adds_user(app):
|
||||
|
||||
|
||||
def test_static_files(app):
|
||||
base_url = ujoin(public_host(app), app.hub.server.base_url)
|
||||
base_url = ujoin(public_host(app), app.hub.base_url)
|
||||
r = requests.get(ujoin(base_url, 'logo'))
|
||||
r.raise_for_status()
|
||||
assert r.headers['content-type'] == 'image/png'
|
||||
|
@@ -6,6 +6,8 @@ from queue import Queue
|
||||
from subprocess import Popen
|
||||
from urllib.parse import urlparse, unquote
|
||||
|
||||
from traitlets.config import Config
|
||||
|
||||
import pytest
|
||||
|
||||
from .. import orm
|
||||
@@ -19,12 +21,12 @@ def test_external_proxy(request, io_loop):
|
||||
auth_token = 'secret!'
|
||||
proxy_ip = '127.0.0.1'
|
||||
proxy_port = 54321
|
||||
cfg = Config()
|
||||
cfg.ConfigurableHTTPProxy.auth_token = auth_token
|
||||
cfg.ConfigurableHTTPProxy.api_url = 'http://%s:%i' % (proxy_ip, proxy_port)
|
||||
cfg.ConfigurableHTTPProxy.should_start = False
|
||||
|
||||
app = MockHub.instance(
|
||||
proxy_api_ip=proxy_ip,
|
||||
proxy_api_port=proxy_port,
|
||||
proxy_auth_token=auth_token,
|
||||
)
|
||||
app = MockHub.instance(config=cfg)
|
||||
|
||||
def fin():
|
||||
MockHub.clear_instance()
|
||||
@@ -35,7 +37,8 @@ def test_external_proxy(request, io_loop):
|
||||
# configures and starts proxy process
|
||||
env = os.environ.copy()
|
||||
env['CONFIGPROXY_AUTH_TOKEN'] = auth_token
|
||||
cmd = app.proxy_cmd + [
|
||||
cmd = [
|
||||
'configurable-http-proxy',
|
||||
'--ip', app.ip,
|
||||
'--port', str(app.port),
|
||||
'--api-ip', proxy_ip,
|
||||
@@ -57,10 +60,10 @@ def test_external_proxy(request, io_loop):
|
||||
wait_for_proxy()
|
||||
|
||||
app.start([])
|
||||
assert app.proxy_process is None
|
||||
assert app.proxy.proxy_process is None
|
||||
|
||||
# test if api service has a root route '/'
|
||||
routes = io_loop.run_sync(app.proxy.get_routes)
|
||||
routes = io_loop.run_sync(app.proxy.get_all_routes)
|
||||
assert list(routes.keys()) == ['/']
|
||||
|
||||
# add user to the db and start a single user server
|
||||
@@ -70,7 +73,7 @@ def test_external_proxy(request, io_loop):
|
||||
r = api_request(app, 'users', name, 'server', method='post')
|
||||
r.raise_for_status()
|
||||
|
||||
routes = io_loop.run_sync(app.proxy.get_routes)
|
||||
routes = io_loop.run_sync(app.proxy.get_all_routes)
|
||||
# sets the desired path result
|
||||
user_path = unquote(ujoin(app.base_url, 'user/river'))
|
||||
if app.subdomain_host:
|
||||
@@ -83,7 +86,8 @@ def test_external_proxy(request, io_loop):
|
||||
proxy = Popen(cmd, env=env)
|
||||
wait_for_proxy()
|
||||
|
||||
routes = io_loop.run_sync(app.proxy.get_routes)
|
||||
routes = io_loop.run_sync(app.proxy.get_all_routes)
|
||||
|
||||
assert list(routes.keys()) == ['/']
|
||||
|
||||
# poke the server to update the proxy
|
||||
@@ -91,7 +95,7 @@ def test_external_proxy(request, io_loop):
|
||||
r.raise_for_status()
|
||||
|
||||
# check that the routes are correct
|
||||
routes = io_loop.run_sync(app.proxy.get_routes)
|
||||
routes = io_loop.run_sync(app.proxy.get_all_routes)
|
||||
assert sorted(routes.keys()) == ['/', user_path]
|
||||
|
||||
# teardown the proxy, and start a new one with different auth and port
|
||||
@@ -99,10 +103,10 @@ def test_external_proxy(request, io_loop):
|
||||
new_auth_token = 'different!'
|
||||
env['CONFIGPROXY_AUTH_TOKEN'] = new_auth_token
|
||||
proxy_port = 55432
|
||||
cmd = app.proxy_cmd + [
|
||||
cmd = ['configurable-http-proxy',
|
||||
'--ip', app.ip,
|
||||
'--port', str(app.port),
|
||||
'--api-ip', app.proxy_api_ip,
|
||||
'--api-ip', proxy_ip,
|
||||
'--api-port', str(proxy_port),
|
||||
'--default-target', 'http://%s:%i' % (app.hub_ip, app.hub_port),
|
||||
]
|
||||
@@ -112,14 +116,13 @@ def test_external_proxy(request, io_loop):
|
||||
wait_for_proxy()
|
||||
|
||||
# tell the hub where the new proxy is
|
||||
new_api_url = 'http://{}:{}'.format(proxy_ip, proxy_port)
|
||||
r = api_request(app, 'proxy', method='patch', data=json.dumps({
|
||||
'port': proxy_port,
|
||||
'protocol': 'http',
|
||||
'ip': app.ip,
|
||||
'api_url': new_api_url,
|
||||
'auth_token': new_auth_token,
|
||||
}))
|
||||
r.raise_for_status()
|
||||
assert app.proxy.api_server.port == proxy_port
|
||||
assert app.proxy.api_url == new_api_url
|
||||
|
||||
# get updated auth token from main thread
|
||||
def get_app_proxy_token():
|
||||
@@ -131,7 +134,7 @@ def test_external_proxy(request, io_loop):
|
||||
app.proxy.auth_token = new_auth_token
|
||||
|
||||
# check that the routes are correct
|
||||
routes = io_loop.run_sync(app.proxy.get_routes)
|
||||
routes = io_loop.run_sync(app.proxy.get_all_routes)
|
||||
assert sorted(routes.keys()) == ['/', user_path]
|
||||
|
||||
|
||||
@@ -152,18 +155,18 @@ def test_check_routes(app, io_loop, username, endpoints):
|
||||
|
||||
# check a valid route exists for user
|
||||
test_user = app.users[username]
|
||||
before = sorted(io_loop.run_sync(app.proxy.get_routes))
|
||||
before = sorted(io_loop.run_sync(app.proxy.get_all_routes))
|
||||
assert unquote(test_user.proxy_path) in before
|
||||
|
||||
# check if a route is removed when user deleted
|
||||
io_loop.run_sync(lambda: app.proxy.check_routes(app.users, app._service_map))
|
||||
io_loop.run_sync(lambda: proxy.delete_user(test_user))
|
||||
during = sorted(io_loop.run_sync(app.proxy.get_routes))
|
||||
during = sorted(io_loop.run_sync(app.proxy.get_all_routes))
|
||||
assert unquote(test_user.proxy_path) not in during
|
||||
|
||||
# check if a route exists for user
|
||||
io_loop.run_sync(lambda: app.proxy.check_routes(app.users, app._service_map))
|
||||
after = sorted(io_loop.run_sync(app.proxy.get_routes))
|
||||
after = sorted(io_loop.run_sync(app.proxy.get_all_routes))
|
||||
assert unquote(test_user.proxy_path) in after
|
||||
|
||||
# check that before and after state are the same
|
||||
|
@@ -25,7 +25,7 @@ def external_service(app, name='mockservice'):
|
||||
env = {
|
||||
'JUPYTERHUB_API_TOKEN': hexlify(os.urandom(5)),
|
||||
'JUPYTERHUB_SERVICE_NAME': name,
|
||||
'JUPYTERHUB_API_URL': url_path_join(app.hub.server.url, 'api/'),
|
||||
'JUPYTERHUB_API_URL': url_path_join(app.hub.url, 'api/'),
|
||||
'JUPYTERHUB_SERVICE_URL': 'http://127.0.0.1:%i' % random_port(),
|
||||
}
|
||||
proc = Popen(mockservice_cmd, env=env)
|
||||
@@ -64,7 +64,7 @@ def test_managed_service(mockservice):
|
||||
def test_proxy_service(app, mockservice_url, io_loop):
|
||||
service = mockservice_url
|
||||
name = service.name
|
||||
io_loop.run_sync(app.proxy.get_routes)
|
||||
io_loop.run_sync(app.proxy.get_all_routes)
|
||||
url = public_url(app, service) + '/foo'
|
||||
r = requests.get(url, allow_redirects=False)
|
||||
path = '/services/{}/foo'.format(name)
|
||||
|
@@ -195,9 +195,7 @@ def test_hub_authenticated(request):
|
||||
cookies={'jubal': 'early'},
|
||||
allow_redirects=False,
|
||||
)
|
||||
r.raise_for_status()
|
||||
assert r.status_code == 302
|
||||
assert auth.login_url in r.headers['Location']
|
||||
assert r.status_code == 403
|
||||
|
||||
# pass group whitelist
|
||||
TestHandler.hub_groups = {'lions'}
|
||||
@@ -214,9 +212,7 @@ def test_hub_authenticated(request):
|
||||
cookies={'jubal': 'early'},
|
||||
allow_redirects=False,
|
||||
)
|
||||
r.raise_for_status()
|
||||
assert r.status_code == 302
|
||||
assert auth.login_url in r.headers['Location']
|
||||
assert r.status_code == 403
|
||||
|
||||
|
||||
def test_hubauth_cookie(app, mockservice_url):
|
||||
|
@@ -37,6 +37,12 @@ def test_singleuser_auth(app, io_loop):
|
||||
r = requests.get(url_path_join(url, 'logout'), cookies=cookies)
|
||||
assert len(r.cookies) == 0
|
||||
|
||||
# another user accessing should get 403, not redirect to login
|
||||
cookies = app.login_user('burgess')
|
||||
r = requests.get(url, cookies=cookies)
|
||||
assert r.status_code == 403
|
||||
assert 'burgess' in r.text
|
||||
|
||||
|
||||
def test_disable_user_config(app, io_loop):
|
||||
# use StubSingleUserSpawner to launch a single-user app in a thread
|
||||
|
@@ -17,6 +17,7 @@ import requests
|
||||
from tornado import gen
|
||||
|
||||
from ..user import User
|
||||
from ..objects import Hub
|
||||
from .. import spawner as spawnermod
|
||||
from ..spawner import LocalProcessSpawner
|
||||
from .. import orm
|
||||
@@ -43,8 +44,8 @@ def setup():
|
||||
|
||||
def new_spawner(db, **kwargs):
|
||||
kwargs.setdefault('cmd', [sys.executable, '-c', _echo_sleep])
|
||||
kwargs.setdefault('hub', Hub())
|
||||
kwargs.setdefault('user', User(db.query(orm.User).first(), {}))
|
||||
kwargs.setdefault('hub', db.query(orm.Hub).first())
|
||||
kwargs.setdefault('notebook_dir', os.getcwd())
|
||||
kwargs.setdefault('default_url', '/user/{username}/lab')
|
||||
kwargs.setdefault('INTERRUPT_TIMEOUT', 1)
|
||||
|
@@ -9,9 +9,10 @@ from sqlalchemy import inspect
|
||||
from tornado import gen
|
||||
from tornado.log import app_log
|
||||
|
||||
from .utils import url_path_join, default_server_name, new_token
|
||||
from .utils import url_path_join, default_server_name
|
||||
|
||||
from . import orm
|
||||
from .objects import Server
|
||||
from traitlets import HasTraits, Any, Dict, observe, default
|
||||
from .spawner import LocalProcessSpawner
|
||||
|
||||
@@ -112,24 +113,21 @@ class User(HasTraits):
|
||||
return self.settings.get('spawner_class', LocalProcessSpawner)
|
||||
|
||||
|
||||
def __init__(self, orm_user, settings, **kwargs):
|
||||
def __init__(self, orm_user, settings=None, **kwargs):
|
||||
self.orm_user = orm_user
|
||||
self.settings = settings
|
||||
self.settings = settings or {}
|
||||
self._instances = {}
|
||||
super().__init__(**kwargs)
|
||||
|
||||
hub = self.db.query(orm.Hub).first()
|
||||
|
||||
self.allow_named_servers = self.settings.get('allow_named_servers', False)
|
||||
|
||||
self.cookie_name = '%s-%s' % (hub.server.cookie_name, quote(self.name, safe=''))
|
||||
self.base_url = url_path_join(
|
||||
self.settings.get('base_url', '/'), 'user', self.escaped_name)
|
||||
|
||||
self.spawner = self.spawner_class(
|
||||
user=self,
|
||||
db=self.db,
|
||||
hub=hub,
|
||||
hub=self.settings.get('hub'),
|
||||
authenticator=self.authenticator,
|
||||
config=self.settings.get('config'),
|
||||
)
|
||||
@@ -174,6 +172,13 @@ class User(HasTraits):
|
||||
return False
|
||||
return True
|
||||
|
||||
@property
|
||||
def server(self):
|
||||
if len(self.servers) == 0:
|
||||
return None
|
||||
else:
|
||||
return Server(orm_server=self.servers[0])
|
||||
|
||||
@property
|
||||
def escaped_name(self):
|
||||
"""My name, escaped for use in URLs, cookies, etc."""
|
||||
@@ -239,18 +244,17 @@ class User(HasTraits):
|
||||
server_name = ''
|
||||
base_url = self.base_url
|
||||
|
||||
server = orm.Server(
|
||||
orm_server = orm.Server(
|
||||
name=server_name,
|
||||
cookie_name=self.cookie_name,
|
||||
base_url=base_url,
|
||||
)
|
||||
self.servers.append(server)
|
||||
db.add(self)
|
||||
db.commit()
|
||||
self.servers.append(orm_server)
|
||||
|
||||
api_token = self.new_api_token()
|
||||
db.commit()
|
||||
|
||||
server = Server(orm_server=orm_server)
|
||||
|
||||
spawner = self.spawner
|
||||
|
||||
# Save spawner's instance inside self._instances
|
||||
@@ -299,7 +303,7 @@ class User(HasTraits):
|
||||
ip_port = yield gen.with_timeout(timedelta(seconds=spawner.start_timeout), f)
|
||||
if ip_port:
|
||||
# get ip, port info from return value of start()
|
||||
self.server.ip, self.server.port = ip_port
|
||||
server.ip, server.port = ip_port
|
||||
else:
|
||||
# prior to 0.7, spawners had to store this info in user.server themselves.
|
||||
# Handle < 0.7 behavior with a warning, assuming info was stored in db by the Spawner.
|
||||
@@ -337,14 +341,14 @@ class User(HasTraits):
|
||||
db.commit()
|
||||
self.waiting_for_response = True
|
||||
try:
|
||||
yield self.server.wait_up(http=True, timeout=spawner.http_timeout)
|
||||
yield server.wait_up(http=True, timeout=spawner.http_timeout)
|
||||
except Exception as e:
|
||||
if isinstance(e, TimeoutError):
|
||||
self.log.warning(
|
||||
"{user}'s server never showed up at {url} "
|
||||
"after {http_timeout} seconds. Giving up".format(
|
||||
user=self.name,
|
||||
url=self.server.url,
|
||||
url=server.url,
|
||||
http_timeout=spawner.http_timeout,
|
||||
)
|
||||
)
|
||||
@@ -352,7 +356,7 @@ class User(HasTraits):
|
||||
else:
|
||||
e.reason = 'error'
|
||||
self.log.error("Unhandled error waiting for {user}'s server to show up at {url}: {error}".format(
|
||||
user=self.name, url=self.server.url, error=e,
|
||||
user=self.name, url=server.url, error=e,
|
||||
))
|
||||
try:
|
||||
yield self.stop()
|
||||
|
@@ -37,8 +37,10 @@ def can_connect(ip, port):
|
||||
|
||||
Return True if we can connect, False otherwise.
|
||||
"""
|
||||
if ip in {'', '0.0.0.0'}:
|
||||
ip = '127.0.0.1'
|
||||
try:
|
||||
socket.create_connection((ip, port))
|
||||
socket.create_connection((ip, port)).close()
|
||||
except socket.error as e:
|
||||
if e.errno not in {errno.ECONNREFUSED, errno.ETIMEDOUT}:
|
||||
app_log.error("Unexpected error connecting to %s:%i %s", ip, port, e)
|
||||
@@ -50,6 +52,8 @@ def can_connect(ip, port):
|
||||
@gen.coroutine
|
||||
def wait_for_server(ip, port, timeout=10):
|
||||
"""Wait for any server to show up at ip:port."""
|
||||
if ip in {'', '0.0.0.0'}:
|
||||
ip = '127.0.0.1'
|
||||
loop = ioloop.IOLoop.current()
|
||||
tic = loop.time()
|
||||
while loop.time() - tic < timeout:
|
||||
|
@@ -11,7 +11,7 @@
|
||||
{{ custom_html }}
|
||||
{% elif login_service %}
|
||||
<div class="service-login">
|
||||
<a class='btn btn-jupyter btn-lg' href='{{login_url}}'>
|
||||
<a class='btn btn-jupyter btn-lg' href='{{authenticator_login_url}}'>
|
||||
Sign in with {{login_service}}
|
||||
</a>
|
||||
</div>
|
||||
|
9
share/jupyter/hub/templates/logout.html
Normal file
9
share/jupyter/hub/templates/logout.html
Normal file
@@ -0,0 +1,9 @@
|
||||
{% extends "page.html" %}
|
||||
{% block main %}
|
||||
<div id="logout-main" class="container">
|
||||
<p>
|
||||
Successfully logged out.
|
||||
</p>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
Reference in New Issue
Block a user