mirror of
https://github.com/jupyterhub/jupyterhub.git
synced 2025-10-18 15:33:02 +00:00
add JupyterHub.public_url config
When specified, removes all guesses based on the request, which aren't always correct in proxy situations (especially Host and protocol)
This commit is contained in:
@@ -110,14 +110,20 @@ class OAuthHandler:
|
||||
redirect_uri = self.get_argument('redirect_uri')
|
||||
if not redirect_uri or not redirect_uri.startswith('/'):
|
||||
return uri
|
||||
|
||||
# make absolute local redirects full URLs
|
||||
# to satisfy oauthlib's absolute URI requirement
|
||||
redirect_uri = (
|
||||
get_browser_protocol(self.request)
|
||||
+ "://"
|
||||
+ self.request.host
|
||||
+ redirect_uri
|
||||
)
|
||||
|
||||
public_url = self.settings.get("public_url")
|
||||
if public_url:
|
||||
proto = public_url.scheme
|
||||
host = public_url.netloc
|
||||
else:
|
||||
# guess from request
|
||||
proto = get_browser_protocol(self.request)
|
||||
host = self.request.host
|
||||
redirect_uri = f"{proto}://{host}{redirect_uri}"
|
||||
|
||||
parsed_url = urlparse(uri)
|
||||
query_list = parse_qsl(parsed_url.query, keep_blank_values=True)
|
||||
for idx, item in enumerate(query_list):
|
||||
|
@@ -704,6 +704,21 @@ class JupyterHub(Application):
|
||||
proto = 'https' if self.ssl_cert else 'http'
|
||||
return proto + '://:8000'
|
||||
|
||||
public_url = Unicode(
|
||||
"",
|
||||
config=True,
|
||||
help="""Set the public URL of JupyterHub
|
||||
|
||||
This will skip any detection of URL and protocol from requests,
|
||||
which isn't always correct when JupyterHub is behind
|
||||
multiple layers of proxies, etc.
|
||||
Usually the failure is detecting http when it's really https.
|
||||
|
||||
Should include the full, public URL of JupyterHub,
|
||||
including the public-facing base_url prefix.
|
||||
""",
|
||||
)
|
||||
|
||||
subdomain_host = Unicode(
|
||||
'',
|
||||
help="""Run single-user servers on subdomains of this host.
|
||||
@@ -2755,6 +2770,7 @@ class JupyterHub(Application):
|
||||
spawner_class=self.spawner_class,
|
||||
base_url=self.base_url,
|
||||
default_url=self.default_url,
|
||||
public_url=urlparse(self.public_url) if self.public_url else "",
|
||||
cookie_secret=self.cookie_secret,
|
||||
cookie_max_age_days=self.cookie_max_age_days,
|
||||
redirect_to_server=self.redirect_to_server,
|
||||
|
@@ -569,10 +569,15 @@ class BaseHandler(RequestHandler):
|
||||
# tornado <4.2 have a bug that consider secure==True as soon as
|
||||
# 'secure' kwarg is passed to set_secure_cookie
|
||||
kwargs = {'httponly': True}
|
||||
if self.request.protocol == 'https':
|
||||
kwargs['secure'] = True
|
||||
if self.subdomain_host:
|
||||
kwargs['domain'] = self.domain
|
||||
public_url = self.settings.get("public_url")
|
||||
if public_url:
|
||||
if public_url.scheme == 'https':
|
||||
kwargs['secure'] = True
|
||||
else:
|
||||
if self.request.protocol == 'https':
|
||||
kwargs['secure'] = True
|
||||
if self.subdomain_host:
|
||||
kwargs['domain'] = self.domain
|
||||
|
||||
kwargs.update(self.settings.get('cookie_options', {}))
|
||||
kwargs.update(overrides)
|
||||
@@ -654,8 +659,15 @@ class BaseHandler(RequestHandler):
|
||||
next_url = self.get_argument('next', default='')
|
||||
# protect against some browsers' buggy handling of backslash as slash
|
||||
next_url = next_url.replace('\\', '%5C')
|
||||
proto = get_browser_protocol(self.request)
|
||||
host = self.request.host
|
||||
public_url = self.settings.get("public_url")
|
||||
if public_url:
|
||||
proto = public_url.scheme
|
||||
host = public_url.netloc
|
||||
else:
|
||||
# guess from request
|
||||
proto = get_browser_protocol(self.request)
|
||||
host = self.request.host
|
||||
|
||||
if next_url.startswith("///"):
|
||||
# strip more than 2 leading // down to 2
|
||||
# because urlparse treats that as empty netloc,
|
||||
|
@@ -37,7 +37,7 @@ import uuid
|
||||
import warnings
|
||||
from http import HTTPStatus
|
||||
from unittest import mock
|
||||
from urllib.parse import urlencode
|
||||
from urllib.parse import urlencode, urlparse
|
||||
|
||||
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
|
||||
from tornado.httputil import url_concat
|
||||
@@ -891,8 +891,13 @@ class HubOAuth(HubAuth):
|
||||
# OAuth that doesn't complete shouldn't linger too long.
|
||||
'max_age': 600,
|
||||
}
|
||||
if get_browser_protocol(handler.request) == 'https':
|
||||
kwargs['secure'] = True
|
||||
public_url = os.getenv("JUPYTERHUB_PUBLIC_URL")
|
||||
if public_url:
|
||||
if urlparse(public_url).scheme == 'https':
|
||||
kwargs['secure'] = True
|
||||
else:
|
||||
if get_browser_protocol(handler.request) == 'https':
|
||||
kwargs['secure'] = True
|
||||
# load user cookie overrides
|
||||
kwargs.update(self.cookie_options)
|
||||
handler.set_secure_cookie(cookie_name, b64_state, **kwargs)
|
||||
|
Reference in New Issue
Block a user