add JupyterHub.public_url config

When specified, removes all guesses based on the request,
which aren't always correct in proxy situations (especially Host and protocol)
This commit is contained in:
Min RK
2023-06-14 14:07:36 +02:00
parent 0e4deec714
commit 64d237a89e
4 changed files with 54 additions and 15 deletions

View File

@@ -110,14 +110,20 @@ class OAuthHandler:
redirect_uri = self.get_argument('redirect_uri') redirect_uri = self.get_argument('redirect_uri')
if not redirect_uri or not redirect_uri.startswith('/'): if not redirect_uri or not redirect_uri.startswith('/'):
return uri return uri
# make absolute local redirects full URLs # make absolute local redirects full URLs
# to satisfy oauthlib's absolute URI requirement # to satisfy oauthlib's absolute URI requirement
redirect_uri = (
get_browser_protocol(self.request) public_url = self.settings.get("public_url")
+ "://" if public_url:
+ self.request.host proto = public_url.scheme
+ redirect_uri host = public_url.netloc
) else:
# guess from request
proto = get_browser_protocol(self.request)
host = self.request.host
redirect_uri = f"{proto}://{host}{redirect_uri}"
parsed_url = urlparse(uri) parsed_url = urlparse(uri)
query_list = parse_qsl(parsed_url.query, keep_blank_values=True) query_list = parse_qsl(parsed_url.query, keep_blank_values=True)
for idx, item in enumerate(query_list): for idx, item in enumerate(query_list):

View File

@@ -704,6 +704,21 @@ class JupyterHub(Application):
proto = 'https' if self.ssl_cert else 'http' proto = 'https' if self.ssl_cert else 'http'
return proto + '://:8000' return proto + '://:8000'
public_url = Unicode(
"",
config=True,
help="""Set the public URL of JupyterHub
This will skip any detection of URL and protocol from requests,
which isn't always correct when JupyterHub is behind
multiple layers of proxies, etc.
Usually the failure is detecting http when it's really https.
Should include the full, public URL of JupyterHub,
including the public-facing base_url prefix.
""",
)
subdomain_host = Unicode( subdomain_host = Unicode(
'', '',
help="""Run single-user servers on subdomains of this host. help="""Run single-user servers on subdomains of this host.
@@ -2755,6 +2770,7 @@ class JupyterHub(Application):
spawner_class=self.spawner_class, spawner_class=self.spawner_class,
base_url=self.base_url, base_url=self.base_url,
default_url=self.default_url, default_url=self.default_url,
public_url=urlparse(self.public_url) if self.public_url else "",
cookie_secret=self.cookie_secret, cookie_secret=self.cookie_secret,
cookie_max_age_days=self.cookie_max_age_days, cookie_max_age_days=self.cookie_max_age_days,
redirect_to_server=self.redirect_to_server, redirect_to_server=self.redirect_to_server,

View File

@@ -569,10 +569,15 @@ class BaseHandler(RequestHandler):
# tornado <4.2 have a bug that consider secure==True as soon as # tornado <4.2 have a bug that consider secure==True as soon as
# 'secure' kwarg is passed to set_secure_cookie # 'secure' kwarg is passed to set_secure_cookie
kwargs = {'httponly': True} kwargs = {'httponly': True}
if self.request.protocol == 'https': public_url = self.settings.get("public_url")
kwargs['secure'] = True if public_url:
if self.subdomain_host: if public_url.scheme == 'https':
kwargs['domain'] = self.domain kwargs['secure'] = True
else:
if self.request.protocol == 'https':
kwargs['secure'] = True
if self.subdomain_host:
kwargs['domain'] = self.domain
kwargs.update(self.settings.get('cookie_options', {})) kwargs.update(self.settings.get('cookie_options', {}))
kwargs.update(overrides) kwargs.update(overrides)
@@ -654,8 +659,15 @@ class BaseHandler(RequestHandler):
next_url = self.get_argument('next', default='') next_url = self.get_argument('next', default='')
# protect against some browsers' buggy handling of backslash as slash # protect against some browsers' buggy handling of backslash as slash
next_url = next_url.replace('\\', '%5C') next_url = next_url.replace('\\', '%5C')
proto = get_browser_protocol(self.request) public_url = self.settings.get("public_url")
host = self.request.host if public_url:
proto = public_url.scheme
host = public_url.netloc
else:
# guess from request
proto = get_browser_protocol(self.request)
host = self.request.host
if next_url.startswith("///"): if next_url.startswith("///"):
# strip more than 2 leading // down to 2 # strip more than 2 leading // down to 2
# because urlparse treats that as empty netloc, # because urlparse treats that as empty netloc,

View File

@@ -37,7 +37,7 @@ import uuid
import warnings import warnings
from http import HTTPStatus from http import HTTPStatus
from unittest import mock from unittest import mock
from urllib.parse import urlencode from urllib.parse import urlencode, urlparse
from tornado.httpclient import AsyncHTTPClient, HTTPRequest from tornado.httpclient import AsyncHTTPClient, HTTPRequest
from tornado.httputil import url_concat from tornado.httputil import url_concat
@@ -891,8 +891,13 @@ class HubOAuth(HubAuth):
# OAuth that doesn't complete shouldn't linger too long. # OAuth that doesn't complete shouldn't linger too long.
'max_age': 600, 'max_age': 600,
} }
if get_browser_protocol(handler.request) == 'https': public_url = os.getenv("JUPYTERHUB_PUBLIC_URL")
kwargs['secure'] = True if public_url:
if urlparse(public_url).scheme == 'https':
kwargs['secure'] = True
else:
if get_browser_protocol(handler.request) == 'https':
kwargs['secure'] = True
# load user cookie overrides # load user cookie overrides
kwargs.update(self.cookie_options) kwargs.update(self.cookie_options)
handler.set_secure_cookie(cookie_name, b64_state, **kwargs) handler.set_secure_cookie(cookie_name, b64_state, **kwargs)