mirror of
https://github.com/jupyterhub/jupyterhub.git
synced 2025-10-16 14:33:00 +00:00
move redirect loop fallback to /user/name
handler
- add user_redirect_limit to settings, default 5 (that's still a whole minute before failing) - limit sleep to ten seconds
This commit is contained in:
@@ -20,7 +20,7 @@ from .. import __version__
|
|||||||
from .. import orm
|
from .. import orm
|
||||||
from ..objects import Server
|
from ..objects import Server
|
||||||
from ..spawner import LocalProcessSpawner
|
from ..spawner import LocalProcessSpawner
|
||||||
from ..utils import url_path_join
|
from ..utils import url_path_join, DT_SCALE
|
||||||
|
|
||||||
# pattern for the authentication token header
|
# pattern for the authentication token header
|
||||||
auth_header_pat = re.compile(r'^(?:token|bearer)\s+([^\s]+)$', flags=re.IGNORECASE)
|
auth_header_pat = re.compile(r'^(?:token|bearer)\s+([^\s]+)$', flags=re.IGNORECASE)
|
||||||
@@ -503,32 +503,15 @@ class PrefixRedirectHandler(BaseHandler):
|
|||||||
|
|
||||||
Redirects /foo to /prefix/foo, etc.
|
Redirects /foo to /prefix/foo, etc.
|
||||||
"""
|
"""
|
||||||
@gen.coroutine
|
|
||||||
def get(self):
|
def get(self):
|
||||||
# We do exponential backoff here - since otherwise we can get stuck in a redirect loop!
|
|
||||||
# This is important in many distributed proxy implementations - those are often eventually
|
|
||||||
# consistent and can take upto a couple of seconds to actually apply throughout the cluster.
|
|
||||||
times = int(self.get_argument('times', 0))
|
|
||||||
# FIXME: should this be customizable? At times=7, it'll wait for about 12s the last time.
|
|
||||||
if times > 7:
|
|
||||||
# We stop if we've been redirected 12 times.
|
|
||||||
raise web.HTTPError(500)
|
|
||||||
# We want this to be 0 the first time, and then increase after that. This formula seems to work,
|
|
||||||
# but we might wanna make it customizable?
|
|
||||||
yield gen.sleep(( (2 ** times) - 1)/10)
|
|
||||||
uri = self.request.uri
|
uri = self.request.uri
|
||||||
if uri.startswith(self.base_url):
|
if uri.startswith(self.base_url):
|
||||||
path = self.request.uri[len(self.base_url):]
|
path = self.request.uri[len(self.base_url):]
|
||||||
else:
|
else:
|
||||||
path = self.request.path
|
path = self.request.path
|
||||||
|
self.redirect(url_path_join(
|
||||||
url_parts = urlparse(url_path_join(self.hub.base_url, path))
|
self.hub.base_url, path,
|
||||||
query_parts = parse_qs(url_parts.query)
|
), permanent=False)
|
||||||
query_parts['times'] = times + 1
|
|
||||||
url_parts = url_parts._replace(query=urlencode(query_parts))
|
|
||||||
new_url = urlunparse(url_parts)
|
|
||||||
|
|
||||||
self.redirect(new_url, permanent=False)
|
|
||||||
|
|
||||||
|
|
||||||
class UserSpawnHandler(BaseHandler):
|
class UserSpawnHandler(BaseHandler):
|
||||||
@@ -544,6 +527,7 @@ class UserSpawnHandler(BaseHandler):
|
|||||||
@gen.coroutine
|
@gen.coroutine
|
||||||
def get(self, name, user_path):
|
def get(self, name, user_path):
|
||||||
current_user = self.get_current_user()
|
current_user = self.get_current_user()
|
||||||
|
|
||||||
if current_user and current_user.name == name:
|
if current_user and current_user.name == name:
|
||||||
# If people visit /user/:name directly on the Hub,
|
# If people visit /user/:name directly on the Hub,
|
||||||
# the redirects will just loop, because the proxy is bypassed.
|
# the redirects will just loop, because the proxy is bypassed.
|
||||||
@@ -578,12 +562,40 @@ class UserSpawnHandler(BaseHandler):
|
|||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
yield self.spawn_single_user(current_user)
|
yield self.spawn_single_user(current_user)
|
||||||
|
|
||||||
|
# We do exponential backoff here - since otherwise we can get stuck in a redirect loop!
|
||||||
|
# This is important in many distributed proxy implementations - those are often eventually
|
||||||
|
# consistent and can take upto a couple of seconds to actually apply throughout the cluster.
|
||||||
|
try:
|
||||||
|
redirects = int(self.get_argument('redirects', 0))
|
||||||
|
except ValueError:
|
||||||
|
self.log.warning("Invalid redirects argument %r", self.get_argument('redirects'))
|
||||||
|
redirects = 0
|
||||||
|
|
||||||
|
if redirects >= self.settings.get('user_redirect_limit', 5):
|
||||||
|
# We stop if we've been redirected too many times.
|
||||||
|
raise web.HTTPError(500, "Redirect loop detected.")
|
||||||
|
|
||||||
# set login cookie anew
|
# set login cookie anew
|
||||||
self.set_login_cookie(current_user)
|
self.set_login_cookie(current_user)
|
||||||
without_prefix = self.request.uri[len(self.hub.base_url):]
|
without_prefix = self.request.uri[len(self.hub.base_url):]
|
||||||
target = url_path_join(self.base_url, without_prefix)
|
target = url_path_join(self.base_url, without_prefix)
|
||||||
if self.subdomain_host:
|
if self.subdomain_host:
|
||||||
target = current_user.host + target
|
target = current_user.host + target
|
||||||
|
|
||||||
|
# record redirect count in query parameter
|
||||||
|
if redirects:
|
||||||
|
self.log.warning("Redirect loop detected on %s", self.request.uri)
|
||||||
|
yield gen.sleep(min(1 * (DT_SCALE ** redirects), 10))
|
||||||
|
# rewrite target url with new `redirects` query value
|
||||||
|
url_parts = urlparse(target)
|
||||||
|
query_parts = parse_qs(url_parts.query)
|
||||||
|
query_parts['redirects'] = redirects + 1
|
||||||
|
url_parts = url_parts._replace(query=urlencode(query_parts))
|
||||||
|
target = urlunparse(url_parts)
|
||||||
|
else:
|
||||||
|
target = url_concat(target, {'redirects': 1})
|
||||||
|
|
||||||
self.redirect(target)
|
self.redirect(target)
|
||||||
self.statsd.incr('redirects.user_after_login')
|
self.statsd.incr('redirects.user_after_login')
|
||||||
elif current_user:
|
elif current_user:
|
||||||
|
Reference in New Issue
Block a user