diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2a4d5760..b7785e68 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -9,8 +9,6 @@ name: Run tests on: pull_request: push: - branches: [main, master, pr/migrate-to-gh-actions-from-travis] - tags: defaults: run: @@ -79,6 +77,10 @@ jobs: # Tests everything when the user instances are started with # jupyter_server instead of notebook. # + # ssl: + # Tests everything using internal SSL connections instead of + # unencrypted HTTP + # # main_dependencies: # Tests everything when the we use the latest available dependencies # from: ipytraitlets. @@ -87,10 +89,14 @@ jobs: # GitHub UI when the workflow run, we avoid using true/false as # values by instead duplicating the name to signal true. include: + - python: "3.6" + oldest_dependencies: oldest_dependencies - python: "3.6" subdomain: subdomain - python: "3.7" db: mysql + - python: "3.7" + ssl: ssl - python: "3.8" db: postgres - python: "3.8" @@ -111,6 +117,9 @@ jobs: echo "MYSQL_HOST=127.0.0.1" >> $GITHUB_ENV echo "JUPYTERHUB_TEST_DB_URL=mysql+mysqlconnector://root@127.0.0.1:3306/jupyterhub" >> $GITHUB_ENV fi + if [ "${{ matrix.ssl }}" == "ssl" ]; then + echo "SSL_ENABLED=1" >> $GITHUB_ENV + fi if [ "${{ matrix.db }}" == "postgres" ]; then echo "PGHOST=127.0.0.1" >> $GITHUB_ENV echo "PGUSER=test_user" >> $GITHUB_ENV @@ -145,6 +154,14 @@ jobs: pip install --upgrade pip pip install --upgrade . -r dev-requirements.txt + if [ "${{ matrix.oldest_dependencies }}" != "" ]; then + # take any dependencies in requirements.txt such as tornado>=5.0 + # and transform them to tornado==5.0 so we can run tests with + # the earliest-supported versions + cat requirements.txt | grep '>=' | sed -e 's@>=@==@g' > oldest-requirements.txt + pip install -r oldest-requirements.txt + fi + if [ "${{ matrix.main_dependencies }}" != "" ]; then pip install git+https://github.com/ipython/traitlets#egg=traitlets --force fi diff --git a/.gitignore b/.gitignore index 44a7b3d4..5ff18d83 100644 --- a/.gitignore +++ b/.gitignore @@ -28,3 +28,4 @@ htmlcov .pytest_cache pip-wheel-metadata docs/source/reference/metrics.rst +oldest-requirements.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e8f6663e..67297a8e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ repos: hooks: - id: reorder-python-imports - repo: https://github.com/psf/black - rev: 19.10b0 + rev: 20.8b1 hooks: - id: black - repo: https://github.com/pre-commit/pre-commit-hooks diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 0f5ec05a..326b4899 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,7 +1,7 @@ # Contributing to JupyterHub Welcome! As a [Jupyter](https://jupyter.org) project, -you can follow the [Jupyter contributor guide](https://jupyter.readthedocs.io/en/latest/contributor/content-contributor.html). +you can follow the [Jupyter contributor guide](https://jupyter.readthedocs.io/en/latest/contributing/content-contributor.html). Make sure to also follow [Project Jupyter's Code of Conduct](https://github.com/jupyter/governance/blob/master/conduct/code_of_conduct.md) for a friendly and welcoming collaborative environment. diff --git a/docs/rest-api.yml b/docs/rest-api.yml index 0c07e640..869fa85b 100644 --- a/docs/rest-api.yml +++ b/docs/rest-api.yml @@ -115,6 +115,21 @@ paths: - oauth2: - users - read:users + parameters: + - name: state + in: query + required: false + type: string + enum: ["inactive", "active", "ready"] + description: | + Return only users who have servers in the given state. + If unspecified, return all users. + + active: all users with any active servers (ready OR pending) + ready: all users who have any ready servers (running, not pending) + inactive: all users who have *no* active servers (complement of active) + + Added in JupyterHub 1.3 responses: '200': description: The Hub's user list diff --git a/docs/source/changelog.md b/docs/source/changelog.md index 637d2c83..6a032337 100644 --- a/docs/source/changelog.md +++ b/docs/source/changelog.md @@ -9,6 +9,37 @@ command line for details. ## 1.2 +### [1.2.2] 2020-11-27 + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/1.2.1...41f291c0c973223c33a6aa1fa86d5d57f297be78)) + +#### Enhancements made + +* Standardize "Sign in" capitalization on the login page [#3252](https://github.com/jupyterhub/jupyterhub/pull/3252) ([@cmd-ntrf](https://github.com/cmd-ntrf)) + +#### Bugs fixed + +* Fix RootHandler when default_url is a callable [#3265](https://github.com/jupyterhub/jupyterhub/pull/3265) ([@danlester](https://github.com/danlester)) +* Only preserve params when ?next= is unspecified [#3261](https://github.com/jupyterhub/jupyterhub/pull/3261) ([@minrk](https://github.com/minrk)) +* \[Windows\] Improve robustness when detecting and closing existing proxy processes [#3237](https://github.com/jupyterhub/jupyterhub/pull/3237) ([@alexweav](https://github.com/alexweav)) + +#### Maintenance and upkeep improvements + +* Environment marker on pamela [#3255](https://github.com/jupyterhub/jupyterhub/pull/3255) ([@fcollonval](https://github.com/fcollonval)) +* remove push-branch conditions for CI [#3250](https://github.com/jupyterhub/jupyterhub/pull/3250) ([@minrk](https://github.com/minrk)) +* Migrate from travis to GitHub actions [#3246](https://github.com/jupyterhub/jupyterhub/pull/3246) ([@consideRatio](https://github.com/consideRatio)) + +#### Documentation improvements + +* Update services-basics.md to use jupyterhub_idle_culler [#3257](https://github.com/jupyterhub/jupyterhub/pull/3257) ([@manics](https://github.com/manics)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2020-10-30&to=2020-11-27&type=c)) + +[@alexweav](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aalexweav+updated%3A2020-10-30..2020-11-27&type=Issues) | [@belfhi](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abelfhi+updated%3A2020-10-30..2020-11-27&type=Issues) | [@betatim](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abetatim+updated%3A2020-10-30..2020-11-27&type=Issues) | [@cmd-ntrf](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acmd-ntrf+updated%3A2020-10-30..2020-11-27&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2020-10-30..2020-11-27&type=Issues) | [@danlester](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adanlester+updated%3A2020-10-30..2020-11-27&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Afcollonval+updated%3A2020-10-30..2020-11-27&type=Issues) | [@GeorgianaElena](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AGeorgianaElena+updated%3A2020-10-30..2020-11-27&type=Issues) | [@ianabc](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aianabc+updated%3A2020-10-30..2020-11-27&type=Issues) | [@IvanaH8](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AIvanaH8+updated%3A2020-10-30..2020-11-27&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2020-10-30..2020-11-27&type=Issues) | [@meeseeksmachine](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ameeseeksmachine+updated%3A2020-10-30..2020-11-27&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2020-10-30..2020-11-27&type=Issues) | [@mriedem](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amriedem+updated%3A2020-10-30..2020-11-27&type=Issues) | [@olifre](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aolifre+updated%3A2020-10-30..2020-11-27&type=Issues) | [@rcthomas](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arcthomas+updated%3A2020-10-30..2020-11-27&type=Issues) | [@rgbkrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Argbkrk+updated%3A2020-10-30..2020-11-27&type=Issues) | [@rkdarst](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arkdarst+updated%3A2020-10-30..2020-11-27&type=Issues) | [@slemonide](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aslemonide+updated%3A2020-10-30..2020-11-27&type=Issues) | [@support](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asupport+updated%3A2020-10-30..2020-11-27&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awelcome+updated%3A2020-10-30..2020-11-27&type=Issues) | [@yuvipanda](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayuvipanda+updated%3A2020-10-30..2020-11-27&type=Issues) + + ### [1.2.1] 2020-10-30 ([full changelog](https://github.com/jupyterhub/jupyterhub/compare/1.2.0...1.2.1)) diff --git a/docs/source/getting-started/services-basics.md b/docs/source/getting-started/services-basics.md index 9014fb9b..a5246350 100644 --- a/docs/source/getting-started/services-basics.md +++ b/docs/source/getting-started/services-basics.md @@ -5,7 +5,7 @@ that interacts with the Hub's REST API. A Service may perform a specific or action or task. For example, shutting down individuals' single user notebook servers that have been idle for some time is a good example of a task that could be automated by a Service. Let's look at how the -[cull_idle_servers][] script can be used as a Service. +[jupyterhub_idle_culler][] script can be used as a Service. ## Real-world example to cull idle servers @@ -15,11 +15,11 @@ document will: - explain some basic information about API tokens - clarify that API tokens can be used to authenticate to single-user servers as of [version 0.8.0](../changelog) -- show how the [cull_idle_servers][] script can be: +- show how the [jupyterhub_idle_culler][] script can be: - used in a Hub-managed service - run as a standalone script -Both examples for `cull_idle_servers` will communicate tasks to the +Both examples for `jupyterhub_idle_culler` will communicate tasks to the Hub via the REST API. ## API Token basics @@ -78,17 +78,23 @@ single-user servers, and only cookies can be used for authentication. 0.8 supports using JupyterHub API tokens to authenticate to single-user servers. -## Configure `cull-idle` to run as a Hub-Managed Service +## Configure the idle culler to run as a Hub-Managed Service + +Install the idle culler: + +``` +pip install jupyterhub-idle-culler +``` In `jupyterhub_config.py`, add the following dictionary for the -`cull-idle` Service to the `c.JupyterHub.services` list: +`idle-culler` Service to the `c.JupyterHub.services` list: ```python c.JupyterHub.services = [ { - 'name': 'cull-idle', + 'name': 'idle-culler', 'admin': True, - 'command': [sys.executable, 'cull_idle_servers.py', '--timeout=3600'], + 'command': [sys.executable, '-m', 'jupyterhub_idle_culler', '--timeout=3600'], } ] ``` @@ -101,21 +107,21 @@ where: ## Run `cull-idle` manually as a standalone script -Now you can run your script, i.e. `cull_idle_servers`, by providing it +Now you can run your script by providing it the API token and it will authenticate through the REST API to interact with it. -This will run `cull-idle` manually. `cull-idle` can be run as a standalone +This will run the idle culler service manually. It can be run as a standalone script anywhere with access to the Hub, and will periodically check for idle servers and shut them down via the Hub's REST API. In order to shutdown the servers, the token given to cull-idle must have admin privileges. Generate an API token and store it in the `JUPYTERHUB_API_TOKEN` environment -variable. Run `cull_idle_servers.py` manually. +variable. Run `jupyterhub_idle_culler` manually. ```bash export JUPYTERHUB_API_TOKEN='token' - python3 cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api] + python -m jupyterhub_idle_culler [--timeout=900] [--url=http://127.0.0.1:8081/hub/api] ``` -[cull_idle_servers]: https://github.com/jupyterhub/jupyterhub/blob/master/examples/cull-idle/cull_idle_servers.py +[jupyterhub_idle_culler]: https://github.com/jupyterhub/jupyterhub-idle-culler diff --git a/docs/source/reference/authenticators.md b/docs/source/reference/authenticators.md index 0775809f..f0db22f0 100644 --- a/docs/source/reference/authenticators.md +++ b/docs/source/reference/authenticators.md @@ -235,10 +235,9 @@ to Spawner environment: ```python class MyAuthenticator(Authenticator): - @gen.coroutine - def authenticate(self, handler, data=None): - username = yield identify_user(handler, data) - upstream_token = yield token_for_user(username) + async def authenticate(self, handler, data=None): + username = await identify_user(handler, data) + upstream_token = await token_for_user(username) return { 'name': username, 'auth_state': { @@ -246,10 +245,9 @@ class MyAuthenticator(Authenticator): }, } - @gen.coroutine - def pre_spawn_start(self, user, spawner): + async def pre_spawn_start(self, user, spawner): """Pass upstream_token to spawner via environment variable""" - auth_state = yield user.get_auth_state() + auth_state = await user.get_auth_state() if not auth_state: # auth_state not enabled return diff --git a/docs/source/reference/services.md b/docs/source/reference/services.md index 77d6cb25..d477163b 100644 --- a/docs/source/reference/services.md +++ b/docs/source/reference/services.md @@ -50,7 +50,7 @@ A Service may have the following properties: If a service is also to be managed by the Hub, it has a few extra options: -- `command: (str/Popen list`) - Command for JupyterHub to spawn the service. +- `command: (str/Popen list)` - Command for JupyterHub to spawn the service. - Only use this if the service should be a subprocess. - If command is not specified, the Service is assumed to be managed externally. diff --git a/examples/service-announcement/README.md b/examples/service-announcement/README.md index 4cdf7bbb..1476a57d 100644 --- a/examples/service-announcement/README.md +++ b/examples/service-announcement/README.md @@ -27,7 +27,7 @@ that environment variable is set or `/` if it is not. Admin users can set the announcement text with an API token: $ curl -X POST -H "Authorization: token " \ - -d "{'announcement':'JupyterHub will be upgraded on August 14!'}" \ + -d '{"announcement":"JupyterHub will be upgraded on August 14!"}' \ https://.../services/announcement Anyone can read the announcement: diff --git a/jupyterhub/apihandlers/auth.py b/jupyterhub/apihandlers/auth.py index 1fd30b3e..76fcd8b8 100644 --- a/jupyterhub/apihandlers/auth.py +++ b/jupyterhub/apihandlers/auth.py @@ -253,7 +253,7 @@ class OAuthAuthorizeHandler(OAuthHandler, BaseHandler): # Render oauth 'Authorize application...' page auth_state = await self.current_user.get_auth_state() self.write( - self.render_template( + await self.render_template( "oauth.html", auth_state=auth_state, scopes=scopes, diff --git a/jupyterhub/apihandlers/hub.py b/jupyterhub/apihandlers/hub.py index afededb8..f2bfd17e 100644 --- a/jupyterhub/apihandlers/hub.py +++ b/jupyterhub/apihandlers/hub.py @@ -17,9 +17,9 @@ class ShutdownAPIHandler(APIHandler): @needs_scope('shutdown') def post(self): """POST /api/shutdown triggers a clean shutdown - + POST (JSON) parameters: - + - servers: specify whether single-user servers should be terminated - proxy: specify whether the proxy should be terminated """ @@ -58,7 +58,7 @@ class RootAPIHandler(APIHandler): """GET /api/ returns info about the Hub and its API. It is not an authenticated endpoint. - + For now, it just returns the version of JupyterHub itself. """ data = {'version': __version__} @@ -70,7 +70,7 @@ class InfoAPIHandler(APIHandler): """GET /api/info returns detailed info about the Hub and its API. It is not an authenticated endpoint. - + For now, it just returns the version of JupyterHub itself. """ diff --git a/jupyterhub/apihandlers/users.py b/jupyterhub/apihandlers/users.py index e82fa5ae..8f35fe8b 100644 --- a/jupyterhub/apihandlers/users.py +++ b/jupyterhub/apihandlers/users.py @@ -9,6 +9,7 @@ from datetime import timezone from async_generator import aclosing from dateutil.parser import parse as parse_date +from sqlalchemy import func from tornado import web from tornado.iostream import StreamClosedError @@ -41,11 +42,61 @@ class SelfAPIHandler(APIHandler): class UserListAPIHandler(APIHandler): + def _user_has_ready_spawner(self, orm_user): + """Return True if a user has *any* ready spawners + + Used for filtering from active -> ready + """ + user = self.users[orm_user] + return any(spawner.ready for spawner in user.spawners.values()) + @needs_scope('read:users') def get(self): - users = self.db.query(orm.User) + state_filter = self.get_argument("state", None) + + # post_filter + post_filter = None + + if state_filter in {"active", "ready"}: + # only get users with active servers + # an 'active' Spawner has a server record in the database + # which means Spawner.server != None + # it may still be in a pending start/stop state. + # join filters out users with no Spawners + query = ( + self.db.query(orm.User) + # join filters out any Users with no Spawners + .join(orm.Spawner) + # this implicitly gets Users with *any* active server + .filter(orm.Spawner.server != None) + ) + if state_filter == "ready": + # have to post-process query results because active vs ready + # can only be distinguished with in-memory Spawner properties + post_filter = self._user_has_ready_spawner + + elif state_filter == "inactive": + # only get users with *no* active servers + # as opposed to users with *any inactive servers* + # this is the complement to the above query. + # how expensive is this with lots of servers? + query = ( + self.db.query(orm.User) + .outerjoin(orm.Spawner) + .outerjoin(orm.Server) + .group_by(orm.User.id) + .having(func.count(orm.Server.id) == 0) + ) + elif state_filter: + raise web.HTTPError(400, "Unrecognized state filter: %r" % state_filter) + else: + # no filter, return all users + query = self.db.query(orm.User) + data = [ - self.user_model(u, include_servers=True, include_state=True) for u in users + self.user_model(u, include_servers=True, include_state=True) + for u in query + if (post_filter is None or post_filter(u)) ] self.write(json.dumps(data)) diff --git a/jupyterhub/app.py b/jupyterhub/app.py index 407f6bdb..b6d8571a 100644 --- a/jupyterhub/app.py +++ b/jupyterhub/app.py @@ -2120,7 +2120,7 @@ class JupyterHub(Application): self.log.debug( "Awaiting checks for %i possibly-running spawners", len(check_futures) ) - await gen.multi(check_futures) + await asyncio.gather(*check_futures) db.commit() # only perform this query if we are going to log it @@ -2187,7 +2187,7 @@ class JupyterHub(Application): def init_tornado_settings(self): """Set up the tornado settings dict.""" base_url = self.hub.base_url - jinja_options = dict(autoescape=True) + jinja_options = dict(autoescape=True, enable_async=True) jinja_options.update(self.jinja_environment_options) base_path = self._template_paths_default()[0] if base_path not in self.template_paths: @@ -2199,6 +2199,14 @@ class JupyterHub(Application): ] ) jinja_env = Environment(loader=loader, **jinja_options) + # We need a sync jinja environment too, for the times we *must* use sync + # code - particularly in RequestHandler.write_error. Since *that* + # is called from inside the asyncio event loop, we can't actulaly just + # schedule it on the loop - without starting another thread with its + # own loop, which seems not worth the trouble. Instead, we create another + # environment, exactly like this one, but sync + del jinja_options['enable_async'] + jinja_env_sync = Environment(loader=loader, **jinja_options) login_url = url_path_join(base_url, 'login') logout_url = self.authenticator.logout_url(base_url) @@ -2245,6 +2253,7 @@ class JupyterHub(Application): template_path=self.template_paths, template_vars=self.template_vars, jinja2_env=jinja_env, + jinja2_env_sync=jinja_env_sync, version_hash=version_hash, subdomain_host=self.subdomain_host, domain=self.domain, diff --git a/jupyterhub/auth.py b/jupyterhub/auth.py index bc9c1ef8..33a00bef 100644 --- a/jupyterhub/auth.py +++ b/jupyterhub/auth.py @@ -101,7 +101,10 @@ class Authenticator(LoggingConfigurable): """ ).tag(config=True) - whitelist = Set(help="Deprecated, use `Authenticator.allowed_users`", config=True,) + whitelist = Set( + help="Deprecated, use `Authenticator.allowed_users`", + config=True, + ) allowed_users = Set( help=""" @@ -715,7 +718,9 @@ for _old_name, _new_name, _version in [ ("check_blacklist", "check_blocked_users", "1.2"), ]: setattr( - Authenticator, _old_name, _deprecated_method(_old_name, _new_name, _version), + Authenticator, + _old_name, + _deprecated_method(_old_name, _new_name, _version), ) @@ -778,7 +783,9 @@ class LocalAuthenticator(Authenticator): """ ).tag(config=True) - group_whitelist = Set(help="""DEPRECATED: use allowed_groups""",).tag(config=True) + group_whitelist = Set( + help="""DEPRECATED: use allowed_groups""", + ).tag(config=True) allowed_groups = Set( help=""" diff --git a/jupyterhub/handlers/base.py b/jupyterhub/handlers/base.py index f86d0cb3..b98355eb 100644 --- a/jupyterhub/handlers/base.py +++ b/jupyterhub/handlers/base.py @@ -40,6 +40,7 @@ from ..metrics import SERVER_STOP_DURATION_SECONDS from ..metrics import ServerPollStatus from ..metrics import ServerSpawnStatus from ..metrics import ServerStopStatus +from ..metrics import TOTAL_USERS from ..objects import Server from ..spawner import LocalProcessSpawner from ..user import User @@ -456,6 +457,7 @@ class BaseHandler(RequestHandler): # not found, create and register user u = orm.User(name=username) self.db.add(u) + TOTAL_USERS.inc() self.db.commit() user = self._user_from_orm(u) return user @@ -492,7 +494,7 @@ class BaseHandler(RequestHandler): self.clear_cookie( 'jupyterhub-services', path=url_path_join(self.base_url, 'services'), - **kwargs + **kwargs, ) # Reset _jupyterhub_user self._jupyterhub_user = None @@ -637,6 +639,12 @@ class BaseHandler(RequestHandler): next_url, ) + # this is where we know if next_url is coming from ?next= param or we are using a default url + if next_url: + next_url_from_param = True + else: + next_url_from_param = False + if not next_url: # custom default URL, usually passed because user landed on that page but was not logged in if default: @@ -662,7 +670,10 @@ class BaseHandler(RequestHandler): else: next_url = url_path_join(self.hub.base_url, 'home') - next_url = self.append_query_parameters(next_url, exclude=['next']) + if not next_url_from_param: + # when a request made with ?next=... assume all the params have already been encoded + # otherwise, preserve params from the current request across the redirect + next_url = self.append_query_parameters(next_url, exclude=['next']) return next_url def append_query_parameters(self, url, exclude=None): @@ -1159,16 +1170,36 @@ class BaseHandler(RequestHandler): "home page.".format(home=home) ) - def get_template(self, name): - """Return the jinja template object for a given name""" - return self.settings['jinja2_env'].get_template(name) + def get_template(self, name, sync=False): + """ + Return the jinja template object for a given name - def render_template(self, name, **ns): + If sync is True, we return a Template that is compiled without async support. + Only those can be used in synchronous code. + + If sync is False, we return a Template that is compiled with async support + """ + if sync: + key = 'jinja2_env_sync' + else: + key = 'jinja2_env' + return self.settings[key].get_template(name) + + def render_template(self, name, sync=False, **ns): + """ + Render jinja2 template + + If sync is set to True, we return an awaitable + If sync is set to False, we render the template & return a string + """ template_ns = {} template_ns.update(self.template_namespace) template_ns.update(ns) - template = self.get_template(name) - return template.render(**template_ns) + template = self.get_template(name, sync) + if sync: + return template.render(**template_ns) + else: + return template.render_async(**template_ns) @property def template_namespace(self): @@ -1243,17 +1274,19 @@ class BaseHandler(RequestHandler): # Content-Length must be recalculated. self.clear_header('Content-Length') - # render the template + # render_template is async, but write_error can't be! + # so we run it sync here, instead of making a sync version of render_template + try: - html = self.render_template('%s.html' % status_code, **ns) + html = self.render_template('%s.html' % status_code, sync=True, **ns) except TemplateNotFound: self.log.debug("No template for %d", status_code) try: - html = self.render_template('error.html', **ns) + html = self.render_template('error.html', sync=True, **ns) except: # In this case, any side effect must be avoided. ns['no_spawner_check'] = True - html = self.render_template('error.html', **ns) + html = self.render_template('error.html', sync=True, **ns) self.write(html) @@ -1457,10 +1490,14 @@ class UserUrlHandler(BaseHandler): # if request is expecting JSON, assume it's an API request and fail with 503 # because it won't like the redirect to the pending page - if get_accepted_mimetype( - self.request.headers.get('Accept', ''), - choices=['application/json', 'text/html'], - ) == 'application/json' or 'api' in user_path.split('/'): + if ( + get_accepted_mimetype( + self.request.headers.get('Accept', ''), + choices=['application/json', 'text/html'], + ) + == 'application/json' + or 'api' in user_path.split('/') + ): self._fail_api_request(user_name, server_name) return @@ -1486,7 +1523,7 @@ class UserUrlHandler(BaseHandler): self.set_status(503) auth_state = await user.get_auth_state() - html = self.render_template( + html = await self.render_template( "not_running.html", user=user, server_name=server_name, @@ -1541,7 +1578,7 @@ class UserUrlHandler(BaseHandler): if redirects: self.log.warning("Redirect loop detected on %s", self.request.uri) # add capped exponential backoff where cap is 10s - await gen.sleep(min(1 * (2 ** redirects), 10)) + await asyncio.sleep(min(1 * (2 ** redirects), 10)) # rewrite target url with new `redirects` query value url_parts = urlparse(target) query_parts = parse_qs(url_parts.query) diff --git a/jupyterhub/handlers/login.py b/jupyterhub/handlers/login.py index f1bd7c45..605cd580 100644 --- a/jupyterhub/handlers/login.py +++ b/jupyterhub/handlers/login.py @@ -72,14 +72,14 @@ class LogoutHandler(BaseHandler): Override this function to set a custom logout page. """ if self.authenticator.auto_login: - html = self.render_template('logout.html') + html = await self.render_template('logout.html') self.finish(html) else: self.redirect(self.settings['login_url'], permanent=False) async def get(self): """Log the user out, call the custom action, forward the user - to the logout page + to the logout page """ await self.default_handle_logout() await self.handle_logout() @@ -132,7 +132,7 @@ class LoginHandler(BaseHandler): self.redirect(auto_login_url) return username = self.get_argument('username', default='') - self.finish(self._render(username=username)) + self.finish(await self._render(username=username)) async def post(self): # parse the arguments dict @@ -149,7 +149,7 @@ class LoginHandler(BaseHandler): self._jupyterhub_user = user self.redirect(self.get_next_url(user)) else: - html = self._render( + html = await self._render( login_error='Invalid username or password', username=data['username'] ) self.finish(html) diff --git a/jupyterhub/handlers/pages.py b/jupyterhub/handlers/pages.py index e336bcc7..98ebddec 100644 --- a/jupyterhub/handlers/pages.py +++ b/jupyterhub/handlers/pages.py @@ -40,11 +40,15 @@ class RootHandler(BaseHandler): def get(self): user = self.current_user if self.default_url: - url = self.default_url + # As set in jupyterhub_config.py + if callable(self.default_url): + url = self.default_url(self) + else: + url = self.default_url elif user: url = self.get_next_url(user) else: - url = self.settings['login_url'] + url = url_concat(self.settings["login_url"], dict(next=self.request.uri)) self.redirect(url) @@ -67,7 +71,7 @@ class HomeHandler(BaseHandler): url = url_path_join(self.hub.base_url, 'spawn', user.escaped_name) auth_state = await user.get_auth_state() - html = self.render_template( + html = await self.render_template( 'home.html', auth_state=auth_state, user=user, @@ -94,7 +98,7 @@ class SpawnHandler(BaseHandler): async def _render_form(self, for_user, spawner_options_form, message=''): auth_state = await for_user.get_auth_state() - return self.render_template( + return await self.render_template( 'spawn.html', for_user=for_user, auth_state=auth_state, @@ -378,7 +382,7 @@ class SpawnPendingHandler(BaseHandler): self.hub.base_url, "spawn", user.escaped_name, server_name ) self.set_status(500) - html = self.render_template( + html = await self.render_template( "not_running.html", user=user, auth_state=auth_state, @@ -402,7 +406,7 @@ class SpawnPendingHandler(BaseHandler): page = "stop_pending.html" else: page = "spawn_pending.html" - html = self.render_template( + html = await self.render_template( page, user=user, spawner=spawner, @@ -429,7 +433,7 @@ class SpawnPendingHandler(BaseHandler): spawn_url = url_path_join( self.hub.base_url, "spawn", user.escaped_name, server_name ) - html = self.render_template( + html = await self.render_template( "not_running.html", user=user, auth_state=auth_state, @@ -519,7 +523,7 @@ class AdminHandler(BaseHandler): ) auth_state = await self.current_user.get_auth_state() - html = self.render_template( + html = await self.render_template( 'admin.html', current_user=self.current_user, auth_state=auth_state, @@ -609,7 +613,7 @@ class TokenPageHandler(BaseHandler): oauth_clients = sorted(oauth_clients, key=sort_key, reverse=True) auth_state = await self.current_user.get_auth_state() - html = self.render_template( + html = await self.render_template( 'token.html', api_tokens=api_tokens, oauth_clients=oauth_clients, @@ -621,7 +625,7 @@ class TokenPageHandler(BaseHandler): class ProxyErrorHandler(BaseHandler): """Handler for rendering proxy error pages""" - def get(self, status_code_s): + async def get(self, status_code_s): status_code = int(status_code_s) status_message = responses.get(status_code, 'Unknown HTTP Error') # build template namespace @@ -645,10 +649,10 @@ class ProxyErrorHandler(BaseHandler): self.set_header('Content-Type', 'text/html') # render the template try: - html = self.render_template('%s.html' % status_code, **ns) + html = await self.render_template('%s.html' % status_code, **ns) except TemplateNotFound: self.log.debug("No template for %d", status_code) - html = self.render_template('error.html', **ns) + html = await self.render_template('error.html', **ns) self.write(html) diff --git a/jupyterhub/handlers/static.py b/jupyterhub/handlers/static.py index 662af997..e9c54199 100644 --- a/jupyterhub/handlers/static.py +++ b/jupyterhub/handlers/static.py @@ -7,7 +7,7 @@ from tornado.web import StaticFileHandler class CacheControlStaticFilesHandler(StaticFileHandler): """StaticFileHandler subclass that sets Cache-Control: no-cache without `?v=` - + rather than relying on default browser cache behavior. """ diff --git a/jupyterhub/oauth/provider.py b/jupyterhub/oauth/provider.py index 7649f1ad..ea275a45 100644 --- a/jupyterhub/oauth/provider.py +++ b/jupyterhub/oauth/provider.py @@ -256,7 +256,7 @@ class JupyterHubRequestValidator(RequestValidator): self.db.commit() def get_authorization_code_scopes(self, client_id, code, redirect_uri, request): - """ Extracts scopes from saved authorization code. + """Extracts scopes from saved authorization code. The scopes returned by this method is used to route token requests based on scopes passed to Authorization Code requests. With that the token endpoint knows when to include OpenIDConnect diff --git a/jupyterhub/pagination.py b/jupyterhub/pagination.py index 8d4b912d..dc315e7e 100644 --- a/jupyterhub/pagination.py +++ b/jupyterhub/pagination.py @@ -107,14 +107,14 @@ class Pagination(Configurable): def calculate_pages_window(self): """Calculates the set of pages to render later in links() method. - It returns the list of pages to render via links for the pagination - By default, as we've observed in other applications, we're going to render - only a finite and predefined number of pages, avoiding visual fatigue related - to a long list of pages. By default, we render 7 pages plus some inactive links with the characters '...' - to point out that there are other pages that aren't explicitly rendered. - The primary way of work is to provide current webpage and 5 next pages, the last 2 ones - (in case the current page + 5 does not overflow the total lenght of pages) and the first one for reference. - """ + It returns the list of pages to render via links for the pagination + By default, as we've observed in other applications, we're going to render + only a finite and predefined number of pages, avoiding visual fatigue related + to a long list of pages. By default, we render 7 pages plus some inactive links with the characters '...' + to point out that there are other pages that aren't explicitly rendered. + The primary way of work is to provide current webpage and 5 next pages, the last 2 ones + (in case the current page + 5 does not overflow the total lenght of pages) and the first one for reference. + """ before_page = 2 after_page = 2 @@ -158,9 +158,9 @@ class Pagination(Configurable): @property def links(self): """Get the links for the pagination. - Getting the input from calculate_pages_window(), generates the HTML code - for the pages to render, plus the arrows to go onwards and backwards (if needed). - """ + Getting the input from calculate_pages_window(), generates the HTML code + for the pages to render, plus the arrows to go onwards and backwards (if needed). + """ if self.total_pages == 1: return [] diff --git a/jupyterhub/proxy.py b/jupyterhub/proxy.py index 5b8d386a..077ed820 100644 --- a/jupyterhub/proxy.py +++ b/jupyterhub/proxy.py @@ -25,7 +25,6 @@ from functools import wraps from subprocess import Popen from urllib.parse import quote -from tornado import gen from tornado.httpclient import AsyncHTTPClient from tornado.httpclient import HTTPError from tornado.httpclient import HTTPRequest @@ -292,7 +291,7 @@ class Proxy(LoggingConfigurable): if service.server: futures.append(self.add_service(service)) # wait after submitting them all - await gen.multi(futures) + await asyncio.gather(*futures) async def add_all_users(self, user_dict): """Update the proxy table from the database. @@ -305,7 +304,7 @@ class Proxy(LoggingConfigurable): if spawner.ready: futures.append(self.add_user(user, name)) # wait after submitting them all - await gen.multi(futures) + await asyncio.gather(*futures) @_one_at_a_time async def check_routes(self, user_dict, service_dict, routes=None): @@ -391,7 +390,7 @@ class Proxy(LoggingConfigurable): self.log.warning("Deleting stale route %s", routespec) futures.append(self.delete_route(routespec)) - await gen.multi(futures) + await asyncio.gather(*futures) stop = time.perf_counter() # timer stops here when user is deleted CHECK_ROUTES_DURATION_SECONDS.observe(stop - start) # histogram metric @@ -497,6 +496,19 @@ class ConfigurableHTTPProxy(Proxy): if not psutil.pid_exists(pid): raise ProcessLookupError + + try: + process = psutil.Process(pid) + if self.command and self.command[0]: + process_cmd = process.cmdline() + if process_cmd and not any( + self.command[0] in clause for clause in process_cmd + ): + raise ProcessLookupError + except (psutil.AccessDenied, psutil.NoSuchProcess): + # If there is a process at the proxy's PID but we don't have permissions to see it, + # then it is unlikely to actually be the proxy. + raise ProcessLookupError else: os.kill(pid, 0) @@ -574,6 +586,34 @@ class ConfigurableHTTPProxy(Proxy): self.log.debug("PID file %s already removed", self.pid_file) pass + def _get_ssl_options(self): + """List of cmd proxy options to use internal SSL""" + cmd = [] + proxy_api = 'proxy-api' + proxy_client = 'proxy-client' + api_key = self.app.internal_proxy_certs[proxy_api][ + 'keyfile' + ] # Check content in next test and just patch manulaly or in the config of the file + api_cert = self.app.internal_proxy_certs[proxy_api]['certfile'] + api_ca = self.app.internal_trust_bundles[proxy_api + '-ca'] + + client_key = self.app.internal_proxy_certs[proxy_client]['keyfile'] + client_cert = self.app.internal_proxy_certs[proxy_client]['certfile'] + client_ca = self.app.internal_trust_bundles[proxy_client + '-ca'] + + cmd.extend(['--api-ssl-key', api_key]) + cmd.extend(['--api-ssl-cert', api_cert]) + cmd.extend(['--api-ssl-ca', api_ca]) + cmd.extend(['--api-ssl-request-cert']) + cmd.extend(['--api-ssl-reject-unauthorized']) + + cmd.extend(['--client-ssl-key', client_key]) + cmd.extend(['--client-ssl-cert', client_cert]) + cmd.extend(['--client-ssl-ca', client_ca]) + cmd.extend(['--client-ssl-request-cert']) + cmd.extend(['--client-ssl-reject-unauthorized']) + return cmd + async def start(self): """Start the proxy process""" # check if there is a previous instance still around @@ -605,27 +645,7 @@ class ConfigurableHTTPProxy(Proxy): if self.ssl_cert: cmd.extend(['--ssl-cert', self.ssl_cert]) if self.app.internal_ssl: - proxy_api = 'proxy-api' - proxy_client = 'proxy-client' - api_key = self.app.internal_proxy_certs[proxy_api]['keyfile'] - api_cert = self.app.internal_proxy_certs[proxy_api]['certfile'] - api_ca = self.app.internal_trust_bundles[proxy_api + '-ca'] - - client_key = self.app.internal_proxy_certs[proxy_client]['keyfile'] - client_cert = self.app.internal_proxy_certs[proxy_client]['certfile'] - client_ca = self.app.internal_trust_bundles[proxy_client + '-ca'] - - cmd.extend(['--api-ssl-key', api_key]) - cmd.extend(['--api-ssl-cert', api_cert]) - cmd.extend(['--api-ssl-ca', api_ca]) - cmd.extend(['--api-ssl-request-cert']) - cmd.extend(['--api-ssl-reject-unauthorized']) - - cmd.extend(['--client-ssl-key', client_key]) - cmd.extend(['--client-ssl-cert', client_cert]) - cmd.extend(['--client-ssl-ca', client_ca]) - cmd.extend(['--client-ssl-request-cert']) - cmd.extend(['--client-ssl-reject-unauthorized']) + cmd.extend(self._get_ssl_options()) if self.app.statsd_host: cmd.extend( [ @@ -692,8 +712,17 @@ class ConfigurableHTTPProxy(Proxy): parent = psutil.Process(pid) children = parent.children(recursive=True) for child in children: - child.kill() - psutil.wait_procs(children, timeout=5) + child.terminate() + gone, alive = psutil.wait_procs(children, timeout=5) + for p in alive: + p.kill() + # Clear the shell, too, if it still exists. + try: + parent.terminate() + parent.wait(timeout=5) + parent.kill() + except psutil.NoSuchProcess: + pass def _terminate(self): """Terminate our process""" diff --git a/jupyterhub/services/auth.py b/jupyterhub/services/auth.py index c9e6974f..6eaa4caa 100644 --- a/jupyterhub/services/auth.py +++ b/jupyterhub/services/auth.py @@ -23,7 +23,6 @@ from urllib.parse import quote from urllib.parse import urlencode import requests -from tornado.gen import coroutine from tornado.httputil import url_concat from tornado.log import app_log from tornado.web import HTTPError @@ -288,7 +287,7 @@ class HubAuth(SingletonConfigurable): def _check_hub_authorization(self, url, cache_key=None, use_cache=True): """Identify a user with the Hub - + Args: url (str): The API URL to check the Hub for authorization (e.g. http://127.0.0.1:8081/hub/api/authorizations/token/abc-def) @@ -604,10 +603,10 @@ class HubOAuth(HubAuth): def token_for_code(self, code): """Get token for OAuth temporary code - + This is the last step of OAuth login. Should be called in OAuth Callback handler. - + Args: code (str): oauth code for finishing OAuth login Returns: @@ -950,8 +949,7 @@ class HubOAuthCallbackHandler(HubOAuthenticated, RequestHandler): .. versionadded: 0.8 """ - @coroutine - def get(self): + async def get(self): error = self.get_argument("error", False) if error: msg = self.get_argument("error_description", error) diff --git a/jupyterhub/singleuser/mixins.py b/jupyterhub/singleuser/mixins.py index 1703e9a0..c0912710 100755 --- a/jupyterhub/singleuser/mixins.py +++ b/jupyterhub/singleuser/mixins.py @@ -20,7 +20,6 @@ from urllib.parse import urlparse from jinja2 import ChoiceLoader from jinja2 import FunctionLoader -from tornado import gen from tornado import ioloop from tornado.httpclient import AsyncHTTPClient from tornado.httpclient import HTTPRequest @@ -434,7 +433,7 @@ class SingleUserNotebookAppMixin(Configurable): i, RETRIES, ) - await gen.sleep(min(2 ** i, 16)) + await asyncio.sleep(min(2 ** i, 16)) else: break else: diff --git a/jupyterhub/spawner.py b/jupyterhub/spawner.py index ad07fa44..7dea35d7 100644 --- a/jupyterhub/spawner.py +++ b/jupyterhub/spawner.py @@ -16,8 +16,7 @@ from tempfile import mkdtemp if os.name == 'nt': import psutil -from async_generator import async_generator -from async_generator import yield_ +from async_generator import aclosing from sqlalchemy import inspect from tornado.ioloop import PeriodicCallback from traitlets import Any @@ -1024,7 +1023,6 @@ class Spawner(LoggingConfigurable): def _progress_url(self): return self.user.progress_url(self.name) - @async_generator async def _generate_progress(self): """Private wrapper of progress generator @@ -1036,21 +1034,17 @@ class Spawner(LoggingConfigurable): ) return - await yield_({"progress": 0, "message": "Server requested"}) - from async_generator import aclosing + yield {"progress": 0, "message": "Server requested"} async with aclosing(self.progress()) as progress: async for event in progress: - await yield_(event) + yield event - @async_generator async def progress(self): """Async generator for progress events Must be an async generator - For Python 3.5-compatibility, use the async_generator package - Should yield messages of the form: :: @@ -1067,7 +1061,7 @@ class Spawner(LoggingConfigurable): .. versionadded:: 0.9 """ - await yield_({"progress": 50, "message": "Spawning server..."}) + yield {"progress": 50, "message": "Spawning server..."} async def start(self): """Start the single-user server @@ -1078,9 +1072,7 @@ class Spawner(LoggingConfigurable): .. versionchanged:: 0.7 Return ip, port instead of setting on self.user.server directly. """ - raise NotImplementedError( - "Override in subclass. Must be a Tornado gen.coroutine." - ) + raise NotImplementedError("Override in subclass. Must be a coroutine.") async def stop(self, now=False): """Stop the single-user server @@ -1093,9 +1085,7 @@ class Spawner(LoggingConfigurable): Must be a coroutine. """ - raise NotImplementedError( - "Override in subclass. Must be a Tornado gen.coroutine." - ) + raise NotImplementedError("Override in subclass. Must be a coroutine.") async def poll(self): """Check if the single-user process is running @@ -1121,9 +1111,7 @@ class Spawner(LoggingConfigurable): process has not yet completed. """ - raise NotImplementedError( - "Override in subclass. Must be a Tornado gen.coroutine." - ) + raise NotImplementedError("Override in subclass. Must be a coroutine.") def add_poll_callback(self, callback, *args, **kwargs): """Add a callback to fire when the single-user server stops""" diff --git a/jupyterhub/tests/conftest.py b/jupyterhub/tests/conftest.py index 7126baa7..104ca635 100644 --- a/jupyterhub/tests/conftest.py +++ b/jupyterhub/tests/conftest.py @@ -36,7 +36,6 @@ from unittest import mock from pytest import fixture from pytest import raises -from tornado import gen from tornado import ioloop from tornado.httpclient import HTTPError from tornado.platform.asyncio import AsyncIOMainLoop @@ -56,13 +55,16 @@ _db = None def pytest_collection_modifyitems(items): - """add asyncio marker to all async tests""" + """This function is automatically run by pytest passing all collected test + functions. + + We use it to add asyncio marker to all async tests and assert we don't use + test functions that are async generators which wouldn't make sense. + """ for item in items: if inspect.iscoroutinefunction(item.obj): item.add_marker('asyncio') - if hasattr(inspect, 'isasyncgenfunction'): - # double-check that we aren't mixing yield and async def - assert not inspect.isasyncgenfunction(item.obj) + assert not inspect.isasyncgenfunction(item.obj) @fixture(scope='module') @@ -74,7 +76,9 @@ def ssl_tmpdir(tmpdir_factory): def app(request, io_loop, ssl_tmpdir): """Mock a jupyterhub app for testing""" mocked_app = None - ssl_enabled = getattr(request.module, "ssl_enabled", False) + ssl_enabled = getattr( + request.module, 'ssl_enabled', os.environ.get('SSL_ENABLED', False) + ) kwargs = dict() if ssl_enabled: kwargs.update(dict(internal_ssl=True, internal_certs_location=str(ssl_tmpdir))) @@ -214,7 +218,7 @@ def admin_user(app, username): class MockServiceSpawner(jupyterhub.services.service._ServiceSpawner): """mock services for testing. - Shorter intervals, etc. + Shorter intervals, etc. """ poll_interval = 1 @@ -244,17 +248,14 @@ def _mockservice(request, app, url=False): assert name in app._service_map service = app._service_map[name] - @gen.coroutine - def start(): + async def start(): # wait for proxy to be updated before starting the service - yield app.proxy.add_all_services(app._service_map) + await app.proxy.add_all_services(app._service_map) service.start() io_loop.run_sync(start) def cleanup(): - import asyncio - asyncio.get_event_loop().run_until_complete(service.stop()) app.services[:] = [] app._service_map.clear() diff --git a/jupyterhub/tests/mocking.py b/jupyterhub/tests/mocking.py index 6e1b6879..8adfc63f 100644 --- a/jupyterhub/tests/mocking.py +++ b/jupyterhub/tests/mocking.py @@ -36,13 +36,12 @@ from unittest import mock from urllib.parse import urlparse from pamela import PAMError -from tornado import gen -from tornado.concurrent import Future from tornado.ioloop import IOLoop from traitlets import Bool from traitlets import default from traitlets import Dict +from .. import metrics from .. import orm from ..app import JupyterHub from ..auth import PAMAuthenticator @@ -118,19 +117,17 @@ class SlowSpawner(MockSpawner): delay = 2 _start_future = None - @gen.coroutine - def start(self): - (ip, port) = yield super().start() + async def start(self): + (ip, port) = await super().start() if self._start_future is not None: - yield self._start_future + await self._start_future else: - yield gen.sleep(self.delay) + await asyncio.sleep(self.delay) return ip, port - @gen.coroutine - def stop(self): - yield gen.sleep(self.delay) - yield super().stop() + async def stop(self): + await asyncio.sleep(self.delay) + await super().stop() class NeverSpawner(MockSpawner): @@ -142,14 +139,12 @@ class NeverSpawner(MockSpawner): def start(self): """Return a Future that will never finish""" - return Future() + return asyncio.Future() - @gen.coroutine - def stop(self): + async def stop(self): pass - @gen.coroutine - def poll(self): + async def poll(self): return 0 @@ -223,8 +218,7 @@ class MockPAMAuthenticator(PAMAuthenticator): # skip the add-system-user bit return not user.name.startswith('dne') - @gen.coroutine - def authenticate(self, *args, **kwargs): + async def authenticate(self, *args, **kwargs): with mock.patch.multiple( 'pamela', authenticate=mock_authenticate, @@ -232,7 +226,7 @@ class MockPAMAuthenticator(PAMAuthenticator): close_session=mock_open_session, check_account=mock_check_account, ): - username = yield super(MockPAMAuthenticator, self).authenticate( + username = await super(MockPAMAuthenticator, self).authenticate( *args, **kwargs ) if username is None: @@ -329,14 +323,13 @@ class MockHub(JupyterHub): self.db.delete(group) self.db.commit() - @gen.coroutine - def initialize(self, argv=None): + async def initialize(self, argv=None): self.pid_file = NamedTemporaryFile(delete=False).name self.db_file = NamedTemporaryFile() self.db_url = os.getenv('JUPYTERHUB_TEST_DB_URL') or self.db_file.name if 'mysql' in self.db_url: self.db_kwargs['connect_args'] = {'auth_plugin': 'mysql_native_password'} - yield super().initialize([]) + await super().initialize([]) # add an initial user user = self.db.query(orm.User).filter(orm.User.name == 'user').first() @@ -344,6 +337,7 @@ class MockHub(JupyterHub): user = orm.User(name='user') self.db.add(user) self.db.commit() + metrics.TOTAL_USERS.inc() def stop(self): super().stop() @@ -367,14 +361,13 @@ class MockHub(JupyterHub): self.cleanup = lambda: None self.db_file.close() - @gen.coroutine - def login_user(self, name): + async def login_user(self, name): """Login a user by name, returning her cookies.""" base_url = public_url(self) external_ca = None if self.internal_ssl: external_ca = self.external_certs['files']['ca'] - r = yield async_requests.post( + r = await async_requests.post( base_url + 'hub/login', data={'username': name, 'password': name}, allow_redirects=False, @@ -416,8 +409,7 @@ class StubSingleUserSpawner(MockSpawner): _thread = None - @gen.coroutine - def start(self): + async def start(self): ip = self.ip = '127.0.0.1' port = self.port = random_port() env = self.get_env() @@ -444,14 +436,12 @@ class StubSingleUserSpawner(MockSpawner): assert ready return (ip, port) - @gen.coroutine - def stop(self): + async def stop(self): self._app.stop() self._thread.join(timeout=30) assert not self._thread.is_alive() - @gen.coroutine - def poll(self): + async def poll(self): if self._thread is None: return 0 if self._thread.is_alive(): diff --git a/jupyterhub/tests/mockservice.py b/jupyterhub/tests/mockservice.py index 6194d844..415f512f 100644 --- a/jupyterhub/tests/mockservice.py +++ b/jupyterhub/tests/mockservice.py @@ -60,7 +60,7 @@ class APIHandler(web.RequestHandler): class WhoAmIHandler(HubAuthenticated, web.RequestHandler): """Reply with the name of the user who made the request. - + Uses "deprecated" cookie login """ @@ -71,7 +71,7 @@ class WhoAmIHandler(HubAuthenticated, web.RequestHandler): class OWhoAmIHandler(HubOAuthenticated, web.RequestHandler): """Reply with the name of the user who made the request. - + Uses OAuth login flow """ diff --git a/jupyterhub/tests/test_api.py b/jupyterhub/tests/test_api.py index fa37037a..c740820e 100644 --- a/jupyterhub/tests/test_api.py +++ b/jupyterhub/tests/test_api.py @@ -1,22 +1,20 @@ """Tests for the REST API.""" +import asyncio import json import re import sys import uuid -from concurrent.futures import Future from datetime import datetime from datetime import timedelta from unittest import mock from urllib.parse import quote from urllib.parse import urlparse -from async_generator import async_generator -from async_generator import yield_ from pytest import mark -from tornado import gen import jupyterhub from .. import orm +from ..objects import Server from ..utils import url_path_join as ujoin from ..utils import utcnow from .mocking import mock_role @@ -28,7 +26,6 @@ from .utils import async_requests from .utils import auth_header from .utils import find_user - # -------------------- # Authentication tests # -------------------- @@ -183,6 +180,71 @@ async def test_get_users(app): assert r.status_code == 403 +@mark.user +@mark.parametrize( + "state", + ("inactive", "active", "ready", "invalid"), +) +async def test_get_users_state_filter(app, state): + db = app.db + + # has_one_active: one active, one inactive, zero ready + has_one_active = add_user(db, app=app, name='has_one_active') + # has_two_active: two active, ready servers + has_two_active = add_user(db, app=app, name='has_two_active') + # has_two_inactive: two spawners, neither active + has_two_inactive = add_user(db, app=app, name='has_two_inactive') + # has_zero: no Spawners registered at all + has_zero = add_user(db, app=app, name='has_zero') + + test_usernames = set( + ("has_one_active", "has_two_active", "has_two_inactive", "has_zero") + ) + + user_states = { + "inactive": ["has_two_inactive", "has_zero"], + "ready": ["has_two_active"], + "active": ["has_one_active", "has_two_active"], + "invalid": [], + } + expected = user_states[state] + + def add_spawner(user, name='', active=True, ready=True): + """Add a spawner in a requested state + + If active, should turn up in an active query + If active and ready, should turn up in a ready query + If not active, should turn up in an inactive query + """ + spawner = user.spawners[name] + db.commit() + if active: + orm_server = orm.Server() + db.add(orm_server) + db.commit() + spawner.server = Server(orm_server=orm_server) + db.commit() + if not ready: + spawner._spawn_pending = True + return spawner + + for name in ("", "secondary"): + add_spawner(has_two_active, name, active=True) + add_spawner(has_two_inactive, name, active=False) + + add_spawner(has_one_active, active=True, ready=False) + add_spawner(has_one_active, "inactive", active=False) + + r = await api_request(app, 'users?state={}'.format(state)) + if state == "invalid": + assert r.status_code == 400 + return + assert r.status_code == 200 + + usernames = sorted(u["name"] for u in r.json() if u["name"] in test_usernames) + assert usernames == expected + + @mark.user async def test_get_self(app): db = app.db @@ -206,14 +268,22 @@ async def test_get_self(app): ) db.add(oauth_token) db.commit() - r = await api_request(app, 'user', headers={'Authorization': 'token ' + token},) + r = await api_request( + app, + 'user', + headers={'Authorization': 'token ' + token}, + ) r.raise_for_status() model = r.json() assert model['name'] == u.name # invalid auth gets 403 with mock_role(app, 'user'): - r = await api_request(app, 'user', headers={'Authorization': 'token notvalid'},) + r = await api_request( + app, + 'user', + headers={'Authorization': 'token notvalid'}, + ) assert r.status_code == 403 @@ -234,7 +304,11 @@ async def test_get_user(app): name = 'user' _ = await api_request(app, 'users', name, headers=auth_header(app.db, name)) with mock_role(app, role=name, name=name): - r = await api_request(app, 'users', name,) + r = await api_request( + app, + 'users', + name, + ) assert r.status_code == 200 user = normalize_user(r.json()) @@ -616,7 +690,7 @@ async def test_slow_spawn(app, no_patience, slow_spawn): async def wait_spawn(): while not app_user.running: - await gen.sleep(0.1) + await asyncio.sleep(0.1) await wait_spawn() assert not app_user.spawner._spawn_pending @@ -625,7 +699,7 @@ async def test_slow_spawn(app, no_patience, slow_spawn): async def wait_stop(): while app_user.spawner._stop_pending: - await gen.sleep(0.1) + await asyncio.sleep(0.1) r = await api_request(app, 'users', name, 'server', method='delete') r.raise_for_status() @@ -659,7 +733,7 @@ async def test_never_spawn(app, no_patience, never_spawn): assert app.users.count_active_users()['pending'] == 1 while app_user.spawner.pending: - await gen.sleep(0.1) + await asyncio.sleep(0.1) print(app_user.spawner.pending) assert not app_user.spawner._spawn_pending @@ -685,7 +759,7 @@ async def test_slow_bad_spawn(app, no_patience, slow_bad_spawn): r = await api_request(app, 'users', name, 'server', method='post') r.raise_for_status() while user.spawner.pending: - await gen.sleep(0.1) + await asyncio.sleep(0.1) # spawn failed assert not user.running assert app.users.count_active_users()['pending'] == 0 @@ -821,32 +895,12 @@ async def test_progress_bad_slow(request, app, no_patience, slow_bad_spawn): } -@async_generator async def progress_forever(): """progress function that yields messages forever""" for i in range(1, 10): - await yield_({'progress': i, 'message': 'Stage %s' % i}) + yield {'progress': i, 'message': 'Stage %s' % i} # wait a long time before the next event - await gen.sleep(10) - - -if sys.version_info >= (3, 6): - # additional progress_forever defined as native - # async generator - # to test for issues with async_generator wrappers - exec( - """ -async def progress_forever_native(): - for i in range(1, 10): - yield { - 'progress': i, - 'message': 'Stage %s' % i, - } - # wait a long time before the next event - await gen.sleep(10) -""", - globals(), - ) + await asyncio.sleep(10) async def test_spawn_progress_cutoff(request, app, no_patience, slow_spawn): @@ -857,11 +911,7 @@ async def test_spawn_progress_cutoff(request, app, no_patience, slow_spawn): db = app.db name = 'geddy' app_user = add_user(db, app=app, name=name) - if sys.version_info >= (3, 6): - # Python >= 3.6, try native async generator - app_user.spawner.progress = globals()['progress_forever_native'] - else: - app_user.spawner.progress = progress_forever + app_user.spawner.progress = progress_forever app_user.spawner.delay = 1 r = await api_request(app, 'users', name, 'server', method='post') @@ -888,8 +938,8 @@ async def test_spawn_limit(app, no_patience, slow_spawn, request): # start two pending spawns names = ['ykka', 'hjarka'] users = [add_user(db, app=app, name=name) for name in names] - users[0].spawner._start_future = Future() - users[1].spawner._start_future = Future() + users[0].spawner._start_future = asyncio.Future() + users[1].spawner._start_future = asyncio.Future() for name in names: await api_request(app, 'users', name, 'server', method='post') assert app.users.count_active_users()['pending'] == 2 @@ -897,7 +947,7 @@ async def test_spawn_limit(app, no_patience, slow_spawn, request): # ykka and hjarka's spawns are both pending. Essun should fail with 429 name = 'essun' user = add_user(db, app=app, name=name) - user.spawner._start_future = Future() + user.spawner._start_future = asyncio.Future() r = await api_request(app, 'users', name, 'server', method='post') assert r.status_code == 429 @@ -905,7 +955,7 @@ async def test_spawn_limit(app, no_patience, slow_spawn, request): users[0].spawner._start_future.set_result(None) # wait for ykka to finish while not users[0].running: - await gen.sleep(0.1) + await asyncio.sleep(0.1) assert app.users.count_active_users()['pending'] == 1 r = await api_request(app, 'users', name, 'server', method='post') @@ -916,7 +966,7 @@ async def test_spawn_limit(app, no_patience, slow_spawn, request): for user in users[1:]: user.spawner._start_future.set_result(None) while not all(u.running for u in users): - await gen.sleep(0.1) + await asyncio.sleep(0.1) # everybody's running, pending count should be back to 0 assert app.users.count_active_users()['pending'] == 0 @@ -925,7 +975,7 @@ async def test_spawn_limit(app, no_patience, slow_spawn, request): r = await api_request(app, 'users', u.name, 'server', method='delete') r.raise_for_status() while any(u.spawner.active for u in users): - await gen.sleep(0.1) + await asyncio.sleep(0.1) @mark.slow @@ -1003,7 +1053,7 @@ async def test_start_stop_race(app, no_patience, slow_spawn): r = await api_request(app, 'users', user.name, 'server', method='delete') assert r.status_code == 400 while not spawner.ready: - await gen.sleep(0.1) + await asyncio.sleep(0.1) spawner.delay = 3 # stop the spawner @@ -1011,7 +1061,7 @@ async def test_start_stop_race(app, no_patience, slow_spawn): assert r.status_code == 202 assert spawner.pending == 'stop' # make sure we get past deleting from the proxy - await gen.sleep(1) + await asyncio.sleep(1) # additional stops while stopping shouldn't trigger a new stop with mock.patch.object(spawner, 'stop') as m: r = await api_request(app, 'users', user.name, 'server', method='delete') @@ -1023,7 +1073,7 @@ async def test_start_stop_race(app, no_patience, slow_spawn): assert r.status_code == 400 while spawner.active: - await gen.sleep(0.1) + await asyncio.sleep(0.1) # start after stop is okay r = await api_request(app, 'users', user.name, 'server', method='post') assert r.status_code == 202 diff --git a/jupyterhub/tests/test_internal_ssl_api.py b/jupyterhub/tests/test_internal_ssl_api.py deleted file mode 100644 index 17349ae6..00000000 --- a/jupyterhub/tests/test_internal_ssl_api.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Tests for the SSL enabled REST API.""" -# Copyright (c) Jupyter Development Team. -# Distributed under the terms of the Modified BSD License. -from jupyterhub.tests.test_api import * - -ssl_enabled = True diff --git a/jupyterhub/tests/test_internal_ssl_app.py b/jupyterhub/tests/test_internal_ssl_app.py deleted file mode 100644 index cae0519b..00000000 --- a/jupyterhub/tests/test_internal_ssl_app.py +++ /dev/null @@ -1,7 +0,0 @@ -"""Test the JupyterHub entry point with internal ssl""" -# Copyright (c) Jupyter Development Team. -# Distributed under the terms of the Modified BSD License. -import jupyterhub.tests.mocking -from jupyterhub.tests.test_app import * - -ssl_enabled = True diff --git a/jupyterhub/tests/test_internal_ssl_connections.py b/jupyterhub/tests/test_internal_ssl_connections.py index a650806c..47845651 100644 --- a/jupyterhub/tests/test_internal_ssl_connections.py +++ b/jupyterhub/tests/test_internal_ssl_connections.py @@ -20,8 +20,7 @@ ssl_enabled = True SSL_ERROR = (SSLError, ConnectionError) -@gen.coroutine -def wait_for_spawner(spawner, timeout=10): +async def wait_for_spawner(spawner, timeout=10): """Wait for an http server to show up polling at shorter intervals for early termination @@ -32,15 +31,15 @@ def wait_for_spawner(spawner, timeout=10): return spawner.server.wait_up(timeout=1, http=True) while time.monotonic() < deadline: - status = yield spawner.poll() + status = await spawner.poll() assert status is None try: - yield wait() + await wait() except TimeoutError: continue else: break - yield wait() + await wait() async def test_connection_hub_wrong_certs(app): diff --git a/jupyterhub/tests/test_internal_ssl_spawner.py b/jupyterhub/tests/test_internal_ssl_spawner.py deleted file mode 100644 index 85ea5ccd..00000000 --- a/jupyterhub/tests/test_internal_ssl_spawner.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Tests for process spawning with internal_ssl""" -# Copyright (c) Jupyter Development Team. -# Distributed under the terms of the Modified BSD License. -from jupyterhub.tests.test_spawner import * - -ssl_enabled = True diff --git a/jupyterhub/tests/test_metrics.py b/jupyterhub/tests/test_metrics.py new file mode 100644 index 00000000..29c22122 --- /dev/null +++ b/jupyterhub/tests/test_metrics.py @@ -0,0 +1,34 @@ +import json + +from .utils import add_user +from .utils import api_request +from jupyterhub import metrics +from jupyterhub import orm + + +async def test_total_users(app): + num_users = app.db.query(orm.User).count() + sample = metrics.TOTAL_USERS.collect()[0].samples[0] + assert sample.value == num_users + + await api_request( + app, "/users", method="post", data=json.dumps({"usernames": ["incrementor"]}) + ) + + sample = metrics.TOTAL_USERS.collect()[0].samples[0] + assert sample.value == num_users + 1 + + # GET /users used to double-count + await api_request(app, "/users") + + # populate the Users cache dict if any are missing: + for user in app.db.query(orm.User): + _ = app.users[user.id] + + sample = metrics.TOTAL_USERS.collect()[0].samples[0] + assert sample.value == num_users + 1 + + await api_request(app, "/users/incrementor", method="delete") + + sample = metrics.TOTAL_USERS.collect()[0].samples[0] + assert sample.value == num_users diff --git a/jupyterhub/tests/test_orm.py b/jupyterhub/tests/test_orm.py index 0c125c5a..0e31cc85 100644 --- a/jupyterhub/tests/test_orm.py +++ b/jupyterhub/tests/test_orm.py @@ -222,8 +222,7 @@ async def test_spawn_fails(db): db.commit() class BadSpawner(MockSpawner): - @gen.coroutine - def start(self): + async def start(self): raise RuntimeError("Split the party") user = User( diff --git a/jupyterhub/tests/test_pages.py b/jupyterhub/tests/test_pages.py index fbb67e8c..7140b823 100644 --- a/jupyterhub/tests/test_pages.py +++ b/jupyterhub/tests/test_pages.py @@ -8,7 +8,6 @@ from urllib.parse import urlparse import pytest from bs4 import BeautifulSoup -from tornado import gen from tornado.escape import url_escape from tornado.httputil import url_concat @@ -31,7 +30,7 @@ async def test_root_no_auth(app): url = ujoin(public_host(app), app.hub.base_url) r = await async_requests.get(url) r.raise_for_status() - assert r.url == ujoin(url, 'login') + assert r.url == url_concat(ujoin(url, 'login'), dict(next=app.hub.base_url)) async def test_root_auth(app): @@ -266,7 +265,8 @@ async def test_spawn_with_query_arguments(app): next_url = ujoin(app.base_url, 'user/jones/tree') r = await async_requests.get( url_concat( - ujoin(base_url, 'spawn'), {'next': next_url, 'energy': '510keV'}, + ujoin(base_url, 'spawn'), + {'next': next_url, 'energy': '510keV'}, ), cookies=cookies, ) @@ -332,6 +332,12 @@ async def test_spawn_form_admin_access(app, admin_access): data={'bounds': ['-3', '3'], 'energy': '938MeV'}, ) r.raise_for_status() + + while '/spawn-pending/' in r.url: + await asyncio.sleep(0.1) + r = await async_requests.get(r.url, cookies=cookies) + r.raise_for_status() + assert r.history assert r.url.startswith(public_url(app, u)) assert u.spawner.user_options == { @@ -586,8 +592,7 @@ async def test_login_strip(app): base_url = public_url(app) called_with = [] - @gen.coroutine - def mock_authenticate(handler, data): + async def mock_authenticate(handler, data): called_with.append(data) with mock.patch.object(app.authenticator, 'authenticate', mock_authenticate): @@ -616,9 +621,16 @@ async def test_login_strip(app): (False, '//other.domain', '', None), (False, '///other.domain/triple', '', None), (False, '\\\\other.domain/backslashes', '', None), - # params are handled correctly - (True, '/hub/admin', 'hub/admin?left=1&right=2', [('left', 1), ('right', 2)]), - (False, '/hub/admin', 'hub/admin?left=1&right=2', [('left', 1), ('right', 2)]), + # params are handled correctly (ignored if ?next= specified) + ( + True, + '/hub/admin?left=1&right=2', + 'hub/admin?left=1&right=2', + {"left": "abc"}, + ), + (False, '/hub/admin', 'hub/admin', [('left', 1), ('right', 2)]), + (True, '', '', {"keep": "yes"}), + (False, '', '', {"keep": "yes"}), ], ) async def test_login_redirect(app, running, next_url, location, params): @@ -627,10 +639,15 @@ async def test_login_redirect(app, running, next_url, location, params): if location: location = ujoin(app.base_url, location) elif running: + # location not specified, location = user.url + if params: + location = url_concat(location, params) else: # use default url location = ujoin(app.base_url, 'hub/spawn') + if params: + location = url_concat(location, params) url = 'login' if params: @@ -649,7 +666,73 @@ async def test_login_redirect(app, running, next_url, location, params): r = await get_page(url, app, cookies=cookies, allow_redirects=False) r.raise_for_status() assert r.status_code == 302 - assert location == r.headers['Location'] + assert r.headers["Location"] == location + + +@pytest.mark.parametrize( + 'location, next, extra_params', + [ + ( + "{base_url}hub/spawn?a=5", + None, + {"a": "5"}, + ), # no ?next= given, preserve params + ("/x", "/x", {"a": "5"}), # ?next=given, params ignored + ( + "/x?b=10", + "/x?b=10", + {"a": "5"}, + ), # ?next=given with params, additional params ignored + ], +) +async def test_next_url(app, user, location, next, extra_params): + params = {} + if extra_params: + params.update(extra_params) + if next: + params["next"] = next + url = url_concat("/", params) + cookies = await app.login_user("monster") + + # location can be a string template + location = location.format(base_url=app.base_url) + + r = await get_page(url, app, cookies=cookies, allow_redirects=False) + r.raise_for_status() + assert r.status_code == 302 + assert r.headers["Location"] == location + + +async def test_next_url_params_sequence(app, user): + """Test each step of / -> login -> spawn + + and whether they preserve url params + """ + params = {"xyz": "5"} + # first request: root page, with params, not logged in + r = await get_page("/?xyz=5", app, allow_redirects=False) + r.raise_for_status() + location = r.headers["Location"] + + # next page: login + cookies = await app.login_user(user.name) + assert location == url_concat( + ujoin(app.base_url, "/hub/login"), {"next": ujoin(app.base_url, "/hub/?xyz=5")} + ) + r = await async_requests.get( + public_host(app) + location, cookies=cookies, allow_redirects=False + ) + r.raise_for_status() + location = r.headers["Location"] + + # after login, redirect back + assert location == ujoin(app.base_url, "/hub/?xyz=5") + r = await async_requests.get( + public_host(app) + location, cookies=cookies, allow_redirects=False + ) + r.raise_for_status() + location = r.headers["Location"] + assert location == ujoin(app.base_url, "/hub/spawn?xyz=5") async def test_auto_login(app, request): @@ -663,14 +746,18 @@ async def test_auto_login(app, request): ) # no auto_login: end up at /hub/login r = await async_requests.get(base_url) - assert r.url == public_url(app, path='hub/login') + assert r.url == url_concat( + public_url(app, path="hub/login"), {"next": app.hub.base_url} + ) # enable auto_login: redirect from /hub/login to /hub/dummy authenticator = Authenticator(auto_login=True) authenticator.login_url = lambda base_url: ujoin(base_url, 'dummy') with mock.patch.dict(app.tornado_settings, {'authenticator': authenticator}): r = await async_requests.get(base_url) - assert r.url == public_url(app, path='hub/dummy') + assert r.url == url_concat( + public_url(app, path="hub/dummy"), {"next": app.hub.base_url} + ) async def test_auto_login_logout(app): @@ -943,8 +1030,7 @@ async def test_pre_spawn_start_exc_no_form(app): exc = "pre_spawn_start error" # throw exception from pre_spawn_start - @gen.coroutine - def mock_pre_spawn_start(user, spawner): + async def mock_pre_spawn_start(user, spawner): raise Exception(exc) with mock.patch.object(app.authenticator, 'pre_spawn_start', mock_pre_spawn_start): @@ -959,8 +1045,7 @@ async def test_pre_spawn_start_exc_options_form(app): exc = "pre_spawn_start error" # throw exception from pre_spawn_start - @gen.coroutine - def mock_pre_spawn_start(user, spawner): + async def mock_pre_spawn_start(user, spawner): raise Exception(exc) with mock.patch.dict( diff --git a/jupyterhub/tests/test_proxy.py b/jupyterhub/tests/test_proxy.py index e912bc62..60f2d0e0 100644 --- a/jupyterhub/tests/test_proxy.py +++ b/jupyterhub/tests/test_proxy.py @@ -9,12 +9,12 @@ from urllib.parse import urlparse import pytest from traitlets.config import Config -from .. import orm from ..utils import url_path_join as ujoin from ..utils import wait_for_http_server from .mocking import MockHub from .test_api import add_user from .test_api import api_request +from .utils import skip_if_ssl @pytest.fixture @@ -27,6 +27,7 @@ def disable_check_routes(app): app.last_activity_callback.start() +@skip_if_ssl async def test_external_proxy(request): auth_token = 'secret!' proxy_ip = '127.0.0.1' diff --git a/jupyterhub/tests/test_services.py b/jupyterhub/tests/test_services.py index 127a9f45..fd46dd0f 100644 --- a/jupyterhub/tests/test_services.py +++ b/jupyterhub/tests/test_services.py @@ -6,9 +6,7 @@ from binascii import hexlify from contextlib import contextmanager from subprocess import Popen -from async_generator import async_generator from async_generator import asynccontextmanager -from async_generator import yield_ from tornado.ioloop import IOLoop from ..utils import maybe_future @@ -17,6 +15,7 @@ from ..utils import url_path_join from ..utils import wait_for_http_server from .mocking import public_url from .utils import async_requests +from .utils import skip_if_ssl mockservice_path = os.path.dirname(os.path.abspath(__file__)) mockservice_py = os.path.join(mockservice_path, 'mockservice.py') @@ -24,7 +23,6 @@ mockservice_cmd = [sys.executable, mockservice_py] @asynccontextmanager -@async_generator async def external_service(app, name='mockservice'): env = { 'JUPYTERHUB_API_TOKEN': hexlify(os.urandom(5)), @@ -35,7 +33,7 @@ async def external_service(app, name='mockservice'): proc = Popen(mockservice_cmd, env=env) try: await wait_for_http_server(env['JUPYTERHUB_SERVICE_URL']) - await yield_(env) + yield env finally: proc.terminate() @@ -63,6 +61,7 @@ async def test_managed_service(mockservice): assert service.proc.poll() is None +@skip_if_ssl async def test_proxy_service(app, mockservice_url): service = mockservice_url name = service.name @@ -76,6 +75,7 @@ async def test_proxy_service(app, mockservice_url): assert r.text.endswith(path) +@skip_if_ssl async def test_external_service(app): name = 'external' async with external_service(app, name=name) as env: diff --git a/jupyterhub/tests/test_services_auth.py b/jupyterhub/tests/test_services_auth.py index 867d1d97..fb657385 100644 --- a/jupyterhub/tests/test_services_auth.py +++ b/jupyterhub/tests/test_services_auth.py @@ -35,6 +35,7 @@ from .utils import AsyncSession # mock for sending monotonic counter way into the future monotonic_future = mock.patch('time.monotonic', lambda: sys.maxsize) +ssl_enabled = False def test_expiring_dict(): diff --git a/jupyterhub/tests/test_spawner.py b/jupyterhub/tests/test_spawner.py index 99b84393..7a37cc58 100644 --- a/jupyterhub/tests/test_spawner.py +++ b/jupyterhub/tests/test_spawner.py @@ -1,6 +1,7 @@ """Tests for process spawning""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +import asyncio import logging import os import signal @@ -12,7 +13,6 @@ from unittest import mock from urllib.parse import urlparse import pytest -from tornado import gen from .. import orm from .. import spawner as spawnermod @@ -123,7 +123,7 @@ async def test_stop_spawner_sigint_fails(db): await spawner.start() # wait for the process to get to the while True: loop - await gen.sleep(1) + await asyncio.sleep(1) status = await spawner.poll() assert status is None @@ -138,7 +138,7 @@ async def test_stop_spawner_stop_now(db): await spawner.start() # wait for the process to get to the while True: loop - await gen.sleep(1) + await asyncio.sleep(1) status = await spawner.poll() assert status is None @@ -165,7 +165,7 @@ async def test_spawner_poll(db): spawner.start_polling() # wait for the process to get to the while True: loop - await gen.sleep(1) + await asyncio.sleep(1) status = await spawner.poll() assert status is None @@ -173,12 +173,12 @@ async def test_spawner_poll(db): proc.terminate() for i in range(10): if proc.poll() is None: - await gen.sleep(1) + await asyncio.sleep(1) else: break assert proc.poll() is not None - await gen.sleep(2) + await asyncio.sleep(2) status = await spawner.poll() assert status is not None @@ -258,8 +258,7 @@ async def test_shell_cmd(db, tmpdir, request): def test_inherit_overwrite(): - """On 3.6+ we check things are overwritten at import time - """ + """On 3.6+ we check things are overwritten at import time""" if sys.version_info >= (3, 6): with pytest.raises(NotImplementedError): diff --git a/jupyterhub/tests/test_utils.py b/jupyterhub/tests/test_utils.py index 49155e92..c75c9051 100644 --- a/jupyterhub/tests/test_utils.py +++ b/jupyterhub/tests/test_utils.py @@ -1,21 +1,22 @@ """Tests for utilities""" import asyncio +import time +from concurrent.futures import ThreadPoolExecutor import pytest from async_generator import aclosing -from async_generator import async_generator -from async_generator import yield_ +from tornado import gen +from tornado.concurrent import run_on_executor from ..utils import iterate_until -@async_generator async def yield_n(n, delay=0.01): """Yield n items with a delay between each""" for i in range(n): if delay: await asyncio.sleep(delay) - await yield_(i) + yield i def schedule_future(io_loop, *, delay, result=None): @@ -50,13 +51,40 @@ async def test_iterate_until(io_loop, deadline, n, delay, expected): async def test_iterate_until_ready_after_deadline(io_loop): f = schedule_future(io_loop, delay=0) - @async_generator async def gen(): for i in range(5): - await yield_(i) + yield i yielded = [] async with aclosing(iterate_until(f, gen())) as items: async for item in items: yielded.append(item) assert yielded == list(range(5)) + + +@gen.coroutine +def tornado_coroutine(): + yield gen.sleep(0.05) + return "ok" + + +class TornadoCompat: + def __init__(self): + self.executor = ThreadPoolExecutor(1) + + @run_on_executor + def on_executor(self): + time.sleep(0.05) + return "executor" + + @gen.coroutine + def tornado_coroutine(self): + yield gen.sleep(0.05) + return "gen.coroutine" + + +async def test_tornado_coroutines(): + t = TornadoCompat() + # verify that tornado gen and executor methods return awaitables + assert (await t.on_executor()) == "executor" + assert (await t.tornado_coroutine()) == "gen.coroutine" diff --git a/jupyterhub/tests/utils.py b/jupyterhub/tests/utils.py index 7db84a79..8876705a 100644 --- a/jupyterhub/tests/utils.py +++ b/jupyterhub/tests/utils.py @@ -1,9 +1,12 @@ import asyncio +import os from concurrent.futures import ThreadPoolExecutor +import pytest import requests from certipy import Certipy +from jupyterhub import metrics from jupyterhub import orm from jupyterhub.objects import Server from jupyterhub.utils import url_path_join as ujoin @@ -52,6 +55,12 @@ def ssl_setup(cert_dir, authority_name): return external_certs +"""Skip tests that don't work under internal-ssl when testing under internal-ssl""" +skip_if_ssl = pytest.mark.skipif( + os.environ.get('SSL_ENABLED', False), reason="Does not use internal SSL" +) + + def check_db_locks(func): """Decorator that verifies no locks are held on database upon exit. @@ -97,6 +106,7 @@ def add_user(db, app=None, **kwargs): if orm_user is None: orm_user = orm.User(**kwargs) db.add(orm_user) + metrics.TOTAL_USERS.inc() else: for attr, value in kwargs.items(): setattr(orm_user, attr, value) diff --git a/jupyterhub/user.py b/jupyterhub/user.py index a970c120..bf8603f8 100644 --- a/jupyterhub/user.py +++ b/jupyterhub/user.py @@ -69,7 +69,6 @@ class UserDict(dict): """Add a user to the UserDict""" if orm_user.id not in self: self[orm_user.id] = self.from_orm(orm_user) - TOTAL_USERS.inc() return self[orm_user.id] def __contains__(self, key): diff --git a/jupyterhub/utils.py b/jupyterhub/utils.py index 5ee824d1..26bae333 100644 --- a/jupyterhub/utils.py +++ b/jupyterhub/utils.py @@ -23,10 +23,6 @@ from hmac import compare_digest from operator import itemgetter from async_generator import aclosing -from async_generator import async_generator -from async_generator import asynccontextmanager -from async_generator import yield_ -from tornado import gen from tornado import ioloop from tornado import web from tornado.httpclient import AsyncHTTPClient @@ -81,8 +77,7 @@ def can_connect(ip, port): def make_ssl_context(keyfile, certfile, cafile=None, verify=True, check_hostname=True): - """Setup context for starting an https server or making requests over ssl. - """ + """Setup context for starting an https server or making requests over ssl.""" if not keyfile or not certfile: return None purpose = ssl.Purpose.SERVER_AUTH if verify else ssl.Purpose.CLIENT_AUTH @@ -102,7 +97,7 @@ async def exponential_backoff( timeout=10, timeout_tolerance=0.1, *args, - **kwargs + **kwargs, ): """ Exponentially backoff until `pass_func` is true. @@ -177,7 +172,7 @@ async def exponential_backoff( dt = min(max_wait, remaining, random.uniform(0, start_wait * scale)) if dt < max_wait: scale *= scale_factor - await gen.sleep(dt) + await asyncio.sleep(dt) raise TimeoutError(fail_message) @@ -638,28 +633,12 @@ def maybe_future(obj): return asyncio.wrap_future(obj) else: # could also check for tornado.concurrent.Future - # but with tornado >= 5 tornado.Future is asyncio.Future + # but with tornado >= 5.1 tornado.Future is asyncio.Future f = asyncio.Future() f.set_result(obj) return f -@asynccontextmanager -@async_generator -async def not_aclosing(coro): - """An empty context manager for Python < 3.5.2 - which lacks the `aclose` method on async iterators - """ - await yield_(await coro) - - -if sys.version_info < (3, 5, 2): - # Python 3.5.1 is missing the aclose method on async iterators, - # so we can't close them - aclosing = not_aclosing - - -@async_generator async def iterate_until(deadline_future, generator): """An async generator that yields items from a generator until a deadline future resolves @@ -684,7 +663,7 @@ async def iterate_until(deadline_future, generator): ) if item_future.done(): try: - await yield_(item_future.result()) + yield item_future.result() except (StopAsyncIteration, asyncio.CancelledError): break elif deadline_future.done(): diff --git a/pyproject.toml b/pyproject.toml index 0097e9f6..a7630d48 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,2 +1,7 @@ [tool.black] skip-string-normalization = true +target_version = [ + "py36", + "py37", + "py38", +] diff --git a/requirements.txt b/requirements.txt index a74d1d16..e54e8f11 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,15 +1,15 @@ alembic -async_generator>=1.8 +async_generator>=1.9 certipy>=0.1.2 entrypoints jinja2 jupyter_telemetry>=0.1.0 oauthlib>=3.0 -pamela -prometheus_client>=0.0.21 +pamela; sys_platform != 'win32' +prometheus_client>=0.4.0 psutil>=5.6.5; sys_platform == 'win32' python-dateutil requests SQLAlchemy>=1.1 -tornado>=5.0 +tornado>=5.1 traitlets>=4.3.2 diff --git a/setup.py b/setup.py index dbd1f3d3..f6d545c6 100755 --- a/setup.py +++ b/setup.py @@ -17,8 +17,8 @@ from setuptools.command.bdist_egg import bdist_egg v = sys.version_info -if v[:2] < (3, 5): - error = "ERROR: JupyterHub requires Python version 3.5 or above." +if v[:2] < (3, 6): + error = "ERROR: JupyterHub requires Python version 3.6 or above." print(error, file=sys.stderr) sys.exit(1) @@ -94,7 +94,7 @@ setup_args = dict( license="BSD", platforms="Linux, Mac OS X", keywords=['Interactive', 'Interpreter', 'Shell', 'Web'], - python_requires=">=3.5", + python_requires=">=3.6", entry_points={ 'jupyterhub.authenticators': [ 'default = jupyterhub.auth:PAMAuthenticator', diff --git a/share/jupyterhub/templates/login.html b/share/jupyterhub/templates/login.html index 5c00d31d..9359d9ec 100644 --- a/share/jupyterhub/templates/login.html +++ b/share/jupyterhub/templates/login.html @@ -61,7 +61,7 @@ id="login_submit" type="submit" class='btn btn-jupyter' - value='Sign In' + value='Sign in' tabindex="3" />