Compare commits

..

787 Commits
0.2.0 ... 0.7.0

Author SHA1 Message Date
Min RK
c3faef8e2a release 0.7.0 2016-12-02 18:02:20 +01:00
Carol Willing
d2175635af Merge pull request #895 from minrk/release-0.7
Update changelog for 0.7 final
2016-12-02 10:47:21 -06:00
Min RK
1f7401cd14 Update changelog for 0.7 final 2016-12-02 17:35:19 +01:00
Min RK
c94b3e34d2 Merge pull request #894 from minrk/disable-token
disable unused token on singleuser-server
2016-12-02 17:01:50 +01:00
Carol Willing
566e1d05ea Merge pull request #893 from minrk/expanduser
call expanduser on singleuser notebook_dir
2016-12-01 21:53:24 -06:00
Min RK
0488d0bd73 call expanduser on singleuser notebook_dir
This copies validate_notebook_dir from notebook with one addition:
calling expanduser.
2016-12-01 22:04:18 +01:00
Min RK
ca31d9b426 disable token on singleuser-server
fixes confusing output about token access in notebook server startup
2016-12-01 21:59:44 +01:00
Min RK
8721f9010f Merge pull request #892 from yuvipanda/maybe-async
Document that authenticator's add_user maybe a coroutine
2016-12-01 17:58:02 +01:00
YuviPanda
88de48ebac Document that authenticator's add_user maybe a coroutine 2016-12-01 19:31:23 +05:30
Min RK
d5a6e2b2ac Merge pull request #886 from yuvipanda/spawner-docs 2016-11-30 13:48:05 +01:00
Min RK
2152a94156 review pass on spawner docstring changes
- small wording, spelling tweaks
- rst formatting fixes
- remove some spurious, cluttering newlines
- clearer traitlets default values on first line
2016-11-30 13:43:59 +01:00
Min RK
bc3824e9bf review pass on auth docstrings 2016-11-30 13:22:07 +01:00
YuviPanda
60bc92cf78 Spawner doc fixes per @willingc 2016-11-30 14:02:02 +05:30
YuviPanda
3b15467738 Clearer module docstring for spawner.py 2016-11-29 16:26:34 +08:00
YuviPanda
4970fe0a1c Add more docs for spawner base class 2016-11-29 16:25:15 +08:00
YuviPanda
7dbe2425b8 Fix typo 2016-11-29 16:00:25 +08:00
YuviPanda
433d44a642 Add docs for PAMAuthenticator 2016-11-29 15:58:35 +08:00
YuviPanda
7733d320d0 Add more docs to LocalAuthenticator 2016-11-29 15:56:16 +08:00
YuviPanda
20d367c2a8 Add more docs for authenticator base class 2016-11-29 15:55:32 +08:00
YuviPanda
4687fbe075 Add extended docs for LocalProcessSpawner too 2016-11-28 23:07:54 -08:00
YuviPanda
b0dc52781e Add info about shell expansion to cmd / args traitlets
We should probably standardize this too
2016-11-28 22:45:06 -08:00
YuviPanda
4f1f7d6b8f Add example use for default_url traitlet 2016-11-28 22:42:10 -08:00
YuviPanda
41f8608f4e Fix port config documentation to match reality 2016-11-28 22:41:47 -08:00
Min RK
ba3a8f2e76 Merge pull request #887 from yuvipanda/rename-spec
Rename MemorySpecification to ByteSpecification
2016-11-28 10:27:41 +01:00
YuviPanda
12e3a5496d Rename MemorySpecification to ByteSpecification 2016-11-27 17:57:34 -08:00
YuviPanda
280644bab5 Expand traitlet documentation for spawner base class 2016-11-27 17:53:41 -08:00
Carol Willing
bf28371356 Merge pull request #882 from minrk/alembic.ini
add alembic.ini to package_data
2016-11-22 08:10:43 -08:00
Carol Willing
ce237181f2 Merge pull request #881 from minrk/more-allow-async-whitelist
handle async check_whitelist in app
2016-11-22 07:46:27 -08:00
Min RK
85ca5a052e add alembic.ini to package_data 2016-11-22 16:21:03 +01:00
Min RK
db8b3dbce9 handle async check_whitelist in app
follow-up to previous PR
2016-11-22 16:06:08 +01:00
Min RK
9c2d56f015 Merge pull request #876 from jbweston/bugfix/whitelist-coroutine
allow `check_whitelist` to be a coroutine
2016-11-22 09:56:28 +01:00
Joseph Weston
d244a1e02f allow check_whitelist to be a coroutine
Some authenticators may have whitelist checking that requires
async operations.
2016-11-21 16:14:02 +01:00
Min RK
9f134277a9 Merge pull request #872 from jupyterhub/willingc-patch-1
Change py.test to newer convention of pytest
2016-11-16 10:18:15 +01:00
Carol Willing
ef9aca7bcb Change py.test to newer convention of pytest 2016-11-15 14:13:03 -08:00
Min RK
32f39f23eb Merge pull request #871 from jupyterhub/willingc-patch-1
Add info on tests to README
2016-11-15 20:39:59 +01:00
Carol Willing
c9b2beb821 Add info on tests to README 2016-11-15 06:35:39 -08:00
Min RK
e9ad82e350 release 0.7b1 2016-11-12 18:36:36 -08:00
Min RK
347dd3cc0f prune docs/node_modules 2016-11-12 18:36:30 -08:00
Min RK
798346dbe8 Merge pull request #869 from willingc/doc-service
Edit Services doc content
2016-11-12 18:28:24 -08:00
Carol Willing
fd94c6de17 Fix missing link target 2016-11-12 18:16:59 -08:00
Carol Willing
3fc6fc32c5 Add review comment by @parente 2016-11-12 18:07:17 -08:00
Carol Willing
a1b6aa5537 Add troubleshooting commands 2016-11-12 18:01:53 -08:00
Carol Willing
f9965bb3c3 Add example of Service per @parente 2016-11-12 17:37:30 -08:00
Carol Willing
541997371c Fix broken or changed links 2016-11-12 17:17:00 -08:00
Carol Willing
522c3e5bee Edit Services doc content 2016-11-12 16:54:57 -08:00
Carol Willing
1baf434695 Merge pull request #868 from minrk/more-changes
Add a few more things in the changelog
2016-11-12 13:40:16 -08:00
Carol Willing
92db71f293 Merge pull request #867 from minrk/upgrade-db-backup
backup db file during upgrade-db
2016-11-12 13:39:39 -08:00
Min RK
b985f8384d Add a few more things in the changelog 2016-11-12 12:54:44 -08:00
Min RK
4c2d049e70 backup db file during upgrade-db 2016-11-12 12:44:59 -08:00
Carol Willing
605c4f121c Merge pull request #866 from parente/note-about-db-secrets
Add two short notes about db security
2016-11-12 12:03:21 -08:00
Peter Parente
4baf5035cb Reflow markdown for easier editing 2016-11-12 11:57:45 -08:00
Peter Parente
f8a57eb7d9 Add two short notes about db security 2016-11-12 11:49:17 -08:00
Min RK
93ac343493 Merge pull request #865 from willingc/doc-tidbits
Add documentation prior to 0.7 beta
2016-11-12 11:40:53 -08:00
Carol Willing
dc092186f0 Edit example for clarity 2016-11-12 11:27:36 -08:00
Carol Willing
6b7c319351 Add intro and standardize code format 2016-11-12 11:15:44 -08:00
Carol Willing
ef5885f769 Make minor edits 2016-11-12 11:15:02 -08:00
Peter Parente
0ffd53424d Merge pull request #861 from willingc/issuetempl
Add initial issue template
2016-11-11 13:36:11 -08:00
Carol Willing
5f464d01b4 Soften tone 2016-11-11 10:44:20 -08:00
Yuvi Panda
0a054cc651 Merge pull request #858 from willingc/post855-edits
Reflow text and minor edits following PR #855
2016-11-11 09:54:14 -08:00
Carol Willing
348af48d45 Merge pull request #863 from minrk/checklist-checklist
Make the release checklist a GFM checklist
2016-11-11 09:31:47 -08:00
Min RK
4d03c00dab Make the release checklist a GFM checklist
so we can paste into a new issue when preparing for a release
2016-11-11 09:14:07 -08:00
Min RK
7a71074a55 Merge pull request #860 from willingc/release-checklist
Add a high-level release checklist
2016-11-11 08:23:12 -08:00
Carol Willing
5527a3e7dd Fix spacing 2016-11-11 07:39:23 -08:00
Carol Willing
f961800fa4 Add troubleshoot command per @parente review 2016-11-11 07:37:43 -08:00
Peter Parente
adbf961433 Merge pull request #859 from willingc/contrib-thanks
Update contributor thank you list
2016-11-11 07:30:02 -08:00
Carol Willing
73e130cb2c Add initial issue template 2016-11-11 07:03:05 -08:00
Carol Willing
a44f178b64 Fix typo 2016-11-11 03:56:42 -08:00
Carol Willing
057fe32e3b Add release checklist 2016-11-11 03:54:33 -08:00
Carol Willing
cad9ffa453 Update contributor thank you list 2016-11-11 03:29:42 -08:00
Carol Willing
a11193a240 Reflow text and minor edits 2016-11-11 03:13:51 -08:00
Carol Willing
ea61a580b3 Merge pull request #855 from yuvipanda/limits-env
Add docs for the LIMIT_ and GUARANTEE_ conventions
2016-11-11 02:43:05 -08:00
Min RK
0bf6db92dd typo in example 2016-11-10 17:07:48 -08:00
YuviPanda
b0f38e7626 Fix docs to match reality 2016-11-10 14:38:09 -08:00
YuviPanda
0f237f28e7 Rename the env variables
Match the traitlet names
2016-11-10 14:37:50 -08:00
YuviPanda
d63bd944ac Add clarifying comment about limit / guarantee env variables 2016-11-10 10:39:44 -08:00
YuviPanda
54e28d759d Some inline doc fixups 2016-11-10 10:31:04 -08:00
YuviPanda
a00c13ba67 Set allow_none=True for limit/guarantee floats 2016-11-09 09:41:54 -08:00
YuviPanda
b4bc5437dd Set allow_none = True as default for MemorySpecification 2016-11-08 22:43:47 -08:00
Min RK
13bc0397f6 Merge pull request #767 from willingc/upgrade723
Add document on upgrading JupyterHub and its db
2016-11-08 18:13:27 -08:00
YuviPanda
9eb30f6ff6 Add resource limits / guarantees consistently to jupyterhub
- Allows us to standardize this on the spawner base class,
  so there's a consistent interface for different spawners
  to implement this.
- Specify the supported suffixes and various units we accept
  for memory and cpu units.
- Standardize the way we expose resource limit / guarantees
  to single-user servers
2016-11-08 17:17:10 -08:00
YuviPanda
17f20d8593 Add docs for the LIMIT_ and GUARANTEE_ conventions
https://github.com/jupyterhub/jupyterhub/issues/854 has
rationale for why, and links to PRs.
2016-11-08 16:19:25 -08:00
Carol Willing
cd23e086a8 Add an upgrade checklist 2016-11-08 12:04:57 -08:00
Carol Willing
03087f20fe Add additional database content from @minrk review 2016-11-08 11:51:42 -08:00
Carol Willing
f536eb4629 Change title 2016-11-08 10:50:49 -08:00
Carol Willing
f3e814aa8a Minor edits 2016-11-08 10:50:49 -08:00
Carol Willing
5fb0a6dffe Add note on databases 2016-11-08 10:50:49 -08:00
Carol Willing
c7ba86d1d8 Add upgrade instructions 2016-11-08 10:50:49 -08:00
Carol Willing
38dcc694b7 Add shutdown and upgrade steps 2016-11-08 10:50:49 -08:00
Carol Willing
fdfffefefa Update process steps 2016-11-08 10:50:49 -08:00
Carol Willing
4e7704afd9 Edit heading levels 2016-11-08 10:50:49 -08:00
Carol Willing
b52fcf4936 Add structure to upgrading doc 2016-11-08 10:50:49 -08:00
Carol Willing
539be2f08e Add basics for alembic 2016-11-08 10:50:49 -08:00
Carol Willing
29b2836c50 Add wip upgrade doc 2016-11-08 10:50:49 -08:00
Min RK
3a757d003a Merge pull request #852 from parente/use-conda-forge
[WIP] Update Dockerfile
2016-11-08 09:58:36 -08:00
Peter Parente
236802be1f Update Dockerfile
* Use nodejs, CHP from condaforge
* Bump the version of conda used
2016-11-07 18:46:04 -08:00
Carol Willing
4a2c9e97c6 Merge pull request #844 from willingc/secure-doc
Reflow text in websecurity doc
2016-11-01 15:15:16 -07:00
Carol Willing
0444d8465c Reflow text in doc 2016-11-01 14:27:49 -07:00
Carol Willing
faef34e4ff Merge pull request #838 from minrk/ensure-strings
quotes around single-user CLI args
2016-11-01 14:05:56 -07:00
Carol Willing
c174ec42f0 Merge pull request #842 from minrk/generate-path-error
finish error message when generate-config path does not exist
2016-11-01 09:26:52 -07:00
Min RK
d484728de9 check directory existence when writing config file
rather than file

and put output on stderr with exit message
2016-11-01 14:47:44 +01:00
Min RK
7da7f7e074 quotes around single-user CLI args
avoids mishandling things such as integer-literals
2016-11-01 12:07:25 +01:00
Min RK
53bdcd7d74 Merge pull request #840 from parente/clear-services-cookie
Fix jupyter-services cookie reset on logout
2016-10-31 13:25:43 +01:00
Peter Parente
1849964699 Fix jupyter-services cookie reset on logout
It currently remains set after logout from the hub allowing the user to
continue to access any services.
2016-10-30 22:36:31 -04:00
Carol Willing
5163c7a97f Merge pull request #824 from minrk/allow-empty-state
Don't assume empty state means not running
2016-10-27 08:33:54 -07:00
Min RK
b9daef9947 docstring review 2016-10-27 11:41:23 +02:00
Carol Willing
f16e0488ab Merge pull request #837 from Scrypy/issue-821
Updated authenticators and spawner docs
2016-10-26 15:44:09 -07:00
Daniel Martinez
adc16be4dc Updated spawners docs 2016-10-26 16:50:25 -05:00
Daniel Martinez
3e4b4149de Updated authenticators docs 2016-10-26 16:48:15 -05:00
Min RK
c392bae7e4 Merge pull request #835 from willingc/check-return
Edit model check to be consistent for user and group
2016-10-26 23:25:24 +02:00
Carol Willing
2e5373aa37 Edit model check to be consistent for user and group 2016-10-26 12:03:53 -07:00
Min RK
5412cd414f Merge pull request #832 from willingc/replace-warn
Use warning instead of warn for logs
2016-10-26 13:26:41 +02:00
Carol Willing
d957c5158f Use warning instead of warn for logs 2016-10-26 04:06:29 -07:00
Carol Willing
4a622cb964 Merge pull request #831 from jupyterhub/willingc-patch-1
Remove duplicate word in docstring
2016-10-26 02:24:39 -07:00
Carol Willing
69e721de46 Remove duplicate word in docstring 2016-10-26 02:19:49 -07:00
Carol Willing
f3f130f452 Merge pull request #830 from minrk/services-todo
Flesh out custom services examples
2016-10-26 02:16:39 -07:00
Min RK
fd4a04e3f3 docs review 2016-10-26 10:22:54 +02:00
Min RK
85c040ab8e flesh out custom services doc 2016-10-25 13:28:13 +02:00
Min RK
2bb4cd4739 allow HubAuthenticated to check groups 2016-10-25 13:27:57 +02:00
Min RK
4c3b134f10 add flask whoami service
for a non-tornado example
2016-10-25 13:24:46 +02:00
Carol Willing
bb8536b553 Merge pull request #826 from Scrypy/issue-822
Updated spawner docs
2016-10-24 23:21:00 -07:00
Carol Willing
8998fd480c Merge pull request #829 from Todd-Z-Li/issue-823
Added funky ascii art to previous TODO messages.
2016-10-24 18:40:25 -07:00
Carol Willing
d948fed0b5 Merge pull request #828 from temogen/deldoc
Deleted IPython from howitworks doc.
2016-10-24 18:38:17 -07:00
Daniel Anthony Noventa
fcfe6314ac Delted IPython from howitworks docs. 2016-10-24 19:13:57 -05:00
Todd
dcfe2aa792 Added funky ascii art to previous TODO messages. 2016-10-24 19:03:21 -05:00
Danowsky
85790ab9d8 Updated spawner docs 2016-10-24 18:57:17 -05:00
Min RK
adda2fcd90 Don't assume empty state means not running
Some Spawners may not need state,
and they should be allowed to resume on Hub restart as well.

Adds some detail about when .poll may be called and how it should behave in less obvious circumstances
2016-10-21 16:28:40 +02:00
Min RK
5604e983db Merge pull request #818 from minrk/unmanaged-no-start
don’t try to start unmanaged services
2016-10-19 10:44:14 +02:00
Min RK
386563a10a don’t try to start unmanaged services 2016-10-18 16:18:03 +02:00
Min RK
0e3c5cf625 statsd typo 2016-10-18 16:17:49 +02:00
Min RK
a3eb2d2b9a Merge pull request #815 from kinuax/fix-setting-in-configuration-example
Fix setting in configuration example
2016-10-14 13:03:45 +02:00
Asier
b6a8860a44 Fix setting in configuration example 2016-10-13 13:45:23 -05:00
Carol Willing
b8a649ae86 Add error message when generate config path does not exist 2016-10-13 07:20:38 -07:00
Min RK
7774bfc612 Merge pull request #811 from willingc/quick-install
Sync quick install steps with PyData tutorial
2016-10-13 10:56:55 +02:00
Carol Willing
9f76613aed Sync quick install steps with PyData tutorial 2016-10-12 18:06:11 -07:00
Min RK
f1ccbe4bed Merge pull request #807 from willingc/normalize-whitelist
Add tests for username normalization
2016-10-12 16:21:19 +02:00
Carol Willing
668d78f729 Add tests for username normalization 2016-10-11 16:44:24 -07:00
Min RK
0009b9a3d6 Merge pull request #805 from danielballan/template-vars
MNT: Add hub host and prefix to template vars in prep for JLab extension
2016-10-11 18:18:21 +02:00
danielballan
b2be07ea6a MNT: Add hub host and prefix to template vars in prep for JLab extension. 2016-10-11 11:27:50 -04:00
Min RK
74649eaad0 Merge pull request #804 from willingc/ssl-termination
Clarify deprecation of --no-ssl
2016-10-11 12:08:55 +02:00
Carol Willing
f33086aa13 Clarify deprecation of --no-ssl 2016-10-10 12:05:39 -07:00
Min RK
9c1cd960fc Merge pull request #801 from minrk/warn-about-direct-connect
try to detect and warn about connecting directly to the Hub
2016-10-10 10:36:50 +02:00
Min RK
3a5226ffa0 Merge pull request #802 from minrk/spawn-pending-finish
add User.waiting_for_response
2016-10-07 11:53:04 +02:00
Min RK
96a53f9921 Merge pull request #797 from ianabc/redirection_loop
spawn_pending set too soon causing redirect loop
2016-10-07 11:13:09 +02:00
Min RK
ff92ac9dad more mocking in tests
avoids no_patience state leaking into other tests
2016-10-07 10:59:32 +02:00
Min RK
933478bfff add waiting_for_response indicator on User
.spawn_pending used for the *whole* window, from request to responsive (added to proxy)
.waiting_for_response is just used for the window between Spawner.start returning (process started, http endpoint known) and http endpoint becoming responsive

.waiting_for_response will never be True while .spawn_pending is False
2016-10-07 10:59:05 +02:00
Min RK
7d996f91b0 try to detect and warn about connecting directly to the Hub
This is guaranteed to result in a redirect loop.
2016-10-07 10:16:21 +02:00
Min RK
c818cbb644 Merge pull request #799 from willingc/doc-install
Move README installation instructions to docs
2016-10-06 19:46:51 +02:00
Carol Willing
e638e5b684 Move README installation instructions to docs 2016-10-06 04:37:57 -07:00
Ian Allison
625e76ea40 spawn_pending set too soon causing redirect loop
Signed-off-by: Ian Allison <iana@pims.math.ca>
2016-10-05 13:28:52 -07:00
Min RK
f8229c9fb6 Merge pull request #793 from willingc/slimconfpy
Slim conf.py comments and options cruft
2016-10-04 15:04:32 +02:00
Min RK
47da422a93 Merge pull request #758 from willingc/update-changes
Add changes for 0.7 release
2016-10-04 14:47:00 +02:00
Carol Willing
3dd98bc0fc Slim conf.py comments and options cruft 2016-10-04 05:28:03 -07:00
Carol Willing
fa6e4aa449 Add pr 789 deprecate --no-ssl 2016-09-30 09:02:58 -07:00
Carol Willing
182472f921 Changes per @minrk review 2016-09-30 08:57:35 -07:00
Carol Willing
d99afe531d Add changes for 0.7 release 2016-09-30 08:57:35 -07:00
Carol Willing
b6b238073f Merge pull request #789 from minrk/deprecate-no-ssl
Deprecate `--no-ssl`
2016-09-30 08:42:07 -07:00
Min RK
a4c696d3bd Merge pull request #788 from willingc/warehouse
Update link to docs
2016-09-30 17:03:34 +02:00
Min RK
bce767120c Merge pull request #785 from willingc/devclarity
Clarify docstring
2016-09-30 16:58:23 +02:00
Min RK
6a9f346b21 Deprecate --no-ssl
it's unnecessarily pedantic. Just warn instead.
2016-09-30 16:16:33 +02:00
Carol Willing
d4646e1caa Update link 2016-09-28 20:54:57 -07:00
Carol Willing
77f0e00695 Clarify docstring 2016-09-28 07:36:29 -07:00
Carol Willing
26a6c89b3a Merge pull request #778 from minrk/shutdown-services
cleanup managed services in shutdown
2016-09-27 09:53:50 -07:00
Carol Willing
34297b82b3 Merge pull request #777 from minrk/service-cookie
Work on service authentication
2016-09-27 09:53:12 -07:00
Carol Willing
70727c4940 Merge pull request #776 from minrk/cleanup-on-start
remove stopped users from proxy on startup
2016-09-27 09:51:01 -07:00
Min RK
56080e5436 Merge pull request #782 from spoorthyv/master
Updated Logos To Match New Brand Guidelines
2016-09-27 15:41:44 +02:00
spoorthyv
309b1bda75 Updated Logos 2016-09-26 15:56:11 -07:00
Min RK
f3ebb694b4 Merge pull request #780 from minrk/travis-no-verbose-pip
remove -v from pip install on travis
2016-09-26 17:06:44 +02:00
Min RK
f35c14318a Merge pull request #779 from minrk/docker-cdn
Dockerfile: set debian CDN
2016-09-26 17:06:01 +02:00
Min RK
b60f2e8233 remove -v from pip install on travis
it makes way too much noise
2016-09-26 17:03:51 +02:00
Min RK
f1a55e31ce Dockerfile: set debian CDN
because the default httpredir fails with some regularity

based on info from http://deb.debian.org
2016-09-26 16:58:31 +02:00
Min RK
2432611264 cleanup managed services in shutdown
don’t leave them running
2016-09-26 15:20:34 +02:00
Min RK
729b608eff Fix setting cookie for services
and exercise it in tests
2016-09-26 14:30:00 +02:00
Min RK
eb3252da28 simplify whoami service example
rely on defaults in HubAuthenticated to show how simple it can be
2016-09-26 14:18:54 +02:00
Min RK
a9e9338ee4 get HubAuth defaults from service env variables
allows use of HubAuthenticated without any arguments
2016-09-26 14:13:04 +02:00
Min RK
aad063e3cd remove stopped users from proxy on startup
We already added running users, but we didn't handle removing users from the proxy
if the user's server was stopped (e.g. while the Hub was restarting).
2016-09-26 13:20:42 +02:00
Min RK
be00265d1a Merge pull request #762 from willingc/swagger
Edit descriptions in API spec for user clarity
2016-09-25 14:39:42 +02:00
Min RK
335ba4f453 Merge pull request #771 from willingc/faq-adds
Added navigation links and workshop best practices
2016-09-25 14:38:45 +02:00
Carol Willing
5a4f3a4910 Added navigation links and workshop best practices 2016-09-22 10:02:29 -07:00
Carol Willing
7ee4be0f13 Remove api review notes doc 2016-09-22 09:13:39 -07:00
Carol Willing
10c3fbe5cf Add changes per @minrk 2016-09-22 09:12:26 -07:00
Carol Willing
13826a41a1 Merge pull request #769 from minrk/service-start-yield
service.start is not a coroutine
2016-09-22 03:21:47 -07:00
Min RK
cb35026637 service.start is not a coroutine
don’t yield it
2016-09-22 12:04:31 +02:00
Min RK
24c080cf4a Merge pull request #768 from minrk/service-url
only set service URL env if there's a URL to set
2016-09-22 11:57:44 +02:00
Min RK
e9fc629285 only set service URL env if there's a URL to set
These fields are only relevant for services with a web endpoint
2016-09-21 12:39:07 +02:00
Min RK
150b67c1c9 Merge pull request #761 from willingc/apidocs
Update API docs
2016-09-21 10:34:57 +02:00
Carol Willing
acdee0ac29 Change notes from txt to md 2016-09-19 12:05:26 -07:00
Carol Willing
193b236ef1 Add additional review questions re: API 2016-09-19 11:52:43 -07:00
Carol Willing
1851e6a29d Edit descriptions in API spec for user clarity 2016-09-19 10:49:56 -07:00
Carol Willing
74f086629c Update API docs 2016-09-19 08:42:28 -07:00
Min RK
33a59c8352 Merge pull request #757 from willingc/doc-contrib
Add contributor list to the docs and update the contents
2016-09-19 09:01:54 +02:00
Carol Willing
08644fea74 Add services to index/table of contents 2016-09-18 15:33:01 -07:00
Carol Willing
f878bf6ad3 Add contribution list to documentation 2016-09-18 15:29:54 -07:00
Carol Willing
651c457266 Add contributor list 2016-09-18 15:28:51 -07:00
Carol Willing
2dd3463ea8 Merge pull request #748 from minrk/string-formatting
Deprecate `%U` username substitution
2016-09-18 06:34:01 -04:00
Carol Willing
ad93af8cc8 Merge pull request #749 from minrk/single-user-help-all
exercise single-user help output
2016-09-18 06:33:38 -04:00
Min RK
080cf7a29b exercise single-user help output
and tweak some of its output
2016-09-15 13:04:09 +02:00
Min RK
b8f4803ef4 Deprecate %U username substitution
use Python format-strings instead.
2016-09-15 12:05:46 +02:00
Min RK
4a8f51ed6d Merge pull request #741 from willingc/add-lab
Add info on trying out JupyterLab
2016-09-13 10:31:49 +02:00
Carol Willing
7923074ed5 Add info on trying out JupyterLab 2016-09-12 22:22:22 -07:00
Min RK
834b2ba77d Merge pull request #739 from jupyterhub/fix-readme-1
Add --no-ssl to docker run command in README
2016-09-10 10:55:41 +02:00
Yuvi Panda
7897a13ca5 Add --no-ssl to docker run command
Otherwise this doesn't run by default, and someone in gitter ran into this earlier.
2016-09-09 10:57:19 -07:00
Min RK
7987011372 Merge pull request #738 from willingc/inspired-zulip
Add navigation to README
2016-09-09 13:38:22 +02:00
Min RK
d7a76077bd Merge pull request #734 from minrk/deprecated-local-spawner-subclasses
backward-combat for ip, port in LocalProcessSpawner subclasses
2016-09-09 13:37:58 +02:00
Min RK
62731cf489 Merge pull request #727 from willingc/servicespec
Edit Services document
2016-09-09 10:29:10 +02:00
Carol Willing
5d501bc465 Add navigation to README 2016-09-08 22:41:44 -07:00
Kyle Kelley
63a6841848 Merge pull request #737 from willingc/issue438
Add info on configuring pySpark executors on YARN
2016-09-08 17:59:24 -05:00
Carol Willing
403241bd98 Add reference to official pySpark docs 2016-09-08 15:28:54 -07:00
Carol Willing
de3fe88df6 Fix code indentation for markdown 2016-09-08 15:11:25 -07:00
Carol Willing
6a370286e1 Add info on setting pySpark executors on YARN 2016-09-08 15:09:20 -07:00
Carol Willing
491b7e7d11 Use Hub-Managed and Externally-Managed 2016-09-08 08:23:47 -07:00
Min RK
0b0db97117 Merge pull request #728 from willingc/cull-idle
Add readme to cull-idle example to demonstrate managed services
2016-09-08 16:57:16 +02:00
Min RK
42a993fd08 backward-combat for ip, port in LocalProcessSpawner subclasses
Subclasses prior to 0.6 may assume return value
of LocalProcessSpawner.start can be ignored
instead of passing it through.

For these cases, keep setting ip/port in the deprecated way
so that it still works with a warning,
rather than failing with the wrong port.
2016-09-08 16:54:52 +02:00
Carol Willing
fd1544bf41 Edits per @minrk's review 2016-09-08 07:16:25 -07:00
Carol Willing
ed36207328 Merge pull request #731 from willingc/issue654
Add config for default URL to FAQ
2016-09-08 07:05:56 -07:00
Carol Willing
a0b8ccf805 Add config for whole filesystem access and user home directory as default 2016-09-08 06:54:23 -07:00
Min RK
9d2278d29b Merge pull request #733 from willingc/issue594
Add troubleshooting info about sudospawner
2016-09-08 14:51:57 +02:00
Min RK
df42385d7e Merge pull request #732 from willingc/issue632
Add info on updates and Qualsys SSL analyzer to docs
2016-09-08 14:51:42 +02:00
Min RK
02796d4daa Merge pull request #730 from willingc/issue661
Add install instructions with no network to FAQ
2016-09-08 14:50:44 +02:00
Carol Willing
80c5f67335 Add troubleshooting info about sudospawner 2016-09-08 00:57:17 -07:00
Carol Willing
0b14e89404 Add info on updates and Qualsys SSL analyzer to docs 2016-09-07 22:00:33 -07:00
Carol Willing
f595b1ad59 Add clarification re: run on hub not each single user server 2016-09-07 21:21:10 -07:00
Carol Willing
80ca1eacc5 Add install instructions with no network to FAQ 2016-09-07 21:06:56 -07:00
Carol Willing
5b3ac6c840 Add readme to cull-idle example 2016-09-07 14:01:46 -07:00
Carol Willing
0000b7447a Make command consistent with examples/cull-idle 2016-09-07 13:52:33 -07:00
Carol Willing
a22060ca7f Edit Services document 2016-09-07 11:39:15 -07:00
Min RK
8ca321ecc3 Merge pull request #705 from minrk/actual-services
WIP: implement services API
2016-09-07 13:43:54 +02:00
Min RK
862cb3640b Merge pull request #722 from minrk/setuptools-no-egg
always install with setuptools
2016-09-07 13:38:06 +02:00
Min RK
51908c9673 clarifications from review 2016-09-07 13:19:09 +02:00
Min RK
9aa4046093 always install with setuptools
but not eggs (effectively require pip install .)
2016-09-05 15:46:20 +02:00
Min RK
acb49adfea Merge pull request #719 from Mistobaan/patch-1
fix docker repository
2016-09-05 10:38:12 +02:00
Fabrizio Milo
f345ad5422 fix docker repository 2016-09-02 14:45:16 -07:00
Min RK
5ad618bfc1 add API endpoint for services 2016-09-02 15:19:45 +02:00
Min RK
26b00578a1 remove redundant user_url utility
public_url works for users now
2016-09-02 13:22:49 +02:00
Min RK
c3111b04bb support services subdomain
- all services are on the 'services' domain, share the same cookie
2016-09-02 13:21:46 +02:00
Min RK
a61ba74360 Merge pull request #717 from minrk/hubauth-defaults
HubAuth login_url changes:
2016-09-02 12:05:17 +02:00
Min RK
4de93fd1d5 Merge pull request #718 from willingc/sdist-one
Remove zip from sdist build per PEP 527
2016-09-02 11:45:44 +02:00
Min RK
46bb7b05f4 strict host matching by including / 2016-09-02 11:44:51 +02:00
Carol Willing
1aa2cb1921 Remove zip from sdist build per PEP 527 2016-09-01 07:33:10 -07:00
Min RK
c4bfa63fd6 allow full URLs for login redirects iff they are for our host 2016-09-01 15:10:02 +02:00
Min RK
4c5d6167bd use just path for default hub auth login_url 2016-09-01 15:07:00 +02:00
Min RK
9a002c2445 update services doc with some feedback 2016-09-01 15:01:02 +02:00
Min RK
f97d32c5bd add services to the proxy
and start test coverage
2016-09-01 14:46:34 +02:00
Min RK
bac311677f Merge pull request #711 from willingc/update-change
Update changelog format
2016-08-29 12:01:08 +02:00
Carol Willing
94cb5b3a05 Update changelog format 2016-08-29 02:39:39 -07:00
Carol Willing
ed4f0ba014 Merge pull request #707 from willingc/mytheme
Update conda env and conf.py for clean build
2016-08-28 10:42:49 -07:00
Carol Willing
fd219b5fff Update conda env and conf.py for clean build 2016-08-28 10:08:00 -07:00
Min RK
140c4f2909 use services API in cull-idle example 2016-08-27 13:23:45 +02:00
Min RK
a1c787ba5f basic implementation of managed services
- managed services are automatically restarted
- proxied services not there yet
2016-08-27 12:59:26 +02:00
Min RK
54c808fe98 Service specification document 2016-08-26 17:25:53 +02:00
Min RK
eaeec9f19b Merge pull request #693 from willingc/doc-revise
Documentation refresh
2016-08-24 23:05:54 +02:00
Min RK
21d25ac130 Merge pull request #689 from minrk/log-add-user-error
log errors adding users already in db
2016-08-21 22:19:39 +02:00
Min RK
eda21642bd log errors adding users already in db
avoids removal of system users preventing Hub startup
2016-08-21 22:07:46 +02:00
Carol Willing
aace54d5b2 Merge pull request #699 from jhamrick/swarm-docs
Remove link to SwarmSpawner
2016-08-19 20:41:18 -07:00
Jessica B. Hamrick
e460c00759 Remove link to SwarmSpawner 2016-08-20 02:02:08 +01:00
Carol Willing
678fd1cd08 Shorten name 2016-08-18 10:36:40 -07:00
Carol Willing
42c78f3c43 Drop back to old environment 2016-08-18 10:20:57 -07:00
Carol Willing
548e0f6153 Edits to technical overview 2016-08-18 09:58:37 -07:00
Carol Willing
31f63c737f Add image of JupyterHub parts to index 2016-08-18 09:15:48 -07:00
Carol Willing
71b35602d7 Edit grammar in index 2016-08-18 08:36:46 -07:00
Carol Willing
7c41a024ba Fix typo 2016-08-18 05:05:46 -07:00
Carol Willing
51097de43d Update contents format 2016-08-18 04:52:51 -07:00
Carol Willing
44e16d538d Edit and corrections 2016-08-18 04:52:22 -07:00
Carol Willing
f6517d01db Move 'Using API' to user guide 2016-08-18 04:51:48 -07:00
Carol Willing
039b925cf6 Edit config-examples 2016-08-18 04:50:11 -07:00
Carol Willing
bba5460236 Simplify formating of troubleshooting doc 2016-08-18 04:49:30 -07:00
Carol Willing
e5d3705a1a Edit headings for authenticators and spawners docs 2016-08-18 04:48:43 -07:00
Carol Willing
7b80b95a49 Add checks for spelling 2016-08-18 04:47:21 -07:00
Carol Willing
75cb487ab3 Update conf 2016-08-17 15:11:57 -07:00
Carol Willing
eba4b3e8c7 More doc edits 2016-08-17 15:11:17 -07:00
Carol Willing
712b895d8e WIP refresh 2016-08-15 19:18:38 -07:00
Carol Willing
635fd9b2c3 Fix typo 2016-08-15 18:50:26 -07:00
Min RK
afcbdd9bc4 Merge pull request #678 from vilhelmen/swagger_fix
Swagger spec conformance
2016-08-04 13:06:00 +02:00
Will Starms
80fa5418b7 Fix missing description for responce 2016-08-03 16:59:14 -05:00
Will Starms
b0a09c027d Fix invalid type bool->boolean 2016-08-03 16:57:17 -05:00
Kyle Kelley
4edf59efeb Merge pull request #675 from minrk/api-info
Add /api/ and /api/info endpoints
2016-08-02 22:26:10 -05:00
Min RK
9f0dec1247 ignore generated rest-api html 2016-08-01 15:16:26 +02:00
Min RK
2c47fd4a02 Add /api/ and /api/info endpoints
/api/ is not authenticated, and just reports JupyterHub's version for now.
/api/info is admin-only, and reports more detailed info about Python, authenticators/spawners in use, etc.
2016-08-01 15:15:59 +02:00
Min RK
9878f1e32d Document parameters to shutdown API 2016-08-01 15:12:05 +02:00
Min RK
5c396668ff Merge pull request #671 from vilhelmen/swagger_fix
Fix timestamp type in API spec
2016-08-01 13:39:10 +02:00
Min RK
5f12f9f2c3 Merge pull request #667 from vilhelmen/master
Proxy will no longer recieve Hub's SIGINT
2016-08-01 11:04:39 +02:00
Will Starms
4974775cd9 Fix timestamp type 2016-07-31 19:23:59 -05:00
Will Starms
0cb777cd0f Switch to start_new_sesstion 2016-07-29 13:43:09 -05:00
Min RK
a4bb25a75f Merge pull request #604 from minrk/service-token
add Services to db
2016-07-29 10:32:15 +02:00
Min RK
b3f117bc59 Merge pull request #669 from vilhelmen/swagger_fix
Fix invalid license object and bad JSON pointers in API spec
2016-07-29 10:25:39 +02:00
Will Starms
499ba89f07 Correct invalid JSON pointers 2016-07-28 22:25:38 -05:00
Will Starms
05d743f725 Correct invalid license object in API spec 2016-07-28 22:19:05 -05:00
Carol Willing
a347d56623 Merge pull request #668 from willingc/toc-tweak
Minor additions to work done by @iamed18 in PR#602
2016-07-28 14:33:36 -07:00
Carol Willing
172976208e Minor additions to work done by @iamed18 in PR#602 2016-07-28 14:27:19 -07:00
Carol Willing
b6db3f59a2 Merge pull request #602 from iamed18/master
Added nginx reverse proxy example to GettingStarted.md
2016-07-28 14:09:31 -07:00
Carol Willing
4b31279fc8 Merge branch 'iamed18-master'
Closes #509
2016-07-28 14:01:29 -07:00
Edward Leonard
bfef83cefc separated configuration examples into their own document
Merge conflict resolved by @willingc
2016-07-28 13:58:20 -07:00
Edward Leonard
07d599fed2 added code-block ends
forgot them in last commit
2016-07-28 13:56:15 -07:00
Edward Leonard
0412407558 added example config with nginx reverse proxy 2016-07-28 13:56:15 -07:00
Edward Leonard
4c568b46d6 separated configuration examples into their own document 2016-07-28 14:17:51 -05:00
Michael Milligan
d92fcf5827 batchspawner URL change 2016-07-28 13:54:34 -05:00
Will Starms
36f3abbfc7 Proxy will no longer recieve Hub's SIGINT #665 2016-07-28 13:04:55 -05:00
Min RK
49a45b13e6 debug installation on travis 2016-07-28 17:23:44 +02:00
Min RK
dfa13cb2c5 avoid creating duplicate users in test_api
now that we check!
2016-07-28 17:23:44 +02:00
Min RK
fd3b959771 add api_tokens.service_id column with alembic 2016-07-28 17:23:44 +02:00
Min RK
39a80edb74 async fixes in test_init_tokens 2016-07-28 17:23:44 +02:00
Min RK
2a35d1c8a6 add service API tokens
service_tokens supersedes api_tokens,
since they now map to a new services collection,
rather than regular Hub usernames.

Services in the ORM have:

- API tokens
- servers (multiple, can be 0)
- pid (0 if not managed)
2016-07-28 17:23:44 +02:00
Min RK
81350322d7 Merge pull request #660 from willingc/remove-badge
Remove requires.io badge
2016-07-26 12:02:33 +02:00
Min RK
50c2528359 Merge pull request #659 from willingc/fix-restlink
Remove link and reflow text
2016-07-26 12:02:24 +02:00
Min RK
77bac30654 Merge pull request #650 from minrk/return-ip-port
return (ip, port) from Spawner.start
2016-07-26 12:02:13 +02:00
Carol Willing
41fafc74cf Merge pull request #662 from mwmarkland/master
Fix typo regarding user's interactions with PATH
2016-07-25 17:58:48 -07:00
Matthew Markland
c6281160fa Fix typo regarding user 2016-07-25 14:55:17 -05:00
Min RK
3159b61ae7 return (ip, port) from Spawner.start
removes the need for Spawners to set db state themselves in most cases

Should be backward-compatible with warnings.
2016-07-25 16:54:15 +02:00
Carol Willing
11278ddb26 Remove requires.io badge 2016-07-25 07:45:34 -07:00
Carol Willing
e299a6c279 Remove link and reflow text 2016-07-25 07:41:28 -07:00
Min RK
22ff5f3d91 Merge pull request #635 from minrk/traitlets-4.2-singleuser
use traitlets 4.2 API in singleuser script
2016-07-25 16:29:07 +02:00
Carol Willing
a3e8bd346f Merge pull request #656 from minrk/rest-api-docs
Add REST API to docs
2016-07-25 07:09:16 -07:00
Min RK
592a084a28 set API token in single-user-spawner test 2016-07-25 15:57:43 +02:00
Min RK
c27e59b0f9 better exit message if JPY_API_TOKEN is undefined. 2016-07-25 15:27:32 +02:00
Min RK
1c9bc1b133 traitlets 4.2 API in singleuser script 2016-07-25 15:27:32 +02:00
Min RK
be4f4853cf Merge pull request #655 from willingc/doc-rest
Add links to REST API docs
2016-07-25 10:51:08 +02:00
Carol Willing
7d8895c2fb Add links to swagger docs for REST API 2016-07-23 18:47:23 -07:00
Min RK
5b8913be5b install nodejs with conda on RTD 2016-07-23 12:23:30 +02:00
Min RK
d03a1ee490 build rest-api on RTD 2016-07-23 12:05:50 +02:00
Min RK
19ae38c108 add REST API to docs
include local build, even though it's not as nice as petstore.
Due the that, link to petstore as well.
2016-07-23 12:05:25 +02:00
Carol Willing
9b71f11213 Merge pull request #651 from minrk/check-hub-ip
more informative error if single-user server can't connect to Hub for auth
2016-07-22 07:30:27 -07:00
Min RK
8fbaedf4d7 more informative error if single-user server can't connect to Hub for auth
error message points to hub_ip setting if Hub doesn't appear to be accessible at 127.0.0.1
2016-07-22 15:35:24 +02:00
Min RK
87ab07b322 Merge pull request #646 from datapolitan/fix_juptyter
fixing start_proxy() that misspelled name of the project
2016-07-18 22:05:22 -07:00
Richard Dunks
f36a1e10e6 fixing start_proxy() that misspelled name of the project 2016-07-17 23:23:32 -04:00
Carol Willing
5944671663 Merge pull request #644 from JamiesHQ/doctypo
Fix link
2016-07-16 11:21:55 -05:00
Jamie W
27dfd0edca fix link 2016-07-16 11:18:14 -05:00
Min RK
9dfc043352 Merge pull request #639 from ryanlovett/patch-1
Correct Spawner.start typo
2016-07-13 17:10:17 -05:00
Min RK
e8bd1520b2 Merge pull request #640 from minrk/travis-pre-for-nathaniel
install dependencies with pre
2016-07-13 17:10:03 -05:00
Min RK
a30b9976f5 install dependencies with pre
to catch bugs introduced by dependencies during prerelease
2016-07-13 16:19:15 -05:00
Ryan Lovett
954e5b3d5e Correct Spawner.start typo
As documented at https://github.com/jupyterhub/jupyterhub/blob/master/jupyterhub/spawner.py#L103
2016-07-13 10:23:16 -07:00
Min RK
7cd8aa266b Merge pull request #634 from minrk/cleanup-after-yourself
cleanup servers, api tokens after spawner shutdown
2016-07-11 14:29:20 -05:00
Min RK
d0449d136c cascade on API token delete 2016-07-11 10:44:55 -05:00
Min RK
ff9aeb70b4 try ondelete=SET NULL in foreign keys 2016-07-09 12:13:04 +02:00
Min RK
2eaecd22ba cleanup servers, api tokens after spawner shutdown
prevents growing table of unused servers and tokens
2016-07-08 16:50:43 +02:00
Carol Willing
4801d647c1 Merge pull request #627 from minrk/alembic-util
allow running alembic with `python -m jupyterhub.dbutil`
2016-07-01 17:17:54 -07:00
Carol Willing
b7e6fa3abe Merge pull request #626 from minrk/check-permissions
Add a permissions-check hint when spawn fails with PermissionError
2016-07-01 17:02:37 -07:00
Min RK
d590024c47 allow running alembic with python -m jupyterhub.dbutil
since we only have generated alembic.ini, present a command that generates one and uses it.

enables generating new revisions with:

    python -m jupyterhub.dbutil revision -m msg
2016-07-01 14:38:31 +02:00
Min RK
f3f71c38c3 Merge pull request #620 from Fokko/fd-add-badge
Added requirements badge
2016-07-01 14:37:26 +02:00
Min RK
27125a169c Merge pull request #621 from minrk/user-redirect-handler
Add /user-redirect/ endpoint
2016-07-01 14:37:00 +02:00
Min RK
3f9205d405 Add a permissions-check hint when spawn fails with PermissionError 2016-07-01 14:36:34 +02:00
Carol Willing
96861dc2b0 Merge pull request #622 from minrk/getting-started-log
update log instructions in getting started
2016-06-24 08:52:31 -07:00
Min RK
cedaa184f1 update log instructions in getting started
use recommended output-piping instead of nonexistent log_file config
2016-06-24 17:15:50 +02:00
Fokko Driesprong
f491791081 Added requirements badge 2016-06-24 16:17:51 +02:00
Min RK
6bba1c474f Add /user-redirect/ endpoint
should avoid needing to cram user-detection / intent into other endpoints.
That functionality isn't removed,
but warnings are added indicating that /user-redirect/ should be used instead.
2016-06-24 16:08:30 +02:00
Carol Willing
357f6799b0 Merge pull request #613 from minrk/user-redirect-560
Finish up cross-user redirects
2016-06-16 08:32:03 -07:00
Carol Willing
ce3ea270f5 Merge pull request #612 from minrk/doc-links
fix links to authenticator/spawner pages from howitworks
2016-06-16 08:28:56 -07:00
Min RK
992717adc0 support cross-user redirects when JupyterHub is on a prefix 2016-06-16 15:42:00 +02:00
Min RK
993101710f fix links to authenticator/spawner pages from howitworks 2016-06-16 15:00:19 +02:00
Matthias Bussonnier
ac6fe61804 Merge pull request #609 from minrk/only-relevant-warning
hide http warning until it's relevant
2016-06-14 09:42:24 -07:00
Min RK
37aa1a291a Merge pull request #606 from minrk/rm-hub-prefix-option
disable hub_prefix config
2016-06-13 12:08:58 +02:00
Min RK
c6294f2763 Merge pull request #607 from Carreau/fix-bad-security-anchor-link
Fix bad anchor
2016-06-13 12:08:34 +02:00
Min RK
6e9a77f55f hide http warning until it's relevant
avoids flash of invalid warning when everything is correct
2016-06-13 12:07:52 +02:00
Min RK
799b407d89 Merge pull request #608 from Carreau/add-ssl-frontend-warning
Add a warning on login if page not over ssl.
2016-06-13 12:00:02 +02:00
Matthias Bussonnier
3ddfa5f939 Add a warning on login if page not over ssl.
The --no-ssl option in the backend make sens, but still too many
deployment are not over SSL because they underestimate / do not
understand the risks.
2016-06-12 13:24:46 -07:00
Matthias Bussonnier
5968661742 Fix bad anchor 2016-06-11 12:09:00 -07:00
Dara Adib
34592e3da5 Process single-user server redirects
Follow-up to #448.

If single-user notebook is running, it will redirect other
users to hub root with next argument, which was previously
ignored.
2016-06-10 17:15:48 +02:00
Min RK
5aea7eda96 disable hub_prefix config
it shouldn't be configurable
2016-06-10 17:14:47 +02:00
Edward Leonard
08024be1c0 added code-block ends
forgot them in last commit
2016-06-06 22:07:10 -05:00
Edward Leonard
39daff3099 added example config with nginx reverse proxy 2016-06-06 22:05:27 -05:00
Min RK
d4c0fe8679 Merge pull request #597 from minrk/single-user-service-auth
use HubAuth in single-user server
2016-06-06 13:31:40 +02:00
Min RK
c9ae45bef3 Merge pull request #599 from minrk/groups
Add groups
2016-06-04 21:22:29 +02:00
Min RK
503f21fd37 allow initializing groups from config
c.JupyterHub.load_groups creates groups and adds users to them.

It *does not* remove users from groups added previously.
2016-06-01 14:35:34 +02:00
Min RK
6d106b24f4 add groups API 2016-06-01 14:04:32 +02:00
Min RK
71f47b7a70 add user groups 2016-06-01 13:47:53 +02:00
Min RK
844381e7c9 use HubAuthenticated in jupyterhub-singleuser 2016-05-31 13:20:21 +02:00
Min RK
267994b191 move singleuser script into the package 2016-05-31 12:59:38 +02:00
Min RK
cc2202c188 Merge pull request #554 from minrk/service-auth
Add HubAuth for authenticating services with JupyterHub
2016-05-31 12:58:51 +02:00
Min RK
4996a84ca0 Merge pull request #596 from minrk/hub-prefix-traitlet
fix _hub_prefix_changed signature
2016-05-31 12:58:37 +02:00
Min RK
3cefc2951c fix _hub_prefix_changed signature
wasn't update for new traitlets API

c/o @Milly
2016-05-31 11:33:12 +02:00
Min RK
835b4afc06 Merge pull request #593 from minrk/redirect-typo
only strip base_url if it's actually there
2016-05-31 11:32:42 +02:00
Min RK
146bef1d88 test hub-authenticated tornado handler 2016-05-30 13:32:10 +02:00
Min RK
ef9656eb8b add example service 2016-05-30 13:32:10 +02:00
Min RK
84868a6475 add login_url to HubAuth
needed for tornado redirects. Still not sure the best way to pass it to tornado app settings.
2016-05-30 13:32:10 +02:00
Min RK
9e9c6f2761 document services.auth 2016-05-30 13:32:10 +02:00
Min RK
19e8bdacfe Add HubAuth for authenticating tornado-based services with JupyterHub
- HubAuth implements request to identify users with the Hub
- HubAuthenticated is a mixin for tornado handlers
2016-05-30 13:32:10 +02:00
Min RK
c6640aa51d only strip base_url if it's actually there 2016-05-30 10:34:33 +02:00
Min RK
1514a2f2e2 ignore .cache 2016-05-27 16:12:54 +02:00
Min RK
9edb282067 Merge pull request #591 from minrk/clean-css-fix
require clean-css 3.4.13
2016-05-27 13:12:09 +02:00
Min RK
9ffe5e6187 require clean-css 3.4.13
fixes node 6 compatibility
2016-05-26 18:31:10 +02:00
Min RK
14662111a8 Merge pull request #508 from minrk/alembic
Use alembic for database migrations
2016-05-26 15:40:44 +02:00
Min RK
a7ea5774d9 test database upgrades with alembic 2016-05-26 15:32:57 +02:00
Min RK
c998458362 include old-jupyterhub.sqlite generated by
I don't like including this, but I don't know a better way to get a starting db
other than doing a complete installation of old JupyterHub in an env.

At least it's small.
2016-05-26 15:32:50 +02:00
Min RK
07ddede40c typo: write db url in alembic 2016-05-26 15:31:49 +02:00
Min RK
b8a6ac62e8 include alembic in package_data 2016-05-26 14:23:10 +02:00
Min RK
86e9a3217c add jupyterhub upgrade-db entry point
don't do automatic upgrades (yet)

I'm not sure if auto upgrades are a good idea or not.
2016-05-26 14:17:41 +02:00
Min RK
f591e6e3fb require alembic 2016-05-26 14:17:41 +02:00
Min RK
64dd1db327 add User.auth_state column
a place for storing authenticator state,
and a simple test case for alembic.
2016-05-26 14:17:41 +02:00
Min RK
b68569f61c init alembic
prepare for database migration
2016-05-26 14:17:40 +02:00
Min RK
3a52e3f4df Merge pull request #589 from minrk/test-with-base-url
Run tests with an encoded base_url
2016-05-26 14:14:18 +02:00
Min RK
05c268e190 Run tests with an encoded base_url
to ensure we get our escaping right

Mostly revealed fixes needed in tests so far, not code,
but should catch regressions.
2016-05-26 13:56:20 +02:00
Carol Willing
98937de278 Merge pull request #586 from jupyterhub/willingc-patch-1
Fix typo in README link
2016-05-25 09:36:41 -07:00
Carol Willing
ff35e3b93e Fix typo in README link
Extra 's' on jupyter-notebook removed.
2016-05-25 09:12:10 -07:00
Carol Willing
4eebc95109 Merge pull request #585 from willingc/link-me
Fix RTD links to PDF
2016-05-25 09:02:39 -07:00
Carol Willing
c708c2a3a0 Fix RTD links to PDF back to org 2016-05-25 08:46:54 -07:00
Kyle Kelley
35f8190128 Merge pull request #584 from willingc/wtd-readme
Edit README based on Write The Docs tips
2016-05-24 13:31:46 -07:00
Carol Willing
78b268ddef Edit README based on Write The Docs tips 2016-05-24 12:40:52 -07:00
Min RK
eb99060a25 Merge pull request #578 from willingc/doc-570
Add docs on chain certificates and configuration re: 570
2016-05-19 08:24:54 +02:00
Carol Willing
8e99f659f5 Fix link 2016-05-18 10:59:04 -07:00
Carol Willing
5c9e9d65b5 Add links to SSL section in docs 2016-05-18 10:52:49 -07:00
Carol Willing
3e768b7297 Add chained cert doc info from @ryanlovett and @LMtx 2016-05-18 10:34:38 -07:00
Kyle Kelley
aa2999210d Merge pull request #572 from PeterDaveHelloKitchen/image-optimize
Losslessly optimize images using Google zopflipng
2016-05-16 09:33:59 -05:00
Peter Dave Hello
be95a27597 optimize images 2016-05-16 21:46:53 +08:00
Kyle Kelley
5edcdd4fb2 Merge pull request #567 from minrk/less-271
require less-2.7.1
2016-05-12 07:12:32 -05:00
Min RK
b81586de0a require less-2.7.1
2.7.0 has compatibility problems with source maps
2016-05-12 10:32:52 +02:00
Carol Willing
e0f3e3b954 Merge pull request #559 from ozancaglayan/doc-fix-apitoken
doc: Add section about API tokens
2016-05-09 05:57:56 -07:00
Ozan Çağlayan
3037d264c3 docs: Fix last two typos 2016-05-05 15:41:38 +03:00
Ozan Çağlayan
17f1346c08 doc: address reviewers comments 2016-05-04 18:15:15 +03:00
Ozan Çağlayan
276aba9f85 doc: Add section about API tokens 2016-05-04 17:35:54 +03:00
Min RK
0ba63c42fd back to dev 2016-05-04 14:09:47 +02:00
Min RK
2985562c2f release 0.6.1 2016-05-04 14:08:39 +02:00
Min RK
754f850e95 changelog for 0.6.1 2016-05-04 14:08:39 +02:00
Min RK
dccb85d225 plural add-users ids 2016-05-04 13:57:16 +02:00
Min RK
a0e401bc87 Merge pull request #551 from minrk/proxy-error
Serve proxy error pages from the Hub
2016-05-04 12:34:10 +02:00
Min RK
c6885a2124 Merge pull request #552 from minrk/poll-and-notify
notice dead servers more often
2016-05-04 12:33:09 +02:00
Min RK
7528fb7d9b notice dead servers more often
call poll_and_notify to ensure triggering of dead-server events in a few places:

- `/hub/home` page view
- user start and stop API endpoints

This should avoid the failure to stop a server that's died uncleanly because the server hasn't noticed yet
2016-05-04 11:07:28 +02:00
Carol Willing
e7df5a299c Merge pull request #556 from minrk/shutdown-all
Add Stop All button to admin page
2016-05-03 05:58:41 -07:00
Min RK
ff997bbce5 Add Stop All button to admin page
for stopping all single-user servers at once
2016-05-03 13:25:12 +02:00
Min RK
1e21e00e1a return status from poll_and_notify
allows calling it directly
2016-04-27 14:28:23 +02:00
Min RK
77d3ee98f9 allow logo_url in template namespace to set the logo link 2016-04-27 14:06:51 +02:00
Min RK
1f861b2c90 server proxy error pages from the Hub 2016-04-27 14:06:29 +02:00
Carol Willing
14a00e67b4 Merge pull request #550 from daradib/typo
Fix docs typo for Spawner.disable_user_config
2016-04-26 15:28:02 -07:00
Dara Adib
14f63c168d Fix docs typo for Spawner.disable_user_config 2016-04-26 11:36:48 -07:00
Kyle Kelley
e70dbb3d32 Merge pull request #549 from minrk/optional-statsd
Make statsd an optional dependency
2016-04-26 07:46:28 -05:00
Min RK
b679275a68 remove unneeded codecov.yml
codecov team config suffices
2016-04-26 13:44:29 +02:00
Min RK
0c1478a67e Make statsd an optional dependency
only import it if it's used
2016-04-26 13:37:39 +02:00
Min RK
d26e2346a2 Merge pull request #548 from minrk/jupyterhub-urls
fix a few more jupyter->jupyterhub URLs
2016-04-26 12:41:19 +02:00
Min RK
9a09c841b9 Merge pull request #547 from minrk/disable-codecov-comments
disable codecov PR comments
2016-04-26 12:41:02 +02:00
Min RK
f1d4f5a733 fix a few more jupyter->jupyterhub URLs
in README
2016-04-26 11:58:27 +02:00
Min RK
d970dd4c89 disable CodeCov PR comments
The've removed web app config, in favor of codecov.yml,
discarding our existing config,
which means coverage reports are showing up in most Jupyter PRs now.
2016-04-26 11:55:52 +02:00
Min RK
f3279bf849 Merge pull request #544 from rafael-ladislau/master
Fix multiple windows logout error
2016-04-26 11:41:53 +02:00
Rafael Ladislau
db0878a495 Fix multiple windows logout error
When you have two JupyterHub windows and log out successfully in one of them. If you try to click the logout button in the other window, you will receive a 500 error.

It happened because there were operations being done in a None user object.
2016-04-25 13:31:39 -04:00
Min RK
c9b1042791 back to dev 2016-04-25 14:34:15 +02:00
Min RK
cd81320d8f push tags on circleci 2016-04-25 14:25:34 +02:00
Min RK
3046971064 release 0.6 2016-04-25 14:10:29 +02:00
Min RK
30498f97c4 Merge pull request #543 from robnagler:master
Allow jupyterhub-singleuser to run on python 2 install

closes #543
2016-04-25 11:35:40 +02:00
robnagler
d9d68efa55 run with default python, which might be python 2 2016-04-25 11:31:17 +02:00
Min RK
4125dc7ad0 Merge pull request #542 from willingc/doc-addition
Add troubleshooting documentation for 500 issue
2016-04-22 15:23:28 +02:00
Carol Willing
13600894fb Changed link re: Min's tip 2016-04-22 06:11:53 -07:00
Carol Willing
1b796cd871 Add links 2016-04-22 05:37:30 -07:00
Carol Willing
e7889dc12e Add 500 error to troubleshooting docs 2016-04-22 05:36:15 -07:00
Carol Willing
244a3b1000 Merge pull request #541 from minrk/cookie-referer
check referer only if there is a valid user cookie
2016-04-22 05:03:05 -07:00
Carol Willing
05dfda469f Merge pull request #540 from minrk/0.6
Changelog for 0.6
2016-04-22 04:21:44 -07:00
Min RK
6b19ee792d check referer only if there is a valid user cookie
avoids misleading "Blocking Cross Origin..." message
when there's no logged-in user for API requests.
2016-04-22 13:16:13 +02:00
Min RK
ace38d744a Changelog for 0.6 2016-04-22 12:50:49 +02:00
Min RK
56a5ed8c87 Merge pull request #539 from minrk/unused-email
add ignored -e arg to docker login
2016-04-22 10:50:15 +02:00
Min RK
60e8a76476 add ignored -e arg to docker login
doesn't appear to be needed on more recent docker
2016-04-22 10:24:40 +02:00
Min RK
552800ceb7 add sec doc
reviewed on security list
2016-04-22 10:20:18 +02:00
Carol Willing
7dd1900f5f Merge pull request #521 from minrk/docker-onbuild
Move docker onbuild step to jupyterhub-onbuild
2016-04-21 18:26:43 -07:00
Min RK
35c261d0ed better Dockerfile comments from Carol 2016-04-21 21:32:36 +02:00
Min RK
fa34ce64b7 include dockerfiles in manifest 2016-04-21 13:50:35 +02:00
Min RK
f0504420a9 move docker onbuild to directory 2016-04-21 13:50:27 +02:00
Min RK
8666f3a46c push onbuild image to docker hub with circle-ci 2016-04-21 13:38:15 +02:00
Carol Willing
60d6019cf7 Merge pull request #534 from jupyterhub/willingc-patch-1
Fix post move links to jupyterhub org in README
2016-04-20 20:05:39 -07:00
Carol Willing
173daeeb09 Fix post move links to jupyterhub org in README 2016-04-20 20:00:18 -07:00
Carol Willing
cf988dca4d Merge pull request #531 from minrk/extra-log-file-doc
[DOC] Note that extra_log_file only affects Hub's logs
2016-04-20 06:00:01 -07:00
Min RK
ffc2faabf7 [DOC] Note that extra_log_file only affects Hub's logs
not single-user server logs, or anything else.
2016-04-20 14:45:22 +02:00
Min RK
9fed0334c8 jupyterhub path in dockerfiles 2016-04-20 14:43:25 +02:00
Min RK
8b61eb7347 install from miniconda 4.0.5
- checksum miniconda installer
- move jupyterhub src to /src/jupyterhub
2016-04-19 13:21:25 +02:00
Min RK
9cdda101c7 Move onbuild step to jupyterhub-onbuild
Removes onbuild from from jupyter/jupyterhub image,
though it remains incomplete and will not run without a config file.
2016-04-19 13:21:24 +02:00
Carol Willing
f3bbca80ea Merge pull request #528 from minrk/test-cookie-secret
exercise cookie secret loading in tests
2016-04-19 04:01:36 -07:00
Carol Willing
ce30f28449 Merge pull request #527 from minrk/polish-520
polish cookie-secret PR
2016-04-19 03:56:41 -07:00
Min RK
6cb58c17e7 exercise cookie secret loading in tests 2016-04-19 11:15:48 +02:00
Min RK
183e244490 polish cookie-secret PR
- fix a couple of typos
- use ValueError instead of assert to ensure error is raised even when Python optimizes-out asserts
2016-04-19 10:15:10 +02:00
Min RK
d5cd5115a5 Merge pull request #520 from robnagler/master
cookie_secret file must be base64
2016-04-19 10:10:58 +02:00
robnagler
bbd3b22490 incorrect log call in previous checkin 2016-04-18 16:25:03 +00:00
robnagler
e02daf01ad Fix jupyter/jupyterhub#520: exit if any errors parsing file; Also, fix abstraction use of getenv/os.environ (use one or the other, not both) 2016-04-18 15:35:31 +00:00
robnagler
af1e253f8a Fix jupyter/jupyterhub#522 2016-04-18 15:16:01 +00:00
Min RK
491da69994 typo 2016-04-18 12:51:35 +02:00
Min RK
0737600d3c Merge pull request #515 from proversity-org/master
Post handler for  requesting authorization tokens; authenticated via form.
2016-04-18 11:23:24 +02:00
dominic
c7f542e79e Add tests for form based token generation 2016-04-18 10:27:38 +02:00
robnagler
21213c97c6 cookie_secret file is decoded by binascii.a2b_base64 so need to document it must be Base64. Added better doc for other values, and included description of "cookie_secret" parameter as well 2016-04-17 23:35:06 +00:00
Carol Willing
b36cd92ae6 Merge pull request #517 from minrk/load-tokens
allow pre-loading API tokens from config
2016-04-15 06:49:40 -07:00
Min RK
094ac451c7 Don't allow bad tokens to create tokens in the db 2016-04-15 12:42:52 +02:00
Min RK
fa4b666693 allow pre-loading API tokens from config
This is the first small part of easing the pain of services,
which is generating the API tokens,
and used to require initializing the JupyterHub database.
2016-04-14 16:45:40 +02:00
Carol Willing
ce9dc2093c Merge pull request #514 from minrk/docker-readme
revisions to docker notes
2016-04-14 07:05:02 -07:00
dominic
9fd97a8d63 Keep line spacing consistent. Don't do anything if authenticator not defined. 2016-04-14 15:39:26 +02:00
Min RK
2261a0e21d revisions to docker notes
- link to Docker docs on volumes
- name container `jupyterhub`
- wording
2016-04-14 15:35:58 +02:00
dominic
a7a1c32a03 Add post handler for form based auth 2016-04-14 09:32:42 +02:00
Matthias Bussonnier
dfd01bbf5f Merge pull request #503 from minrk/disable-npm-progress
disable npm progress when installing
2016-04-08 17:36:02 -07:00
Min RK
b11a5be781 disable npm progress when installing
apparently faster, but should also fix unicode errors
2016-04-08 16:35:23 -07:00
Kyle Kelley
8b6950055b Merge pull request #501 from minrk/set-login-at-root-redirect
set login cookie when redirecting
2016-04-06 12:35:50 -05:00
Min RK
e8a298be00 set login cookie when redirecting
should avoid one possible redirect-loop case when the single-user cookie is invalid, but the Hub cookie is valid.
2016-04-06 10:18:23 -07:00
Min RK
69f24acac2 Merge pull request #499 from yuvipanda/statsd
Emit metrics via statsd
2016-04-05 09:23:20 -07:00
YuviPanda
9ffebd0c5e Send metrics about various redirects from User spawning 2016-04-01 14:05:02 -07:00
YuviPanda
2dd3d3c448 Send timing info about spawner success / failure 2016-04-01 10:20:37 -07:00
YuviPanda
4644e7019e Send metrics about running and active users
Uses the standard user last-updated activity callback
2016-04-01 10:20:37 -07:00
YuviPanda
5a15d7a219 Actually start the timer 2016-04-01 10:20:37 -07:00
YuviPanda
788129da12 Send metrics for login and logout actions 2016-04-01 10:20:37 -07:00
YuviPanda
cac5175c9b Send CSP metrics to statsd 2016-04-01 10:20:37 -07:00
YuviPanda
80556360ac Add statsd to the base request handler 2016-04-01 10:20:37 -07:00
YuviPanda
3dca0df55f Add statsd to the base JupyterHub app
Not actually emitting any metrics yet
2016-04-01 10:20:37 -07:00
Min RK
62a5e9dbce Merge pull request #497 from yuvipanda/env-callable
Allow environment config values to be callable
2016-04-01 09:57:52 -07:00
YuviPanda
45fcdc75c0 Add docs about callables in environment configurable 2016-03-31 23:44:08 -07:00
Min RK
f1bdf6247a Merge pull request #500 from yuvipanda/fix-url-encoding-4
Use User.url instead of constructing it manually
2016-03-31 22:15:58 -07:00
YuviPanda
80932a51f4 Use User.url instead of constructing it manually
This fixes issues with URL encoding when redirecting users to
their own notebook instances
2016-03-31 17:28:33 -07:00
Min RK
c8774c44d4 Merge pull request #498 from yuvipanda/statsd-configurable
Mark statsd_prefix as configurable as well
2016-03-31 15:08:10 -07:00
YuviPanda
bf2629450c Mark statsd_prefix as configurable as well 2016-03-31 13:46:37 -07:00
YuviPanda
705ff78715 Allow environment config values to be callable
This allows deployments to configure environment variables
that need to be different for each user / container (such as
credentials for various services, etc).
2016-03-31 11:52:53 -07:00
Min RK
a13119a79f Merge pull request #496 from yuvipanda/statsd
Allow specifying statsd host/port/prefix info
2016-03-31 11:18:09 -07:00
YuviPanda
6932719e4e Convert port into string (so that .join works) 2016-03-31 10:32:49 -07:00
YuviPanda
68a750fc7a Use 'Integer' rather than 'Int' for config traitlet 2016-03-30 19:04:57 -07:00
YuviPanda
c6d05d0840 Allow specifying statsd host/port/prefix info
Currently only passes it through to CHP. This is needed
for the cases when JupyterHub spawns and maintains CHP.
2016-03-30 18:59:32 -07:00
Carol Willing
2bbfd75f4d Merge pull request #495 from Carreau/add-import
Import warnings, used on line 215, not imported.
2016-03-29 15:52:39 -07:00
Matthias Bussonnier
26f0e8ea5c Import warnings, used on line 215, not imported. 2016-03-29 15:36:22 -07:00
Carol Willing
552e5caa11 Merge pull request #494 from jupyter/Codecov-badge
Add codecov Badge.
2016-03-29 15:23:54 -07:00
Matthias Bussonnier
7753187e51 Add codecov Badge. 2016-03-29 15:04:17 -07:00
Carol Willing
bddadc7522 Merge pull request #493 from minrk/traitlets-4-1-again
use traitlets 4.1 APIs
2016-03-29 14:51:51 -07:00
Min RK
195eea55f3 log.warning 2016-03-29 09:22:32 -07:00
Min RK
7a2794af7c use traitlets-4.1 observe/default decorators 2016-03-27 10:41:36 -07:00
Min RK
fa48620076 use traitlets-4.1 .tag(config=True) API 2016-03-27 10:29:36 -07:00
Min RK
e4cfe01c4a require traitlets 4.1 2016-03-27 10:21:41 -07:00
Carol Willing
b35e506220 Merge pull request #479 from minrk/config-env
Make Spawner.env configurable
2016-03-24 07:59:44 -07:00
Carol Willing
dd3ed1bf75 Merge pull request #490 from minrk/disable-pam-session
Allow disabling PAM sessions
2016-03-24 07:57:00 -07:00
Min RK
40368b8f55 Allow disabling PAM sessions
it's often buggy and rarely necessary,
so allow it to be disabled when it's causing problems.

It's still on by default for backward-compatibility,
though maybe it shouldn't be.
2016-03-23 23:24:54 +01:00
Min RK
d0f1520642 Add Spawner.environment configurable
instead of making existing Spawner.env configurable

Spawner.env is deprecated
2016-03-22 13:48:26 +01:00
Carol Willing
28c8265c3d Merge pull request #487 from minrk/fix-failed-login-for-none
Fix 'failed login for None' message
2016-03-21 04:51:20 -07:00
Min RK
1d1a8ba78b Fix 'failed login for None' message
on failed login, get username from form data, not the guaranteed-None return value of authenticate
2016-03-21 12:01:31 +01:00
Min RK
a1c764593c travis_retry tests
to hide intermittent failures and enable laziness
2016-03-15 10:37:03 +01:00
Min RK
06902afa2d Merge pull request #481 from willingc/issue-417
Add additional documentation on --no-SSL option
2016-03-15 10:12:18 +01:00
Min RK
6d46f10cfa Merge pull request #480 from willingc/issue-458
Update the configuration section of docs
2016-03-15 10:11:54 +01:00
Carol Willing
b71f34eb3c Fix transposed version number 2016-03-14 16:57:12 -07:00
Carol Willing
11df935f34 Fix awkward wording 2016-03-14 16:54:04 -07:00
Carol Willing
19b6468889 Add no-SSL option to docs 2016-03-14 16:48:49 -07:00
Carol Willing
d2dddd6c82 Update the configuration section of docs, add example 2016-03-14 16:21:24 -07:00
Min RK
5d140fb889 Merge pull request #478 from willingc/readme-docker
Update README re: docker image contents
2016-03-11 22:24:27 +01:00
Matthias Bussonnier
2bf8683905 Merge pull request #477 from willingc/doc-sphinx
Use latest version of Sphinx to fix RTD "Edit on GitHub"
2016-03-11 10:46:52 -08:00
Carol Willing
2dba7f4f61 Update README re: docker image contents 2016-03-11 10:05:13 -08:00
Carol Willing
2820ba319f Update sphinx version for md on rtd 2016-03-11 07:55:49 -08:00
Min RK
be7a627c11 Make Spawner.env configurable
moves `_env_default` logic to `get_env`,
so that `Spawner.env` can be safely configurable
2016-03-11 12:34:49 +01:00
Matthias Bussonnier
2cb1618937 Merge pull request #467 from minrk/add-user-more-often
Call `add_user` more often
2016-03-10 14:45:56 -08:00
Min RK
c9e0c5fe04 Merge pull request #474 from minrk/user.url
allow user.url to be accessed without the server running
2016-03-10 10:28:11 +01:00
Min RK
922956def2 allow user.url to be accessed without the server running
Reduces the number of different ways we need to build the same URLs.
2016-03-09 09:30:50 +01:00
Min RK
c6c699ea89 Merge pull request #472 from yuvipanda/fix-user-encoding
Use encoded URL when redirecting user notebooks
2016-03-09 09:20:43 +01:00
YuviPanda
e0219d0363 Use encoded URL when redirecting user notebooks
Otherwise it breaks for usernames that have url unsafe
characters.
2016-03-08 18:41:35 -08:00
Matthias Bussonnier
f7dab558e4 Merge pull request #468 from minrk/clean-the-pool
set default pool_recycle if using mysql
2016-03-08 11:28:59 -08:00
Min RK
74e558dad2 set default pool_recycle if using mysql 2016-03-08 10:58:18 +01:00
Min RK
96269fac0f Call add_user more often
- Ensures add_user is called as part of startup *for all users*.
  This was previously only true for users not already in the db.
- Normalize usernames in whitelist and admin sets
- Call add_user on new users logged in when there is no whitelist.
2016-03-08 10:49:02 +01:00
Min RK
a0501c6ee4 set patch version to 0 on release 2016-03-08 09:55:44 +01:00
Min RK
ea2ed75ab2 back to dev 2016-03-08 09:00:41 +01:00
Min RK
fc6435825c release 0.5.0 2016-03-08 08:57:33 +01:00
Min RK
b3ab48eb68 Merge pull request #463 from minrk/moar-coverage
Increase some test coverage
2016-03-07 17:13:20 +01:00
Carol Willing
a212151c09 Merge pull request #461 from minrk/0.5
0.5 changelog
2016-03-07 08:07:19 -08:00
Min RK
67ccfc7eb7 increase some test coverage 2016-03-07 16:13:57 +01:00
Min RK
9af103c673 fixes for handling failed chdir in spawners 2016-03-07 15:12:30 +01:00
Min RK
82643adfb6 stop_pending also counts as not running 2016-03-07 14:27:40 +01:00
Min RK
74df94d15a 0.5 changelog 2016-03-07 13:54:40 +01:00
Min RK
da1b9bdd80 Merge pull request #460 from yuvipanda/mysql-fix
Add lengths to all Unicode() columns
2016-03-07 10:36:17 +01:00
Min RK
18675ef6df Merge pull request #453 from minrk/timeout-in-is-up
use the same connection check everywhere
2016-03-07 10:35:12 +01:00
YuviPanda
bf9dea5522 Add lengths to all Unicode() ones
- Otherwise does not work with MySQL
- Change JSONDict to be TEXT (Unbounded) rather than VARCHAR.
  This makes most sense, since you can't index these anyway.
- The 'ip' field in Server is set to 255, since that is the
  max allowed length of DNS entries.
- Most of the rest of the Unicodes have approximately high
  values that most people should not mostly run into
  (famous last words).
2016-03-06 18:26:25 -08:00
Min RK
62e30c1d79 Merge pull request #457 from shreddd/default_url
Enable default_url to pass in to notebook server
2016-03-06 10:33:52 +01:00
shreddd
1316196542 Update spawner.py
type
2016-03-05 12:24:39 -08:00
Shreyas Cholia
1a377bd03a comment on default_url being used with notebook_dir 2016-03-05 12:16:10 -08:00
Shreyas Cholia
66a99ce881 Add support for default_url 2016-03-05 12:05:58 -08:00
shreddd
481debcb80 Merge pull request #1 from jupyter/master
sync master
2016-03-05 12:04:09 -08:00
Carol Willing
03c25b5cac Merge pull request #452 from minrk/redundant-use-subdomain
remove redundant use_subdomains
2016-03-05 11:52:43 -08:00
Carol Willing
26c060d2c5 Merge pull request #456 from willingc/readme-clarify
Add minor clarification to README
2016-03-05 10:57:13 -08:00
Carol Willing
7ff42f9b55 Add @betatim's suggested wording 2016-03-05 10:43:45 -08:00
Carol Willing
a35d8a6262 Add minor clarification 2016-03-05 10:14:44 -08:00
Carol Willing
8f39e1f8f9 Merge pull request #455 from betatim/readme-fix
README uses two different names for docker container
2016-03-05 10:08:34 -08:00
Tim Head
ff19b799c4 container -> cont for consistency 2016-03-05 09:19:15 +01:00
Kyle Kelley
e547949aee Merge pull request #433 from minrk/disable-user-config
allow disabling user configuration of single-user servers
2016-03-04 09:57:45 -06:00
Min RK
31be00b49f failure to connect may be a timeout 2016-03-04 16:28:57 +01:00
Min RK
4533d96002 use the same connection check everywhere
avoids inconsistencies in error handling
2016-03-04 16:28:57 +01:00
Min RK
7f89f1a2a0 expose disable_user_config as Spawner.disable_user_config 2016-03-04 14:41:40 +01:00
Min RK
aed29e1db8 Simplify filter to exclude config in the home directory 2016-03-04 11:43:45 +01:00
Min RK
49bee25820 allow disabling user configuration of single-user servers 2016-03-04 11:43:45 +01:00
Min RK
838c8eb057 Merge pull request #448 from daradib/redirect
Redirect requests to logged in user
2016-03-04 11:15:56 +01:00
Min RK
be5860822d remove redundant use_subdomains
non-empty subdomain_host is enough
2016-03-04 11:11:41 +01:00
Dara Adib
5a10d304c9 Redirect user to login page when not logged in 2016-03-02 16:55:33 -08:00
Dara Adib
fdd3746f54 Add test for user redirect 2016-03-02 16:18:02 -08:00
Dara Adib
4d55a48a79 Redirect requests to logged in user
If a user, alice, requests /user/bob/notebooks/mynotebook.ipynb,
redirect her to /user/alice/notebooks/mynotebook.ipynb.
Currently, such requests get stuck in a redirect loop because
the request will be redirected to login page with a next parameter
that when followed is again redirected.

When notebook_dir is consistent across users, this will allow
users to share notebook URLs. Fixes #424.
2016-03-02 16:15:50 -08:00
Min RK
b2ece48239 reverse arguments in check_routes 2016-03-01 19:42:55 +01:00
Kyle Kelley
6375ba30b7 Merge pull request #445 from minrk/check-routes-pending
Don't add users with spawn_pending to the proxy
2016-03-01 09:19:42 -06:00
Min RK
f565f8ac53 Don't add users with spawn_pending to the proxy
check_routes checks for missing routes for running users.
This is meant for when the proxy has been relaunched outside the Hub.

If spawners are slow to start, it's possible for check_routes to fire in the middle of spawning,
triggering addition of the user's server (which has no defined location yet) to the proxy before it's up.
If the spawning fails, the route will remain indefinitely (because it never should have been added in the first place), and the user will see 503 until their server is launched manually again.

Checking `spawn_pending` in user.running prevents this.
2016-03-01 15:18:51 +01:00
Kyle Kelley
5ec05822f1 Merge pull request #436 from minrk/subdomains
allow running single-user servers on subdomains
2016-02-28 09:49:45 -06:00
Min RK
335b47d7c1 include protocol in subdomain_host
makes everything easier, and tests are passing with and without subdomains (yay!)
2016-02-28 11:12:41 +01:00
Min RK
f922561003 Tests are passing with subdomains 2016-02-26 17:32:55 +01:00
Min RK
79df83f0d3 Allow getting users by name 2016-02-26 17:32:55 +01:00
Min RK
29416463ff proxy needs user dict, which has proxy path
this won't be needed if/when I make a schema change, where domain is included in the Server table.
2016-02-26 17:32:55 +01:00
Min RK
dd2e1ef758 turn off subdomains by default 2016-02-26 17:32:55 +01:00
Min RK
a9b8542ec7 pass hub's host to single-user servers via hub_host 2016-02-26 17:32:54 +01:00
Min RK
a4ae2ec2d8 consolidate cookie setting in _set_user_cookie 2016-02-26 17:32:54 +01:00
Min RK
b54bfad8c2 [WIP]: allow running single-user servers on subdomains
relies on CHP's host-based routing (a feature I didn't add!)

requires wildcard DNS and wildcard SSL for a proper setup

still lots to workout and cleanup in terms of cookies and where to use host, domain, path, but it works locally.
2016-02-26 17:32:54 +01:00
Min RK
724bf7c4ce Merge pull request #441 from jupyter/revert-440-master
Revert "Do not consider `@` character url-safe"
2016-02-26 09:06:46 +01:00
Kyle Kelley
fccc954fb4 Merge pull request #442 from minrk/never-poll-before-start-is-done
avoid calling Spawner.poll during Spawner.start
2016-02-25 08:20:35 -06:00
Kyle Kelley
74385a6906 Merge pull request #443 from minrk/catch-options-from-form
catch exceptions in options_from_form
2016-02-25 08:19:16 -06:00
Min RK
dd66fe63c0 catch exceptions in options_from_form
Allows form validation to be implemented in options_from_form, as well as start.
2016-02-25 12:02:23 +01:00
Min RK
e74934cb17 avoid calling Spawner.poll during Spawner.start
moves `spawn_pending` flag to only around start, not the HTTP wait.

Some Spawners may not know how to poll until start has finished (DockerSpawner).
Let's not require that they do.
2016-02-25 10:13:51 +01:00
Min RK
450281a90a Revert "Do not consider @ character url-safe" 2016-02-25 09:04:25 +01:00
Kyle Kelley
6e7fc0574e Merge pull request #440 from ResearchComputing/master
Do not consider `@` character url-safe
2016-02-24 23:58:45 -06:00
Jonathon Anderson
fc49aac02b Do not consider `@' character url-safe
Usernames that have an `@'-separated domain component
break JupyterHub when the server expects to see query
strings that contain an `@', when browsers and other
clients send `%40'.
2016-02-24 16:48:23 -07:00
Kyle Kelley
097d883905 Merge pull request #435 from minrk/debug-no-server
add debug logging for adding users with no running server
2016-02-20 06:04:12 -08:00
Min RK
cb55118f70 add debug logging for adding users with no running server
in check_routes, it has been reported that users without a running server are attempted to be added.

So something is wrong, either in sqlalchemy or my understanding of what it does (likely the latter),
because a filter for users with a non-None server is returning at least one result whose server is None.
2016-02-20 14:22:50 +01:00
Carol Willing
2a3c87945e Merge pull request #434 from rgbkrk/ssl
Don't let the default include `--no-ssl`.
2016-02-18 16:48:06 -08:00
Kyle Kelley
2b2aacedc6 Don't let the default include --no-ssl. 2016-02-18 16:27:53 -08:00
Kyle Kelley
8ebec52827 Merge pull request #431 from ObiWahn/master
Update README.md
2016-02-18 16:25:56 -08:00
Jan Christoph Uhde
1642cc30c8 fix: run vs exec and split sentence 2016-02-19 00:13:02 +01:00
Kyle Kelley
1645d8f0c0 Merge pull request #432 from minrk/no-port-retries
disable port_retries in single-user server
2016-02-18 06:40:51 -08:00
Min RK
8d390819a1 disable port_retries in single-user server
since Spawners won't notice that the server has started somewhere other than where it was asked to
2016-02-18 09:03:45 +01:00
Jan Christoph Uhde
c7dd18bb03 Update README.md 2016-02-16 22:58:27 +01:00
Min RK
84b7de4d21 set x bit on jupyterhub-singleuser 2016-02-15 21:50:55 +01:00
Carol Willing
161df53143 Merge pull request #426 from takluyver/docs-intro
Add overview to landing page
2016-02-13 11:12:35 -08:00
Thomas Kluyver
1cfd6cf12e Fix grammaros 2016-02-13 18:18:23 +00:00
Thomas Kluyver
d40dcc35fb Reword intro 2016-02-13 16:44:41 +00:00
Thomas Kluyver
a570e95602 Add my overview to intro
Closes gh-425
2016-02-13 15:29:08 +00:00
Thomas Kluyver
e4e43521ee Close code block 2016-02-13 15:28:37 +00:00
Min RK
1b2c21a99c Merge pull request #423 from minrk/custom-logo
allow overriding logo
2016-02-11 15:03:02 +01:00
Min RK
e28eda6386 exercise some static file handlers in tests 2016-02-09 15:38:44 +01:00
Min RK
39c171cce7 allow overriding logo
by specifying JupyterHub.logo_file

also ensures single-user server always has the same logo image as the Hub
2016-02-09 15:38:34 +01:00
Min RK
c81cefd768 Merge pull request #372 from minrk/require-notebook-4
drop support for single-user server from IPython 3.x
2016-02-09 14:42:12 +01:00
Min RK
325f137265 Merge pull request #421 from Fokko/add-docker-label
Added label to dockerfile for referencing
2016-02-09 14:41:48 +01:00
Fokko Driesprong
1ae795df18 Changed domain of the label to .org 2016-02-09 14:16:50 +01:00
Fokko Driesprong
2aacd5e28b Added label to dockerfile for referencing 2016-02-08 17:16:20 +01:00
Kyle Kelley
6e1425e2c0 Merge pull request #417 from minrk/require-confirm-insecure
require confirmation for JupyterHub to run without SSL
2016-02-05 19:27:37 -06:00
Carol Willing
010db6ce72 Merge pull request #416 from willingc/doc-warn
Add more prominent message for https
2016-02-04 14:21:52 -08:00
Min RK
ce8d782220 no-ssl in changelog 2016-02-04 23:00:54 +01:00
Min RK
90c2b23fc0 require confirmation for JupyterHub to run without SSL
ensures folks deploying JupyterHub on HTTP have been told what's up.
2016-02-04 23:00:54 +01:00
Carol Willing
32685aeac1 Add more prominent message for https 2016-02-04 13:42:13 -08:00
Min RK
01c5608104 update version requirements in README 2016-02-04 22:41:18 +01:00
Min RK
a35f6298f0 drop support for single-user server from IPython 3.x 2016-02-04 22:40:44 +01:00
Min RK
8955d6aed4 Merge pull request #411 from minrk/one-two-seven
use 127.0.0.1 instead of localhost
2016-02-04 20:37:09 +01:00
Min RK
cafbf8b990 back to dev 2016-02-03 21:05:48 +01:00
Min RK
7837a9cf68 release 0.4.1 2016-02-03 21:04:32 +01:00
Min RK
65a019e05b Merge pull request #413 from minrk/login_url
Restore /login handler
2016-02-03 21:00:03 +01:00
Min RK
f2014c5687 note that login/logout should always be registered 2016-02-03 20:54:01 +01:00
Min RK
109c315336 changelog for 0.4.1 2016-02-03 16:55:25 +01:00
Min RK
941fc7e627 restore /login page
erroneously removed in 0.4
2016-02-03 16:52:43 +01:00
Min RK
f626d2f6e5 use 127.0.0.1 instead of localhost
localhost can cause some issues on badly behaved or misconfigured systems,
and 127 seems simpler.
2016-02-03 10:30:09 +01:00
Min RK
80215f6b3c Merge pull request #407 from willingc/doc-proxy
Add doc details for #406
2016-02-02 09:06:35 +01:00
Carol Willing
84916062f0 Edit per @minrk and added troubleshooting 2016-02-01 14:17:14 -08:00
Carol Willing
641154bf06 Add doc details for #406 2016-02-01 11:47:08 -08:00
Min RK
14b0dbde0e Merge pull request #405 from willingc/doc-link
Update documentation link to source code
2016-02-01 19:58:08 +01:00
Carol Willing
cd85766441 Update link to source code 2016-02-01 08:42:49 -08:00
Min RK
6c072bdb3d nonempty long_description
avoids dumping README.md garbage onto PyPI
2016-02-01 11:05:58 +01:00
Min RK
35f080458e Upload with twine 2016-02-01 10:41:51 +01:00
Min RK
feac4f6bc4 Changelog for 0.4 2016-02-01 10:41:51 +01:00
Min RK
1bbabbb989 back to dev 2016-02-01 10:37:46 +01:00
Min RK
ad5624c7ce release 0.4.0 2016-02-01 10:37:16 +01:00
Min RK
a7d6c37d26 Merge pull request #400 from willingc/juphub-spawner
Edit tone and grammar in Spawners document
2016-01-29 21:57:51 +01:00
Min RK
b8d9954c28 Merge pull request #402 from mistercrunch/fix_custom_html
Fixing the custom_html feature in the login form
2016-01-29 21:02:45 +01:00
Maxime Beauchemin
927a341764 Fixing the custom_html feature in the login form 2016-01-28 11:25:19 -08:00
Carol Willing
83d092b0ad Minor edit 2016-01-27 22:57:42 -08:00
Carol Willing
95f7889803 Edit the custom spawners doc 2016-01-27 16:48:59 -08:00
Matthias Bussonnier
ceacd72d63 Merge pull request #399 from willingc/newci-badge
Add circleci badge with status only API token
2016-01-25 17:02:29 -08:00
Carol Willing
49c0fa4f08 Add circleci badge with status only API token 2016-01-25 10:56:55 -08:00
Min RK
223318bfff Merge pull request #396 from minrk/test-fixes
If spawner fails to start, show error page
2016-01-25 14:55:07 +01:00
Min RK
9c3f953682 mock pam close session
bug revealed by change in slow_spawn test
2016-01-25 14:29:20 +01:00
Min RK
cc4c65bd0b fix possible loss of port info due to mixed db sessions 2016-01-25 14:28:54 +01:00
Min RK
c4fad21850 If spawner fails to start, show error page
instead of slow-spawner page
2016-01-25 13:32:54 +01:00
Min RK
665907afd3 remove login from default handlers
rely on getting it from LoginHandler
2016-01-25 13:21:21 +01:00
Min RK
8a4305a15c s/chose/choose/ typo 2016-01-25 12:57:19 +01:00
Min RK
7e59148168 ignore node_modules 2016-01-25 12:56:51 +01:00
Min RK
98b44d59c4 Merge pull request #395 from minrk/docker-test
Test docker builds on CircleCI
2016-01-25 12:55:01 +01:00
Min RK
aac357b715 Merge pull request #392 from evanlinde/master
username parameter for notebook_dir
2016-01-25 12:50:47 +01:00
Min RK
2632d03dc2 Merge pull request #391 from minrk/form-error
show error messages on spawn form
2016-01-25 12:46:04 +01:00
Min RK
babb2cf908 test docker builds on circle-ci 2016-01-25 12:32:32 +01:00
Min RK
6a3d790f49 install locale in Dockerfile
and do a little cleanup of temporary installation files
2016-01-25 12:32:32 +01:00
Min RK
9cae91aeb0 Merge pull request #393 from willingc/fix-mdlink
Use relative html link instead of local md
2016-01-22 22:52:01 +01:00
Carol Willing
84f8f8f322 Use relative html link instead of local md 2016-01-22 08:06:30 -08:00
evanlinde
bc4973fb43 username parameter for notebook_dir
Allow specifying user-specific notebook directories outside of user's home folder
2016-01-22 09:47:48 -06:00
Min RK
1a21e822b6 Merge pull request #389 from willingc/fix-docstring
Fix docstrings *ix -> Linux/UNIX to prevent Sphinx build warnings

closes #389
2016-01-22 16:05:31 +01:00
Carol Willing
d437a8f06a Fix docstrings *ix -> Linux/UNIX 2016-01-22 16:05:15 +01:00
Min RK
0555ee44e7 turn on jinja autoescape
now that we are putting user content on the page
2016-01-22 16:02:51 +01:00
Min RK
ef40bd230e Show error messages on spawn form
when spawning fails

instead of 500
2016-01-22 16:02:11 +01:00
Min RK
818510c2ca Merge pull request #381 from minrk/rtd-yml
add preliminary API docs
2016-01-22 12:03:57 +01:00
Min RK
caaab40944 Merge pull request #386 from minrk/dockerfile-jessie
Base Dockerfile on debian:jessie
2016-01-21 13:05:35 +01:00
Min RK
0fb80d43b6 Merge pull request #387 from minrk/single-user-script
make jupyterhub-singleuser a script
2016-01-21 12:55:21 +01:00
Min RK
8146af7240 make jupyterhub-singleuser a script
instead of a module in the package

makes it easier to do `/path/to/python $(which jupyterhub-singleuser)`
2016-01-20 15:41:54 +01:00
Min RK
b9df681115 Merge pull request #353 from minrk/try-localhost
Ensure that we can bind and connect to localhost
2016-01-20 15:37:42 +01:00
Min RK
40a3ebde84 Merge pull request #354 from zoltan-fedor/master
IPv6 ready /etc/hosts file without IPv6 enabled causing localhost issue
2016-01-20 15:37:32 +01:00
Min RK
fbf3b45d52 needs sphinx 1.3 2016-01-20 15:36:26 +01:00
Min RK
eb0a38c136 add preliminary API docs 2016-01-20 15:36:24 +01:00
Min RK
37d42a336f put repo on path
allows autodoc to import jupyterhub without installing it
2016-01-20 15:35:49 +01:00
Min RK
51a04258d1 build on readthedocs 2016-01-20 15:35:49 +01:00
Min RK
1a4226419f Base Dockerfile on debian:jessie
rather than jupyter/notebook

and use conda to get Python 3.5

No longer includes single-user server dependencies
2016-01-20 14:33:39 +01:00
Min RK
ce4cc62c05 Merge pull request #383 from minrk/start-new-session
use start_new_session to detach single-user servers
2016-01-15 17:55:37 +01:00
Min RK
614a0806f5 use start_new_session to detach single-user servers
instead of setpgrp, which causes various problems
2016-01-15 14:21:45 +01:00
Min RK
ff2fef1617 Merge pull request #373 from minrk/normalize-username
Username normalization and validation
2016-01-14 10:16:45 +01:00
Carol Willing
2e6f08268b Merge pull request #380 from minrk/installation
move install commands around a bit
2016-01-13 06:42:36 -08:00
Min RK
ff4019128a move install commands around a bit
npm/less notes are only relevant for dev installs
2016-01-13 15:10:22 +01:00
Min RK
6fd18840a7 Merge pull request #378 from willingc/readme-rtd
Update project README to reflect docs on RTD
2016-01-13 15:07:21 +01:00
Min RK
108d710dcb doc: username normalization and validation 2016-01-13 14:02:51 +01:00
Min RK
aa93384f47 Include system-user creation error message in API reply
when system-user creation fails
2016-01-13 14:02:50 +01:00
Min RK
9441fa37c5 validate usernames
via Authenticator.validate_username

base class configurable with Authenticator.username_pattern
2016-01-13 14:02:50 +01:00
Min RK
beb2dae6ce add username_map 2016-01-13 14:02:50 +01:00
Min RK
887fdaf9d3 add username normalization
Handlers call `get_authenticated_user`, which in turn calls

- authenticate
- normalize_username
- check_whitelist

get_authenticated_user shouldn't need to be overridden.

Normalization can be handled via overriding normalize_username.
2016-01-13 14:02:50 +01:00
Min RK
8a5a85a489 Merge pull request #377 from minrk/swagger-spec
add swagger spec for REST API
2016-01-13 13:08:01 +01:00
Carol Willing
2cc49d317b Add more wording tweaks 2016-01-12 13:55:10 -08:00
Carol Willing
4afa358201 Add some minor formatting 2016-01-12 13:49:04 -08:00
Carol Willing
50a58e5e81 Update README after docs move to RTD 2016-01-12 13:44:08 -08:00
Min RK
479b40d840 add swagger spec for REST API 2016-01-12 16:32:50 +01:00
Min RK
931c2d6f8a Merge pull request #368 from willingc/doc-wip
Sphinx documentation that converts markdown using recommonmark
2016-01-12 16:31:55 +01:00
Min RK
f5746d0765 Merge pull request #375 from betatim/form-file-upload
Handle file upload in spawner form
2016-01-09 23:25:52 +01:00
Tim Head
a59f57e095 Handle file upload in spawner form
Allow files to be uploaded in the spawner form.
2016-01-09 13:53:45 +01:00
Min RK
47549e752d Merge pull request #371 from minrk/delete-user
delete users via UserDict API
2016-01-08 10:32:10 +01:00
Min RK
4534bea86e delete users via UserDict API
avoids reusing user IDs when user creation fails
2016-01-06 15:14:28 +01:00
Carol Willing
2815f72250 Change mocking of slowspawner to match nospawner 2016-01-05 19:45:49 -08:00
Carol Willing
131b695fbb Correct some links 2016-01-05 19:45:49 -08:00
Carol Willing
1bc0d208d3 Move image files 2016-01-05 19:45:49 -08:00
Carol Willing
46a9e8b1c3 Update doc requirements 2016-01-05 19:45:49 -08:00
Carol Willing
04cb5fe503 Add recommonmark parser for markdown 2016-01-05 19:45:49 -08:00
Carol Willing
0ad110f7de Add parsers 2016-01-05 19:45:49 -08:00
Carol Willing
0c5c3eb8b1 Add recommonmark 2016-01-05 19:45:49 -08:00
Carol Willing
bd8b8c55b2 Add initial index file 2016-01-05 19:45:49 -08:00
Carol Willing
e52d2eb27d Add Jupyter customizations 2016-01-05 19:45:49 -08:00
Carol Willing
0b4fbee418 Add sphinx skeleton 2016-01-05 19:45:49 -08:00
Carol Willing
9ee92a3984 Add a requirements for building docs 2016-01-05 19:45:49 -08:00
Carol Willing
f4de573198 Set up docs directory for Sphinx 2016-01-05 19:45:49 -08:00
Min RK
26e00718f9 Merge pull request #366 from minrk/double-redirect
return after redirect to spawner form
2016-01-05 17:19:04 +01:00
Min RK
c878e137aa try codecov for coverage 2016-01-05 14:05:59 +01:00
Min RK
53785a985d return after redirect to spawner form
avoids double-call to redirect, which fails
2016-01-05 14:02:20 +01:00
Min RK
b0cc47984b Merge pull request #364 from minrk/spawn-typo
s/users/user typo in spawn redirect
2015-12-31 17:30:56 +01:00
Min RK
91168fc22b s/users/user typo in spawn redirect 2015-12-31 12:06:04 +01:00
Min RK
66cbb8a614 more testing of spawn page redirects 2015-12-31 12:05:55 +01:00
Min RK
0fbd69be9b Merge pull request #355 from minrk/spawner-options
Add Spawner form page
2015-12-30 16:40:16 +01:00
Min RK
872005f852 document spawner options form 2015-12-30 14:17:58 +01:00
Min RK
647dd09f40 add spawn-form example 2015-12-30 13:55:39 +01:00
Min RK
041c1a4a1e remove always-False else branch 2015-12-30 13:55:38 +01:00
Min RK
d2e3a73f53 set login cookie after starting server
avoids redirect loop
2015-12-30 13:55:38 +01:00
Min RK
2bd7192e89 add extensible get_env hook on Spawner
to make it easier for subclasses to modify the env
2015-12-30 13:55:38 +01:00
Min RK
28f5f33a76 add bootstrap form-control to spawner form inputs 2015-12-30 13:55:38 +01:00
Min RK
f9c9c2b471 options_form is a regular configurable
now that we can assume User.spawner exists at all times
2015-12-30 13:55:38 +01:00
Min RK
41ea696546 Instantiate Spawner on User init
shrinks `User.spawn` to take single argument, grants User more direct access to state.
2015-12-30 13:55:36 +01:00
Min RK
54f9a296de test Spawner.user_options and spawn form 2015-12-30 13:55:01 +01:00
Min RK
ba634354dd Add Spawner form
If Spawner.options_form is specified, a form providing input controls is shown to the user prior to launch.

Spawners access the result via the `self.user_options` dict.

The default spawners offer no form.
2015-12-30 13:55:01 +01:00
Min RK
675f19b5cb Merge pull request #358 from minrk/ipython-traitlets
import base traitlets
2015-12-27 22:26:31 +01:00
Min RK
1eed96193d import base traitlets
missed IPython.utils.traitlets import from old User PR
2015-12-24 12:25:43 +01:00
Zoltan Fedor
faa259e97b IPv6 ready hosts file localhost issue
This is to resolve the 'Network is Unreachable' error experienced by a few when JupyterHUB is connecting to localhost.

On most recent linux OS versions like CentOS 6, 7, Red Hat 6, 7, Oracle Linux 6, 7, etc, the hosts file (/etc/hosts) usually has a line to make the server IPv6-ready:
    ::1 localhost
even if the given server actually has no IPv6 permissioned. In such case the Python socket library when connecting to 'localhost' will try to connect via the IPv6 protocol - which will fail with the 'Network is Unreachable' error.

To solve this we capture this error and try to reconnect on 127.0.0.1 instead of localhost, alias forcing the user of the IPv4 protocol.
2015-12-15 10:53:06 -05:00
Min RK
4785a1ef87 Ensure that we can bind and connect to localhost
otherwise fallback to 127.0.0.1 for defaults
2015-12-15 13:37:30 +01:00
Min RK
aa529f3aba Merge pull request #352 from minrk/sqlalchemy-1.0
require sqlalchemy 1.0
2015-12-14 14:11:52 +01:00
Min RK
98955a5702 require sqlalchemy 1.0
we know 0.7.9 is too old. We might work on 0.8,
but 1.0 is current.
2015-12-14 10:37:48 +01:00
Min RK
2f1a203699 Merge pull request #349 from minrk/adduser
create users with adduser
2015-12-13 13:43:02 +01:00
Brian E. Granger
77b31d8542 Minor fixes to the PR on docs and the default command. 2015-12-13 12:15:31 +01:00
Min RK
8fca4e859d create users with adduser
instead of useradd (on Linux).

- still user `pw useradd` on BSD
- allow complete custom add_user_cmd for specifying directories, etc.
2015-12-13 12:15:31 +01:00
Brian E. Granger
8d90a92ef3 Merge pull request #351 from ellisonbg/token-docs
Edits to the security part of the docs
2015-12-12 15:29:09 -08:00
Brian E. Granger
37424acabf Adding actual secret key... 2015-12-12 15:28:12 -08:00
Brian E. Granger
86a450da77 Edits to the security part of the docs 2015-12-12 14:30:19 -08:00
Min RK
151dcbafb4 Merge pull request #347 from dblockow/feature/log-failed-auth-ip
Log Remote IP Address for Failed Authentication Attempts
2015-12-10 22:51:48 +01:00
David Blockow
d512ee9f65 Fixed to cope with a None handler passed in tests 2015-12-08 15:50:54 +10:30
David Blockow
e59b3f3ab1 Remote IP logged for failed authentication attempts 2015-12-08 15:00:29 +10:30
Min RK
2e7af82865 Merge pull request #185 from minrk/outer-user
move non-persisted User objects (spawner-related) off of orm.User
2015-12-02 12:44:47 +01:00
Min RK
49d4be002b Merge pull request #344 from minrk/system-user-home
add LocalAuthenticator.system_user_home
2015-12-02 10:29:14 +01:00
Min RK
fa8756767d add LocalAuthenticator.system_user_home 2015-12-01 10:36:06 +01:00
Min RK
6f128758db move non-persisted User objects (spawner-related) off of orm.User
adds higher level User object, which handles spawning.
This object has running, spawner, etc. attributes.
2015-11-30 14:05:00 +01:00
Min RK
235746a484 Merge pull request #338 from minrk/dockerfile
install nodejs with one command in Dockerfile
2015-11-25 15:01:57 +01:00
Min RK
37f736cf45 install nodejs with one command in Dockerfile 2015-11-17 14:54:06 +01:00
Min RK
5376291eaa Merge pull request #336 from Fokko/master
Added npm/node which fixes the Dockerfile
2015-11-17 14:52:54 +01:00
Fokko Driesprong
9e738a62d1 Added npm/node which fixes the Dockerfile 2015-11-16 15:55:41 +01:00
Min RK
8bfe52df4f Merge pull request #334 from cwaldbieser/custom_login
Load Authenticator handlers before default handlers
2015-11-10 12:40:57 +01:00
Carl Waldbieser
91ff31f688 Change the order for handlers so that the authenticator handler is added before the default handlers. 2015-11-09 14:17:26 -05:00
Min RK
b7fe3463cf back to dev 2015-11-04 17:13:41 +01:00
Min RK
4931684a2c release 0.3 2015-11-04 17:10:36 +01:00
Min RK
62d3cc53ef changelog for 0.3 2015-11-04 17:09:34 +01:00
Min RK
bd002e5340 Merge pull request #325 from minrk/authenticator-hooks
add pre/post-spawn hooks for Authenticators
2015-11-04 16:07:01 +00:00
Min RK
6f2aefb990 add pre/post-spawn hooks for Authenticators
allows setup/cleanup to be performed by the authenticator

use this to open PAM sessions at spawn
and close them at stop,
rather than open at login and never close.
2015-10-16 12:02:44 +02:00
Min RK
bd3c878c67 Merge pull request #320 from minrk/authenticator-username
get username from authenticator
2015-10-06 15:43:22 +02:00
Min RK
c1de376b6a Merge pull request #310 from minrk/singleuser-notebook
single-user imports notebook package directly
2015-10-06 14:08:35 +02:00
Min RK
4cc74d287e get username from authenticator 2015-10-06 13:36:34 +02:00
Min RK
411a7a0bd8 single-user imports notebook package directly
instead of relying on IPython.html shims

when should we drop support for IPython 3?
2015-09-24 16:13:28 +02:00
Min RK
498c062ee0 Merge pull request #309 from minrk/gen.sleep
use gen.sleep
2015-09-24 16:09:55 +02:00
Min RK
d1edbddb77 use gen.sleep
instead of elaborate `gen.Task(add_timeout...)`

requires tornado 4.1
2015-09-23 17:04:01 +02:00
Min RK
0c9214ffb7 Merge pull request #307 from minrk/test-3.5
test on 3.5
2015-09-22 14:17:30 +02:00
Min RK
db0aaf1027 test on 3.5
requires pytest >= 2.8
2015-09-22 14:09:23 +02:00
Min RK
42681f8512 Merge pull request #306 from minrk/test-token-username
update token app test
2015-09-22 14:08:41 +02:00
Min RK
e5c1414b6a update token app test
now that admin user isn't added by default
2015-09-22 10:14:11 +02:00
Min RK
d857c20de0 Merge pull request #304 from minrk/rm-default-admin
Remove implicit admin of launching user
2015-09-22 08:59:28 +02:00
Min RK
a267174a03 Remove implicit admin of launching user
instead, warn about missing admins and point to config.
2015-09-21 10:52:19 +02:00
Min RK
768eeee470 Merge pull request #298 from minrk/spawner-authenticator
give Spawners a handle on the Authenticator
2015-09-11 14:24:38 +02:00
Min RK
a451f11cd3 give Spawners a handle on the Authenticator
band-aid for spawner-authenticator pairs
2015-09-11 11:57:41 +02:00
Min RK
63a476f9a6 remove some unused cruft from spawner 2015-09-11 11:23:00 +02:00
Min RK
100b17819d Merge pull request #296 from minrk/pamela
use pamela instead of simplepam
2015-09-11 11:02:14 +02:00
Min RK
024d8d7378 update mocking for pamela 2015-09-09 14:24:53 +02:00
Min RK
15e50529ff use pamela instead of simplepam
and open PAM sessions after successful auth
2015-09-09 13:55:02 +02:00
Min RK
a1a10be747 Merge pull request #290 from jhamrick/clear-login-cookies
Unset all login cookies
2015-08-22 18:55:30 -07:00
Jessica B. Hamrick
a91ee67e74 Reset other_user_cookies after clearing them 2015-08-22 13:14:05 -07:00
Jessica B. Hamrick
ea5bfa9999 Unset all login cookies 2015-08-21 19:24:44 -07:00
Min RK
bea58ee622 Merge pull request #288 from minrk/dont-auto-redirect-root
redirect unauthenticated root to *regular* login page
2015-08-19 21:44:00 -07:00
Min RK
b698d4d226 redirect root to *regular* login page
shows "Login with..." button for external services
instead of auto-redirecting to login service
(no good for oauth)
2015-08-19 12:43:32 -07:00
Min RK
139c7ecacb always render login page at /login 2015-08-19 12:30:10 -07:00
Min RK
eefa8fcad7 Merge pull request #284 from minrk/double-base-url
remove double base_url in login redirect
2015-08-06 21:48:49 -07:00
Min RK
acaedcd898 remove double base_url in login redirect
user.server.base_url is already correct,
and shouldn't be joined with the hub url
2015-08-06 21:37:06 -07:00
Min RK
a075661bfb Merge pull request #276 from Crunch-io/redirect-to-login
Redirect unauthenticated root to login
2015-07-23 13:00:16 -07:00
Joseph Tate
f2246df5bb Fix logging and comments 2015-07-23 15:08:53 -04:00
Joseph Tate
1a3c062512 Fix broken test 2015-07-23 15:06:20 -04:00
Joseph Tate
05e4ab41fe Redirect to the loginurl when not logged in, fix the user.running redirect to fix a redirect loop 2015-07-23 15:06:03 -04:00
Min RK
6f3ccb2d3d Merge pull request #275 from jhamrick/installation-instructions
Update installation instructions
2015-07-14 22:06:52 -07:00
Jessica B. Hamrick
6e5ce236c1 Update installation instructions 2015-07-14 15:36:35 -07:00
Min RK
58437057a1 back to dev 2015-07-12 15:30:47 -05:00
126 changed files with 10646 additions and 1872 deletions

View File

@@ -1,4 +1,4 @@
[run]
omit =
jupyterhub/tests/*
jupyterhub/singleuser.py
jupyterhub/alembic/*

View File

@@ -3,3 +3,4 @@ bench
jupyterhub_cookie_secret
jupyterhub.sqlite
jupyterhub_config.py
node_modules

29
.github/issue_template.md vendored Normal file
View File

@@ -0,0 +1,29 @@
Hi! Thanks for using JupyterHub.
If you are reporting an issue with JupyterHub:
- Please use the [GitHub issue](https://github.com/jupyterhub/jupyterhub/issues)
search feature to check if your issue has been asked already. If it has,
please add your comments to the existing issue.
- Where applicable, please fill out the details below to help us troubleshoot
the issue that you are facing. Please be as thorough as you are able to
provide details on the issue.
**How to reproduce the issue**
**What you expected to happen**
**What actually happens**
**Share what version of JupyterHub you are using**
Running `jupyter troubleshoot` from the command line, if possible, and posting
its output would also be helpful.
```
Insert jupyter troubleshoot output here
```

4
.gitignore vendored
View File

@@ -1,9 +1,13 @@
node_modules
*.py[co]
*~
.cache
.DS_Store
build
dist
docs/_build
docs/source/_static/rest-api
.ipynb_checkpoints
# ignore config file at the top-level of the repo
# but not sub-dirs
/jupyterhub_config.py

View File

@@ -2,6 +2,7 @@
language: python
sudo: false
python:
- 3.5
- 3.4
- 3.3
before_install:
@@ -9,9 +10,12 @@ before_install:
- npm install -g configurable-http-proxy
- git clone --quiet --depth 1 https://github.com/minrk/travis-wheels travis-wheels
install:
- pip install -f travis-wheels/wheelhouse -r dev-requirements.txt .
- pip install -f travis-wheels/wheelhouse ipython[notebook]
- pip install --pre -f travis-wheels/wheelhouse -r dev-requirements.txt .
script:
- py.test --cov jupyterhub jupyterhub/tests -v
- travis_retry py.test --cov jupyterhub jupyterhub/tests -v
after_success:
- coveralls
- codecov
matrix:
include:
- python: 3.5
env: JUPYTERHUB_TEST_SUBDOMAIN_HOST=http://127.0.0.1.xip.io:8000

26
CHECKLIST-Release.md Normal file
View File

@@ -0,0 +1,26 @@
# Release checklist
- [ ] Upgrade Docs prior to Release
- [ ] Change log
- [ ] New features documented
- [ ] Update the contributor list - thank you page
- [ ] Upgrade and test Reference Deployments
- [ ] Release software
- [ ] Make sure 0 issues in milestone
- [ ] Follow release process steps
- [ ] Send builds to PyPI (Warehouse) and Conda Forge
- [ ] Blog post and/or release note
- [ ] Notify users of release
- [ ] Email Jupyter and Jupyter In Education mailing lists
- [ ] Tweet (optional)
- [ ] Increment the version number for the next release
- [ ] Update roadmap

View File

@@ -1,35 +1,62 @@
# A base docker image that includes juptyerhub and IPython master
# An incomplete base Docker image for running JupyterHub
#
# Build your own derivative images starting with
# Add your configuration to create a complete derivative Docker image.
#
# FROM jupyter/jupyterhub:latest
# Include your configuration settings by starting with one of two options:
#
# Option 1:
#
# FROM jupyterhub/jupyterhub:latest
#
# And put your configuration file jupyterhub_config.py in /srv/jupyterhub/jupyterhub_config.py.
#
# Option 2:
#
# Or you can create your jupyterhub config and database on the host machine, and mount it with:
#
# docker run -v $PWD:/srv/jupyterhub -t jupyterhub/jupyterhub
#
# NOTE
# If you base on jupyterhub/jupyterhub-onbuild
# your jupyterhub_config.py will be added automatically
# from your docker directory.
FROM jupyter/notebook
FROM debian:jessie
MAINTAINER Jupyter Project <jupyter@googlegroups.com>
# install js dependencies
RUN npm install -g configurable-http-proxy
# install nodejs, utf8 locale, set CDN because default httpredir is unreliable
ENV DEBIAN_FRONTEND noninteractive
RUN REPO=http://cdn-fastly.deb.debian.org && \
echo "deb $REPO/debian jessie main\ndeb $REPO/debian-security jessie/updates main" > /etc/apt/sources.list && \
apt-get -y update && \
apt-get -y upgrade && \
apt-get -y install wget locales git bzip2 &&\
/usr/sbin/update-locale LANG=C.UTF-8 && \
locale-gen C.UTF-8 && \
apt-get remove -y locales && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
ENV LANG C.UTF-8
RUN mkdir -p /srv/
# install Python + NodeJS with conda
RUN wget -q https://repo.continuum.io/miniconda/Miniconda3-4.2.12-Linux-x86_64.sh -O /tmp/miniconda.sh && \
echo 'd0c7c71cc5659e54ab51f2005a8d96f3 */tmp/miniconda.sh' | md5sum -c - && \
bash /tmp/miniconda.sh -f -b -p /opt/conda && \
/opt/conda/bin/conda install --yes -c conda-forge python=3.5 sqlalchemy tornado jinja2 traitlets requests pip nodejs configurable-http-proxy && \
/opt/conda/bin/pip install --upgrade pip && \
rm /tmp/miniconda.sh
ENV PATH=/opt/conda/bin:$PATH
# install jupyterhub
ADD requirements.txt /tmp/requirements.txt
RUN pip3 install -r /tmp/requirements.txt
ADD . /src/jupyterhub
WORKDIR /src/jupyterhub
WORKDIR /srv/
ADD . /srv/jupyterhub
RUN python setup.py js && pip install . && \
rm -rf $PWD ~/.cache ~/.npm
RUN mkdir -p /srv/jupyterhub/
WORKDIR /srv/jupyterhub/
RUN pip3 install .
WORKDIR /srv/jupyterhub/
# Derivative containers should add jupyterhub config,
# which will be used when starting the application.
EXPOSE 8000
ONBUILD ADD jupyterhub_config.py /srv/jupyterhub/jupyterhub_config.py
CMD ["jupyterhub", "-f", "/srv/jupyterhub/jupyterhub_config.py"]
LABEL org.jupyter.service="jupyterhub"
CMD ["jupyterhub"]

View File

@@ -4,13 +4,16 @@ include setupegg.py
include bower.json
include package.json
include *requirements.txt
include Dockerfile
graft onbuild
graft jupyterhub
graft scripts
graft share
# Documentation
graft docs
prune docs/node_modules
# prune some large unused files from components
prune share/jupyter/hub/static/components/bootstrap/css

239
README.md
View File

@@ -1,113 +1,123 @@
# JupyterHub: A multi-user server for Jupyter notebooks
**[Technical overview](#technical-overview)** |
**[Prerequisites](#prerequisites)** |
**[Installation](#installation)** |
**[Running the Hub Server](#running-the-hub-server)** |
**[Configuration](#configuration)** |
**[Docker](#docker)** |
**[Contributing](#contributing)** |
**[License](#license)** |
**[Getting help](#getting-help)**
Questions, comments? Visit our Google Group:
# [JupyterHub](https://github.com/jupyterhub/jupyterhub)
[![Build Status](https://travis-ci.org/jupyterhub/jupyterhub.svg?branch=master)](https://travis-ci.org/jupyterhub/jupyterhub)
[![Circle CI](https://circleci.com/gh/jupyterhub/jupyterhub.svg?style=shield&circle-token=b5b65862eb2617b9a8d39e79340b0a6b816da8cc)](https://circleci.com/gh/jupyterhub/jupyterhub)
[![codecov.io](https://codecov.io/github/jupyterhub/jupyterhub/coverage.svg?branch=master)](https://codecov.io/github/jupyterhub/jupyterhub?branch=master)
"
[![Documentation Status](https://readthedocs.org/projects/jupyterhub/badge/?version=latest)](http://jupyterhub.readthedocs.org/en/latest/?badge=latest)
"
[![Google Group](https://img.shields.io/badge/-Google%20Group-lightgrey.svg)](https://groups.google.com/forum/#!forum/jupyter)
JupyterHub is a multi-user server that manages and proxies multiple instances of the single-user <del>IPython</del> Jupyter notebook server.
With [JupyterHub](https://jupyterhub.readthedocs.io) you can create a
**multi-user Hub** which spawns, manages, and proxies multiple instances of the
single-user [Jupyter notebook *(IPython notebook)* ](https://jupyter-notebook.readthedocs.io) server.
Three actors:
JupyterHub provides **single-user notebook servers to many users**. For example,
JupyterHub could serve notebooks to a class of students, a corporate
workgroup, or a science research group.
- multi-user Hub (tornado process)
- configurable http proxy (node-http-proxy)
- multiple single-user IPython notebook servers (Python/IPython/tornado)
by [Project Jupyter](https://jupyter.org)
Basic principles:
----
- Hub spawns proxy
- Proxy forwards ~all requests to hub by default
## Technical overview
Three main actors make up JupyterHub:
- multi-user **Hub** (tornado process)
- configurable http **proxy** (node-http-proxy)
- multiple **single-user Jupyter notebook servers** (Python/IPython/tornado)
JupyterHub's basic principles for operation are:
- Hub spawns a proxy
- Proxy forwards all requests to Hub by default
- Hub handles login, and spawns single-user servers on demand
- Hub configures proxy to forward url prefixes to single-user servers
- Hub configures proxy to forward url prefixes to the single-user servers
JupyterHub also provides a
[REST API](http://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyter/jupyterhub/master/docs/rest-api.yml#/default)
for administration of the Hub and users.
## Dependencies
----
JupyterHub requires IPython >= 3.0 (current master) and Python >= 3.3.
## Prerequisites
Before installing JupyterHub, you need:
You will need nodejs/npm, which you can get from your package manager:
- [Python](https://www.python.org/downloads/) 3.3 or greater
sudo apt-get install npm nodejs-legacy
An understanding of using [`pip`](https://pip.pypa.io/en/stable/) for installing
Python packages is recommended.
(The `nodejs-legacy` package installs the `node` executable,
which is required for npm to work on Debian/Ubuntu at this point)
- [nodejs/npm](https://www.npmjs.com/)
Then install javascript dependencies:
[Install nodejs/npm](https://docs.npmjs.com/getting-started/installing-node), which is available from your
package manager. For example, install on Linux (Debian/Ubuntu) using:
sudo npm install -g configurable-http-proxy
sudo apt-get install npm nodejs-legacy
### Optional
(The `nodejs-legacy` package installs the `node` executable and is currently
required for npm to work on Debian/Ubuntu.)
- Notes on `pip` command used in the below installation sections:
- `sudo` may be needed for `pip install`, depending on filesystem permissions.
- JupyterHub requires Python >= 3.3, so it may be required on some machines to use `pip3` instead
of `pip` (especially when you have both Python 2 and Python 3 installed on your machine).
If `pip3` is not found on your machine, you can get it by doing:
- TLS certificate and key for HTTPS communication
sudo apt-get install python3-pip
- Domain name
Before running the single-user notebook servers (which may be on the same system as the Hub or not):
- [Jupyter Notebook](https://jupyter.readthedocs.io/en/latest/install.html) version 4 or greater
## Installation
JupyterHub can be installed with `pip`, and the proxy with `npm`:
As usual start with cloning the code:
```bash
npm install -g configurable-http-proxy
pip3 install jupyterhub
```
git clone https://github.com/jupyter/jupyterhub.git
cd jupyterhub
If you plan to run notebook servers locally, you will need to install the
Jupyter notebook:
Then you can install the Python package by doing:
pip3 install --upgrade notebook
pip3 install -r requirements.txt
pip3 install .
If the `pip3 install .` command fails and complains about `lessc` being unavailable, you may need to explicitly install some additional javascript dependencies:
npm install
If you plan to run notebook servers locally, you may also need to install the IPython notebook:
pip3 install "ipython[notebook]"
This will fetch client-side javascript dependencies and compile CSS,
and install these files to `sys.prefix`/share/jupyter, as well as
install any Python dependencies.
### Development install
For a development install:
pip3 install -r dev-requirements.txt -e .
In which case you may need to manually update javascript and css after some updates, with:
python3 setup.py js # fetch updated client-side js (changes rarely)
python3 setup.py css # recompile CSS from LESS sources
## Running the server
To start the server, run the command:
## Running the Hub server
To start the Hub server, run the command:
jupyterhub
and then visit `http://localhost:8000`, and sign in with your unix credentials.
Visit `https://localhost:8000` in your browser, and sign in with your unix credentials.
If you want multiple users to be able to sign into the server, you will need to run the
`jupyterhub` command as a privileged user, such as root.
The [wiki](https://github.com/jupyter/jupyterhub/wiki/Using-sudo-to-run-JupyterHub-without-root-privileges) describes how to run the server
as a less privileged user, which requires more configuration of the system.
To allow multiple users to sign into the server, you will need to
run the `jupyterhub` command as a *privileged user*, such as root.
The [wiki](https://github.com/jupyterhub/jupyterhub/wiki/Using-sudo-to-run-JupyterHub-without-root-privileges)
describes how to run the server as a *less privileged user*, which requires more
configuration of the system.
## Getting started
----
see the [getting started doc](docs/getting-started.md) for some of the basics of configuring your JupyterHub deployment.
## Configuration
The [getting started document](docs/source/getting-started.md) contains the
basics of configuring a JupyterHub deployment.
### Some examples
The JupyterHub **tutorial** provides a video and documentation that explains and illustrates the fundamental steps for installation and configuration. [Repo](https://github.com/jupyterhub/jupyterhub-tutorial)
| [Tutorial documentation](http://jupyterhub-tutorial.readthedocs.io/en/latest/)
generate a default config file:
#### Generate a default configuration file
Generate a default config file:
jupyterhub --generate-config
spawn the server on 10.0.1.2:443 with https:
#### Customize the configuration, authentication, and process spawning
Spawn the server on ``10.0.1.2:443`` with **https**:
jupyterhub --ip 10.0.1.2 --port 443 --ssl-key my_ssl.key --ssl-cert my_ssl.cert
@@ -115,15 +125,86 @@ The authentication and process spawning mechanisms can be replaced,
which should allow plugging into a variety of authentication or process control environments.
Some examples, meant as illustration and testing of this concept:
- Using GitHub OAuth instead of PAM with [OAuthenticator](https://github.com/jupyter/oauthenticator)
- Spawning single-user servers with docker, using the [DockerSpawner](https://github.com/jupyter/dockerspawner)
- Using GitHub OAuth instead of PAM with [OAuthenticator](https://github.com/jupyterhub/oauthenticator)
- Spawning single-user servers with Docker, using the [DockerSpawner](https://github.com/jupyterhub/dockerspawner)
# Getting help
----
We encourage you to ask questions on the mailing list:
## Docker
A ready to go [docker image for JupyterHub](https://hub.docker.com/r/jupyterhub/jupyterhub/) gives a straightforward deployment of JupyterHub.
[![Google Group](https://img.shields.io/badge/-Google%20Group-lightgrey.svg)](https://groups.google.com/forum/#!forum/jupyter)
*Note: This `jupyterhub/jupyterhub` docker image is only an image for running the Hub service itself.
It does not require the other Jupyter components, such as Notebook installation, which are needed by the single-user servers.
To run the single-user servers, which may be on the same system as the Hub or not, Jupyter Notebook version 4 or greater must be installed.*
but you can participate in development discussions or get live help on Gitter:
#### Starting JupyterHub with docker
The JupyterHub docker image can be started with the following command:
[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/jupyter/jupyterhub?utm_source=badge&utm_medium=badge)
docker run -d --name jupyterhub jupyterhub/jupyterhub jupyterhub
This command will create a container named `jupyterhub` that you can **stop and resume** with `docker stop/start`.
The Hub service will be listening on all interfaces at port 8000, which makes this a good choice for **testing JupyterHub on your desktop or laptop**.
If you want to run docker on a computer that has a public IP then you should (as in MUST) **secure it with ssl** by
adding ssl options to your docker configuration or using a ssl enabled proxy.
[Mounting volumes](https://docs.docker.com/engine/userguide/containers/dockervolumes/) will
allow you to **store data outside the docker image (host system) so it will be persistent**, even when you start
a new image.
The command `docker exec -it jupyterhub bash` will spawn a root shell in your docker
container. You can **use the root shell to create system users in the container**. These accounts will be used for authentication
in JupyterHub's default configuration.
----
## Contributing
If you would like to contribute to the project, please read our [contributor documentation](http://jupyter.readthedocs.io/en/latest/contributor/content-contributor.html) and the [`CONTRIBUTING.md`](CONTRIBUTING.md).
For a **development install**, clone the [repository](https://github.com/jupyterhub/jupyterhub) and then install from source:
```bash
git clone https://github.com/jupyterhub/jupyterhub
cd jupyterhub
pip3 install -r dev-requirements.txt -e .
```
If the `pip3 install` command fails and complains about `lessc` being unavailable, you may need to explicitly install some additional JavaScript dependencies:
npm install
This will fetch client-side JavaScript dependencies necessary to compile CSS.
You may also need to manually update JavaScript and CSS after some development updates, with:
```bash
python3 setup.py js # fetch updated client-side js
python3 setup.py css # recompile CSS from LESS sources
```
We use [pytest](http://doc.pytest.org/en/latest/) for testing. To run tests:
```bash
pytest jupyterhub/tests
```
----
## License
We use a shared copyright model that enables all contributors to maintain the
copyright on their contributions.
All code is licensed under the terms of the revised BSD license.
## Getting help
We encourage you to ask questions on the [mailing list](https://groups.google.com/forum/#!forum/jupyter),
and you may participate in development discussions or get live help on [Gitter](https://gitter.im/jupyterhub/jupyterhub).
## Resources
- [Reporting Issues](https://github.com/jupyterhub/jupyterhub/issues)
- JupyterHub tutorial | [Repo](https://github.com/jupyterhub/jupyterhub-tutorial)
| [Tutorial documentation](http://jupyterhub-tutorial.readthedocs.io/en/latest/)
- [Documentation for JupyterHub](http://jupyterhub.readthedocs.io/en/latest/) | [PDF (latest)](https://media.readthedocs.org/pdf/jupyterhub/latest/jupyterhub.pdf) | [PDF (stable)](https://media.readthedocs.org/pdf/jupyterhub/stable/jupyterhub.pdf)
- [Documentation for JupyterHub's REST API](http://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyter/jupyterhub/master/docs/rest-api.yml#/default)
- [Documentation for Project Jupyter](http://jupyter.readthedocs.io/en/latest/index.html) | [PDF](https://media.readthedocs.org/pdf/jupyter/latest/jupyter.pdf)
- [Project Jupyter website](https://jupyter.org)

24
circle.yml Normal file
View File

@@ -0,0 +1,24 @@
machine:
services:
- docker
dependencies:
override:
- ls
test:
override:
- docker build -t jupyterhub/jupyterhub .
- docker build -t jupyterhub/jupyterhub-onbuild:${CIRCLE_TAG:-latest} onbuild
deployment:
hub:
branch: master
commands:
- docker login -u $DOCKER_USER -p $DOCKER_PASS -e unused@example.com
- docker push jupyterhub/jupyterhub-onbuild
release:
tag: /.*/
commands:
- docker login -u $DOCKER_USER -p $DOCKER_PASS -e unused@example.com
- docker push jupyterhub/jupyterhub-onbuild:$CIRCLE_TAG

View File

@@ -1,4 +1,7 @@
-r requirements.txt
coveralls
mock
codecov
pytest-cov
pytest
pytest>=2.8
notebook
requests-mock

206
docs/Makefile Normal file
View File

@@ -0,0 +1,206 @@
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
@echo " spelling to run spell check on documentation"
clean:
rm -rf $(BUILDDIR)/*
node_modules: package.json
npm install && touch node_modules
rest-api: source/_static/rest-api/index.html
source/_static/rest-api/index.html: rest-api.yml node_modules
npm run rest-api
html: rest-api
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/JupyterHub.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/JupyterHub.qhc"
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/JupyterHub"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/JupyterHub"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
spelling:
$(SPHINXBUILD) -b spelling $(ALLSPHINXOPTS) $(BUILDDIR)/spelling
@echo
@echo "Spell check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/spelling/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."

16
docs/environment.yml Normal file
View File

@@ -0,0 +1,16 @@
name: jhub_docs
channels:
- conda-forge
dependencies:
- nodejs
- python=3
- jinja2
- pamela
- requests
- sqlalchemy>=1
- tornado>=4.1
- traitlets>=4.1
- sphinx>=1.3.6
- sphinx_rtd_theme
- pip:
- recommonmark==0.4.0

View File

@@ -1,389 +0,0 @@
# Getting started with JupyterHub
This document describes some of the basics of configuring JupyterHub to do what you want.
JupyterHub is highly customizable, so there's a lot to cover.
## Installation
See [the readme](../README.md) for help installing JupyterHub.
## Overview
JupyterHub is a set of processes that together provide a multiuser Jupyter Notebook server.
There are three main categories of processes run by the `jupyterhub` command line program:
- *Single User Server*: a dedicated, single-user, Jupyter Notebook is started for each user on the system
when they log in. The object that starts these processes is called a *Spawner*.
- *Proxy*: the public facing part of the server that uses a dynamic proxy to route HTTP requests
to the *Hub* and *Single User Servers*.
- *Hub*: manages user accounts and authentication and coordinates *Single Users Servers* using a *Spawner*.
## JupyterHub's default behavior
To start JupyterHub in its default configuration, type the following at the command line:
sudo jupyterhub
The default Authenticator that ships with JupyterHub authenticates users
with their system name and password (via [PAM][]).
Any user on the system with a password will be allowed to start a single-user notebook server.
The default Spawner starts servers locally as each user, one dedicated server per user.
These servers listen on localhost, and start in the given user's home directory.
By default, the *Proxy* listens on all public interfaces on port 8000.
Thus you can reach JupyterHub through:
http://localhost:8000
or any other public IP or domain pointing to your system.
In their default configuration, the other services, the *Hub* and *Single-User Servers*,
all communicate with each other on localhost only.
**NOTE:** In its default configuration, JupyterHub runs without SSL encryption (HTTPS).
You should not run JupyterHub without SSL encryption on a public network.
See [below](#Security) for how to configure JupyterHub to use SSL.
By default, starting JupyterHub will write two files to disk in the current working directory:
- `jupyterhub.sqlite` is the sqlite database containing all of the state of the *Hub*.
This file allows the *Hub* to remember what users are running and where,
as well as other information enabling you to restart parts of JupyterHub separately.
- `jupyterhub_cookie_secret` is the encryption key used for securing cookies.
This file needs to persist in order for restarting the Hub server to avoid invalidating cookies.
Conversely, deleting this file and restarting the server effectively invalidates all login cookies.
The cookie secret file is discussed [below](#Security).
The location of these files can be specified via configuration, discussed below.
## How to configure JupyterHub
JupyterHub is configured in two ways:
1. Command-line arguments
2. Configuration files
Type the following for brief information about the command line arguments:
jupyterhub -h
or:
jupyterhub --help-all
for the full command line help.
By default, JupyterHub will look for a configuration file (can be missing)
named `jupyterhub_config.py` in the current working directory.
You can create an empty configuration file with
jupyterhub --generate-config
This empty configuration file has descriptions of all configuration variables and their default values.
You can load a specific config file with:
jupyterhub -f /path/to/jupyterhub_config.py
See also: [general docs](http://ipython.org/ipython-doc/dev/development/config.html)
on the config system Jupyter uses.
## Networking
In most situations you will want to change the main IP address and port of the Proxy.
This address determines where JupyterHub is available to your users.
The default is all network interfaces (`''`) on port 8000.
This can be done with the following command line arguments:
jupyterhub --ip=192.168.1.2 --port=443
Or you can put the following lines in a configuration file:
```python
c.JupyterHub.ip = '192.168.1.2'
c.JupyterHub.port = 443
```
Port 443 is used in these examples as it is the default port for SSL/HTTPS.
Configuring only the main IP and port of JupyterHub should be sufficient for most deployments of JupyterHub.
However, for more customized scenarios,
you can configure the following additional networking details.
The Hub service talks to the proxy via a REST API on a secondary port,
whose network interface and port can be configured separately.
By default, this REST API listens on port 8081 of localhost only.
If you want to run the Proxy separate from the Hub,
you may need to configure this IP and port with:
```python
# ideally a private network address
c.JupyterHub.proxy_api_ip = '10.0.1.4'
c.JupyterHub.proxy_api_port = 5432
```
The Hub service also listens only on localhost (port 8080) by default.
The Hub needs needs to be accessible from both the proxy and all Spawners.
When spawning local servers localhost is fine,
but if *either* the Proxy or (more likely) the Spawners will be remote or isolated in containers,
the Hub must listen on an IP that is accessible.
```python
c.JupyterHub.hub_ip = '10.0.1.4'
c.JupyterHub.hub_port = 54321
```
## Security
First of all, since JupyterHub includes authentication and allows arbitrary code execution,
you should not run it without SSL (HTTPS).
This will require you to obtain an official SSL certificate or create a self-signed certificate.
Once you have obtained and installed a key and certificate
you need to pass their locations to JupyterHub's configuration as follows:
```python
c.JupyterHub.ssl_key = '/path/to/my.key'
c.JupyterHub.ssl_cert = '/path/to/my.cert'
```
Some cert files also contain the key, in which case only the cert is needed.
It is important that these files be put in a secure location on your server.
There are two other aspects of JupyterHub network security.
The cookie secret is an encryption key, used to encrypt the cookies used for authentication.
If this value changes for the Hub,
all single-user servers must also be restarted.
Normally, this value is stored in the file `jupyterhub_cookie_secret`,
which can be specified with:
```python
c.JupyterHub.cookie_secret_file = '/path/to/jupyterhub_cookie_secret'
```
In most deployments of JupyterHub, you should point this to a secure location on the file system.
If the cookie secret file doesn't exist when the Hub starts,
a new cookie secret is generated and stored in the file.
If you would like to avoid the need for files,
the value can be loaded in the Hub process from the `JPY_COOKIE_SECRET` env variable:
```bash
export JPY_COOKIE_SECRET=`openssl rand -hex 1024`
```
For security reasons, this env variable should only be visible to the Hub.
The Hub authenticates its requests to the Proxy via an environment variable, `CONFIGPROXY_AUTH_TOKEN`.
If you want to be able to start or restart the proxy or Hub independently of each other (not always necessary),
you must set this environment variable before starting the server (for both the Hub and Proxy):
```bash
export CONFIGPROXY_AUTH_TOKEN=`openssl rand -hex 32`
```
This env variable needs to be visible to the Hub and Proxy.
If you don't set this, the Hub will generate a random key itself,
which means that any time you restart the Hub you **must also restart the Proxy**.
If the proxy is a subprocess of the Hub,
this should happen automatically (this is the default configuration).
## Configuring Authentication
The default Authenticator uses [PAM][] to authenticate system users with their username and password.
The default behavior of this Authenticator is to allow any user with an account and password on the system to login.
You can restrict which users are allowed to login with `Authenticator.whitelist`:
```python
c.Authenticator.whitelist = {'mal', 'zoe', 'inara', 'kaylee'}
```
Admin users of JupyterHub have the ability to take actions on users' behalf,
such as stopping and restarting their servers,
and adding and removing new users from the whitelist.
Any users in the admin list are automatically added to the whitelist,
if they are not already present.
The set of initial Admin users can configured as follows:
```python
c.Authenticator.admin_users = {'mal', 'zoe'}
```
If `JupyterHub.admin_access` is True (not default),
then admin users have permission to log in *as other users* on their respective machines, for debugging.
**You should make sure your users know if admin_access is enabled.**
### Adding and removing users
Users can be added and removed to the Hub via the admin panel or REST API.
These users will be added to the whitelist and database.
Restarting the Hub will not require manually updating the whitelist in your config file,
as the users will be loaded from the database.
This means that after starting the Hub once,
it is not sufficient to remove users from the whitelist in your config file.
You must also remove them from the database, either by discarding the database file,
or via the admin UI.
The default PAMAuthenticator is one case of a special kind of authenticator,
called a LocalAuthenticator,
indicating that it manages users on the local system.
When you add a user to the Hub, a LocalAuthenticator checks if that user already exists.
Normally, there will be an error telling you that the user doesn't exist.
If you set the configuration value
```python
c.LocalAuthenticator.create_system_users = True
```
however, adding a user to the Hub that doesn't already exist on the system
will result in the Hub creating that user via the system `useradd` mechanism.
This option is typically used on hosted deployments of JupyterHub,
to avoid the need to manually create all your users before launching the service.
It is not recommended when running JupyterHub in situations where JupyterHub users maps directly onto UNIX users.
## Configuring single-user servers
Since the single-user server is an instance of `ipython notebook`,
an entire separate multi-process application,
there are many aspect of that server can configure,
and a lot of ways to express that configuration.
At the JupyterHub level, you can set some values on the Spawner.
The simplest of these is `Spawner.notebook_dir`,
which lets you set the root directory for a user's server.
This root notebook directory is the highest level directory users will be able to access in the notebook dashboard.
In this example, the root notebook directory is set to `~/notebooks`,
where `~` is expanded to the user's home directory.
```python
c.Spawner.notebook_dir = '~/notebooks'
```
You can also specify extra command-line arguments to the notebook server with:
```python
c.Spawner.args = ['--debug', '--profile=PHYS131']
```
This could be used to set the users default page for the single user server:
```python
c.Spawner.args = ['--NotebookApp.default_url=/notebooks/Welcome.ipynb']
```
Since the single-user server extends the notebook server application,
it still loads configuration from the `ipython_notebook_config.py` config file.
Each user may have one of these files in `$HOME/.ipython/profile_default/`.
IPython also supports loading system-wide config files from `/etc/ipython/`,
which is the place to put configuration that you want to affect all of your users.
## External services
JupyterHub has a REST API that can be used to run external services.
More detail on this API will be added in the future.
## File locations
It is recommended to put all of the files used by JupyterHub into standard UNIX filesystem locations.
* `/srv/jupyterhub` for all security and runtime files
* `/etc/jupyterhub` for all configuration files
* `/var/log` for log files
## Example
In the following example, we show a configuration files for a fairly standard JupyterHub deployment with the following assumptions:
* JupyterHub is running on a single cloud server
* Using SSL on the standard HTTPS port 443
* You want to use [GitHub OAuth][oauthenticator] for login
* You need the users to exist locally on the server
* You want users' notebooks to be served from `~/assignments` to allow users to browse for notebooks within
other users home directories
* You want the landing page for each user to be a Welcome.ipynb notebook in their assignments directory.
* All runtime files are put into `/srv/jupyterhub` and log files in `/var/log`.
Let's start out with `jupyterhub_config.py`:
```python
# jupyterhub_config.py
c = get_config()
import os
pjoin = os.path.join
runtime_dir = os.path.join('/srv/jupyterhub')
ssl_dir = pjoin(runtime_dir, 'ssl')
if not os.path.exists(ssl_dir):
os.makedirs(ssl_dir)
# https on :443
c.JupyterHub.port = 443
c.JupyterHub.ssl_key = pjoin(ssl_dir, 'ssl.key')
c.JupyterHub.ssl_cert = pjoin(ssl_dir, 'ssl.cert')
# put the JupyterHub cookie secret and state db
# in /var/run/jupyterhub
c.JupyterHub.cookie_secret_file = pjoin(runtime_dir, 'cookie_secret')
c.JupyterHub.db_url = pjoin(runtime_dir, 'jupyterhub.sqlite')
# or `--db=/path/to/jupyterhub.sqlite` on the command-line
# put the log file in /var/log
c.JupyterHub.log_file = '/var/log/jupyterhub.log'
# use GitHub OAuthenticator for local users
c.JupyterHub.authenticator_class = 'oauthenticator.LocalGitHubOAuthenticator'
c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL']
# create system users that don't exist yet
c.LocalAuthenticator.create_system_users = True
# specify users and admin
c.Authenticator.whitelist = {'rgbkrk', 'minrk', 'jhamrick'}
c.Authenticator.admin_users = {'jhamrick', 'rgbkrk'}
# start single-user notebook servers in ~/assignments,
# with ~/assignments/Welcome.ipynb as the default landing page
# this config could also be put in
# /etc/ipython/ipython_notebook_config.py
c.Spawner.notebook_dir = '~/assignments'
c.Spawner.args = ['--NotebookApp.default_url=/notebooks/Welcome.ipynb']
```
Using the GitHub Authenticator [requires a few additional env variables][oauth-setup],
which we will need to set when we launch the server:
```bash
export GITHUB_CLIENT_ID=github_id
export GITHUB_CLIENT_SECRET=github_secret
export OAUTH_CALLBACK_URL=https://example.com/hub/oauth_callback
export CONFIGPROXY_AUTH_TOKEN=super-secret
jupyterhub -f /path/to/aboveconfig.py
```
# Further reading
- TODO: troubleshooting
- [Custom Authenticators](authenticators.md)
- [Custom Spawners](spawners.md)
[oauth-setup]: https://github.com/jupyter/oauthenticator#setup
[oauthenticator]: https://github.com/jupyter/oauthenticator
[PAM]: http://en.wikipedia.org/wiki/Pluggable_authentication_module

263
docs/make.bat Normal file
View File

@@ -0,0 +1,263 @@
@ECHO OFF
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set BUILDDIR=build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source
set I18NSPHINXOPTS=%SPHINXOPTS% source
if NOT "%PAPER%" == "" (
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
)
if "%1" == "" goto help
if "%1" == "help" (
:help
echo.Please use `make ^<target^>` where ^<target^> is one of
echo. html to make standalone HTML files
echo. dirhtml to make HTML files named index.html in directories
echo. singlehtml to make a single large HTML file
echo. pickle to make pickle files
echo. json to make JSON files
echo. htmlhelp to make HTML files and a HTML help project
echo. qthelp to make HTML files and a qthelp project
echo. devhelp to make HTML files and a Devhelp project
echo. epub to make an epub
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
echo. text to make text files
echo. man to make manual pages
echo. texinfo to make Texinfo files
echo. gettext to make PO message catalogs
echo. changes to make an overview over all changed/added/deprecated items
echo. xml to make Docutils-native XML files
echo. pseudoxml to make pseudoxml-XML files for display purposes
echo. linkcheck to check all external links for integrity
echo. doctest to run all doctests embedded in the documentation if enabled
echo. coverage to run coverage check of the documentation if enabled
goto end
)
if "%1" == "clean" (
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
del /q /s %BUILDDIR%\*
goto end
)
REM Check if sphinx-build is available and fallback to Python version if any
%SPHINXBUILD% 1>NUL 2>NUL
if errorlevel 9009 goto sphinx_python
goto sphinx_ok
:sphinx_python
set SPHINXBUILD=python -m sphinx.__init__
%SPHINXBUILD% 2> nul
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
:sphinx_ok
if "%1" == "html" (
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
goto end
)
if "%1" == "dirhtml" (
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
goto end
)
if "%1" == "singlehtml" (
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
goto end
)
if "%1" == "pickle" (
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the pickle files.
goto end
)
if "%1" == "json" (
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the JSON files.
goto end
)
if "%1" == "htmlhelp" (
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run HTML Help Workshop with the ^
.hhp project file in %BUILDDIR%/htmlhelp.
goto end
)
if "%1" == "qthelp" (
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run "qcollectiongenerator" with the ^
.qhcp project file in %BUILDDIR%/qthelp, like this:
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\JupyterHub.qhcp
echo.To view the help file:
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\JupyterHub.ghc
goto end
)
if "%1" == "devhelp" (
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished.
goto end
)
if "%1" == "epub" (
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub file is in %BUILDDIR%/epub.
goto end
)
if "%1" == "latex" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
if errorlevel 1 exit /b 1
echo.
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdf" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdfja" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf-ja
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "text" (
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The text files are in %BUILDDIR%/text.
goto end
)
if "%1" == "man" (
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The manual pages are in %BUILDDIR%/man.
goto end
)
if "%1" == "texinfo" (
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
goto end
)
if "%1" == "gettext" (
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
goto end
)
if "%1" == "changes" (
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
if errorlevel 1 exit /b 1
echo.
echo.The overview file is in %BUILDDIR%/changes.
goto end
)
if "%1" == "linkcheck" (
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
if errorlevel 1 exit /b 1
echo.
echo.Link check complete; look for any errors in the above output ^
or in %BUILDDIR%/linkcheck/output.txt.
goto end
)
if "%1" == "doctest" (
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
if errorlevel 1 exit /b 1
echo.
echo.Testing of doctests in the sources finished, look at the ^
results in %BUILDDIR%/doctest/output.txt.
goto end
)
if "%1" == "coverage" (
%SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
if errorlevel 1 exit /b 1
echo.
echo.Testing of coverage in the sources finished, look at the ^
results in %BUILDDIR%/coverage/python.txt.
goto end
)
if "%1" == "xml" (
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The XML files are in %BUILDDIR%/xml.
goto end
)
if "%1" == "pseudoxml" (
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
goto end
)
:end

14
docs/package.json Normal file
View File

@@ -0,0 +1,14 @@
{
"name": "jupyterhub-docs-build",
"version": "0.0.0",
"description": "build JupyterHub swagger docs",
"scripts": {
"rest-api": "bootprint openapi ./rest-api.yml source/_static/rest-api"
},
"author": "",
"license": "BSD-3-Clause",
"devDependencies": {
"bootprint": "^0.8.5",
"bootprint-openapi": "^0.17.0"
}
}

3
docs/requirements.txt Normal file
View File

@@ -0,0 +1,3 @@
-r ../requirements.txt
sphinx>=1.3.6
recommonmark==0.4.0

485
docs/rest-api.yml Normal file
View File

@@ -0,0 +1,485 @@
# see me at: http://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyter/jupyterhub/master/docs/rest-api.yml#/default
swagger: '2.0'
info:
title: JupyterHub
description: The REST API for JupyterHub
version: 0.7.0
license:
name: BSD-3-Clause
schemes:
- [http, https]
securityDefinitions:
token:
type: apiKey
name: Authorization
in: header
security:
- token: []
basePath: /hub/api
produces:
- application/json
consumes:
- application/json
paths:
/:
get:
summary: Get JupyterHub version
description: |
This endpoint is not authenticated for the purpose of clients and user
to identify the JupyterHub version before setting up authentication.
responses:
'200':
description: The JupyterHub version
schema:
type: object
properties:
version:
type: string
description: The version of JupyterHub itself
/info:
get:
summary: Get detailed info about JupyterHub
description: |
Detailed JupyterHub information, including Python version,
JupyterHub's version and executable path,
and which Authenticator and Spawner are active.
responses:
'200':
description: Detailed JupyterHub info
schema:
type: object
properties:
version:
type: string
description: The version of JupyterHub itself
python:
type: string
description: The Python version, as returned by sys.version
sys_executable:
type: string
description: The path to sys.executable running JupyterHub
authenticator:
type: object
properties:
class:
type: string
description: The Python class currently active for JupyterHub Authentication
version:
type: string
description: The version of the currently active Authenticator
spawner:
type: object
properties:
class:
type: string
description: The Python class currently active for spawning single-user notebook servers
version:
type: string
description: The version of the currently active Spawner
/users:
get:
summary: List users
responses:
'200':
description: The Hub's user list
schema:
type: array
items:
$ref: '#/definitions/User'
post:
summary: Create multiple users
parameters:
- name: data
in: body
required: true
schema:
type: object
properties:
usernames:
type: array
description: list of usernames to create on the Hub
items:
type: string
admin:
description: whether the created users should be admins
type: boolean
responses:
'201':
description: The users have been created
schema:
type: array
description: The created users
items:
$ref: '#/definitions/User'
/users/{name}:
get:
summary: Get a user by name
parameters:
- name: name
description: username
in: path
required: true
type: string
responses:
'200':
description: The User model
schema:
$ref: '#/definitions/User'
post:
summary: Create a single user
parameters:
- name: name
description: username
in: path
required: true
type: string
responses:
'201':
description: The user has been created
schema:
$ref: '#/definitions/User'
patch:
summary: Modify a user
description: Change a user's name or admin status
parameters:
- name: name
description: username
in: path
required: true
type: string
- name: data
in: body
required: true
description: Updated user info. At least one key to be updated (name or admin) is required.
schema:
type: object
properties:
name:
type: string
description: the new name (optional, if another key is updated i.e. admin)
admin:
type: boolean
description: update admin (optional, if another key is updated i.e. name)
responses:
'200':
description: The updated user info
schema:
$ref: '#/definitions/User'
delete:
summary: Delete a user
parameters:
- name: name
description: username
in: path
required: true
type: string
responses:
'204':
description: The user has been deleted
/users/{name}/server:
post:
summary: Start a user's single-user notebook server
parameters:
- name: name
description: username
in: path
required: true
type: string
responses:
'201':
description: The user's notebook server has started
'202':
description: The user's notebook server has not yet started, but has been requested
delete:
summary: Stop a user's server
parameters:
- name: name
description: username
in: path
required: true
type: string
responses:
'204':
description: The user's notebook server has stopped
'202':
description: The user's notebook server has not yet stopped as it is taking a while to stop
/users/{name}/admin-access:
post:
summary: Grant admin access to this user's notebook server
parameters:
- name: name
description: username
in: path
required: true
type: string
responses:
'200':
description: Sets a cookie granting the requesting administrator access to the user's notebook server
/groups:
get:
summary: List groups
responses:
'200':
description: The list of groups
schema:
type: array
items:
$ref: '#/definitions/Group'
/groups/{name}:
get:
summary: Get a group by name
parameters:
- name: name
description: group name
in: path
required: true
type: string
responses:
'200':
description: The group model
schema:
$ref: '#/definitions/Group'
post:
summary: Create a group
parameters:
- name: name
description: group name
in: path
required: true
type: string
responses:
'201':
description: The group has been created
schema:
$ref: '#/definitions/Group'
delete:
summary: Delete a group
parameters:
- name: name
description: group name
in: path
required: true
type: string
responses:
'204':
description: The group has been deleted
/groups/{name}/users:
post:
summary: Add users to a group
parameters:
- name: name
description: group name
in: path
required: true
type: string
- name: data
in: body
required: true
description: The users to add to the group
schema:
type: object
properties:
users:
type: array
description: List of usernames to add to the group
items:
type: string
responses:
'200':
description: The users have been added to the group
schema:
$ref: '#/definitions/Group'
delete:
summary: Remove users from a group
parameters:
- name: name
description: group name
in: path
required: true
type: string
- name: data
in: body
required: true
description: The users to remove from the group
schema:
type: object
properties:
users:
type: array
description: List of usernames to remove from the group
items:
type: string
responses:
'200':
description: The users have been removed from the group
/services:
get:
summary: List services
responses:
'200':
description: The service list
schema:
type: array
items:
$ref: '#/definitions/Service'
/services/{name}:
get:
summary: Get a service by name
parameters:
- name: name
description: service name
in: path
required: true
type: string
responses:
'200':
description: The Service model
schema:
$ref: '#/definitions/Service'
/proxy:
get:
summary: Get the proxy's routing table
description: A convenience alias for getting the routing table directly from the proxy
responses:
'200':
description: Routing table
schema:
type: object
description: configurable-http-proxy routing table (see configurable-http-proxy docs for details)
post:
summary: Force the Hub to sync with the proxy
responses:
'200':
description: Success
patch:
summary: Notify the Hub about a new proxy
description: Notifies the Hub of a new proxy to use.
parameters:
- name: data
in: body
required: true
description: Any values that have changed for the new proxy. All keys are optional.
schema:
type: object
properties:
ip:
type: string
description: IP address of the new proxy
port:
type: string
description: Port of the new proxy
protocol:
type: string
description: Protocol of new proxy, if changed
auth_token:
type: string
description: CONFIGPROXY_AUTH_TOKEN for the new proxy
responses:
'200':
description: Success
/authorizations/token/{token}:
get:
summary: Identify a user from an API token
parameters:
- name: token
in: path
required: true
type: string
responses:
'200':
description: The user identified by the API token
schema:
$ref: '#/definitions/User'
/authorizations/cookie/{cookie_name}/{cookie_value}:
get:
summary: Identify a user from a cookie
description: Used by single-user notebook servers to hand off cookie authentication to the Hub
parameters:
- name: cookie_name
in: path
required: true
type: string
- name: cookie_value
in: path
required: true
type: string
responses:
'200':
description: The user identified by the cookie
schema:
$ref: '#/definitions/User'
/shutdown:
post:
summary: Shutdown the Hub
parameters:
- name: proxy
in: body
type: boolean
description: Whether the proxy should be shutdown as well (default from Hub config)
- name: servers
in: body
type: boolean
description: Whether users's notebook servers should be shutdown as well (default from Hub config)
responses:
'200':
description: Hub has shutdown
definitions:
User:
type: object
properties:
name:
type: string
description: The user's name
admin:
type: boolean
description: Whether the user is an admin
groups:
type: array
description: The names of groups where this user is a member
items:
type: string
server:
type: string
description: The user's notebook server's base URL, if running; null if not.
pending:
type: string
enum: ["spawn", "stop"]
description: The currently pending action, if any
last_activity:
type: string
format: date-time
description: Timestamp of last-seen activity from the user
Group:
type: object
properties:
name:
type: string
description: The group's name
users:
type: array
description: The names of users who are members of this group
items:
type: string
Service:
type: object
properties:
name:
type: string
description: The service's name
admin:
type: boolean
description: Whether the service is an admin
url:
type: string
description: The internal url where the service is running
prefix:
type: string
description: The proxied URL prefix to the service's url
pid:
type: number
description: The PID of the service process (if managed)
command:
type: array
description: The command used to start the service (if managed)
items:
type: string

21
docs/source/api/auth.rst Normal file
View File

@@ -0,0 +1,21 @@
==============
Authenticators
==============
Module: :mod:`jupyterhub.auth`
==============================
.. automodule:: jupyterhub.auth
.. currentmodule:: jupyterhub.auth
.. autoclass:: Authenticator
:members:
.. autoclass:: LocalAuthenticator
:members:
.. autoclass:: PAMAuthenticator

33
docs/source/api/index.rst Normal file
View File

@@ -0,0 +1,33 @@
.. _api-index:
####################
The JupyterHub API
####################
:Release: |release|
:Date: |today|
JupyterHub also provides a REST API for administration of the Hub and users.
The documentation on `Using JupyterHub's REST API <../rest.html>`_ provides
information on:
- Creating an API token
- Adding tokens to the configuration file (optional)
- Making an API request
The same JupyterHub API spec, as found here, is available in an interactive form
`here (on swagger's petstore) <http://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyterhub/jupyterhub/master/docs/rest-api.yml#!/default>`__.
The `OpenAPI Initiative`_ (fka Swagger™) is a project used to describe
and document RESTful APIs.
JupyterHub API Reference:
.. toctree::
auth
spawner
user
services.auth
.. _OpenAPI Initiative: https://www.openapis.org/

View File

@@ -0,0 +1,18 @@
=======================
Authenticating Services
=======================
Module: :mod:`jupyterhub.services.auth`
=======================================
.. automodule:: jupyterhub.services.auth
.. currentmodule:: jupyterhub.services.auth
.. autoclass:: HubAuth
:members:
.. autoclass:: HubAuthenticated
:members:

View File

@@ -0,0 +1,18 @@
==============
Spawners
==============
Module: :mod:`jupyterhub.spawner`
=================================
.. automodule:: jupyterhub.spawner
.. currentmodule:: jupyterhub.spawner
:class:`Spawner`
----------------
.. autoclass:: Spawner
:members: options_from_form, poll, start, stop, get_args, get_env, get_state, template_namespace, format_string
.. autoclass:: LocalProcessSpawner

31
docs/source/api/user.rst Normal file
View File

@@ -0,0 +1,31 @@
=============
Users
=============
Module: :mod:`jupyterhub.user`
==============================
.. automodule:: jupyterhub.user
.. currentmodule:: jupyterhub.user
:class:`User`
-------------
.. class:: Server
.. autoclass:: User
:members: escaped_name
.. attribute:: name
The user's name
.. attribute:: server
The user's Server data object if running, None otherwise.
Has ``ip``, ``port`` attributes.
.. attribute:: spawner
The user's :class:`~.Spawner` instance.

View File

@@ -1,4 +1,4 @@
# Writing a custom Authenticator
# Authenticators
The [Authenticator][] is the mechanism for authorizing users.
Basic authenticators use simple username and password authentication.
@@ -11,14 +11,13 @@ One such example is using [GitHub OAuth][].
Because the username is passed from the Authenticator to the Spawner,
a custom Authenticator and Spawner are often used together.
See a list of custom Authenticators [on the wiki](https://github.com/jupyter/jupyterhub/wiki/Authenticators).
See a list of custom Authenticators [on the wiki](https://github.com/jupyterhub/jupyterhub/wiki/Authenticators).
## Basics of Authenticators
A basic Authenticator has one central method:
### Authenticator.authenticate
Authenticator.authenticate(handler, data)
@@ -48,14 +47,13 @@ class DictionaryAuthenticator(Authenticator):
passwords = Dict(config=True,
help="""dict of username:password for authentication"""
)
@gen.coroutine
def authenticate(self, handler, data):
if self.passwords.get(data['username']) == data['password']:
return data['username']
```
### Authenticator.whitelist
Authenticators can specify a whitelist of usernames to allow authentication.
@@ -63,6 +61,38 @@ For local user authentication (e.g. PAM), this lets you limit which users
can login.
## Normalizing and validating usernames
Since the Authenticator and Spawner both use the same username,
sometimes you want to transform the name coming from the authentication service
(e.g. turning email addresses into local system usernames) before adding them to the Hub service.
Authenticators can define `normalize_username`, which takes a username.
The default normalization is to cast names to lowercase
For simple mappings, a configurable dict `Authenticator.username_map` is used to turn one name into another:
```python
c.Authenticator.username_map = {
'service-name': 'localname'
}
```
### Validating usernames
In most cases, there is a very limited set of acceptable usernames.
Authenticators can define `validate_username(username)`,
which should return True for a valid username and False for an invalid one.
The primary effect this has is improving error messages during user creation.
The default behavior is to use configurable `Authenticator.username_pattern`,
which is a regular expression string for validation.
To only allow usernames that start with 'w':
```python
c.Authenticator.username_pattern = r'w.*'
```
## OAuth and other non-password logins
Some login mechanisms, such as [OAuth][], don't map onto username+password.
@@ -72,9 +102,12 @@ You can see an example implementation of an Authenticator that uses [GitHub OAut
at [OAuthenticator][].
[Authenticator]: ../jupyterhub/auth.py
[PAM]: http://en.wikipedia.org/wiki/Pluggable_authentication_module
[OAuth]: http://en.wikipedia.org/wiki/OAuth
[GitHub OAuth]: https://developer.github.com/v3/oauth/
[OAuthenticator]: https://github.com/jupyter/oauthenticator
## Writing a custom authenticator
If you are interested in writing a custom authenticator, you can read [this tutorial](http://jupyterhub-tutorial.readthedocs.io/en/latest/authenticators.html).
[Authenticator]: https://github.com/jupyterhub/jupyterhub/blob/master/jupyterhub/auth.py
[PAM]: https://en.wikipedia.org/wiki/Pluggable_authentication_module
[OAuth]: https://en.wikipedia.org/wiki/OAuth
[GitHub OAuth]: https://developer.github.com/v3/oauth/
[OAuthenticator]: https://github.com/jupyterhub/oauthenticator

129
docs/source/changelog.md Normal file
View File

@@ -0,0 +1,129 @@
# Change log summary
For detailed changes from the prior release, click on the version number, and
its link will bring up a GitHub listing of changes. Use `git log` on the
command line for details.
## [Unreleased] 0.8
## 0.7
### [0.7.0] - 2016-12-2
#### Added
- Implement Services API [\#705](https://github.com/jupyterhub/jupyterhub/pull/705)
- Add `/api/` and `/api/info` endpoints [\#675](https://github.com/jupyterhub/jupyterhub/pull/675)
- Add documentation for JupyterLab, pySpark configuration, troubleshooting,
and more.
- Add logging of error if adding users already in database. [\#689](https://github.com/jupyterhub/jupyterhub/pull/689)
- Add HubAuth class for authenticating with JupyterHub. This class can
be used by any application, even outside tornado.
- Add user groups.
- Add `/hub/user-redirect/...` URL for redirecting users to a file on their own server.
#### Changed
- Always install with setuptools but not eggs (effectively require
`pip install .`) [\#722](https://github.com/jupyterhub/jupyterhub/pull/722)
- Updated formatting of changelog. [\#711](https://github.com/jupyterhub/jupyterhub/pull/711)
- Single-user server is provided by JupyterHub package, so single-user servers depend on JupyterHub now.
#### Fixed
- Fix docker repository location [\#719](https://github.com/jupyterhub/jupyterhub/pull/719)
- Fix swagger spec conformance and timestamp type in API spec
- Various redirect-loop-causing bugs have been fixed.
#### Removed
- Deprecate `--no-ssl` command line option. It has no meaning and warns if
used. [\#789](https://github.com/jupyterhub/jupyterhub/pull/789)
- Deprecate `%U` username substitution in favor of `{username}`. [\#748](https://github.com/jupyterhub/jupyterhub/pull/748)
- Removed deprecated SwarmSpawner link. [\#699](https://github.com/jupyterhub/jupyterhub/pull/699)
## 0.6
### [0.6.1] - 2016-05-04
Bugfixes on 0.6:
- statsd is an optional dependency, only needed if in use
- Notice more quickly when servers have crashed
- Better error pages for proxy errors
- Add Stop All button to admin panel for stopping all servers at once
### [0.6.0] - 2016-04-25
- JupyterHub has moved to a new `jupyterhub` namespace on GitHub and Docker. What was `juptyer/jupyterhub` is now `jupyterhub/jupyterhub`, etc.
- `jupyterhub/jupyterhub` image on DockerHub no longer loads the jupyterhub_config.py in an ONBUILD step. A new `jupyterhub/jupyterhub-onbuild` image does this
- Add statsd support, via `c.JupyterHub.statsd_{host,port,prefix}`
- Update to traitlets 4.1 `@default`, `@observe` APIs for traits
- Allow disabling PAM sessions via `c.PAMAuthenticator.open_sessions = False`. This may be needed on SELinux-enabled systems, where our PAM session logic often does not work properly
- Add `Spawner.environment` configurable, for defining extra environment variables to load for single-user servers
- JupyterHub API tokens can be pregenerated and loaded via `JupyterHub.api_tokens`, a dict of `token: username`.
- JupyterHub API tokens can be requested via the REST API, with a POST request to `/api/authorizations/token`.
This can only be used if the Authenticator has a username and password.
- Various fixes for user URLs and redirects
## [0.5] - 2016-03-07
- Single-user server must be run with Jupyter Notebook ≥ 4.0
- Require `--no-ssl` confirmation to allow the Hub to be run without SSL (e.g. behind SSL termination in nginx)
- Add lengths to text fields for MySQL support
- Add `Spawner.disable_user_config` for preventing user-owned configuration from modifying single-user servers.
- Fixes for MySQL support.
- Add ability to run each user's server on its own subdomain. Requires wildcard DNS and wildcard SSL to be feasible. Enable subdomains by setting `JupyterHub.subdomain_host = 'https://jupyterhub.domain.tld[:port]'`.
- Use `127.0.0.1` for local communication instead of `localhost`, avoiding issues with DNS on some systems.
- Fix race that could add users to proxy prematurely if spawning is slow.
## 0.4
### [0.4.1] - 2016-02-03
Fix removal of `/login` page in 0.4.0, breaking some OAuth providers.
### [0.4.0] - 2016-02-01
- Add `Spawner.user_options_form` for specifying an HTML form to present to users,
allowing users to influence the spawning of their own servers.
- Add `Authenticator.pre_spawn_start` and `Authenticator.post_spawn_stop` hooks,
so that Authenticators can do setup or teardown (e.g. passing credentials to Spawner,
mounting data sources, etc.).
These methods are typically used with custom Authenticator+Spawner pairs.
- 0.4 will be the last JupyterHub release where single-user servers running IPython 3 is supported instead of Notebook ≥ 4.0.
## [0.3] - 2015-11-04
- No longer make the user starting the Hub an admin
- start PAM sessions on login
- hooks for Authenticators to fire before spawners start and after they stop,
allowing deeper interaction between Spawner/Authenticator pairs.
- login redirect fixes
## [0.2] - 2015-07-12
- Based on standalone traitlets instead of IPython.utils.traitlets
- multiple users in admin panel
- Fixes for usernames that require escaping
## 0.1 - 2015-03-07
First preview release
[Unreleased]: https://github.com/jupyterhub/jupyterhub/compare/0.7.0...HEAD
[Unreleased]: https://github.com/jupyterhub/jupyterhub/compare/0.6.1...0.7.0
[0.6.1]: https://github.com/jupyterhub/jupyterhub/compare/0.6.0...0.6.1
[0.6.0]: https://github.com/jupyterhub/jupyterhub/compare/0.5.0...0.6.0
[0.5]: https://github.com/jupyterhub/jupyterhub/compare/0.4.1...0.5.0
[0.4.1]: https://github.com/jupyterhub/jupyterhub/compare/0.4.0...0.4.1
[0.4.0]: https://github.com/jupyterhub/jupyterhub/compare/0.3.0...0.4.0
[0.3]: https://github.com/jupyterhub/jupyterhub/compare/0.2.0...0.3.0
[0.2]: https://github.com/jupyterhub/jupyterhub/compare/0.1.0...0.2.0

187
docs/source/conf.py Normal file
View File

@@ -0,0 +1,187 @@
# -*- coding: utf-8 -*-
#
import sys
import os
import shlex
# For conversion from markdown to html
import recommonmark.parser
# Set paths
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# Minimal Sphinx version
needs_sphinx = '1.4'
# Sphinx extension modules
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
]
templates_path = ['_templates']
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'JupyterHub'
copyright = u'2016, Project Jupyter team'
author = u'Project Jupyter team'
# Autopopulate version
from os.path import dirname
docs = dirname(dirname(__file__))
root = dirname(docs)
sys.path.insert(0, root)
import jupyterhub
# The short X.Y version.
version = '%i.%i' % jupyterhub.version_info[:2]
# The full version, including alpha/beta/rc tags.
release = jupyterhub.__version__
language = None
exclude_patterns = []
pygments_style = 'sphinx'
todo_include_todos = False
# -- Source -------------------------------------------------------------
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
source_suffix = ['.rst', '.md']
#source_encoding = 'utf-8-sig'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages.
html_theme = 'sphinx_rtd_theme'
#html_theme_options = {}
#html_theme_path = []
#html_title = None
#html_short_title = None
#html_logo = None
#html_favicon = None
# Paths that contain custom static files (such as style sheets)
html_static_path = ['_static']
#html_extra_path = []
#html_last_updated_fmt = '%b %d, %Y'
#html_use_smartypants = True
#html_sidebars = {}
#html_additional_pages = {}
#html_domain_indices = True
#html_use_index = True
#html_split_index = False
#html_show_sourcelink = True
#html_show_sphinx = True
#html_show_copyright = True
#html_use_opensearch = ''
#html_file_suffix = None
#html_search_language = 'en'
#html_search_options = {'type': 'default'}
#html_search_scorer = 'scorer.js'
htmlhelp_basename = 'JupyterHubdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
#'papersize': 'letterpaper',
#'pointsize': '10pt',
#'preamble': '',
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'JupyterHub.tex', u'JupyterHub Documentation',
u'Project Jupyter team', 'manual'),
]
#latex_logo = None
#latex_use_parts = False
#latex_show_pagerefs = False
#latex_show_urls = False
#latex_appendices = []
#latex_domain_indices = True
# -- manual page output -------------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'jupyterhub', u'JupyterHub Documentation',
[author], 1)
]
#man_show_urls = False
# -- Texinfo output -----------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'JupyterHub', u'JupyterHub Documentation',
author, 'JupyterHub', 'One line description of project.',
'Miscellaneous'),
]
#texinfo_appendices = []
#texinfo_domain_indices = True
#texinfo_show_urls = 'footnote'
#texinfo_no_detailmenu = False
# -- Epub output --------------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Intersphinx ----------------------------------------------------------
intersphinx_mapping = {'https://docs.python.org/': None}
# -- Read The Docs --------------------------------------------------------
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
# only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
# readthedocs.org uses their theme by default, so no need to specify it
# build rest-api, since RTD doesn't run make
from subprocess import check_call as sh
sh(['make', 'rest-api'], cwd=docs)
# -- Spell checking -------------------------------------------------------
try:
import sphinxcontrib.spelling
except ImportError:
pass
else:
extensions.append("sphinxcontrib.spelling")
spelling_word_list_filename='spelling_wordlist.txt'

View File

@@ -0,0 +1,194 @@
# Configuration examples
This section provides configuration files and tips for the following
configurations:
- Example with GitHub OAuth
- Example with nginx reverse proxy
## Example with GitHub OAuth
In the following example, we show a configuration files for a fairly standard JupyterHub deployment with the following assumptions:
* JupyterHub is running on a single cloud server
* Using SSL on the standard HTTPS port 443
* You want to use GitHub OAuth (using oauthenticator) for login
* You need the users to exist locally on the server
* You want users' notebooks to be served from `~/assignments` to allow users to browse for notebooks within
other users home directories
* You want the landing page for each user to be a Welcome.ipynb notebook in their assignments directory.
* All runtime files are put into `/srv/jupyterhub` and log files in `/var/log`.
Let's start out with `jupyterhub_config.py`:
```python
# jupyterhub_config.py
c = get_config()
import os
pjoin = os.path.join
runtime_dir = os.path.join('/srv/jupyterhub')
ssl_dir = pjoin(runtime_dir, 'ssl')
if not os.path.exists(ssl_dir):
os.makedirs(ssl_dir)
# https on :443
c.JupyterHub.port = 443
c.JupyterHub.ssl_key = pjoin(ssl_dir, 'ssl.key')
c.JupyterHub.ssl_cert = pjoin(ssl_dir, 'ssl.cert')
# put the JupyterHub cookie secret and state db
# in /var/run/jupyterhub
c.JupyterHub.cookie_secret_file = pjoin(runtime_dir, 'cookie_secret')
c.JupyterHub.db_url = pjoin(runtime_dir, 'jupyterhub.sqlite')
# or `--db=/path/to/jupyterhub.sqlite` on the command-line
# put the log file in /var/log
c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log'
# use GitHub OAuthenticator for local users
c.JupyterHub.authenticator_class = 'oauthenticator.LocalGitHubOAuthenticator'
c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL']
# create system users that don't exist yet
c.LocalAuthenticator.create_system_users = True
# specify users and admin
c.Authenticator.whitelist = {'rgbkrk', 'minrk', 'jhamrick'}
c.Authenticator.admin_users = {'jhamrick', 'rgbkrk'}
# start single-user notebook servers in ~/assignments,
# with ~/assignments/Welcome.ipynb as the default landing page
# this config could also be put in
# /etc/ipython/ipython_notebook_config.py
c.Spawner.notebook_dir = '~/assignments'
c.Spawner.args = ['--NotebookApp.default_url=/notebooks/Welcome.ipynb']
```
Using the GitHub Authenticator [requires a few additional env variables][oauth-setup],
which we will need to set when we launch the server:
```bash
export GITHUB_CLIENT_ID=github_id
export GITHUB_CLIENT_SECRET=github_secret
export OAUTH_CALLBACK_URL=https://example.com/hub/oauth_callback
export CONFIGPROXY_AUTH_TOKEN=super-secret
jupyterhub -f /path/to/aboveconfig.py
```
## Example with nginx reverse proxy
In the following example, we show configuration files for a JupyterHub server running locally on port `8000` but accessible from the outside on the standard SSL port `443`. This could be useful if the JupyterHub server machine is also hosting other domains or content on `443`. The goal here is to have the following be true:
* JupyterHub is running on a server, accessed *only* via `HUB.DOMAIN.TLD:443`
* On the same machine, `NO_HUB.DOMAIN.TLD` strictly serves different content, also on port `443`
* `nginx` is used to manage the web servers / reverse proxy (which means that only nginx will be able to bind two servers to `443`)
* After testing, the server in question should be able to score an A+ on the Qualys SSL Labs [SSL Server Test](https://www.ssllabs.com/ssltest/)
Let's start out with `jupyterhub_config.py`:
```python
# Force the proxy to only listen to connections to 127.0.0.1
c.JupyterHub.ip = '127.0.0.1'
```
The `nginx` server config files are fairly standard fare except for the two `location` blocks within the `HUB.DOMAIN.TLD` config file:
```bash
# HTTP server to redirect all 80 traffic to SSL/HTTPS
server {
listen 80;
server_name HUB.DOMAIN.TLD;
# Tell all requests to port 80 to be 302 redirected to HTTPS
return 302 https://$host$request_uri;
}
# HTTPS server to handle JupyterHub
server {
listen 443;
ssl on;
server_name HUB.DOMAIN.TLD;
ssl_certificate /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem
ssl_certificate_key /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
ssl_prefer_server_ciphers on;
ssl_dhparam /etc/ssl/certs/dhparam.pem;
ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA';
ssl_session_timeout 1d;
ssl_session_cache shared:SSL:50m;
ssl_stapling on;
ssl_stapling_verify on;
add_header Strict-Transport-Security max-age=15768000;
# Managing literal requests to the JupyterHub front end
location / {
proxy_pass https://127.0.0.1:8000;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
# Managing WebHook/Socket requests between hub user servers and external proxy
location ~* /(api/kernels/[^/]+/(channels|iopub|shell|stdin)|terminals/websocket)/? {
proxy_pass https://127.0.0.1:8000;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
# WebSocket support
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
}
# Managing requests to verify letsencrypt host
location ~ /.well-known {
allow all;
}
}
```
`nginx` will now be the front facing element of JupyterHub on `443` which means it is also free to bind other servers, like `NO_HUB.DOMAIN.TLD` to the same port on the same machine and network interface. In fact, one can simply use the same server blocks as above for `NO_HUB` and simply add line for the root directory of the site as well as the applicable location call:
```bash
server {
listen 80;
server_name NO_HUB.DOMAIN.TLD;
# Tell all requests to port 80 to be 302 redirected to HTTPS
return 302 https://$host$request_uri;
}
server {
listen 443;
ssl on;
# INSERT OTHER SSL PARAMETERS HERE AS ABOVE
# Set the appropriate root directory
root /var/www/html
# Set URI handling
location / {
try_files $uri $uri/ =404;
}
# Managing requests to verify letsencrypt host
location ~ /.well-known {
allow all;
}
}
```
Now just restart `nginx`, restart the JupyterHub, and enjoy accessing https://HUB.DOMAIN.TLD while serving other content securely on https://NO_HUB.DOMAIN.TLD.

View File

@@ -0,0 +1,58 @@
# Contributors
Project Jupyter thanks the following people for their help and
contribution on JupyterHub:
- anderbubble
- betatim
- Carreau
- ckald
- cwaldbieser
- danielballen
- daradib
- datapolitan
- dblockow-d2dcrc
- dietmarw
- DominicFollettSmith
- dsblank
- ellisonbg
- evanlinde
- Fokko
- iamed18
- JamiesHQ
- jdavidheiser
- jhamrick
- josephtate
- kinuax
- KrishnaPG
- ksolan
- mbmilligan
- minrk
- mistercrunch
- Mistobaan
- mwmarkland
- nthiery
- ObiWahn
- ozancaglayan
- parente
- PeterDaveHello
- peterruppel
- rafael-ladislau
- rgbkrk
- robnagler
- ryanlovett
- Scrypy
- shreddd
- spoorthyv
- ssanderson
- takluyver
- temogen
- TimShawver
- Todd-Z-Li
- toobaz
- tsaeger
- vilhelmen
- willingc
- YannBrrd
- yuvipanda
- zoltan-fedor

View File

@@ -0,0 +1,526 @@
# Getting started with JupyterHub
This section contains getting started information on the following topics:
- [Technical Overview](getting-started.html#technical-overview)
- [Installation](getting-started.html#installation)
- [Configuration](getting-started.html#configuration)
- [Networking](getting-started.html#networking)
- [Security](getting-started.html#security)
- [Authentication and users](getting-started.html#authentication-and-users)
- [Spawners and single-user notebook servers](getting-started.html#spawners-and-single-user-notebook-servers)
- [External Services](getting-started.html#external-services)
## Technical Overview
JupyterHub is a set of processes that together provide a single user Jupyter
Notebook server for each person in a group.
### Three subsystems
Three major subsystems run by the `jupyterhub` command line program:
- **Single-User Notebook Server**: a dedicated, single-user, Jupyter Notebook server is
started for each user on the system when the user logs in. The object that
starts these servers is called a **Spawner**.
- **Proxy**: the public facing part of JupyterHub that uses a dynamic proxy
to route HTTP requests to the Hub and Single User Notebook Servers.
- **Hub**: manages user accounts, authentication, and coordinates Single User
Notebook Servers using a Spawner.
![JupyterHub subsystems](images/jhub-parts.png)
### Deployment server
To use JupyterHub, you need a Unix server (typically Linux) running somewhere
that is accessible to your team on the network. The JupyterHub server can be
on an internal network at your organization, or it can run on the public
internet (in which case, take care with the Hub's
[security](getting-started.html#security)).
### Basic operation
Users access JupyterHub through a web browser, by going to the IP address or
the domain name of the server.
Basic principles of operation:
* Hub spawns proxy
* Proxy forwards all requests to hub by default
* Hub handles login, and spawns single-user servers on demand
* Hub configures proxy to forward url prefixes to single-user servers
Different **[authenticators](authenticators.html)** control access
to JupyterHub. The default one (PAM) uses the user accounts on the server where
JupyterHub is running. If you use this, you will need to create a user account
on the system for each user on your team. Using other authenticators, you can
allow users to sign in with e.g. a GitHub account, or with any single-sign-on
system your organization has.
Next, **[spawners](spawners.html)** control how JupyterHub starts
the individual notebook server for each user. The default spawner will
start a notebook server on the same machine running under their system username.
The other main option is to start each server in a separate container, often
using Docker.
### Default behavior
**IMPORTANT: You should not run JupyterHub without SSL encryption on a public network.**
See [Security documentation](#security) for how to configure JupyterHub to use SSL,
or put it behind SSL termination in another proxy server, such as nginx.
---
**Deprecation note:** Removed `--no-ssl` in version 0.7.
JupyterHub versions 0.5 and 0.6 require extra confirmation via `--no-ssl` to
allow running without SSL using the command `jupyterhub --no-ssl`. The
`--no-ssl` command line option is not needed anymore in version 0.7.
---
To start JupyterHub in its default configuration, type the following at the command line:
```bash
sudo jupyterhub
```
The default Authenticator that ships with JupyterHub authenticates users
with their system name and password (via [PAM][]).
Any user on the system with a password will be allowed to start a single-user notebook server.
The default Spawner starts servers locally as each user, one dedicated server per user.
These servers listen on localhost, and start in the given user's home directory.
By default, the **Proxy** listens on all public interfaces on port 8000.
Thus you can reach JupyterHub through either:
- `http://localhost:8000`
- or any other public IP or domain pointing to your system.
In their default configuration, the other services, the **Hub** and **Single-User Servers**,
all communicate with each other on localhost only.
By default, starting JupyterHub will write two files to disk in the current working directory:
- `jupyterhub.sqlite` is the sqlite database containing all of the state of the **Hub**.
This file allows the **Hub** to remember what users are running and where,
as well as other information enabling you to restart parts of JupyterHub separately. It is
important to note that this database contains *no* sensitive information other than **Hub**
usernames.
- `jupyterhub_cookie_secret` is the encryption key used for securing cookies.
This file needs to persist in order for restarting the Hub server to avoid invalidating cookies.
Conversely, deleting this file and restarting the server effectively invalidates all login cookies.
The cookie secret file is discussed in the [Cookie Secret documentation](#cookie-secret).
The location of these files can be specified via configuration, discussed below.
## Installation
See the project's [README](https://github.com/jupyterhub/jupyterhub/blob/master/README.md)
for help installing JupyterHub.
### Planning your installation
Prior to beginning installation, it's helpful to consider some of the following:
- deployment system (bare metal, Docker)
- Authentication (PAM, OAuth, etc.)
- Spawner of singleuser notebook servers (Docker, Batch, etc.)
- Services (nbgrader, etc.)
- JupyterHub database (default SQLite; traditional RDBMS such as PostgreSQL,)
MySQL, or other databases supported by [SQLAlchemy](http://www.sqlalchemy.org))
### Folders and File Locations
It is recommended to put all of the files used by JupyterHub into standard
UNIX filesystem locations.
* `/srv/jupyterhub` for all security and runtime files
* `/etc/jupyterhub` for all configuration files
* `/var/log` for log files
## Configuration
JupyterHub is configured in two ways:
1. Configuration file
2. Command-line arguments
### Configuration file
By default, JupyterHub will look for a configuration file (which may not be created yet)
named `jupyterhub_config.py` in the current working directory.
You can create an empty configuration file with:
```bash
jupyterhub --generate-config
```
This empty configuration file has descriptions of all configuration variables and their default
values. You can load a specific config file with:
```bash
jupyterhub -f /path/to/jupyterhub_config.py
```
See also: [general docs](http://ipython.org/ipython-doc/dev/development/config.html)
on the config system Jupyter uses.
### Command-line arguments
Type the following for brief information about the command-line arguments:
```bash
jupyterhub -h
```
or:
```bash
jupyterhub --help-all
```
for the full command line help.
All configurable options are technically configurable on the command-line,
even if some are really inconvenient to type. Just replace the desired option,
`c.Class.trait`, with `--Class.trait`. For example, to configure the
`c.Spawner.notebook_dir` trait from the command-line:
```bash
jupyterhub --Spawner.notebook_dir='~/assignments'
```
## Networking
### Configuring the Proxy's IP address and port
The Proxy's main IP address setting determines where JupyterHub is available to users.
By default, JupyterHub is configured to be available on all network interfaces
(`''`) on port 8000. **Note**: Use of `'*'` is discouraged for IP configuration;
instead, use of `'0.0.0.0'` is preferred.
Changing the IP address and port can be done with the following command line
arguments:
```bash
jupyterhub --ip=192.168.1.2 --port=443
```
Or by placing the following lines in a configuration file:
```python
c.JupyterHub.ip = '192.168.1.2'
c.JupyterHub.port = 443
```
Port 443 is used as an example since 443 is the default port for SSL/HTTPS.
Configuring only the main IP and port of JupyterHub should be sufficient for most deployments of JupyterHub.
However, more customized scenarios may need additional networking details to
be configured.
### Configuring the Proxy's REST API communication IP address and port (optional)
The Hub service talks to the proxy via a REST API on a secondary port,
whose network interface and port can be configured separately.
By default, this REST API listens on port 8081 of localhost only.
If running the Proxy separate from the Hub,
configure the REST API communication IP address and port with:
```python
# ideally a private network address
c.JupyterHub.proxy_api_ip = '10.0.1.4'
c.JupyterHub.proxy_api_port = 5432
```
### Configuring the Hub if Spawners or Proxy are remote or isolated in containers
The Hub service also listens only on localhost (port 8080) by default.
The Hub needs needs to be accessible from both the proxy and all Spawners.
When spawning local servers, an IP address setting of localhost is fine.
If *either* the Proxy *or* (more likely) the Spawners will be remote or
isolated in containers, the Hub must listen on an IP that is accessible.
```python
c.JupyterHub.hub_ip = '10.0.1.4'
c.JupyterHub.hub_port = 54321
```
## Security
**IMPORTANT: You should not run JupyterHub without SSL encryption on a public network.**
---
**Deprecation note:** Removed `--no-ssl` in version 0.7.
JupyterHub versions 0.5 and 0.6 require extra confirmation via `--no-ssl` to
allow running without SSL using the command `jupyterhub --no-ssl`. The
`--no-ssl` command line option is not needed anymore in version 0.7.
---
Security is the most important aspect of configuring Jupyter. There are four main aspects of the
security configuration:
1. SSL encryption (to enable HTTPS)
2. Cookie secret (a key for encrypting browser cookies)
3. Proxy authentication token (used for the Hub and other services to authenticate to the Proxy)
4. Periodic security audits
*Note* that the **Hub** hashes all secrets (e.g., auth tokens) before storing them in its
database. A loss of control over read-access to the database should have no security impact
on your deployment.
### SSL encryption
Since JupyterHub includes authentication and allows arbitrary code execution, you should not run
it without SSL (HTTPS). This will require you to obtain an official, trusted SSL certificate or
create a self-signed certificate. Once you have obtained and installed a key and certificate you
need to specify their locations in the configuration file as follows:
```python
c.JupyterHub.ssl_key = '/path/to/my.key'
c.JupyterHub.ssl_cert = '/path/to/my.cert'
```
It is also possible to use letsencrypt (https://letsencrypt.org/) to obtain
a free, trusted SSL certificate. If you run letsencrypt using the default
options, the needed configuration is (replace `mydomain.tld` by your fully
qualified domain name):
```python
c.JupyterHub.ssl_key = '/etc/letsencrypt/live/{mydomain.tld}/privkey.pem'
c.JupyterHub.ssl_cert = '/etc/letsencrypt/live/{mydomain.tld}/fullchain.pem'
```
If the fully qualified domain name (FQDN) is `example.com`, the following
would be the needed configuration:
```python
c.JupyterHub.ssl_key = '/etc/letsencrypt/live/example.com/privkey.pem'
c.JupyterHub.ssl_cert = '/etc/letsencrypt/live/example.com/fullchain.pem'
```
Some cert files also contain the key, in which case only the cert is needed. It is important that
these files be put in a secure location on your server, where they are not readable by regular
users.
Note on **chain certificates**: If you are using a chain certificate, see also
[chained certificate for SSL](troubleshooting.md#chained-certificates-for-ssl) in the JupyterHub troubleshooting FAQ).
Note: In certain cases, e.g. **behind SSL termination in nginx**, allowing no SSL
running on the hub may be desired.
### Cookie secret
The cookie secret is an encryption key, used to encrypt the browser cookies used for
authentication. If this value changes for the Hub, all single-user servers must also be restarted.
Normally, this value is stored in a file, the location of which can be specified in a config file
as follows:
```python
c.JupyterHub.cookie_secret_file = '/srv/jupyterhub/cookie_secret'
```
The content of this file should be a long random string encoded in MIME Base64. An example would be to generate this file as:
```bash
openssl rand -base64 2048 > /srv/jupyterhub/cookie_secret
```
In most deployments of JupyterHub, you should point this to a secure location on the file
system, such as `/srv/jupyterhub/cookie_secret`. If the cookie secret file doesn't exist when
the Hub starts, a new cookie secret is generated and stored in the file. The
file must not be readable by group or other or the server won't start.
The recommended permissions for the cookie secret file are 600 (owner-only rw).
If you would like to avoid the need for files, the value can be loaded in the Hub process from
the `JPY_COOKIE_SECRET` environment variable, which is a hex-encoded string. You
can set it this way:
```bash
export JPY_COOKIE_SECRET=`openssl rand -hex 1024`
```
For security reasons, this environment variable should only be visible to the Hub.
If you set it dynamically as above, all users will be logged out each time the
Hub starts.
You can also set the cookie secret in the configuration file itself,`jupyterhub_config.py`,
as a binary string:
```python
c.JupyterHub.cookie_secret = bytes.fromhex('VERY LONG SECRET HEX STRING')
```
### Proxy authentication token
The Hub authenticates its requests to the Proxy using a secret token that
the Hub and Proxy agree upon. The value of this string should be a random
string (for example, generated by `openssl rand -hex 32`). You can pass
this value to the Hub and Proxy using either the `CONFIGPROXY_AUTH_TOKEN`
environment variable:
```bash
export CONFIGPROXY_AUTH_TOKEN=`openssl rand -hex 32`
```
This environment variable needs to be visible to the Hub and Proxy.
Or you can set the value in the configuration file, `jupyterhub_config.py`:
```python
c.JupyterHub.proxy_auth_token = '0bc02bede919e99a26de1e2a7a5aadfaf6228de836ec39a05a6c6942831d8fe5'
```
If you don't set the Proxy authentication token, the Hub will generate a random key itself, which
means that any time you restart the Hub you **must also restart the Proxy**. If the proxy is a
subprocess of the Hub, this should happen automatically (this is the default configuration).
Another time you must set the Proxy authentication token yourself is if
you want other services, such as [nbgrader](https://github.com/jupyter/nbgrader)
to also be able to connect to the Proxy.
### Security audits
We recommend that you do periodic reviews of your deployment's security. It's
good practice to keep JupyterHub, configurable-http-proxy, and nodejs
versions up to date.
A handy website for testing your deployment is
[Qualsys' SSL analyzer tool](https://www.ssllabs.com/ssltest/analyze.html).
## Authentication and users
The default Authenticator uses [PAM][] to authenticate system users with
their username and password. The default behavior of this Authenticator
is to allow any user with an account and password on the system to login.
### Creating a whitelist of users
You can restrict which users are allowed to login with `Authenticator.whitelist`:
```python
c.Authenticator.whitelist = {'mal', 'zoe', 'inara', 'kaylee'}
```
### Managing Hub administrators
Admin users of JupyterHub have the ability to take actions on users' behalf,
such as stopping and restarting their servers,
and adding and removing new users from the whitelist.
Any users in the admin list are automatically added to the whitelist,
if they are not already present.
The set of initial Admin users can configured as follows:
```python
c.Authenticator.admin_users = {'mal', 'zoe'}
```
If `JupyterHub.admin_access` is True (not default),
then admin users have permission to log in *as other users* on their respective machines, for debugging.
**You should make sure your users know if admin_access is enabled.**
Note: additional configuration examples are provided in this guide's
[Configuration Examples section](./config-examples.html).
### Add or remove users from the Hub
Users can be added and removed to the Hub via the admin panel or REST API. These users will be
added to the whitelist and database. Restarting the Hub will not require manually updating the
whitelist in your config file, as the users will be loaded from the database. This means that
after starting the Hub once, it is not sufficient to remove users from the whitelist in your
config file. You must also remove them from the database, either by discarding the database file,
or via the admin UI.
The default `PAMAuthenticator` is one case of a special kind of authenticator, called a
`LocalAuthenticator`, indicating that it manages users on the local system. When you add a user to
the Hub, a `LocalAuthenticator` checks if that user already exists. Normally, there will be an
error telling you that the user doesn't exist. If you set the configuration value
```python
c.LocalAuthenticator.create_system_users = True
```
however, adding a user to the Hub that doesn't already exist on the system will result in the Hub
creating that user via the system `adduser` command line tool. This option is typically used on
hosted deployments of JupyterHub, to avoid the need to manually create all your users before
launching the service. It is not recommended when running JupyterHub in situations where
JupyterHub users maps directly onto UNIX users.
## Spawners and single-user notebook servers
Since the single-user server is an instance of `jupyter notebook`, an entire separate
multi-process application, there are many aspect of that server can configure, and a lot of ways
to express that configuration.
At the JupyterHub level, you can set some values on the Spawner. The simplest of these is
`Spawner.notebook_dir`, which lets you set the root directory for a user's server. This root
notebook directory is the highest level directory users will be able to access in the notebook
dashboard. In this example, the root notebook directory is set to `~/notebooks`, where `~` is
expanded to the user's home directory.
```python
c.Spawner.notebook_dir = '~/notebooks'
```
You can also specify extra command-line arguments to the notebook server with:
```python
c.Spawner.args = ['--debug', '--profile=PHYS131']
```
This could be used to set the users default page for the single user server:
```python
c.Spawner.args = ['--NotebookApp.default_url=/notebooks/Welcome.ipynb']
```
Since the single-user server extends the notebook server application,
it still loads configuration from the `ipython_notebook_config.py` config file.
Each user may have one of these files in `$HOME/.ipython/profile_default/`.
IPython also supports loading system-wide config files from `/etc/ipython/`,
which is the place to put configuration that you want to affect all of your users.
## External services
JupyterHub has a REST API that can be used by external services like the
[cull_idle_servers](https://github.com/jupyterhub/jupyterhub/blob/master/examples/cull-idle/cull_idle_servers.py)
script which monitors and kills idle single-user servers periodically. In order to run such an
external service, you need to provide it an API token. In the case of `cull_idle_servers`, it is passed
as the environment variable called `JPY_API_TOKEN`.
Currently there are two ways of registering that token with JupyterHub. The first one is to use
the `jupyterhub` command to generate a token for a specific hub user:
```bash
jupyterhub token <username>
```
As of [version 0.6.0](./changelog.html), the preferred way of doing this is to first generate an API token:
```bash
openssl rand -hex 32
```
and then write it to your JupyterHub configuration file (note that the **key** is the token while the **value** is the username):
```python
c.JupyterHub.api_tokens = {'token' : 'username'}
```
Upon restarting JupyterHub, you should see a message like below in the logs:
```
Adding API token for <username>
```
Now you can run your script, i.e. `cull_idle_servers`, by providing it the API token and it will authenticate through
the REST API to interact with it.
[oauth-setup]: https://github.com/jupyterhub/oauthenticator#setup
[oauthenticator]: https://github.com/jupyterhub/oauthenticator
[PAM]: https://en.wikipedia.org/wiki/Pluggable_authentication_module

View File

@@ -1,11 +1,11 @@
# How JupyterHub works
JupyterHub is a multi-user server that manages and proxies multiple instances of the single-user <del>IPython</del> Jupyter notebook server.
JupyterHub is a multi-user server that manages and proxies multiple instances of the single-user Jupyter notebook server.
There are three basic processes involved:
- multi-user Hub (Python/Tornado)
- configurable http proxy (nodejs)
- [configurable http proxy](https://github.com/jupyterhub/configurable-http-proxy) (node-http-proxy)
- multiple single-user IPython notebook servers (Python/IPython/Tornado)
The proxy is the only process that listens on a public interface.
@@ -51,15 +51,15 @@ Authentication is customizable via the Authenticator class.
Authentication can be replaced by any mechanism,
such as OAuth, Kerberos, etc.
JupyterHub only ships with [PAM](http://en.wikipedia.org/wiki/Pluggable_authentication_module) authentication,
JupyterHub only ships with [PAM](https://en.wikipedia.org/wiki/Pluggable_authentication_module) authentication,
which requires the server to be run as root,
or at least with access to the PAM service,
which regular users typically do not have
(on Ubuntu, this requires being added to the `shadow` group).
[More info on custom Authenticators](authenticators.md).
[More info on custom Authenticators](authenticators.html).
See a list of custom Authenticators [on the wiki](https://github.com/jupyter/jupyterhub/wiki/Authenticators).
See a list of custom Authenticators [on the wiki](https://github.com/jupyterhub/jupyterhub/wiki/Authenticators).
### Spawning
@@ -72,6 +72,6 @@ and needs to be able to take three actions:
2. poll whether the process is still running
3. stop the process
[More info on custom Spawners](spawners.md).
[More info on custom Spawners](spawners.html).
See a list of custom Spawners [on the wiki](https://github.com/jupyter/jupyterhub/wiki/Spawners).
See a list of custom Spawners [on the wiki](https://github.com/jupyterhub/jupyterhub/wiki/Spawners).

Binary file not shown.

After

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 80 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

116
docs/source/index.rst Normal file
View File

@@ -0,0 +1,116 @@
JupyterHub
==========
With JupyterHub you can create a **multi-user Hub** which spawns, manages,
and proxies multiple instances of the single-user
`Jupyter notebook <https://jupyter-notebook.readthedocs.io/en/latest/>`_ server.
Due to its flexibility and customization options, JupyterHub can be used to
serve notebooks to a class of students, a corporate data science group, or a
scientific research group.
.. image:: images/jhub-parts.png
:alt: JupyterHub subsystems
:width: 40%
:align: right
Three subsystems make up JupyterHub:
* a multi-user **Hub** (tornado process)
* a **configurable http proxy** (node-http-proxy)
* multiple **single-user Jupyter notebook servers** (Python/IPython/tornado)
JupyterHub's basic flow of operations includes:
- The Hub spawns a proxy
- The proxy forwards all requests to the Hub by default
- The Hub handles user login and spawns single-user servers on demand
- The Hub configures the proxy to forward URL prefixes to the single-user notebook servers
For convenient administration of the Hub, its users, and :doc:`services`
(added in version 7.0), JupyterHub also provides a
`REST API <http://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyterhub/jupyterhub/master/docs/rest-api.yml#!/default>`__.
Contents
--------
**User Guide**
* :doc:`quickstart`
* :doc:`getting-started`
* :doc:`howitworks`
* :doc:`websecurity`
* :doc:`rest`
.. toctree::
:maxdepth: 2
:hidden:
:caption: User Guide
quickstart
getting-started
howitworks
websecurity
rest
**Configuration Guide**
* :doc:`authenticators`
* :doc:`spawners`
* :doc:`services`
* :doc:`config-examples`
* :doc:`upgrading`
* :doc:`troubleshooting`
.. toctree::
:maxdepth: 2
:hidden:
:caption: Configuration Guide
authenticators
spawners
services
config-examples
upgrading
troubleshooting
**API Reference**
* :doc:`api/index`
.. toctree::
:maxdepth: 2
:hidden:
:caption: API Reference
api/index
**About JupyterHub**
* :doc:`changelog`
* :doc:`contributor-list`
.. toctree::
:maxdepth: 2
:hidden:
:caption: About JupyterHub
changelog
contributor-list
Indices and tables
------------------
* :ref:`genindex`
* :ref:`modindex`
Questions? Suggestions?
-----------------------
- `Jupyter mailing list <https://groups.google.com/forum/#!forum/jupyter>`_
- `Jupyter website <https://jupyter.org>`_

160
docs/source/quickstart.md Normal file
View File

@@ -0,0 +1,160 @@
# Quickstart - Installation
## Prerequisites
**Before installing JupyterHub**, you will need:
- [Python](https://www.python.org/downloads/) 3.3 or greater
An understanding of using [`pip`](https://pip.pypa.io/en/stable/) or
[`conda`](http://conda.pydata.org/docs/get-started.html) for
installing Python packages is helpful.
- [nodejs/npm](https://www.npmjs.com/)
[Install nodejs/npm](https://docs.npmjs.com/getting-started/installing-node),
using your operating system's package manager. For example, install on Linux
(Debian/Ubuntu) using:
```bash
sudo apt-get install npm nodejs-legacy
```
(The `nodejs-legacy` package installs the `node` executable and is currently
required for npm to work on Debian/Ubuntu.)
- TLS certificate and key for HTTPS communication
- Domain name
**Before running the single-user notebook servers** (which may be on the same
system as the Hub or not):
- [Jupyter Notebook](https://jupyter.readthedocs.io/en/latest/install.html)
version 4 or greater
## Installation
JupyterHub can be installed with `pip` or `conda` and the proxy with `npm`:
**pip, npm:**
```bash
python3 -m pip install jupyterhub
npm install -g configurable-http-proxy
```
**conda** (one command installs jupyterhub and proxy):
```bash
conda install -c conda-forge jupyterhub
```
To test your installation:
```bash
jupyterhub -h
configurable-http-proxy -h
```
If you plan to run notebook servers locally, you will need also to install
Jupyter notebook:
**pip:**
```bash
python3 -m pip install notebook
```
**conda:**
```bash
conda install notebook
```
## Start the Hub server
To start the Hub server, run the command:
```bash
jupyterhub
```
Visit `https://localhost:8000` in your browser, and sign in with your unix
credentials.
To allow multiple users to sign into the Hub server, you must start `jupyterhub` as a *privileged user*, such as root:
```bash
sudo jupyterhub
```
The [wiki](https://github.com/jupyterhub/jupyterhub/wiki/Using-sudo-to-run-JupyterHub-without-root-privileges)
describes how to run the server as a *less privileged user*, which requires
additional configuration of the system.
----
## Basic Configuration
The [getting started document](docs/source/getting-started.md) contains
detailed information abouts configuring a JupyterHub deployment.
The JupyterHub **tutorial** provides a video and documentation that explains
and illustrates the fundamental steps for installation and configuration.
[Repo](https://github.com/jupyterhub/jupyterhub-tutorial)
| [Tutorial documentation](http://jupyterhub-tutorial.readthedocs.io/en/latest/)
#### Generate a default configuration file
Generate a default config file:
jupyterhub --generate-config
#### Customize the configuration, authentication, and process spawning
Spawn the server on ``10.0.1.2:443`` with **https**:
jupyterhub --ip 10.0.1.2 --port 443 --ssl-key my_ssl.key --ssl-cert my_ssl.cert
The authentication and process spawning mechanisms can be replaced,
which should allow plugging into a variety of authentication or process
control environments. Some examples, meant as illustration and testing of this
concept, are:
- Using GitHub OAuth instead of PAM with [OAuthenticator](https://github.com/jupyterhub/oauthenticator)
- Spawning single-user servers with Docker, using the [DockerSpawner](https://github.com/jupyterhub/dockerspawner)
----
## Alternate Installation using Docker
A ready to go [docker image for JupyterHub](https://hub.docker.com/r/jupyterhub/jupyterhub/)
gives a straightforward deployment of JupyterHub.
*Note: This `jupyterhub/jupyterhub` docker image is only an image for running
the Hub service itself. It does not provide the other Jupyter components, such
as Notebook installation, which are needed by the single-user servers.
To run the single-user servers, which may be on the same system as the Hub or
not, Jupyter Notebook version 4 or greater must be installed.*
#### Starting JupyterHub with docker
The JupyterHub docker image can be started with the following command:
docker run -d --name jupyterhub jupyterhub/jupyterhub jupyterhub
This command will create a container named `jupyterhub` that you can
**stop and resume** with `docker stop/start`.
The Hub service will be listening on all interfaces at port 8000, which makes
this a good choice for **testing JupyterHub on your desktop or laptop**.
If you want to run docker on a computer that has a public IP then you should
(as in MUST) **secure it with ssl** by adding ssl options to your docker
configuration or using a ssl enabled proxy.
[Mounting volumes](https://docs.docker.com/engine/userguide/containers/dockervolumes/)
will allow you to **store data outside the docker image (host system) so it will be persistent**,
even when you start a new image.
The command `docker exec -it jupyterhub bash` will spawn a root shell in your
docker container. You can **use the root shell to create system users in the container**.
These accounts will be used for authentication in JupyterHub's default
configuration.

70
docs/source/rest.md Normal file
View File

@@ -0,0 +1,70 @@
# Using JupyterHub's REST API
Using the [JupyterHub REST API][], you can perform actions on the Hub,
such as:
- checking which users are active
- adding or removing users
- stopping or starting single user notebook servers
- authenticating services
A [REST](https://en.wikipedia.org/wiki/Representational_state_transfer)
API provides a standard way for users to get and send information to the
Hub.
## Creating an API token
To send requests using JupyterHub API, you must pass an API token with the
request. You can create a token for an individual user using the following
command:
jupyterhub token USERNAME
## Adding tokens to the config file
You may also add a dictionary of API tokens and usernames to the hub's
configuration file, `jupyterhub_config.py`:
```python
c.JupyterHub.api_tokens = {
'secret-token': 'username',
}
```
## Making an API request
To authenticate your requests, pass the API token in the request's
Authorization header.
**Example: List the hub's users**
Using the popular Python requests library, the following code sends an API
request and an API token for authorization:
```python
import requests
api_url = 'http://127.0.0.1:8081/hub/api'
r = requests.get(api_url + '/users',
headers={
'Authorization': 'token %s' % token,
}
)
r.raise_for_status()
users = r.json()
```
## Learning more about the API
You can see the full [JupyterHub REST API][] for details.
The same REST API Spec can be viewed in a more interactive style [on swagger's petstore][].
Both resources contain the same information and differ only in its display.
Note: The Swagger specification is being renamed the [OpenAPI Initiative][].
[on swagger's petstore]: http://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyterhub/jupyterhub/master/docs/rest-api.yml#!/default
[OpenAPI Initiative]: https://www.openapis.org/
[JupyterHub REST API]: ./api/index.html

357
docs/source/services.md Normal file
View File

@@ -0,0 +1,357 @@
# Services
With version 0.7, JupyterHub adds support for **Services**.
This section provides the following information about Services:
- [Definition of a Service](services.html#definition-of-a-service)
- [Properties of a Service](services.html#properties-of-a-service)
- [Hub-Managed Services](services.html#hub-managed-services)
- [Launching a Hub-Managed Service](services.html#launching-a-hub-managed-service)
- [Externally-Managed Services](services.html#externally-managed-services)
- [Writing your own Services](services.html#writing-your-own-services)
- [Hub Authentication and Services](services.html#hub-authentication-and-services)
## Definition of a Service
When working with JupyterHub, a **Service** is defined as a process that interacts
with the Hub's REST API. A Service may perform a specific or
action or task. For example, the following tasks can each be a unique Service:
- shutting down individuals' single user notebook servers that have been idle
for some time
- registering additional web servers which should use the Hub's authentication
and be served behind the Hub's proxy.
Two key features help define a Service:
- Is the Service **managed** by JupyterHub?
- Does the Service have a web server that should be added to the proxy's
table?
Currently, these characteristics distinguish two types of Services:
- A **Hub-Managed Service** which is managed by JupyterHub
- An **Externally-Managed Service** which runs its own web server and
communicates operation instructions via the Hub's API.
## Properties of a Service
A Service may have the following properties:
- `name: str` - the name of the service
- `admin: bool (default - false)` - whether the service should have
administrative privileges
- `url: str (default - None)` - The URL where the service is/should be. If a
url is specified for where the Service runs its own web server,
the service will be added to the proxy at `/services/:name`
If a service is also to be managed by the Hub, it has a few extra options:
- `command: (str/Popen list`) - Command for JupyterHub to spawn the service.
- Only use this if the service should be a subprocess.
- If command is not specified, the Service is assumed to be managed
externally.
- If a command is specified for launching the Service, the Service will
be started and managed by the Hub.
- `env: dict` - environment variables to add to the current env
- `user: str` - the name of a system user to manage the Service. If
unspecified, run as the same user as the Hub.
## Hub-Managed Services
A **Hub-Managed Service** is started by the Hub, and the Hub is responsible
for the Service's actions. A Hub-Managed Service can only be a local
subprocess of the Hub. The Hub will take care of starting the process and
restarts it if it stops.
While Hub-Managed Services share some similarities with notebook Spawners,
there are no plans for Hub-Managed Services to support the same spawning
abstractions as a notebook Spawner.
If you wish to run a Service in a Docker container or other deployment
environments, the Service can be registered as an
**Externally-Managed Service**, as described below.
## Launching a Hub-Managed Service
A Hub-Managed Service is characterized by its specified `command` for launching
the Service. For example, a 'cull idle' notebook server task configured as a
Hub-Managed Service would include:
- the Service name,
- admin permissions, and
- the `command` to launch the Service which will cull idle servers after a
timeout interval
This example would be configured as follows in `jupyterhub_config.py`:
```python
c.JupyterHub.services = [
{
'name': 'cull-idle',
'admin': True,
'command': ['python', '/path/to/cull-idle.py', '--timeout']
}
]
```
A Hub-Managed Service may also be configured with additional optional
parameters, which describe the environment needed to start the Service process:
- `env: dict` - additional environment variables for the Service.
- `user: str` - name of the user to run the server if different from the Hub.
Requires Hub to be root.
- `cwd: path` directory in which to run the Service, if different from the
Hub directory.
The Hub will pass the following environment variables to launch the Service:
```bash
JUPYTERHUB_SERVICE_NAME: The name of the service
JUPYTERHUB_API_TOKEN: API token assigned to the service
JUPYTERHUB_API_URL: URL for the JupyterHub API (default, http://127.0.0.1:8080/hub/api)
JUPYTERHUB_BASE_URL: Base URL of the Hub (https://mydomain[:port]/)
JUPYTERHUB_SERVICE_PREFIX: URL path prefix of this service (/services/:service-name/)
JUPYTERHUB_SERVICE_URL: Local URL where the service is expected to be listening.
Only for proxied web services.
```
For the previous 'cull idle' Service example, these environment variables
would be passed to the Service when the Hub starts the 'cull idle' Service:
```bash
JUPYTERHUB_SERVICE_NAME: 'cull-idle'
JUPYTERHUB_API_TOKEN: API token assigned to the service
JUPYTERHUB_API_URL: http://127.0.0.1:8080/hub/api
JUPYTERHUB_BASE_URL: https://mydomain[:port]
JUPYTERHUB_SERVICE_PREFIX: /services/cull-idle/
```
See the JupyterHub GitHub repo for additional information about the
[`cull-idle` example](https://github.com/jupyterhub/jupyterhub/tree/master/examples/cull-idle).
## Externally-Managed Services
You may prefer to use your own service management tools, such as Docker or
systemd, to manage a JupyterHub Service. These **Externally-Managed
Services**, unlike Hub-Managed Services, are not subprocesses of the Hub. You
must tell JupyterHub which API token the Externally-Managed Service is using
to perform its API requests. Each Externally-Managed Service will need a
unique API token, because the Hub authenticates each API request and the API
token is used to identify the originating Service or user.
A configuration example of an Externally-Managed Service with admin access and
running its own web server is:
```python
c.JupyterHub.services = [
{
'name': 'my-web-service',
'url': 'https://10.0.1.1:1984',
'api_token': 'super-secret',
}
]
```
In this case, the `url` field will be passed along to the Service as
`JUPYTERHUB_SERVICE_URL`.
## Writing your own Services
When writing your own services, you have a few decisions to make (in addition
to what your service does!):
1. Does my service need a public URL?
2. Do I want JupyterHub to start/stop the service?
3. Does my service need to authenticate users?
When a Service is managed by JupyterHub, the Hub will pass the necessary
information to the Service via the environment variables described above. A
flexible Service, whether managed by the Hub or not, can make use of these
same environment variables.
When you run a service that has a url, it will be accessible under a
`/services/` prefix, such as `https://myhub.horse/services/my-service/`. For
your service to route proxied requests properly, it must take
`JUPYTERHUB_SERVICE_PREFIX` into account when routing requests. For example, a
web service would normally service its root handler at `'/'`, but the proxied
service would need to serve `JUPYTERHUB_SERVICE_PREFIX + '/'`.
## Hub Authentication and Services
JupyterHub 0.7 introduces some utilities for using the Hub's authentication
mechanism to govern access to your service. When a user logs into JupyterHub,
the Hub sets a **cookie (`jupyterhub-services`)**. The service can use this
cookie to authenticate requests.
JupyterHub ships with a reference implementation of Hub authentication that
can be used by services. You may go beyond this reference implementation and
create custom hub-authenticating clients and services. We describe the process
below.
The reference, or base, implementation is the [`HubAuth`][HubAuth] class,
which implements the requests to the Hub.
To use HubAuth, you must set the `.api_token`, either programmatically when constructing the class,
or via the `JUPYTERHUB_API_TOKEN` environment variable.
Most of the logic for authentication implementation is found in the
[`HubAuth.user_for_cookie`](services.auth.html#jupyterhub.services.auth.HubAuth.user_for_cookie)
method, which makes a request of the Hub, and returns:
- None, if no user could be identified, or
- a dict of the following form:
```python
{
"name": "username",
"groups": ["list", "of", "groups"],
"admin": False, # or True
}
```
You are then free to use the returned user information to take appropriate
action.
HubAuth also caches the Hub's response for a number of seconds,
configurable by the `cookie_cache_max_age` setting (default: five minutes).
### Flask Example
For example, you have a Flask service that returns information about a user.
JupyterHub's HubAuth class can be used to authenticate requests to the Flask
service. See the `service-whoami-flask` example in the
[JupyterHub GitHub repo](https://github.com/jupyterhub/jupyterhub/tree/master/examples/service-whoami-flask)
for more details.
```python
from functools import wraps
import json
import os
from urllib.parse import quote
from flask import Flask, redirect, request, Response
from jupyterhub.services.auth import HubAuth
prefix = os.environ.get('JUPYTERHUB_SERVICE_PREFIX', '/')
auth = HubAuth(
api_token=os.environ['JUPYTERHUB_API_TOKEN'],
cookie_cache_max_age=60,
)
app = Flask(__name__)
def authenticated(f):
"""Decorator for authenticating with the Hub"""
@wraps(f)
def decorated(*args, **kwargs):
cookie = request.cookies.get(auth.cookie_name)
if cookie:
user = auth.user_for_cookie(cookie)
else:
user = None
if user:
return f(user, *args, **kwargs)
else:
# redirect to login url on failed auth
return redirect(auth.login_url + '?next=%s' % quote(request.path))
return decorated
@app.route(prefix + '/')
@authenticated
def whoami(user):
return Response(
json.dumps(user, indent=1, sort_keys=True),
mimetype='application/json',
)
```
### Authenticating tornado services with JupyterHub
Since most Jupyter services are written with tornado,
we include a mixin class, [`HubAuthenticated`][HubAuthenticated],
for quickly authenticating your own tornado services with JupyterHub.
Tornado's `@web.authenticated` method calls a Handler's `.get_current_user`
method to identify the user. Mixing in `HubAuthenticated` defines
`get_current_user` to use HubAuth. If you want to configure the HubAuth
instance beyond the default, you'll want to define an `initialize` method,
such as:
```python
class MyHandler(HubAuthenticated, web.RequestHandler):
hub_users = {'inara', 'mal'}
def initialize(self, hub_auth):
self.hub_auth = hub_auth
@web.authenticated
def get(self):
...
```
The HubAuth will automatically load the desired configuration from the Service
environment variables.
If you want to limit user access, you can whitelist users through either the
`.hub_users` attribute or `.hub_groups`. These are sets that check against the
username and user group list, respectively. If a user matches neither the user
list nor the group list, they will not be allowed access. If both are left
undefined, then any user will be allowed.
### Implementing your own Authentication with JupyterHub
If you don't want to use the reference implementation
(e.g. you find the implementation a poor fit for your Flask app),
you can implement authentication via the Hub yourself.
We recommend looking at the [`HubAuth`][HubAuth] class implementation for reference,
and taking note of the following process:
1. retrieve the cookie `jupyterhub-services` from the request.
2. Make an API request `GET /hub/api/authorizations/cookie/jupyterhub-services/cookie-value`,
where cookie-value is the url-encoded value of the `jupyterhub-services` cookie.
This request must be authenticated with a Hub API token in the `Authorization` header.
For example, with [requests][]:
```python
r = requests.get(
'/'.join((["http://127.0.0.1:8081/hub/api",
"authorizations/cookie/jupyterhub-services",
quote(encrypted_cookie, safe=''),
]),
headers = {
'Authorization' : 'token %s' % api_token,
},
)
r.raise_for_status()
user = r.json()
```
3. On success, the reply will be a JSON model describing the user:
```json
{
"name": "inara",
"groups": ["serenity", "guild"],
}
```
An example of using an Externally-Managed Service and authentication is
[nbviewer](https://github.com/jupyter/nbviewer#securing-the-notebook-viewer),
and an example of its configuration is found [here](https://github.com/jupyter/nbviewer/blob/master/nbviewer/providers/base.py#L94).
nbviewer can also be run as a Hub-Managed Service as described [here](https://github.com/jupyter/nbviewer#securing-the-notebook-viewer).
[requests]: http://docs.python-requests.org/en/master/
[services_auth]: api/services.auth.html
[HubAuth]: api/services.auth.html#jupyterhub.services.auth.HubAuth
[HubAuthenticated]: api/services.auth.html#jupyterhub.services.auth.HubAuthenticated

214
docs/source/spawners.md Normal file
View File

@@ -0,0 +1,214 @@
# Spawners
A [Spawner][] starts each single-user notebook server.
The Spawner represents an abstract interface to a process,
and a custom Spawner needs to be able to take three actions:
- start the process
- poll whether the process is still running
- stop the process
## Examples
Custom Spawners for JupyterHub can be found on the [JupyterHub wiki](https://github.com/jupyterhub/jupyterhub/wiki/Spawners).
Some examples include:
- [DockerSpawner](https://github.com/jupyterhub/dockerspawner) for spawning user servers in Docker containers
* `dockerspawner.DockerSpawner` for spawning identical Docker containers for
each users
* `dockerspawner.SystemUserSpawner` for spawning Docker containers with an
environment and home directory for each users
* both `DockerSpawner` and `SystemUserSpawner` also work with Docker Swarm for
launching containers on remote machines
- [SudoSpawner](https://github.com/jupyterhub/sudospawner) enables JupyterHub to
run without being root, by spawning an intermediate process via `sudo`
- [BatchSpawner](https://github.com/jupyterhub/batchspawner) for spawning remote
servers using batch systems
- [RemoteSpawner](https://github.com/zonca/remotespawner) to spawn notebooks
and a remote server and tunnel the port via SSH
## Spawner control methods
### Spawner.start
`Spawner.start` should start the single-user server for a single user.
Information about the user can be retrieved from `self.user`,
an object encapsulating the user's name, authentication, and server info.
When `Spawner.start` returns, it should have stored the IP and port
of the single-user server in `self.user.server`.
**NOTE:** When writing coroutines, *never* `yield` in between a database change and a commit.
Most `Spawner.start` functions will look similar to this example:
```python
def start(self):
self.user.server.ip = 'localhost' # or other host or IP address, as seen by the Hub
self.user.server.port = 1234 # port selected somehow
self.db.commit() # always commit before yield, if modifying db values
yield self._actually_start_server_somehow()
```
When `Spawner.start` returns, the single-user server process should actually be running,
not just requested. JupyterHub can handle `Spawner.start` being very slow
(such as PBS-style batch queues, or instantiating whole AWS instances)
via relaxing the `Spawner.start_timeout` config value.
### Spawner.poll
`Spawner.poll` should check if the spawner is still running.
It should return `None` if it is still running,
and an integer exit status, otherwise.
For the local process case, `Spawner.poll` uses `os.kill(PID, 0)`
to check if the local process is still running.
### Spawner.stop
`Spawner.stop` should stop the process. It must be a tornado coroutine, which should return when the process has finished exiting.
## Spawner state
JupyterHub should be able to stop and restart without tearing down
single-user notebook servers. To do this task, a Spawner may need to persist
some information that can be restored later.
A JSON-able dictionary of state can be used to store persisted information.
Unlike start, stop, and poll methods, the state methods must not be coroutines.
For the single-process case, the Spawner state is only the process ID of the server:
```python
def get_state(self):
"""get the current state"""
state = super().get_state()
if self.pid:
state['pid'] = self.pid
return state
def load_state(self, state):
"""load state from the database"""
super().load_state(state)
if 'pid' in state:
self.pid = state['pid']
def clear_state(self):
"""clear any state (called after shutdown)"""
super().clear_state()
self.pid = 0
```
## Spawner options form
(new in 0.4)
Some deployments may want to offer options to users to influence how their servers are started.
This may include cluster-based deployments, where users specify what resources should be available,
or docker-based deployments where users can select from a list of base images.
This feature is enabled by setting `Spawner.options_form`, which is an HTML form snippet
inserted unmodified into the spawn form.
If the `Spawner.options_form` is defined, when a user tries to start their server, they will be directed to a form page, like this:
![spawn-form](images/spawn-form.png)
If `Spawner.options_form` is undefined, the user's server is spawned directly, and no spawn page is rendered.
See [this example](https://github.com/jupyterhub/jupyterhub/blob/master/examples/spawn-form/jupyterhub_config.py) for a form that allows custom CLI args for the local spawner.
### `Spawner.options_from_form`
Options from this form will always be a dictionary of lists of strings, e.g.:
```python
{
'integer': ['5'],
'text': ['some text'],
'select': ['a', 'b'],
}
```
When `formdata` arrives, it is passed through `Spawner.options_from_form(formdata)`,
which is a method to turn the form data into the correct structure.
This method must return a dictionary, and is meant to interpret the lists-of-strings into the correct types. For example, the `options_from_form` for the above form would look like:
```python
def options_from_form(self, formdata):
options = {}
options['integer'] = int(formdata['integer'][0]) # single integer value
options['text'] = formdata['text'][0] # single string value
options['select'] = formdata['select'] # list already correct
options['notinform'] = 'extra info' # not in the form at all
return options
```
which would return:
```python
{
'integer': 5,
'text': 'some text',
'select': ['a', 'b'],
'notinform': 'extra info',
}
```
When `Spawner.start` is called, this dictionary is accessible as `self.user_options`.
[Spawner]: https://github.com/jupyterhub/jupyterhub/blob/master/jupyterhub/spawner.py
## Writing a custom spawner
If you are interested in building a custom spawner, you can read [this tutorial](http://jupyterhub-tutorial.readthedocs.io/en/latest/spawners.html).
## Spawners, resource limits, and guarantees (Optional)
Some spawners of the single-user notebook servers allow setting limits or
guarantees on resources, such as CPU and memory. To provide a consistent
experience for sysadmins and users, we provide a standard way to set and
discover these resource limits and guarantees, such as for memory and CPU. For
the limits and guarantees to be useful, the spawner must implement support for
them.
### Memory Limits & Guarantees
`c.Spawner.mem_limit`: A **limit** specifies the *maximum amount of memory*
that may be allocated, though there is no promise that the maximum amount will
be available. In supported spawners, you can set `c.Spawner.mem_limit` to
limit the total amount of memory that a single-user notebook server can
allocate. Attempting to use more memory than this limit will cause errors. The
single-user notebook server can discover its own memory limit by looking at
the environment variable `MEM_LIMIT`, which is specified in absolute bytes.
`c.Spawner.mem_guarantee`: Sometimes, a **guarantee** of a *minumum amount of
memory* is desirable. In this case, you can set `c.Spawner.mem_guarantee` to
to provide a guarantee that at minimum this much memory will always be
available for the single-user notebook server to use. The environment variable
`MEM_GUARANTEE` will also be set in the single-user notebook server.
The spawner's underlying system or cluster is responsible for enforcing these
limits and providing these guarantees. If these values are set to `None`, no
limits or guarantees are provided, and no environment values are set.
### CPU Limits & Guarantees
`c.Spawner.cpu_limit`: In supported spawners, you can set
`c.Spawner.cpu_limit` to limit the total number of cpu-cores that a
single-user notebook server can use. These can be fractional - `0.5` means 50%
of one CPU core, `4.0` is 4 cpu-cores, etc. This value is also set in the
single-user notebook server's environment variable `CPU_LIMIT`. The limit does
not claim that you will be able to use all the CPU up to your limit as other
higher priority applications might be taking up CPU.
`c.Spawner.cpu_guarantee`: You can set `c.Spawner.cpu_guarantee` to provide a
guarantee for CPU usage. The environment variable `CPU_GUARANTEE` will be set
in the single-user notebook server when a guarantee is being provided.
The spawner's underlying system or cluster is responsible for enforcing these
limits and providing these guarantees. If these values are set to `None`, no
limits or guarantees are provided, and no environment values are set.

View File

@@ -0,0 +1,213 @@
admin
Afterwards
alchemyst
alope
api
API
apps
args
asctime
auth
authenticator
Authenticator
authenticators
Authenticators
Autograde
autograde
autogradeapp
autograded
Autograded
autograder
Autograder
autograding
backends
Bitdiddle
bugfix
Bugfixes
bugtracker
Carreau
Changelog
changelog
checksum
checksums
cmd
cogsci
conda
config
coroutine
coroutines
crt
customizable
datefmt
decrypted
dev
DockerSpawner
dockerspawner
dropdown
duedate
Duedate
ellachao
ellisonbg
entrypoint
env
Filenames
filesystem
formatters
formdata
formgrade
formgrader
gif
GitHub
Gradebook
gradebook
Granger
hardcoded
hOlle
Homebrew
html
http
https
hubapi
Indices
IFramed
inline
iopub
ip
ipynb
IPython
ischurov
ivanslapnicar
jdfreder
jhamrick
jklymak
jonathanmorgan
joschu
JUPYTER
Jupyter
jupyter
jupyterhub
Kerberos
kerberos
letsencrypt
lgpage
linkcheck
linux
localhost
logfile
login
logins
logout
lookup
lphk
mandli
Marr
mathjax
matplotlib
metadata
mikebolt
minrk
Mitigations
mixin
Mixin
multi
multiuser
namespace
nbconvert
nbgrader
neuroscience
nginx
np
npm
oauth
OAuth
oauthenticator
ok
olgabot
osx
PAM
phantomjs
Phantomjs
plugin
plugins
Popen
positionally
postgres
pregenerated
prepend
prepopulate
preprocessor
Preprocessor
prev
Programmatically
programmatically
ps
py
Qualys
quickstart
readonly
redSlug
reinstall
resize
rst
runtime
rw
sandboxed
sansary
singleuser
smeylan
spawner
Spawner
spawners
Spawners
spellcheck
SQL
sqlite
startup
statsd
stdin
stdout
stoppped
subclasses
subcommand
subdomain
subdomains
Subdomains
suchow
suprocesses
svurens
sys
SystemUserSpawner
systemwide
tasilb
teardown
threadsafe
timestamp
timestamps
TLD
todo
toolbar
traitlets
travis
tuples
undeletable
unicode
uninstall
UNIX
unix
untracked
untrusted
url
username
usernames
utcnow
utils
vinaykola
virtualenv
whitelist
whitespace
wildcard
Wildcards
willingc
wordlist
Workflow
workflow

View File

@@ -0,0 +1,251 @@
# Troubleshooting
When troubleshooting, you may see unexpected behaviors or receive an error
message. This section provide links for identifying the cause of the
problem and how to resolve it.
[*Behavior*](#behavior)
- JupyterHub proxy fails to start
- sudospawner fails to run
[*Errors*](#errors)
- 500 error after spawning my single-user server
[*How do I...?*](#how-do-i)
- Use a chained SSL certificate
- Install JupyterHub without a network connection
- I want access to the whole filesystem, but still default users to their home directory
- How do I increase the number of pySpark executors on YARN?
- How do I use JupyterLab's prerelease version with JupyterHub?
- How do I set up JupyterHub for a workshop (when users are not known ahead of time)?
[*Troubleshooting commands*](#troubleshooting-commands)
## Behavior
### JupyterHub proxy fails to start
If you have tried to start the JupyterHub proxy and it fails to start:
- check if the JupyterHub IP configuration setting is
``c.JupyterHub.ip = '*'``; if it is, try ``c.JupyterHub.ip = ''``
- Try starting with ``jupyterhub --ip=0.0.0.0``
### sudospawner fails to run
If the sudospawner script is not found in the path, sudospawner will not run.
To avoid this, specify sudospawner's absolute path. For example, start
jupyterhub with:
jupyterhub --SudoSpawner.sudospawner_path='/absolute/path/to/sudospawner'
or add:
c.SudoSpawner.sudospawner_path = '/absolute/path/to/sudospawner'
to the config file, `jupyterhub_config.py`.
## Errors
### 500 error after spawning my single-user server
You receive a 500 error when accessing the URL `/user/<your_name>/...`.
This is often seen when your single-user server cannot verify your user cookie
with the Hub.
There are two likely reasons for this:
1. The single-user server cannot connect to the Hub's API (networking
configuration problems)
2. The single-user server cannot *authenticate* its requests (invalid token)
#### Symptoms
The main symptom is a failure to load *any* page served by the single-user
server, met with a 500 error. This is typically the first page at `/user/<your_name>`
after logging in or clicking "Start my server". When a single-user notebook server
receives a request, the notebook server makes an API request to the Hub to
check if the cookie corresponds to the right user. This request is logged.
If everything is working, the response logged will be similar to this:
```
200 GET /hub/api/authorizations/cookie/jupyter-hub-token-name/[secret] (@10.0.1.4) 6.10ms
```
You should see a similar 200 message, as above, in the Hub log when you first
visit your single-user notebook server. If you don't see this message in the log, it
may mean that your single-user notebook server isn't connecting to your Hub.
If you see 403 (forbidden) like this, it's a token problem:
```
403 GET /hub/api/authorizations/cookie/jupyter-hub-token-name/[secret] (@10.0.1.4) 4.14ms
```
Check the logs of the single-user notebook server, which may have more detailed
information on the cause.
#### Causes and resolutions
##### No authorization request
If you make an API request and it is not received by the server, you likely
have a network configuration issue. Often, this happens when the Hub is only
listening on 127.0.0.1 (default) and the single-user servers are not on the
same 'machine' (can be physically remote, or in a docker container or VM). The
fix for this case is to make sure that `c.JupyterHub.hub_ip` is an address
that all single-user servers can connect to, e.g.:
```python
c.JupyterHub.hub_ip = '10.0.0.1'
```
##### 403 GET /hub/api/authorizations/cookie
If you receive a 403 error, the API token for the single-user server is likely
invalid. Commonly, the 403 error is caused by resetting the JupyterHub
database (either removing jupyterhub.sqlite or some other action) while
leaving single-user servers running. This happens most frequently when using
DockerSpawner, because Docker's default behavior is to stop/start containers
which resets the JupyterHub database, rather than destroying and recreating
the container every time. This means that the same API token is used by the
server for its whole life, until the container is rebuilt.
The fix for this Docker case is to remove any Docker containers seeing this
issue (typically all containers created before a certain point in time):
docker rm -f jupyter-name
After this, when you start your server via JupyterHub, it will build a
new container. If this was the underlying cause of the issue, you should see
your server again.
## How do I...?
### Use a chained SSL certificate
Some certificate providers, i.e. Entrust, may provide you with a chained
certificate that contains multiple files. If you are using a chained
certificate you will need to concatenate the individual files by appending the
chain cert and root cert to your host cert:
cat your_host.crt chain.crt root.crt > your_host-chained.crt
You would then set in your `jupyterhub_config.py` file the `ssl_key` and
`ssl_cert` as follows:
c.JupyterHub.ssl_cert = your_host-chained.crt
c.JupyterHub.ssl_key = your_host.key
#### Example
Your certificate provider gives you the following files: `example_host.crt`,
`Entrust_L1Kroot.txt` and `Entrust_Root.txt`.
Concatenate the files appending the chain cert and root cert to your host cert:
cat example_host.crt Entrust_L1Kroot.txt Entrust_Root.txt > example_host-chained.crt
You would then use the `example_host-chained.crt` as the value for
JupyterHub's `ssl_cert`. You may pass this value as a command line option
when starting JupyterHub or more conveniently set the `ssl_cert` variable in
JupyterHub's configuration file, `jupyterhub_config.py`. In `jupyterhub_config.py`,
set:
c.JupyterHub.ssl_cert = /path/to/example_host-chained.crt
c.JupyterHub.ssl_key = /path/to/example_host.key
where `ssl_cert` is example-chained.crt and ssl_key to your private key.
Then restart JupyterHub.
See also [JupyterHub SSL encryption](getting-started.md#ssl-encryption).
### Install JupyterHub without a network connection
Both conda and pip can be used without a network connection. You can make your
own repository (directory) of conda packages and/or wheels, and then install
from there instead of the internet.
For instance, you can install JupyterHub with pip and configurable-http-proxy
with npmbox:
pip wheel jupyterhub
npmbox configurable-http-proxy
### I want access to the whole filesystem, but still default users to their home directory
Setting the following in `jupyterhub_config.py` will configure access to
the entire filesystem and set the default to the user's home directory.
c.Spawner.notebook_dir = '/'
c.Spawner.default_url = '/home/%U' # %U will be replaced with the username
### How do I increase the number of pySpark executors on YARN?
From the command line, pySpark executors can be configured using a command
similar to this one:
pyspark --total-executor-cores 2 --executor-memory 1G
[Cloudera documentation for configuring spark on YARN applications](https://www.cloudera.com/documentation/enterprise/latest/topics/cdh_ig_running_spark_on_yarn.html#spark_on_yarn_config_apps)
provides additional information. The [pySpark configuration documentation](https://spark.apache.org/docs/0.9.0/configuration.html)
is also helpful for programmatic configuration examples.
### How do I use JupyterLab's prerelease version with JupyterHub?
While JupyterLab is still under active development, we have had users
ask about how to try out JupyterLab with JupyterHub.
You need to install and enable the JupyterLab extension system-wide,
then you can change the default URL to `/lab`.
For instance:
pip install jupyterlab
jupyter serverextension enable --py jupyterlab --sys-prefix
The important thing is that jupyterlab is installed and enabled in the
single-user notebook server environment. For system users, this means
system-wide, as indicated above. For Docker containers, it means inside
the single-user docker image, etc.
In `jupyterhub_config.py`, configure the Spawner to tell the single-user
notebook servers to default to JupyterLab:
c.Spawner.default_url = '/lab'
### How do I set up JupyterHub for a workshop (when users are not known ahead of time)?
1. Set up JupyterHub using OAuthenticator for GitHub authentication
2. Configure whitelist to be an empty list in` jupyterhub_config.py`
3. Configure admin list to have workshop leaders be listed with administrator privileges.
Users will need a GitHub account to login and be authenticated by the Hub.
## Troubleshooting commands
The following commands provide additional detail about installed packages,
versions, and system information that may be helpful when troubleshooting
a JupyterHub deployment. The commands are:
- System and deployment information
```bash
jupyter troubleshooting
```
- Kernel information
```bash
jupyter kernelspec list
```
- Debug logs when running JupyterHub
```bash
jupyterhub --debug
```

106
docs/source/upgrading.md Normal file
View File

@@ -0,0 +1,106 @@
# Upgrading JupyterHub and its database
From time to time, you may wish to upgrade JupyterHub to take advantage
of new releases. Much of this process is automated using scripts,
such as those generated by alembic for database upgrades. Before upgrading a
JupyterHub deployment, it's critical to backup your data and configurations
before shutting down the JupyterHub process and server.
## Databases: SQLite (default) or RDBMS (PostgreSQL, MySQL)
The default database for JupyterHub is a [SQLite](https://sqlite.org) database.
We have chosen SQLite as JupyterHub's default for its lightweight simplicity
in certain uses such as testing, small deployments and workshops.
When running a long term deployment or a production system, we recommend using
a traditional RDBMS database, such as [PostgreSQL](https://www.postgresql.org)
or [MySQL](https://www.mysql.com), that supports the SQL `ALTER TABLE`
statement.
For production systems, SQLite has some disadvantages when used with JupyterHub:
- `upgrade-db` may not work, and you may need to start with a fresh database
- `downgrade-db` **will not** work if you want to rollback to an earlier
version, so backup the `jupyterhub.sqlite` file before upgrading
The sqlite documentation provides a helpful page about [when to use sqlite and
where traditional RDBMS may be a better choice](https://sqlite.org/whentouse.html).
## The upgrade process
Four fundamental process steps are needed when upgrading JupyterHub and its
database:
1. Backup JupyterHub database
2. Backup JupyterHub configuration file
3. Shutdown the Hub
4. Upgrade JupyterHub
5. Upgrade the database using run `jupyterhub upgrade-db`
Let's take a closer look at each step in the upgrade process as well as some
additional information about JupyterHub databases.
### Backup JupyterHub database
To prevent unintended loss of data or configuration information, you should
back up the JupyterHub database (the default SQLite database or a RDBMS
database using PostgreSQL, MySQL, or others supported by SQLAlchemy):
- If using the default SQLite database, back up the `jupyterhub.sqlite`
database.
- If using an RDBMS database such as PostgreSQL, MySQL, or other supported by
SQLAlchemy, back up the JupyterHub database.
Losing the Hub database is often not a big deal. Information that resides only
in the Hub database includes:
- active login tokens (user cookies, service tokens)
- users added via GitHub UI, instead of config files
- info about running servers
If the following conditions are true, you should be fine clearing the Hub
database and starting over:
- users specified in config file
- user servers are stopped during upgrade
- don't mind causing users to login again after upgrade
### Backup JupyterHub configuration file
Additionally, backing up your configuration file, `jupyterhub_config.py`, to
a secure location.
### Shutdown JupyterHub
Prior to shutting down JupyterHub, you should notify the Hub users of the
scheduled downtime. This gives users the opportunity to finish any outstanding
work in process.
Next, shutdown the JupyterHub service.
### Upgrade JupyterHub
Follow directions that correspond to your package manager, `pip` or `conda`,
for the new JupyterHub release. These directions will guide you to the
specific command. In general, `pip install -U jupyterhub` or
`conda upgrade jupyterhub`
### Upgrade JupyterHub databases
To run the upgrade process for JupyterHub databases, enter:
```
jupyterhub upgrade-db
```
## Upgrade checklist
1. Backup JupyterHub database:
- `jupyterhub.sqlite` when using the default sqlite database
- Your JupyterHub database when using an RDBMS
2. Backup JupyterHub configuration file: `jupyterhub_config.py`
3. Shutdown the Hub
4. Upgrade JupyterHub
- `pip install -U jupyterhub` when using `pip`
- `conda upgrade jupyterhub` when using `conda`
5. Upgrade the database using run `jupyterhub upgrade-db`

View File

@@ -0,0 +1,80 @@
# Web Security in JupyterHub
JupyterHub is designed to be a simple multi-user server for modestly sized
groups of semi-trusted users. While the design reflects serving semi-trusted
users, JupyterHub is not necessarily unsuitable for serving untrusted users.
Using JupyterHub with untrusted users does mean more work and much care is
required to secure a Hub against untrusted users, with extra caution on
protecting users from each other as the Hub is serving untrusted users.
One aspect of JupyterHub's design simplicity for semi-trusted users is that
the Hub and single-user servers are placed in a single domain, behind a
[proxy][configurable-http-proxy]. As a result, if the Hub is serving untrusted
users, many of the web's cross-site protections are not applied between
single-user servers and the Hub, or between single-user servers and each
other, since browsers see the whole thing (proxy, Hub, and single user
servers) as a single website.
To protect users from each other, a user must never be able to write arbitrary
HTML and serve it to another user on the Hub's domain. JupyterHub's
authentication setup prevents this because only the owner of a given
single-user server is allowed to view user-authored pages served by their
server. To protect all users from each other, JupyterHub administrators must
ensure that:
* A user does not have permission to modify their single-user server:
- A user may not install new packages in the Python environment that runs
their server.
- If the PATH is used to resolve the single-user executable (instead of an
absolute path), a user may not create new files in any PATH directory
that precedes the directory containing jupyterhub-singleuser.
- A user may not modify environment variables (e.g. PATH, PYTHONPATH) for
their single-user server.
* A user may not modify the configuration of the notebook server
(the ~/.jupyter or JUPYTER_CONFIG_DIR directory).
If any additional services are run on the same domain as the Hub, the services
must never display user-authored HTML that is neither sanitized nor sandboxed
(e.g. IFramed) to any user that lacks authentication as the author of a file.
## Mitigations
There are two main configuration options provided by JupyterHub to mitigate
these issues:
### Subdomains
JupyterHub 0.5 adds the ability to run single-user servers on their own
subdomains, which means the cross-origin protections between servers has the
desired effect, and user servers and the Hub are protected from each other. A
user's server will be at `username.jupyter.mydomain.com`, etc. This requires
all user subdomains to point to the same address, which is most easily
accomplished with wildcard DNS. Since this spreads the service across multiple
domains, you will need wildcard SSL, as well. Unfortunately, for many
institutional domains, wildcard DNS and SSL are not available, but if you do
plan to serve untrusted users, enabling subdomains is highly encouraged, as it
resolves all of the cross-site issues.
### Disabling user config
If subdomains are not available or not desirable, 0.5 also adds an option
`Spawner.disable_user_config`, which you can set to prevent the user-owned
configuration files from being loaded. This leaves only package installation
and PATHs as things the admin must enforce.
For most Spawners, PATH is not something users can influence, but care should
be taken to ensure that the Spawn does *not* evaluate shell configuration
files prior to launching the server.
Package isolation is most easily handled by running the single-user server in
a virtualenv with disabled system-site-packages.
## Extra notes
It is important to note that the control over the environment only affects the
single-user server, and not the environment(s) in which the user's kernel(s)
may run. Installing additional packages in the kernel environment does not
pose additional risk to the web application's security.
[configurable-http-proxy]: https://github.com/jupyterhub/configurable-http-proxy

View File

@@ -1,89 +0,0 @@
# Writing a custom Spawner
Each single-user server is started by a [Spawner][].
The Spawner represents an abstract interface to a process,
and a custom Spawner needs to be able to take three actions:
1. start the process
2. poll whether the process is still running
3. stop the process
See a list of custom Spawners [on the wiki](https://github.com/jupyter/jupyterhub/wiki/Spawners).
## Spawner.start
`Spawner.start` should start the single-user server for a single user.
Information about the user can be retrieved from `self.user`,
an object encapsulating the user's name, authentication, and server info.
When `Spawner.start` returns, it should have stored the IP and port
of the single-user server in `self.user.server`.
**NOTE:** when writing coroutines, *never* `yield` in between a db change and a commit.
Most `Spawner.start`s should have something looking like:
```python
def start(self):
self.user.server.ip = 'localhost' # or other host or IP address, as seen by the Hub
self.user.server.port = 1234 # port selected somehow
self.db.commit() # always commit before yield, if modifying db values
yield self._actually_start_server_somehow()
```
When `Spawner.start` returns, the single-user server process should actually be running,
not just requested. JupyterHub can handle `Spawner.start` being very slow
(such as PBS-style batch queues, or instantiating whole AWS instances)
via relaxing the `Spawner.start_timeout` config value.
## Spawner.poll
`Spawner.poll` should check if the spawner is still running.
It should return `None` if it is still running,
and an integer exit status, otherwise.
For the local process case, this uses `os.kill(PID, 0)`
to check if the process is still around.
## Spawner.stop
`Spawner.stop` should stop the process. It must be a tornado coroutine,
and should return when the process has finished exiting.
## Spawner state
JupyterHub should be able to stop and restart without having to teardown
single-user servers. This means that a Spawner may need to persist
some information that it can be restored.
A dictionary of JSON-able state can be used to store this information.
Unlike start/stop/poll, the state methods must not be coroutines.
In the single-process case, this is only the process ID of the server:
```python
def get_state(self):
"""get the current state"""
state = super().get_state()
if self.pid:
state['pid'] = self.pid
return state
def load_state(self, state):
"""load state from the database"""
super().load_state(state)
if 'pid' in state:
self.pid = state['pid']
def clear_state(self):
"""clear any state (called after shutdown)"""
super().clear_state()
self.pid = 0
```
[Spawner]: ../jupyterhub/spawner.py

View File

@@ -0,0 +1,41 @@
# `cull-idle` Example
The `cull_idle_servers.py` file provides a script to cull and shut down idle
single-user notebook servers. This script is used when `cull-idle` is run as
a Service or when it is run manually as a standalone script.
## Configure `cull-idle` to run as a Hub-Managed Service
In `jupyterhub_config.py`, add the following dictionary for the `cull-idle`
Service to the `c.JupyterHub.services` list:
```python
c.JupyterHub.services = [
{
'name': 'cull-idle',
'admin': True,
'command': 'python cull_idle_servers.py --timeout=3600'.split(),
}
]
```
where:
- `'admin': True` indicates that the Service has 'admin' permissions, and
- `'command'` indicates that the Service will be managed by the Hub.
## Run `cull-idle` manually as a standalone script
This will run `cull-idle` manually. `cull-idle` can be run as a standalone
script anywhere with access to the Hub, and will periodically check for idle
servers and shut them down via the Hub's REST API. In order to shutdown the
servers, the token given to cull-idle must have admin privileges.
Generate an API token and store it in the `JUPYTERHUB_API_TOKEN` environment
variable. Run `cull_idle_servers.py` manually.
```bash
export JUPYTERHUB_API_TOKEN=`jupyterhub token`
python cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
```

View File

@@ -9,10 +9,21 @@ so cull timeout should be greater than the sum of:
- single-user websocket ping interval (default: 30s)
- JupyterHub.last_activity_interval (default: 5 minutes)
Generate an API token and store it in `JPY_API_TOKEN`:
You can run this as a service managed by JupyterHub with this in your config::
export JPY_API_TOKEN=`jupyterhub token`
python cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub]
c.JupyterHub.services = [
{
'name': 'cull-idle',
'admin': True,
'command': 'python cull_idle_servers.py --timeout=3600'.split(),
}
]
Or run it manually by generating an API token and storing it in `JUPYTERHUB_API_TOKEN`:
export JUPYTERHUB_API_TOKEN=`jupyterhub token`
python cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
"""
import datetime
@@ -34,7 +45,7 @@ def cull_idle(url, api_token, timeout):
auth_header = {
'Authorization': 'token %s' % api_token
}
req = HTTPRequest(url=url + '/api/users',
req = HTTPRequest(url=url + '/users',
headers=auth_header,
)
now = datetime.datetime.utcnow()
@@ -47,7 +58,7 @@ def cull_idle(url, api_token, timeout):
last_activity = parse_date(user['last_activity'])
if user['server'] and last_activity < cull_limit:
app_log.info("Culling %s (inactive since %s)", user['name'], last_activity)
req = HTTPRequest(url=url + '/api/users/%s/server' % user['name'],
req = HTTPRequest(url=url + '/users/%s/server' % user['name'],
method='DELETE',
headers=auth_header,
)
@@ -60,7 +71,7 @@ def cull_idle(url, api_token, timeout):
app_log.debug("Finished culling %s", name)
if __name__ == '__main__':
define('url', default='http://127.0.0.1:8081/hub', help="The JupyterHub API URL")
define('url', default=os.environ.get('JUPYTERHUB_API_URL'), help="The JupyterHub API URL")
define('timeout', default=600, help="The idle timeout (in seconds)")
define('cull_every', default=0, help="The interval (in seconds) for checking for idle servers to cull")
@@ -68,7 +79,7 @@ if __name__ == '__main__':
if not options.cull_every:
options.cull_every = options.timeout // 2
api_token = os.environ['JPY_API_TOKEN']
api_token = os.environ['JUPYTERHUB_API_TOKEN']
loop = IOLoop.current()
cull = lambda : cull_idle(options.url, api_token, options.timeout)

View File

@@ -0,0 +1,8 @@
# run cull-idle as a service
c.JupyterHub.services = [
{
'name': 'cull-idle',
'admin': True,
'command': 'python cull_idle_servers.py --timeout=3600'.split(),
}
]

View File

@@ -1,4 +1,4 @@
FROM jupyter/jupyterhub
FROM jupyter/jupyterhub-onbuild
MAINTAINER Jupyter Project <jupyter@googlegroups.com>

View File

@@ -0,0 +1,33 @@
# Authenticating a flask service with JupyterHub
Uses `jupyterhub.services.HubAuth` to authenticate requests with the Hub in a [flask][] application.
## Run
1. Launch JupyterHub and the `whoami service` with
jupyterhub --ip=127.0.0.1
2. Visit http://127.0.0.1:8000/services/whoami
After logging in with your local-system credentials, you should see a JSON dump of your user info:
```json
{
"admin": false,
"last_activity": "2016-05-27T14:05:18.016372",
"name": "queequeg",
"pending": null,
"server": "/user/queequeg"
}
```
This relies on the Hub starting the whoami service, via config (see [jupyterhub_config.py](./jupyterhub_config.py)).
A similar service could be run externally, by setting the JupyterHub service environment variables:
JUPYTERHUB_API_TOKEN
JUPYTERHUB_SERVICE_PREFIX
[flask]: http://flask.pocoo.org

View File

@@ -0,0 +1,13 @@
import os
import sys
c.JupyterHub.services = [
{
'name': 'whoami',
'url': 'http://127.0.0.1:10101',
'command': ['flask', 'run', '--port=10101'],
'environment': {
'FLASK_APP': 'whoami-flask.py',
}
}
]

View File

@@ -0,0 +1,4 @@
export CONFIGPROXY_AUTH_TOKEN=`openssl rand -hex 32`
# start JupyterHub
jupyterhub --ip=127.0.0.1

View File

@@ -0,0 +1,50 @@
#!/usr/bin/env python3
"""
whoami service authentication with the Hub
"""
from functools import wraps
import json
import os
from urllib.parse import quote
from flask import Flask, redirect, request, Response
from jupyterhub.services.auth import HubAuth
prefix = os.environ.get('JUPYTERHUB_SERVICE_PREFIX', '/')
auth = HubAuth(
api_token=os.environ['JUPYTERHUB_API_TOKEN'],
cookie_cache_max_age=60,
)
app = Flask(__name__)
def authenticated(f):
"""Decorator for authenticating with the Hub"""
@wraps(f)
def decorated(*args, **kwargs):
cookie = request.cookies.get(auth.cookie_name)
if cookie:
user = auth.user_for_cookie(cookie)
else:
user = None
if user:
return f(user, *args, **kwargs)
else:
# redirect to login url on failed auth
return redirect(auth.login_url + '?next=%s' % quote(request.path))
return decorated
@app.route(prefix + '/')
@authenticated
def whoami(user):
return Response(
json.dumps(user, indent=1, sort_keys=True),
mimetype='application/json',
)

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

View File

@@ -0,0 +1,42 @@
"""An example service authenticating with the Hub.
This example service serves `/services/whoami/`,
authenticated with the Hub,
showing the user their own info.
"""
from getpass import getuser
import json
import os
from urllib.parse import urlparse
from tornado.ioloop import IOLoop
from tornado.httpserver import HTTPServer
from tornado.web import RequestHandler, Application, authenticated
from jupyterhub.services.auth import HubAuthenticated
class WhoAmIHandler(HubAuthenticated, RequestHandler):
hub_users = {getuser()} # the users allowed to access this service
@authenticated
def get(self):
user_model = self.get_current_user()
self.set_header('content-type', 'application/json')
self.write(json.dumps(user_model, indent=1, sort_keys=True))
def main():
app = Application([
(os.environ['JUPYTERHUB_SERVICE_PREFIX'] + '/?', WhoAmIHandler),
(r'.*', WhoAmIHandler),
], login_url='/hub/login')
http_server = HTTPServer(app)
url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
http_server.listen(url.port, url.hostname)
IOLoop.current().start()
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,32 @@
# Authenticating a service with JupyterHub
Uses `jupyterhub.services.HubAuthenticated` to authenticate requests with the Hub.
## Run
1. Launch JupyterHub and the `whoami service` with
jupyterhub --ip=127.0.0.1
2. Visit http://127.0.0.1:8000/services/whoami
After logging in with your local-system credentials, you should see a JSON dump of your user info:
```json
{
"admin": false,
"last_activity": "2016-05-27T14:05:18.016372",
"name": "queequeg",
"pending": null,
"server": "/user/queequeg"
}
```
This relies on the Hub starting the whoami services, via config (see [jupyterhub_config.py](./jupyterhub_config.py)).
A similar service could be run externally, by setting the JupyterHub service environment variables:
JUPYTERHUB_API_TOKEN
JUPYTERHUB_SERVICE_PREFIX
or instantiating and configuring a HubAuth object yourself, and attaching it as `self.hub_auth` in your HubAuthenticated handlers.

View File

@@ -0,0 +1,10 @@
import os
import sys
c.JupyterHub.services = [
{
'name': 'whoami',
'url': 'http://127.0.0.1:10101',
'command': [sys.executable, './whoami.py'],
}
]

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

View File

@@ -0,0 +1,40 @@
"""An example service authenticating with the Hub.
This serves `/services/whoami/`, authenticated with the Hub, showing the user their own info.
"""
from getpass import getuser
import json
import os
from urllib.parse import urlparse
from tornado.ioloop import IOLoop
from tornado.httpserver import HTTPServer
from tornado.web import RequestHandler, Application, authenticated
from jupyterhub.services.auth import HubAuthenticated
class WhoAmIHandler(HubAuthenticated, RequestHandler):
hub_users = {getuser()} # the users allowed to access me
@authenticated
def get(self):
user_model = self.get_current_user()
self.set_header('content-type', 'application/json')
self.write(json.dumps(user_model, indent=1, sort_keys=True))
def main():
app = Application([
(os.environ['JUPYTERHUB_SERVICE_PREFIX'] + '/?', WhoAmIHandler),
(r'.*', WhoAmIHandler),
], login_url='/hub/login')
http_server = HTTPServer(app)
url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
http_server.listen(url.port, url.hostname)
IOLoop.current().start()
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,46 @@
"""
Example JupyterHub config allowing users to specify environment variables and notebook-server args
"""
import shlex
from jupyterhub.spawner import LocalProcessSpawner
class DemoFormSpawner(LocalProcessSpawner):
def _options_form_default(self):
default_env = "YOURNAME=%s\n" % self.user.name
return """
<label for="args">Extra notebook CLI arguments</label>
<input name="args" placeholder="e.g. --debug"></input>
<label for="env">Environment variables (one per line)</label>
<textarea name="env">{env}</textarea>
""".format(env=default_env)
def options_from_form(self, formdata):
options = {}
options['env'] = env = {}
env_lines = formdata.get('env', [''])
for line in env_lines[0].splitlines():
if line:
key, value = line.split('=', 1)
env[key.strip()] = value.strip()
arg_s = formdata.get('args', [''])[0].strip()
if arg_s:
options['argv'] = shlex.split(arg_s)
return options
def get_args(self):
"""Return arguments to pass to the notebook server"""
argv = super().get_args()
if self.user_options.get('argv'):
argv.extend(self.user_options['argv'])
return argv
def get_env(self):
env = super().get_env()
if self.user_options.get('env'):
env.update(self.user_options['env'])
return env
c.JupyterHub.spawner_class = DemoFormSpawner

66
jupyterhub/alembic.ini Normal file
View File

@@ -0,0 +1,66 @@
# A generic, single database configuration.
[alembic]
script_location = {alembic_dir}
sqlalchemy.url = {db_url}
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# max length of characters to apply to the
# "slug" field
#truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; this defaults
# to jupyterhub/alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat jupyterhub/alembic/versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -0,0 +1 @@
This is the alembic configuration for JupyterHub data base migrations.

70
jupyterhub/alembic/env.py Normal file
View File

@@ -0,0 +1,70 @@
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,24 @@
"""base revision for 0.5
Revision ID: 19c0846f6344
Revises:
Create Date: 2016-04-11 16:05:34.873288
"""
# revision identifiers, used by Alembic.
revision = '19c0846f6344'
down_revision = None
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
pass
def downgrade():
pass

View File

@@ -0,0 +1,25 @@
"""services
Revision ID: af4cbdb2d13c
Revises: eeb276e51423
Create Date: 2016-07-28 16:16:38.245348
"""
# revision identifiers, used by Alembic.
revision = 'af4cbdb2d13c'
down_revision = 'eeb276e51423'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('api_tokens', sa.Column('service_id', sa.Integer))
def downgrade():
# sqlite cannot downgrade because of limited ALTER TABLE support (no DROP COLUMN)
op.drop_column('api_tokens', 'service_id')

View File

@@ -0,0 +1,26 @@
"""auth_state
Adds auth_state column to Users table.
Revision ID: eeb276e51423
Revises: 19c0846f6344
Create Date: 2016-04-11 16:06:49.239831
"""
# revision identifiers, used by Alembic.
revision = 'eeb276e51423'
down_revision = '19c0846f6344'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from jupyterhub.orm import JSONDict
def upgrade():
op.add_column('users', sa.Column('auth_state', JSONDict))
def downgrade():
# sqlite cannot downgrade because of limited ALTER TABLE support (no DROP COLUMN)
op.drop_column('users', 'auth_state')

View File

@@ -1,11 +1,6 @@
from .base import *
from .auth import *
from .hub import *
from .proxy import *
from .users import *
from . import auth, hub, proxy, users
from . import auth, hub, proxy, users, groups, services
default_handlers = []
for mod in (auth, hub, proxy, users):
for mod in (auth, hub, proxy, users, groups, services):
default_handlers.extend(mod.default_handlers)

View File

@@ -6,7 +6,7 @@
import json
from urllib.parse import quote
from tornado import web
from tornado import web, gen
from .. import orm
from ..utils import token_authenticated
from .base import APIHandler
@@ -18,15 +18,27 @@ class TokenAPIHandler(APIHandler):
orm_token = orm.APIToken.find(self.db, token)
if orm_token is None:
raise web.HTTPError(404)
self.write(json.dumps(self.user_model(orm_token.user)))
self.write(json.dumps(self.user_model(self.users[orm_token.user])))
@gen.coroutine
def post(self):
if self.authenticator is not None:
data = self.get_json_body()
username = yield self.authenticator.authenticate(self, data)
if username is None:
raise web.HTTPError(403)
user = self.find_user(username)
api_token = user.new_api_token()
self.write(json.dumps({"Authentication":api_token}))
else:
raise web.HTTPError(404)
class CookieAPIHandler(APIHandler):
@token_authenticated
def get(self, cookie_name, cookie_value=None):
cookie_name = quote(cookie_name, safe='')
if cookie_value is None:
self.log.warn("Cookie values in request body is deprecated, use `/cookie_name/cookie_value`")
self.log.warning("Cookie values in request body is deprecated, use `/cookie_name/cookie_value`")
cookie_value = self.request.body
else:
cookie_value = cookie_value.encode('utf8')
@@ -39,4 +51,5 @@ class CookieAPIHandler(APIHandler):
default_handlers = [
(r"/api/authorizations/cookie/([^/]+)(?:/([^/]+))?", CookieAPIHandler),
(r"/api/authorizations/token/([^/]+)", TokenAPIHandler),
(r"/api/authorizations/token", TokenAPIHandler),
]

View File

@@ -26,25 +26,29 @@ class APIHandler(BaseHandler):
# If no header is provided, assume it comes from a script/curl.
# We are only concerned with cross-site browser stuff here.
if not host:
self.log.warn("Blocking API request with no host")
self.log.warning("Blocking API request with no host")
return False
if not referer:
self.log.warn("Blocking API request with no referer")
self.log.warning("Blocking API request with no referer")
return False
host_path = url_path_join(host, self.hub.server.base_url)
referer_path = referer.split('://', 1)[-1]
if not (referer_path + '/').startswith(host_path):
self.log.warn("Blocking Cross Origin API request. Referer: %s, Host: %s",
self.log.warning("Blocking Cross Origin API request. Referer: %s, Host: %s",
referer, host_path)
return False
return True
def get_current_user_cookie(self):
"""Override get_user_cookie to check Referer header"""
if not self.check_referer():
cookie_user = super().get_current_user_cookie()
# check referer only if there is a cookie user,
# avoiding misleading "Blocking Cross Origin" messages
# when there's no cookie set anyway.
if cookie_user and not self.check_referer():
return None
return super().get_current_user_cookie()
return cookie_user
def get_json_body(self):
"""Return the body of the request as JSON data."""
@@ -83,10 +87,12 @@ class APIHandler(BaseHandler):
}))
def user_model(self, user):
"""Get the JSON model for a User object"""
model = {
'name': user.name,
'admin': user.admin,
'server': user.server.base_url if user.running else None,
'groups': [ g.name for g in user.groups ],
'server': user.url if user.running else None,
'pending': None,
'last_activity': user.last_activity.isoformat(),
}
@@ -95,24 +101,57 @@ class APIHandler(BaseHandler):
elif user.stop_pending:
model['pending'] = 'stop'
return model
_model_types = {
def group_model(self, group):
"""Get the JSON model for a Group object"""
return {
'name': group.name,
'users': [ u.name for u in group.users ]
}
_user_model_types = {
'name': str,
'admin': bool,
'groups': list,
}
def _check_user_model(self, model):
_group_model_types = {
'name': str,
'users': list,
}
def _check_model(self, model, model_types, name):
"""Check a model provided by a REST API request
Args:
model (dict): user-provided model
model_types (dict): dict of key:type used to validate types and keys
name (str): name of the model, used in error messages
"""
if not isinstance(model, dict):
raise web.HTTPError(400, "Invalid JSON data: %r" % model)
if not set(model).issubset(set(self._model_types)):
if not set(model).issubset(set(model_types)):
raise web.HTTPError(400, "Invalid JSON keys: %r" % model)
for key, value in model.items():
if not isinstance(value, self._model_types[key]):
raise web.HTTPError(400, "user.%s must be %s, not: %r" % (
key, self._model_types[key], type(value)
if not isinstance(value, model_types[key]):
raise web.HTTPError(400, "%s.%s must be %s, not: %r" % (
name, key, model_types[key], type(value)
))
def _check_user_model(self, model):
"""Check a request-provided user model from a REST API"""
self._check_model(model, self._user_model_types, 'user')
for username in model.get('users', []):
if not isinstance(username, str):
raise web.HTTPError(400, ("usernames must be str, not %r", type(username)))
def _check_group_model(self, model):
"""Check a request-provided group model from a REST API"""
self._check_model(model, self._group_model_types, 'group')
for groupname in model.get('groups', []):
if not isinstance(groupname, str):
raise web.HTTPError(400, ("group names must be str, not %r", type(groupname)))
def options(self, *args, **kwargs):
self.set_header('Access-Control-Allow-Headers', 'accept, content-type')
self.finish()

View File

@@ -0,0 +1,136 @@
"""Group handlers"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import json
from tornado import gen, web
from .. import orm
from ..utils import admin_only
from .base import APIHandler
class _GroupAPIHandler(APIHandler):
def _usernames_to_users(self, usernames):
"""Turn a list of usernames into user objects"""
users = []
for username in usernames:
username = self.authenticator.normalize_username(username)
user = self.find_user(username)
if user is None:
raise web.HTTPError(400, "No such user: %s" % username)
users.append(user.orm_user)
return users
def find_group(self, name):
"""Find and return a group by name.
Raise 404 if not found.
"""
group = orm.Group.find(self.db, name=name)
if group is None:
raise web.HTTPError(404, "No such group: %s", name)
return group
class GroupListAPIHandler(_GroupAPIHandler):
@admin_only
def get(self):
"""List groups"""
data = [ self.group_model(g) for g in self.db.query(orm.Group) ]
self.write(json.dumps(data))
class GroupAPIHandler(_GroupAPIHandler):
"""View and modify groups by name"""
@admin_only
def get(self, name):
group = self.find_group(name)
self.write(json.dumps(self.group_model(group)))
@admin_only
@gen.coroutine
def post(self, name):
"""POST creates a group by name"""
model = self.get_json_body()
if model is None:
model = {}
else:
self._check_group_model(model)
existing = orm.Group.find(self.db, name=name)
if existing is not None:
raise web.HTTPError(400, "Group %s already exists" % name)
usernames = model.get('users', [])
# check that users exist
users = self._usernames_to_users(usernames)
# create the group
self.log.info("Creating new group %s with %i users",
name, len(users),
)
self.log.debug("Users: %s", usernames)
group = orm.Group(name=name, users=users)
self.db.add(group)
self.db.commit()
self.write(json.dumps(self.group_model(group)))
self.set_status(201)
@admin_only
def delete(self, name):
"""Delete a group by name"""
group = self.find_group(name)
self.log.info("Deleting group %s", name)
self.db.delete(group)
self.db.commit()
self.set_status(204)
class GroupUsersAPIHandler(_GroupAPIHandler):
"""Modify a group's user list"""
@admin_only
def post(self, name):
"""POST adds users to a group"""
group = self.find_group(name)
data = self.get_json_body()
self._check_group_model(data)
if 'users' not in data:
raise web.HTTPError(400, "Must specify users to add")
self.log.info("Adding %i users to group %s", len(data['users']), name)
self.log.debug("Adding: %s", data['users'])
for user in self._usernames_to_users(data['users']):
if user not in group.users:
group.users.append(user)
else:
self.log.warning("User %s already in group %s", user.name, name)
self.db.commit()
self.write(json.dumps(self.group_model(group)))
@gen.coroutine
@admin_only
def delete(self, name):
"""DELETE removes users from a group"""
group = self.find_group(name)
data = self.get_json_body()
self._check_group_model(data)
if 'users' not in data:
raise web.HTTPError(400, "Must specify users to delete")
self.log.info("Removing %i users from group %s", len(data['users']), name)
self.log.debug("Removing: %s", data['users'])
for user in self._usernames_to_users(data['users']):
if user in group.users:
group.users.remove(user)
else:
self.log.warning("User %s already not in group %s", user.name, name)
self.db.commit()
self.write(json.dumps(self.group_model(group)))
default_handlers = [
(r"/api/groups", GroupListAPIHandler),
(r"/api/groups/([^/]+)", GroupAPIHandler),
(r"/api/groups/([^/]+)/users", GroupUsersAPIHandler),
]

View File

@@ -4,12 +4,15 @@
# Distributed under the terms of the Modified BSD License.
import json
import sys
from tornado import web
from tornado.ioloop import IOLoop
from ..utils import admin_only
from .base import APIHandler
from ..version import __version__
class ShutdownAPIHandler(APIHandler):
@@ -49,6 +52,56 @@ class ShutdownAPIHandler(APIHandler):
loop.add_callback(loop.stop)
class RootAPIHandler(APIHandler):
def get(self):
"""GET /api/ returns info about the Hub and its API.
It is not an authenticated endpoint.
For now, it just returns the version of JupyterHub itself.
"""
data = {
'version': __version__,
}
self.finish(json.dumps(data))
class InfoAPIHandler(APIHandler):
@admin_only
def get(self):
"""GET /api/info returns detailed info about the Hub and its API.
It is not an authenticated endpoint.
For now, it just returns the version of JupyterHub itself.
"""
def _class_info(typ):
"""info about a class (Spawner or Authenticator)"""
info = {
'class': '{mod}.{name}'.format(mod=typ.__module__, name=typ.__name__),
}
pkg = typ.__module__.split('.')[0]
try:
version = sys.modules[pkg].__version__
except (KeyError, AttributeError):
version = 'unknown'
info['version'] = version
return info
data = {
'version': __version__,
'python': sys.version,
'sys_executable': sys.executable,
'spawner': _class_info(self.settings['spawner_class']),
'authenticator': _class_info(self.authenticator.__class__),
}
self.finish(json.dumps(data))
default_handlers = [
(r"/api/shutdown", ShutdownAPIHandler),
(r"/api/?", RootAPIHandler),
(r"/api/info", InfoAPIHandler),
]

View File

@@ -28,7 +28,7 @@ class ProxyAPIHandler(APIHandler):
@gen.coroutine
def post(self):
"""POST checks the proxy to ensure"""
yield self.proxy.check_routes()
yield self.proxy.check_routes(self.users, self.services)
@admin_only
@@ -59,7 +59,7 @@ class ProxyAPIHandler(APIHandler):
self.proxy.auth_token = model['auth_token']
self.db.commit()
self.log.info("Updated proxy at %s", server.bind_url)
yield self.proxy.check_routes()
yield self.proxy.check_routes(self.users, self.services)

View File

@@ -0,0 +1,64 @@
"""Service handlers
Currently GET-only, no actions can be taken to modify services.
"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import json
from tornado import web
from .. import orm
from ..utils import admin_only
from .base import APIHandler
def service_model(service):
"""Produce the model for a service"""
return {
'name': service.name,
'admin': service.admin,
'url': service.url,
'prefix': service.server.base_url if service.server else '',
'command': service.command,
'pid': service.proc.pid if service.proc else 0,
}
class ServiceListAPIHandler(APIHandler):
@admin_only
def get(self):
data = {name: service_model(service) for name, service in self.services.items()}
self.write(json.dumps(data))
def admin_or_self(method):
"""Decorator for restricting access to either the target service or admin"""
def decorated_method(self, name):
current = self.get_current_user()
if current is None:
raise web.HTTPError(403)
if not current.admin:
# not admin, maybe self
if not isinstance(current, orm.Service):
raise web.HTTPError(403)
if current.name != name:
raise web.HTTPError(403)
# raise 404 if not found
if name not in self.services:
raise web.HTTPError(404)
return method(self, name)
return decorated_method
class ServiceAPIHandler(APIHandler):
@admin_or_self
def get(self, name):
service = self.services[name]
self.write(json.dumps(service_model(service)))
default_handlers = [
(r"/api/services", ServiceListAPIHandler),
(r"/api/services/([^/]+)", ServiceAPIHandler),
]

View File

@@ -15,7 +15,7 @@ from .base import APIHandler
class UserListAPIHandler(APIHandler):
@admin_only
def get(self):
users = self.db.query(orm.User)
users = [ self._user_from_orm(u) for u in self.db.query(orm.User) ]
data = [ self.user_model(u) for u in users ]
self.write(json.dumps(data))
@@ -33,13 +33,25 @@ class UserListAPIHandler(APIHandler):
admin = data.get('admin', False)
to_create = []
invalid_names = []
for name in usernames:
name = self.authenticator.normalize_username(name)
if not self.authenticator.validate_username(name):
invalid_names.append(name)
continue
user = self.find_user(name)
if user is not None:
self.log.warn("User %s already exists" % name)
self.log.warning("User %s already exists" % name)
else:
to_create.append(name)
if invalid_names:
if len(invalid_names) == 1:
msg = "Invalid username: %s" % invalid_names[0]
else:
msg = "Invalid usernames: %s" % ', '.join(invalid_names)
raise web.HTTPError(400, msg)
if not to_create:
raise web.HTTPError(400, "All %i users already exist" % len(usernames))
@@ -51,11 +63,10 @@ class UserListAPIHandler(APIHandler):
self.db.commit()
try:
yield gen.maybe_future(self.authenticator.add_user(user))
except Exception:
except Exception as e:
self.log.error("Failed to create user: %s" % name, exc_info=True)
self.db.delete(user)
self.db.commit()
raise web.HTTPError(400, "Failed to create user: %s" % name)
del self.users[user]
raise web.HTTPError(400, "Failed to create user %s: %s" % (name, str(e)))
else:
created.append(user)
@@ -104,8 +115,8 @@ class UserAPIHandler(APIHandler):
yield gen.maybe_future(self.authenticator.add_user(user))
except Exception:
self.log.error("Failed to create user: %s" % name, exc_info=True)
self.db.delete(user)
self.db.commit()
# remove from registry
del self.users[user]
raise web.HTTPError(400, "Failed to create user: %s" % name)
self.write(json.dumps(self.user_model(user)))
@@ -127,10 +138,8 @@ class UserAPIHandler(APIHandler):
raise web.HTTPError(400, "%s's server is in the process of stopping, please wait." % name)
yield gen.maybe_future(self.authenticator.delete_user(user))
# remove from the db
self.db.delete(user)
self.db.commit()
# remove from registry
del self.users[user]
self.set_status(204)
@@ -152,12 +161,14 @@ class UserServerAPIHandler(APIHandler):
@admin_or_self
def post(self, name):
user = self.find_user(name)
if user.spawner:
state = yield user.spawner.poll()
if user.running:
# include notify, so that a server that died is noticed immediately
state = yield user.spawner.poll_and_notify()
if state is None:
raise web.HTTPError(400, "%s's server is already running" % name)
yield self.spawn_single_user(user)
options = self.get_json_body()
yield self.spawn_single_user(user, options=options)
status = 202 if user.spawn_pending else 201
self.set_status(status)
@@ -170,7 +181,8 @@ class UserServerAPIHandler(APIHandler):
return
if not user.running:
raise web.HTTPError(400, "%s's server is not running" % name)
status = yield user.spawner.poll()
# include notify, so that a server that died is noticed immediately
status = yield user.spawner.poll_and_notify()
if status is not None:
raise web.HTTPError(400, "%s's server is not running" % name)
yield self.stop_single_user(user)
@@ -185,7 +197,7 @@ class UserAdminAccessAPIHandler(APIHandler):
@admin_only
def post(self, name):
current = self.get_current_user()
self.log.warn("Admin user %s has requested access to %s's server",
self.log.warning("Admin user %s has requested access to %s's server",
current.name, name,
)
if not self.settings.get('admin_access', False):
@@ -196,6 +208,7 @@ class UserAdminAccessAPIHandler(APIHandler):
if not user.running:
raise web.HTTPError(400, "%s's server is not running" % name)
self.set_server_cookie(user)
current.other_user_cookies.add(name)
default_handlers = [

File diff suppressed because it is too large Load Diff

View File

@@ -1,125 +1,358 @@
"""Simple PAM authenticator"""
"""Base Authenticator class and the default PAM Authenticator"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from grp import getgrnam
import pipes
import pwd
from subprocess import check_call, check_output, CalledProcessError
import re
from shutil import which
import sys
from subprocess import Popen, PIPE, STDOUT
from tornado import gen
import simplepam
import pamela
from traitlets.config import LoggingConfigurable
from traitlets import Bool, Set, Unicode, Any
from traitlets import Bool, Set, Unicode, Dict, Any, default, observe
from .handlers.login import LoginHandler
from .utils import url_path_join
from .traitlets import Command
class Authenticator(LoggingConfigurable):
"""A class for authentication.
The API is one method, `authenticate`, a tornado gen.coroutine.
"""
db = Any()
admin_users = Set(config=True,
help="""set of usernames of admin users
"""Base class for implementing an authentication provider for JupyterHub"""
If unspecified, only the user that launches the server will be admin.
db = Any()
admin_users = Set(
help="""
Set of users that will have admin rights on this JupyterHub.
Admin users have extra privilages:
- Use the admin panel to see list of users logged in
- Add / remove users in some authenticators
- Restart / halt the hub
- Start / stop users' single-user servers
- Can access each individual users' single-user server (if configured)
Admin access should be treated the same way root access is.
Defaults to an empty set, in which case no user has admin access.
"""
).tag(config=True)
whitelist = Set(
help="""
Whitelist of usernames that are allowed to log in.
Use this with supported authenticators to restrict which users can log in. This is an
additional whitelist that further restricts users, beyond whatever restrictions the
authenticator has in place.
If empty, does not perform any additional restriction.
"""
).tag(config=True)
custom_html = Unicode(
help="""
HTML form to be overridden by authenticators if they want a custom authentication form.
Defaults to an empty string, which shows the default username/password form.
"""
)
whitelist = Set(config=True,
help="""Username whitelist.
Use this to restrict which users can login.
If empty, allow any user to attempt login.
login_service = Unicode(
help="""
Name of the login service that this authenticator is providing using to authenticate users.
Example: GitHub, MediaWiki, Google, etc.
Setting this value replaces the login form with a "Login with <login_service>" button.
Any authenticator that redirects to an external service (e.g. using OAuth) should set this.
"""
)
custom_html = Unicode('',
help="""HTML login form for custom handlers.
Override in form-based custom authenticators
that don't use username+password,
or need custom branding.
username_pattern = Unicode(
help="""
Regular expression pattern that all valid usernames must match.
If a username does not match the pattern specified here, authentication will not be attempted.
If not set, allow any username.
"""
).tag(config=True)
@observe('username_pattern')
def _username_pattern_changed(self, change):
if not change['new']:
self.username_regex = None
self.username_regex = re.compile(change['new'])
username_regex = Any(
help="""
Compiled regex kept in sync with `username_pattern`
"""
)
login_service = Unicode('',
help="""Name of the login service for external
login services (e.g. 'GitHub').
def validate_username(self, username):
"""Validate a normalized username
Return True if username is valid, False otherwise.
"""
)
if not self.username_regex:
return True
return bool(self.username_regex.match(username))
username_map = Dict(
help="""Dictionary mapping authenticator usernames to JupyterHub users.
Primarily used to normalize OAuth user names to local users.
"""
).tag(config=True)
def normalize_username(self, username):
"""Normalize the given username and return it
Override in subclasses if usernames need different normalization rules.
The default attempts to lowercase the username and apply `username_map` if it is
set.
"""
username = username.lower()
username = self.username_map.get(username, username)
return username
def check_whitelist(self, username):
"""Check if a username is allowed to authenticate based on whitelist configuration
Return True if username is allowed, False otherwise.
No whitelist means any username is allowed.
Names are normalized *before* being checked against the whitelist.
"""
if not self.whitelist:
# No whitelist means any name is allowed
return True
return username in self.whitelist
@gen.coroutine
def get_authenticated_user(self, handler, data):
"""Authenticate the user who is attempting to log in
Returns normalized username if successful, None otherwise.
This calls `authenticate`, which should be overridden in subclasses,
normalizes the username if any normalization should be done,
and then validates the name in the whitelist.
This is the outer API for authenticating a user.
Subclasses should not need to override this method.
The various stages can be overridden separately:
- `authenticate` turns formdata into a username
- `normalize_username` normalizes the username
- `check_whitelist` checks against the user whitelist
"""
username = yield self.authenticate(handler, data)
if username is None:
return
username = self.normalize_username(username)
if not self.validate_username(username):
self.log.warning("Disallowing invalid username %r.", username)
return
whitelist_pass = yield gen.maybe_future(self.check_whitelist(username))
if whitelist_pass:
return username
else:
self.log.warning("User %r not in whitelist.", username)
return
@gen.coroutine
def authenticate(self, handler, data):
"""Authenticate a user with login form data.
"""Authenticate a user with login form data
This must be a tornado gen.coroutine.
It must return the username on successful authentication,
and return None on failed authentication.
Checking the whitelist is handled separately by the caller.
Args:
handler (tornado.web.RequestHandler): the current request handler
data (dict): The formdata of the login form.
The default form has 'username' and 'password' fields.
Returns:
username (str or None): The username of the authenticated user,
or None if Authentication failed
"""
def check_whitelist(self, user):
def pre_spawn_start(self, user, spawner):
"""Hook called before spawning a user's server
Can be used to do auth-related startup, e.g. opening PAM sessions.
"""
Return True if the whitelist is empty or user is in the whitelist.
def post_spawn_stop(self, user, spawner):
"""Hook called after stopping a user container
Can be used to do auth-related cleanup, e.g. closing PAM sessions.
"""
# Parens aren't necessary here, but they make this easier to parse.
return (not self.whitelist) or (user in self.whitelist)
def add_user(self, user):
"""Add a new user
"""Hook called when a user is added to JupyterHub
This is called:
- When a user first authenticates
- When the hub restarts, for all users.
This method may be a coroutine.
By default, this just adds the user to the whitelist.
Subclasses may do more extensive things,
such as adding actual unix users.
Subclasses may do more extensive things, such as adding actual unix users,
but they should call super to ensure the whitelist is updated.
Note that this should be idempotent, since it is called whenever the hub restarts
for all users.
Args:
user (User): The User wrapper object
"""
if not self.validate_username(user.name):
raise ValueError("Invalid username: %s" % user.name)
if self.whitelist:
self.whitelist.add(user.name)
def delete_user(self, user):
"""Triggered when a user is deleted.
"""Hook called when a user is deleted
Removes the user from the whitelist.
Subclasses should call super to ensure the whitelist is updated.
Args:
user (User): The User wrapper object
"""
self.whitelist.discard(user.name)
def login_url(self, base_url):
"""Override to register a custom login handler"""
"""Override this when registering a custom login handler
Generally used by authenticators that do not use simple form based authentication.
The subclass overriding this is responsible for making sure there is a handler
available to handle the URL returned from this method, using the `get_handlers`
method.
Args:
base_url (str): the base URL of the Hub (e.g. /hub/)
Returns:
str: The login URL, e.g. '/hub/login'
"""
return url_path_join(base_url, 'login')
def logout_url(self, base_url):
"""Override to register a custom logout handler"""
"""Override when registering a custom logout handler
The subclass overriding this is responsible for making sure there is a handler
available to handle the URL returned from this method, using the `get_handlers`
method.
Args:
base_url (str): the base URL of the Hub (e.g. /hub/)
Returns:
str: The logout URL, e.g. '/hub/logout'
"""
return url_path_join(base_url, 'logout')
def get_handlers(self, app):
"""Return any custom handlers the authenticator needs to register
(e.g. for OAuth)
Used in conjugation with `login_url` and `logout_url`.
Args:
app (JupyterHub Application):
the application object, in case it needs to be accessed for info.
Returns:
handlers (list):
list of ``('/url', Handler)`` tuples passed to tornado.
The Hub prefix is added to any URLs.
"""
return [
('/login', LoginHandler),
]
class LocalAuthenticator(Authenticator):
"""Base class for Authenticators that work with local *ix users
"""Base class for Authenticators that work with local Linux/UNIX users
Checks for local users, and can attempt to create them if they exist.
"""
create_system_users = Bool(False, config=True,
help="""If a user is added that doesn't exist on the system,
should I try to create the system user?
create_system_users = Bool(False,
help="""
If set to True, will attempt to create local system users if they do not exist already.
Supports Linux and BSD variants only.
"""
)
).tag(config=True)
add_user_cmd = Command(
help="""
The command to use for creating users as a list of strings
For each element in the list, the string USERNAME will be replaced with
the user's username. The username will also be appended as the final argument.
For Linux, the default value is:
['adduser', '-q', '--gecos', '""', '--disabled-password']
To specify a custom home directory, set this to:
['adduser', '-q', '--gecos', '""', '--home', '/customhome/USERNAME', '--disabled-password']
This will run the command:
adduser -q --gecos "" --home /customhome/river --disabled-password river
when the user 'river' is created.
"""
).tag(config=True)
@default('add_user_cmd')
def _add_user_cmd_default(self):
"""Guess the most likely-to-work adduser command for each platform"""
if sys.platform == 'darwin':
raise ValueError("I don't know how to create users on OS X")
elif which('pw'):
# Probably BSD
return ['pw', 'useradd', '-m']
else:
# This appears to be the Linux non-interactive adduser command:
return ['adduser', '-q', '--gecos', '""', '--disabled-password']
group_whitelist = Set(
config=True,
help="Automatically whitelist anyone in this group.",
)
help="""
Whitelist all users from this UNIX group.
def _group_whitelist_changed(self, name, old, new):
This makes the username whitelist ineffective.
"""
).tag(config=True)
@observe('group_whitelist')
def _group_whitelist_changed(self, change):
"""
Log a warning if both group_whitelist and user whitelist are set.
"""
if self.whitelist:
self.log.warn(
self.log.warning(
"Ignoring username whitelist because group whitelist supplied!"
)
@@ -130,6 +363,9 @@ class LocalAuthenticator(Authenticator):
return super().check_whitelist(username)
def check_group_whitelist(self, username):
"""
If group_whitelist is configured, check if authenticating user is part of group.
"""
if not self.group_whitelist:
return False
for grnam in self.group_whitelist:
@@ -144,12 +380,9 @@ class LocalAuthenticator(Authenticator):
@gen.coroutine
def add_user(self, user):
"""Add a new user
By default, this just adds the user to the whitelist.
Subclasses may do more extensive things,
such as adding actual unix users.
"""Hook called whenever a new user is added
If self.create_system_users, the user will attempt to be created if it doesn't exist.
"""
user_exists = yield gen.maybe_future(self.system_user_exists(user))
if not user_exists:
@@ -157,9 +390,9 @@ class LocalAuthenticator(Authenticator):
yield gen.maybe_future(self.add_system_user(user))
else:
raise KeyError("User %s does not exist." % user.name)
yield gen.maybe_future(super().add_user(user))
@staticmethod
def system_user_exists(user):
"""Check if the user exists on the system"""
@@ -169,49 +402,85 @@ class LocalAuthenticator(Authenticator):
return False
else:
return True
@staticmethod
def add_system_user(user):
"""Create a new *ix user on the system. Works on FreeBSD and Linux, at least."""
def add_system_user(self, user):
"""Create a new local UNIX user on the system.
Tested to work on FreeBSD and Linux, at least.
"""
name = user.name
for useradd in (
['pw', 'useradd', '-m'],
['useradd', '-m'],
):
try:
check_output(['which', useradd[0]])
except CalledProcessError:
continue
else:
break
else:
raise RuntimeError("I don't know how to add users on this system.")
check_call(useradd + [name])
cmd = [ arg.replace('USERNAME', name) for arg in self.add_user_cmd ] + [name]
self.log.info("Creating user: %s", ' '.join(map(pipes.quote, cmd)))
p = Popen(cmd, stdout=PIPE, stderr=STDOUT)
p.wait()
if p.returncode:
err = p.stdout.read().decode('utf8', 'replace')
raise RuntimeError("Failed to create system user %s: %s" % (name, err))
class PAMAuthenticator(LocalAuthenticator):
"""Authenticate local *ix users with PAM"""
encoding = Unicode('utf8', config=True,
help="""The encoding to use for PAM"""
)
service = Unicode('login', config=True,
help="""The PAM service to use for authentication."""
)
"""Authenticate local UNIX users with PAM"""
encoding = Unicode('utf8',
help="""
The text encoding to use when communicating with PAM
"""
).tag(config=True)
service = Unicode('login',
help="""
The name of the PAM service to use for authentication
"""
).tag(config=True)
open_sessions = Bool(True,
help="""
Whether to open a new PAM session when spawners are started.
This may trigger things like mounting shared filsystems,
loading credentials, etc. depending on system configuration,
but it does not always work.
If any errors are encountered when opening/closing PAM sessions,
this is automatically set to False.
"""
).tag(config=True)
@gen.coroutine
def authenticate(self, handler, data):
"""Authenticate with PAM, and return the username if login is successful.
Return None otherwise.
"""
username = data['username']
if not self.check_whitelist(username):
return
# simplepam wants bytes, not unicode
# see simplepam#3
busername = username.encode(self.encoding)
bpassword = data['password'].encode(self.encoding)
if simplepam.authenticate(busername, bpassword, service=self.service):
try:
pamela.authenticate(username, data['password'], service=self.service)
except pamela.PAMError as e:
if handler is not None:
self.log.warning("PAM Authentication failed (%s@%s): %s", username, handler.request.remote_ip, e)
else:
self.log.warning("PAM Authentication failed: %s", e)
else:
return username
def pre_spawn_start(self, user, spawner):
"""Open PAM session for user if so configured"""
if not self.open_sessions:
return
try:
pamela.open_session(user.name, service=self.service)
except pamela.PAMError as e:
self.log.warning("Failed to open PAM session for %s: %s", user.name, e)
self.log.warning("Disabling PAM sessions from now on.")
self.open_sessions = False
def post_spawn_stop(self, user, spawner):
"""Close PAM session for user if we were configured to opened one"""
if not self.open_sessions:
return
try:
pamela.close_session(user.name, service=self.service)
except pamela.PAMError as e:
self.log.warning("Failed to close PAM session for %s: %s", user.name, e)
self.log.warning("Disabling PAM sessions from now on.")
self.open_sessions = False

93
jupyterhub/dbutil.py Normal file
View File

@@ -0,0 +1,93 @@
"""Database utilities for JupyterHub"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
# Based on pgcontents.utils.migrate, used under the Apache license.
from contextlib import contextmanager
import os
from subprocess import check_call
import sys
from tempfile import TemporaryDirectory
_here = os.path.abspath(os.path.dirname(__file__))
ALEMBIC_INI_TEMPLATE_PATH = os.path.join(_here, 'alembic.ini')
ALEMBIC_DIR = os.path.join(_here, 'alembic')
def write_alembic_ini(alembic_ini='alembic.ini', db_url='sqlite:///jupyterhub.sqlite'):
"""Write a complete alembic.ini from our template.
Parameters
----------
alembic_ini: str
path to the alembic.ini file that should be written.
db_url: str
The SQLAlchemy database url, e.g. `sqlite:///jupyterhub.sqlite`.
"""
with open(ALEMBIC_INI_TEMPLATE_PATH) as f:
alembic_ini_tpl = f.read()
with open(alembic_ini, 'w') as f:
f.write(
alembic_ini_tpl.format(
alembic_dir=ALEMBIC_DIR,
db_url=db_url,
)
)
@contextmanager
def _temp_alembic_ini(db_url):
"""Context manager for temporary JupyterHub alembic directory
Temporarily write an alembic.ini file for use with alembic migration scripts.
Context manager yields alembic.ini path.
Parameters
----------
db_url: str
The SQLAlchemy database url, e.g. `sqlite:///jupyterhub.sqlite`.
Returns
-------
alembic_ini: str
The path to the temporary alembic.ini that we have created.
This file will be cleaned up on exit from the context manager.
"""
with TemporaryDirectory() as td:
alembic_ini = os.path.join(td, 'alembic.ini')
write_alembic_ini(alembic_ini, db_url)
yield alembic_ini
def upgrade(db_url, revision='head'):
"""Upgrade the given database to revision.
db_url: str
The SQLAlchemy database url, e.g. `sqlite:///jupyterhub.sqlite`.
revision: str [default: head]
The alembic revision to upgrade to.
"""
with _temp_alembic_ini(db_url) as alembic_ini:
check_call(
['alembic', '-c', alembic_ini, 'upgrade', revision]
)
def _alembic(*args):
"""Run an alembic command with a temporary alembic.ini"""
with _temp_alembic_ini('sqlite:///jupyterhub.sqlite') as alembic_ini:
check_call(
['alembic', '-c', alembic_ini] + list(args)
)
if __name__ == '__main__':
import sys
_alembic(*sys.argv[1:])

13
jupyterhub/emptyclass.py Normal file
View File

@@ -0,0 +1,13 @@
"""
Simple empty class that returns itself for all functions called on it.
This allows us to call any method of any name on this, and it'll return another
instance of itself that'll allow any method to be called on it.
Primarily used to mock out the statsd client when statsd is not being used
"""
class EmptyClass:
def empty_function(self, *args, **kwargs):
return self
def __getattr__(self, attr):
return self.empty_function

View File

@@ -4,8 +4,9 @@
# Distributed under the terms of the Modified BSD License.
import re
from datetime import datetime, timedelta
from datetime import timedelta
from http.client import responses
from urllib.parse import urlparse
from jinja2 import TemplateNotFound
@@ -16,6 +17,7 @@ from tornado.web import RequestHandler
from tornado import gen, web
from .. import orm
from ..user import User
from ..spawner import LocalProcessSpawner
from ..utils import url_path_join
@@ -45,23 +47,42 @@ class BaseHandler(RequestHandler):
@property
def base_url(self):
return self.settings.get('base_url', '/')
@property
def version_hash(self):
return self.settings.get('version_hash', '')
@property
def subdomain_host(self):
return self.settings.get('subdomain_host', '')
@property
def domain(self):
return self.settings['domain']
@property
def db(self):
return self.settings['db']
@property
def users(self):
return self.settings.setdefault('users', {})
@property
def services(self):
return self.settings.setdefault('services', {})
@property
def hub(self):
return self.settings['hub']
@property
def proxy(self):
return self.settings['proxy']
@property
def statsd(self):
return self.settings['statsd']
@property
def authenticator(self):
return self.settings.get('authenticator', None)
@@ -70,28 +91,28 @@ class BaseHandler(RequestHandler):
"""Roll back any uncommitted transactions from the handler."""
self.db.rollback()
super().finish(*args, **kwargs)
#---------------------------------------------------------------
# Security policies
#---------------------------------------------------------------
@property
def csp_report_uri(self):
return self.settings.get('csp_report_uri',
url_path_join(self.hub.server.base_url, 'security/csp-report')
)
@property
def content_security_policy(self):
"""The default Content-Security-Policy header
Can be overridden by defining Content-Security-Policy in settings['headers']
"""
return '; '.join([
"frame-ancestors 'self'",
"report-uri " + self.csp_report_uri,
])
def set_default_headers(self):
"""
Set any headers passed as tornado_settings['headers'].
@@ -100,7 +121,7 @@ class BaseHandler(RequestHandler):
"""
headers = self.settings.get('headers', {})
headers.setdefault("Content-Security-Policy", self.content_security_policy)
for header_name, header_content in headers.items():
self.set_header(header_name, header_content)
@@ -111,7 +132,7 @@ class BaseHandler(RequestHandler):
@property
def admin_users(self):
return self.settings.setdefault('admin_users', set())
@property
def cookie_max_age_days(self):
return self.settings.get('cookie_max_age_days', None)
@@ -127,8 +148,8 @@ class BaseHandler(RequestHandler):
if orm_token is None:
return None
else:
return orm_token.user
return orm_token.user or orm_token.service
def _user_for_cookie(self, cookie_name, cookie_value=None):
"""Get the User for a given cookie, if there is one"""
cookie_id = self.get_secure_cookie(
@@ -138,98 +159,126 @@ class BaseHandler(RequestHandler):
)
def clear():
self.clear_cookie(cookie_name, path=self.hub.server.base_url)
if cookie_id is None:
if self.get_cookie(cookie_name):
self.log.warn("Invalid or expired cookie token")
self.log.warning("Invalid or expired cookie token")
clear()
return
cookie_id = cookie_id.decode('utf8', 'replace')
user = self.db.query(orm.User).filter(orm.User.cookie_id==cookie_id).first()
u = self.db.query(orm.User).filter(orm.User.cookie_id==cookie_id).first()
user = self._user_from_orm(u)
if user is None:
self.log.warn("Invalid cookie token")
self.log.warning("Invalid cookie token")
# have cookie, but it's not valid. Clear it and start over.
clear()
return user
def _user_from_orm(self, orm_user):
"""return User wrapper from orm.User object"""
if orm_user is None:
return
return self.users[orm_user]
def get_current_user_cookie(self):
"""get_current_user from a cookie token"""
return self._user_for_cookie(self.hub.server.cookie_name)
def get_current_user(self):
"""get current username"""
user = self.get_current_user_token()
if user is not None:
return user
return self.get_current_user_cookie()
def find_user(self, name):
"""Get a user by name
return None if no such user
"""
return orm.User.find(self.db, name)
orm_user = orm.User.find(db=self.db, name=name)
return self._user_from_orm(orm_user)
def user_from_username(self, username):
"""Get ORM User for username"""
"""Get User for username, creating if it doesn't exist"""
user = self.find_user(username)
if user is None:
user = orm.User(name=username)
self.db.add(user)
# not found, create and register user
u = orm.User(name=username)
self.db.add(u)
self.db.commit()
user = self._user_from_orm(u)
self.authenticator.add_user(user)
return user
def clear_login_cookie(self):
user = self.get_current_user()
def clear_login_cookie(self, name=None):
if name is None:
user = self.get_current_user()
else:
user = self.find_user(name)
kwargs = {}
if self.subdomain_host:
kwargs['domain'] = self.domain
if user and user.server:
self.clear_cookie(user.server.cookie_name, path=user.server.base_url)
self.clear_cookie(self.hub.server.cookie_name, path=self.hub.server.base_url)
self.clear_cookie(user.server.cookie_name, path=user.server.base_url, **kwargs)
self.clear_cookie(self.hub.server.cookie_name, path=self.hub.server.base_url, **kwargs)
self.clear_cookie('jupyterhub-services', path=url_path_join(self.base_url, 'services'))
def _set_user_cookie(self, user, server):
# tornado <4.2 have a bug that consider secure==True as soon as
# 'secure' kwarg is passed to set_secure_cookie
if self.request.protocol == 'https':
kwargs = {'secure': True}
else:
kwargs = {}
if self.subdomain_host:
kwargs['domain'] = self.domain
self.log.debug("Setting cookie for %s: %s, %s", user.name, server.cookie_name, kwargs)
self.set_secure_cookie(
server.cookie_name,
user.cookie_id,
path=server.base_url,
**kwargs
)
def set_service_cookie(self, user):
"""set the login cookie for services"""
self._set_user_cookie(user, orm.Server(
cookie_name='jupyterhub-services',
base_url=url_path_join(self.base_url, 'services')
))
def set_server_cookie(self, user):
"""set the login cookie for the single-user server"""
# tornado <4.2 have a bug that consider secure==True as soon as
# 'secure' kwarg is passed to set_secure_cookie
if self.request.protocol == 'https':
kwargs = {'secure':True}
else:
kwargs = {}
self.set_secure_cookie(
user.server.cookie_name,
user.cookie_id,
path=user.server.base_url,
**kwargs
)
self._set_user_cookie(user, user.server)
def set_hub_cookie(self, user):
"""set the login cookie for the Hub"""
# tornado <4.2 have a bug that consider secure==True as soon as
# 'secure' kwarg is passed to set_secure_cookie
if self.request.protocol == 'https':
kwargs = {'secure':True}
else:
kwargs = {}
self.set_secure_cookie(
self.hub.server.cookie_name,
user.cookie_id,
path=self.hub.server.base_url,
**kwargs
)
self._set_user_cookie(user, self.hub.server)
def set_login_cookie(self, user):
"""Set login cookies for the Hub and single-user server."""
if self.subdomain_host and not self.request.host.startswith(self.domain):
self.log.warning(
"Possibly setting cookie on wrong domain: %s != %s",
self.request.host, self.domain)
# create and set a new cookie token for the single-user server
if user.server:
self.set_server_cookie(user)
# set single cookie for services
if self.db.query(orm.Service).filter(orm.Service.server != None).first():
self.set_service_cookie(user)
# create and set a new cookie token for the hub
if not self.get_current_user_cookie():
self.set_hub_cookie(user)
@gen.coroutine
def authenticate(self, data):
auth = self.authenticator
if auth is not None:
result = yield auth.authenticate(self, data)
result = yield auth.get_authenticated_user(self, data)
return result
else:
self.log.error("No authentication function, login is impossible!")
@@ -252,21 +301,17 @@ class BaseHandler(RequestHandler):
return self.settings.get('spawner_class', LocalProcessSpawner)
@gen.coroutine
def spawn_single_user(self, user):
def spawn_single_user(self, user, options=None):
if user.spawn_pending:
raise RuntimeError("Spawn already pending for: %s" % user.name)
tic = IOLoop.current().time()
f = user.spawn(
spawner_class=self.spawner_class,
base_url=self.base_url,
hub=self.hub,
config=self.config,
)
f = user.spawn(options)
@gen.coroutine
def finish_user_spawn(f=None):
"""Finish the user spawn by registering listeners and notifying the proxy.
If the spawner is slow to start, this is passed as an async callback,
otherwise it is called immediately.
"""
@@ -275,34 +320,53 @@ class BaseHandler(RequestHandler):
return
toc = IOLoop.current().time()
self.log.info("User %s server took %.3f seconds to start", user.name, toc-tic)
self.statsd.timing('spawner.success', (toc - tic) * 1000)
yield self.proxy.add_user(user)
user.spawner.add_poll_callback(self.user_stopped, user)
try:
yield gen.with_timeout(timedelta(seconds=self.slow_spawn_timeout), f)
except gen.TimeoutError:
if user.spawn_pending:
# hit timeout, but spawn is still pending
self.log.warn("User %s server is slow to start", user.name)
# waiting_for_response indicates server process has started,
# but is yet to become responsive.
if not user.waiting_for_response:
# still in Spawner.start, which is taking a long time
# we shouldn't poll while spawn is incomplete.
self.log.warning("User %s's server is slow to start (timeout=%s)",
user.name, self.slow_spawn_timeout)
# schedule finish for when the user finishes spawning
IOLoop.current().add_future(f, finish_user_spawn)
else:
raise
# start has finished, but the server hasn't come up
# check if the server died while we were waiting
status = yield user.spawner.poll()
if status is None:
# hit timeout, but server's running. Hope that it'll show up soon enough,
# though it's possible that it started at the wrong URL
self.log.warning("User %s's server is slow to become responsive (timeout=%s)",
user.name, self.slow_spawn_timeout)
self.log.debug("Expecting server for %s at: %s", user.name, user.server.url)
# schedule finish for when the user finishes spawning
IOLoop.current().add_future(f, finish_user_spawn)
else:
toc = IOLoop.current().time()
self.statsd.timing('spawner.failure', (toc - tic) * 1000)
raise web.HTTPError(500, "Spawner failed to start [status=%s]" % status)
else:
yield finish_user_spawn()
@gen.coroutine
def user_stopped(self, user):
"""Callback that fires when the spawner has stopped"""
status = yield user.spawner.poll()
if status is None:
status = 'unknown'
self.log.warn("User %s server stopped, with exit code: %s",
self.log.warning("User %s server stopped, with exit code: %s",
user.name, status,
)
yield self.proxy.delete_user(user)
yield user.stop()
@gen.coroutine
def stop_single_user(self, user):
if user.stop_pending:
@@ -313,7 +377,7 @@ class BaseHandler(RequestHandler):
@gen.coroutine
def finish_stop(f=None):
"""Finish the stop action by noticing that the user is stopped.
If the spawner is slow to stop, this is passed as an async callback,
otherwise it is called immediately.
"""
@@ -322,13 +386,13 @@ class BaseHandler(RequestHandler):
return
toc = IOLoop.current().time()
self.log.info("User %s server took %.3f seconds to stop", user.name, toc-tic)
try:
yield gen.with_timeout(timedelta(seconds=self.slow_stop_timeout), f)
except gen.TimeoutError:
if user.stop_pending:
# hit timeout, but stop is still pending
self.log.warn("User %s server is slow to stop", user.name)
self.log.warning("User %s server is slow to stop", user.name)
# schedule finish for when the server finishes stopping
IOLoop.current().add_future(f, finish_stop)
else:
@@ -367,6 +431,7 @@ class BaseHandler(RequestHandler):
"""render custom error pages"""
exc_info = kwargs.get('exc_info')
message = ''
exception = None
status_message = responses.get(status_code, 'Unknown HTTP Error')
if exc_info:
exception = exc_info[1]
@@ -408,62 +473,122 @@ class Template404(BaseHandler):
class PrefixRedirectHandler(BaseHandler):
"""Redirect anything outside a prefix inside.
Redirects /foo to /prefix/foo, etc.
"""
def get(self):
path = self.request.uri[len(self.base_url):]
uri = self.request.uri
if uri.startswith(self.base_url):
path = self.request.uri[len(self.base_url):]
else:
path = self.request.path
self.redirect(url_path_join(
self.hub.server.base_url, path,
), permanent=False)
class UserSpawnHandler(BaseHandler):
"""Requests to /user/name handled by the Hub
should result in spawning the single-user server and
being redirected to the original.
"""Redirect requests to /user/name/* handled by the Hub.
If logged in, spawn a single-user server and redirect request.
If a user, alice, requests /user/bob/notebooks/mynotebook.ipynb,
she will be redirected to /hub/user/bob/notebooks/mynotebook.ipynb,
which will be handled by this handler,
which will in turn send her to /user/alice/notebooks/mynotebook.ipynb.
"""
@gen.coroutine
def get(self, name):
def get(self, name, user_path):
current_user = self.get_current_user()
if current_user and current_user.name == name:
# logged in, spawn the server
# If people visit /user/:name directly on the Hub,
# the redirects will just loop, because the proxy is bypassed.
# Try to check for that and warn,
# though the user-facing behavior is unchainged
host_info = urlparse(self.request.full_url())
port = host_info.port
if not port:
port = 443 if host_info.scheme == 'https' else 80
if port != self.proxy.public_server.port and port == self.hub.server.port:
self.log.warning("""
Detected possible direct connection to Hub's private ip: %s, bypassing proxy.
This will result in a redirect loop.
Make sure to connect to the proxied public URL %s
""", self.request.full_url(), self.proxy.public_server.url)
# logged in as correct user, spawn the server
if current_user.spawner:
if current_user.spawn_pending:
# spawn has started, but not finished
self.statsd.incr('redirects.user_spawn_pending', 1)
html = self.render_template("spawn_pending.html", user=current_user)
self.finish(html)
return
# spawn has supposedly finished, check on the status
status = yield current_user.spawner.poll()
if status is not None:
yield self.spawn_single_user(current_user)
else:
yield self.spawn_single_user(current_user)
if current_user.spawner.options_form:
self.redirect(url_path_join(self.hub.server.base_url, 'spawn'))
return
else:
yield self.spawn_single_user(current_user)
# set login cookie anew
self.set_login_cookie(current_user)
without_prefix = self.request.uri[len(self.hub.server.base_url):]
target = url_path_join(self.base_url, without_prefix)
if self.subdomain_host:
target = current_user.host + target
self.redirect(target)
self.statsd.incr('redirects.user_after_login')
elif current_user:
# logged in as a different user, redirect
self.statsd.incr('redirects.user_to_user', 1)
target = url_path_join(current_user.url, user_path or '')
self.redirect(target)
else:
# not logged in to the right user,
# clear any cookies and reload (will redirect to login)
# not logged in, clear any cookies and reload
self.statsd.incr('redirects.user_to_login', 1)
self.clear_login_cookie()
self.redirect(url_concat(
self.settings['login_url'],
{'next': self.request.uri,
}))
{'next': self.request.uri},
))
class UserRedirectHandler(BaseHandler):
"""Redirect requests to user servers.
Allows public linking to "this file on your server".
/user-redirect/path/to/foo will redirect to /user/:name/path/to/foo
If the user is not logged in, send to login URL, redirecting back here.
.. versionadded:: 0.7
"""
@web.authenticated
def get(self, path):
user = self.get_current_user()
url = url_path_join(user.url, path)
self.redirect(url)
class CSPReportHandler(BaseHandler):
'''Accepts a content security policy violation report'''
@web.authenticated
def post(self):
'''Log a content security policy violation report'''
self.log.warn("Content security violation: %s",
self.request.body.decode('utf8', 'replace'))
self.log.warning(
"Content security violation: %s",
self.request.body.decode('utf8', 'replace')
)
# Report it to statsd as well
self.statsd.incr('csp_report')
default_handlers = [
(r'/user/([^/]+)/?.*', UserSpawnHandler),
(r'/user/([^/]+)(/.*)?', UserSpawnHandler),
(r'/user-redirect/(.*)?', UserRedirectHandler),
(r'/security/csp-report', CSPReportHandler),
]

View File

@@ -3,6 +3,8 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from urllib.parse import urlparse
from tornado.escape import url_escape
from tornado import gen
@@ -15,7 +17,11 @@ class LogoutHandler(BaseHandler):
user = self.get_current_user()
if user:
self.log.info("User logged out: %s", user.name)
self.clear_login_cookie()
self.clear_login_cookie()
for name in user.other_user_cookies:
self.clear_login_cookie(name)
user.other_user_cookies = set([])
self.statsd.incr('logout')
self.redirect(self.hub.server.base_url, permanent=False)
@@ -27,19 +33,24 @@ class LoginHandler(BaseHandler):
next=url_escape(self.get_argument('next', default='')),
username=username,
login_error=login_error,
custom_login_form=self.authenticator.custom_html,
custom_html=self.authenticator.custom_html,
login_url=self.settings['login_url'],
)
def get(self):
self.statsd.incr('login.request')
next_url = self.get_argument('next', '')
if not next_url.startswith('/'):
# disallow non-absolute next URLs (e.g. full URLs)
if (next_url + '/').startswith('%s://%s/' % (self.request.protocol, self.request.host)):
# treat absolute URLs for our host as absolute paths:
next_url = urlparse(next_url).path
elif not next_url.startswith('/'):
# disallow non-absolute next URLs (e.g. full URLs to other hosts)
next_url = ''
user = self.get_current_user()
if user:
if not next_url:
if user.running:
next_url = user.server.base_url
next_url = user.url
else:
next_url = self.hub.server.base_url
# set new login cookie
@@ -57,15 +68,19 @@ class LoginHandler(BaseHandler):
for arg in self.request.arguments:
data[arg] = self.get_argument(arg)
username = data['username']
authorized = yield self.authenticate(data)
if authorized:
auth_timer = self.statsd.timer('login.authenticate').start()
username = yield self.authenticate(data)
auth_timer.stop(send=False)
if username:
self.statsd.incr('login.success')
self.statsd.timing('login.authenticate.success', auth_timer.ms)
user = self.user_from_username(username)
already_running = False
if user.spawner:
status = yield user.spawner.poll()
already_running = (status == None)
if not already_running:
if not already_running and not user.spawner.options_form:
yield self.spawn_single_user(user)
self.set_login_cookie(user)
next_url = self.get_argument('next', default='')
@@ -75,7 +90,9 @@ class LoginHandler(BaseHandler):
self.redirect(next_url)
self.log.info("User logged in: %s", username)
else:
self.log.debug("Failed login for %s", username)
self.statsd.incr('login.failure')
self.statsd.timing('login.authenticate.failure', auth_timer.ms)
self.log.debug("Failed login for %s", data.get('username', 'unknown user'))
html = self._render(
login_error='Invalid username or password',
username=username,
@@ -83,7 +100,10 @@ class LoginHandler(BaseHandler):
self.finish(html)
# Only logout is a default handler.
# /login renders the login page or the "Login with..." link,
# so it should always be registered.
# /logout clears cookies.
default_handlers = [
(r"/login", LoginHandler),
(r"/logout", LogoutHandler),
]

View File

@@ -3,7 +3,10 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from http.client import responses
from jinja2 import TemplateNotFound
from tornado import web, gen
from .. import orm
from ..utils import admin_only, url_path_join
@@ -12,40 +15,119 @@ from .base import BaseHandler
class RootHandler(BaseHandler):
"""Render the Hub root page.
If next argument is passed by single-user server,
redirect to base_url + single-user page.
If logged in, redirects to:
- single-user server if running
- hub home, otherwise
Otherwise, renders login page.
"""
def get(self):
next_url = self.get_argument('next', '')
if not next_url.startswith('/'):
self.log.warning("Disallowing redirect outside JupyterHub: %r", next_url)
next_url = ''
if next_url and next_url.startswith(url_path_join(self.base_url, 'user/')):
# add /hub/ prefix, to ensure we redirect to the right user's server.
# The next request will be handled by UserSpawnHandler,
# ultimately redirecting to the logged-in user's server.
without_prefix = next_url[len(self.base_url):]
next_url = url_path_join(self.hub.server.base_url, without_prefix)
self.log.warning("Redirecting %s to %s. For sharing public links, use /user-redirect/",
self.request.uri, next_url,
)
self.redirect(next_url)
return
user = self.get_current_user()
if user:
if user.running:
url = user.server.base_url
url = user.url
self.log.debug("User is running: %s", url)
self.set_login_cookie(user) # set cookie
else:
url = url_path_join(self.hub.server.base_url, 'home')
self.redirect(url, permanent=False)
return
html = self.render_template('login.html',
login_url=self.settings['login_url'],
custom_html=self.authenticator.custom_html,
)
self.finish(html)
self.log.debug("User is not running: %s", url)
else:
url = self.authenticator.login_url(self.base_url)
self.redirect(url)
class HomeHandler(BaseHandler):
"""Render the user's home page."""
@web.authenticated
@gen.coroutine
def get(self):
user = self.get_current_user()
if user.running:
# trigger poll_and_notify event in case of a server that died
yield user.spawner.poll_and_notify()
html = self.render_template('home.html',
user=self.get_current_user(),
user=user,
)
self.finish(html)
class SpawnHandler(BaseHandler):
"""Handle spawning of single-user servers via form.
GET renders the form, POST handles form submission.
Only enabled when Spawner.options_form is defined.
"""
def _render_form(self, message=''):
user = self.get_current_user()
return self.render_template('spawn.html',
user=user,
spawner_options_form=user.spawner.options_form,
error_message=message,
)
@web.authenticated
def get(self):
"""GET renders form for spawning with user-specified options"""
user = self.get_current_user()
if user.running:
url = user.url
self.log.debug("User is running: %s", url)
self.redirect(url)
return
if user.spawner.options_form:
self.finish(self._render_form())
else:
# not running, no form. Trigger spawn.
self.redirect(user.url)
@web.authenticated
@gen.coroutine
def post(self):
"""POST spawns with user-specified options"""
user = self.get_current_user()
if user.running:
url = user.url
self.log.warning("User is already running: %s", url)
self.redirect(url)
return
form_options = {}
for key, byte_list in self.request.body_arguments.items():
form_options[key] = [ bs.decode('utf8') for bs in byte_list ]
for key, byte_list in self.request.files.items():
form_options["%s_file"%key] = byte_list
try:
options = user.spawner.options_from_form(form_options)
yield self.spawn_single_user(user, options=options)
except Exception as e:
self.log.error("Failed to spawn single-user server with form", exc_info=True)
self.finish(self._render_form(str(e)))
return
self.set_login_cookie(user)
url = user.url
self.redirect(url)
class AdminHandler(BaseHandler):
"""Render the admin page."""
@@ -64,14 +146,14 @@ class AdminHandler(BaseHandler):
}
sorts = self.get_arguments('sort') or default_sort
orders = self.get_arguments('order')
for bad in set(sorts).difference(available):
self.log.warn("ignoring invalid sort: %r", bad)
self.log.warning("ignoring invalid sort: %r", bad)
sorts.remove(bad)
for bad in set(orders).difference({'asc', 'desc'}):
self.log.warn("ignoring invalid order: %r", bad)
self.log.warning("ignoring invalid order: %r", bad)
orders.remove(bad)
# add default sort as secondary
for s in default_sort:
if s not in sorts:
@@ -81,16 +163,17 @@ class AdminHandler(BaseHandler):
orders.append(default_order[col])
else:
orders = orders[:len(sorts)]
# this could be one incomprehensible nested list comprehension
# get User columns
cols = [ getattr(orm.User, mapping.get(c, c)) for c in sorts ]
# get User.col.desc() order objects
ordered = [ getattr(c, o)() for c, o in zip(cols, orders) ]
users = self.db.query(orm.User).order_by(*ordered)
running = users.filter(orm.User.server != None)
users = [ self._user_from_orm(u) for u in users ]
running = [ u for u in users if u.running ]
html = self.render_template('admin.html',
user=self.get_current_user(),
admin_access=self.settings.get('admin_access', False),
@@ -101,8 +184,43 @@ class AdminHandler(BaseHandler):
self.finish(html)
class ProxyErrorHandler(BaseHandler):
"""Handler for rendering proxy error pages"""
def get(self, status_code_s):
status_code = int(status_code_s)
status_message = responses.get(status_code, 'Unknown HTTP Error')
# build template namespace
hub_home = url_path_join(self.hub.server.base_url, 'home')
message_html = ''
if status_code == 503:
message_html = ' '.join([
"Your server appears to be down.",
"Try restarting it <a href='%s'>from the hub</a>" % hub_home
])
ns = dict(
status_code=status_code,
status_message=status_message,
message_html=message_html,
logo_url=hub_home,
)
self.set_header('Content-Type', 'text/html')
# render the template
try:
html = self.render_template('%s.html' % status_code, **ns)
except TemplateNotFound:
self.log.debug("No template for %d", status_code)
html = self.render_template('error.html', **ns)
self.write(html)
default_handlers = [
(r'/', RootHandler),
(r'/home', HomeHandler),
(r'/admin', AdminHandler),
(r'/spawn', SpawnHandler),
(r'/error/(\d+)', ProxyErrorHandler),
]

View File

@@ -1,6 +1,7 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import os
from tornado.web import StaticFileHandler
class CacheControlStaticFilesHandler(StaticFileHandler):
@@ -14,4 +15,14 @@ class CacheControlStaticFilesHandler(StaticFileHandler):
def set_extra_headers(self, path):
if "v" not in self.request.arguments:
self.add_header("Cache-Control", "no-cache")
class LogoHandler(StaticFileHandler):
"""A singular handler for serving the logo."""
def get(self):
return super().get('')
@classmethod
def get_absolute_path(cls, root, path):
"""We only serve one file, ignore relative path"""
return os.path.abspath(root)

View File

@@ -3,17 +3,14 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from datetime import datetime, timedelta
import errno
from datetime import datetime
import json
import socket
from urllib.parse import quote
from tornado import gen
from tornado.log import app_log
from tornado.httpclient import HTTPRequest, AsyncHTTPClient, HTTPError
from tornado.httpclient import HTTPRequest, AsyncHTTPClient
from sqlalchemy.types import TypeDecorator, VARCHAR
from sqlalchemy.types import TypeDecorator, TEXT
from sqlalchemy import (
inspect,
Column, Integer, ForeignKey, Unicode, Boolean,
@@ -23,11 +20,11 @@ from sqlalchemy.ext.declarative import declarative_base, declared_attr
from sqlalchemy.orm import sessionmaker, relationship
from sqlalchemy.pool import StaticPool
from sqlalchemy.sql.expression import bindparam
from sqlalchemy import create_engine
from sqlalchemy import create_engine, Table
from .utils import (
random_port, url_path_join, wait_for_server, wait_for_http_server,
new_token, hash_token, compare_token,
new_token, hash_token, compare_token, can_connect,
)
@@ -40,7 +37,7 @@ class JSONDict(TypeDecorator):
"""
impl = VARCHAR
impl = TEXT
def process_bind_param(self, value, dialect):
if value is not None:
@@ -60,26 +57,26 @@ Base.log = app_log
class Server(Base):
"""The basic state of a server
connection and cookie info
"""
__tablename__ = 'servers'
id = Column(Integer, primary_key=True)
proto = Column(Unicode, default='http')
ip = Column(Unicode, default='')
proto = Column(Unicode(15), default='http')
ip = Column(Unicode(255), default='') # could also be a DNS name
port = Column(Integer, default=random_port)
base_url = Column(Unicode, default='/')
cookie_name = Column(Unicode, default='cookie')
base_url = Column(Unicode(255), default='/')
cookie_name = Column(Unicode(255), default='cookie')
def __repr__(self):
return "<Server(%s:%s)>" % (self.ip, self.port)
@property
def host(self):
ip = self.ip
if ip in {'', '0.0.0.0'}:
# when listening on all interfaces, connect to localhost
ip = 'localhost'
ip = '127.0.0.1'
return "{proto}://{ip}:{port}".format(
proto=self.proto,
ip=ip,
@@ -92,42 +89,34 @@ class Server(Base):
host=self.host,
uri=self.base_url,
)
@property
def bind_url(self):
"""representation of URL used for binding
Never used in APIs, only logging,
since it can be non-connectable value, such as '', meaning all interfaces.
"""
if self.ip in {'', '0.0.0.0'}:
return self.url.replace('localhost', self.ip or '*', 1)
return self.url.replace('127.0.0.1', self.ip or '*', 1)
return self.url
@gen.coroutine
def wait_up(self, timeout=10, http=False):
"""Wait for this server to come up"""
if http:
yield wait_for_http_server(self.url, timeout=timeout)
else:
yield wait_for_server(self.ip or 'localhost', self.port, timeout=timeout)
yield wait_for_server(self.ip or '127.0.0.1', self.port, timeout=timeout)
def is_up(self):
"""Is the server accepting connections?"""
try:
socket.create_connection((self.ip or 'localhost', self.port))
except socket.error as e:
if e.errno == errno.ECONNREFUSED:
return False
else:
raise
else:
return True
return can_connect(self.ip or '127.0.0.1', self.port)
class Proxy(Base):
"""A configurable-http-proxy instance.
A proxy consists of the API server info and the public-facing server info,
plus an auth token for configuring the proxy table.
"""
@@ -138,7 +127,7 @@ class Proxy(Base):
public_server = relationship(Server, primaryjoin=_public_server_id == Server.id)
_api_server_id = Column(Integer, ForeignKey('servers.id'))
api_server = relationship(Server, primaryjoin=_api_server_id == Server.id)
def __repr__(self):
if self.public_server:
return "<%s %s:%s>" % (
@@ -146,7 +135,7 @@ class Proxy(Base):
)
else:
return "<%s [unconfigured]>" % self.__class__.__name__
def api_request(self, path, method='GET', body=None, client=None):
"""Make an authenticated API request of the proxy"""
client = client or AsyncHTTPClient()
@@ -163,14 +152,47 @@ class Proxy(Base):
return client.fetch(req)
@gen.coroutine
def add_service(self, service, client=None):
"""Add a service's server to the proxy table."""
if not service.server:
raise RuntimeError(
"Service %s does not have an http endpoint to add to the proxy.", service.name)
self.log.info("Adding service %s to proxy %s => %s",
service.name, service.proxy_path, service.server.host,
)
yield self.api_request(service.proxy_path,
method='POST',
body=dict(
target=service.server.host,
service=service.name,
),
client=client,
)
@gen.coroutine
def delete_service(self, service, client=None):
"""Remove a service's server from the proxy table."""
self.log.info("Removing service %s from proxy", service.name)
yield self.api_request(service.proxy_path,
method='DELETE',
client=client,
)
@gen.coroutine
def add_user(self, user, client=None):
"""Add a user's server to the proxy table."""
self.log.info("Adding user %s to proxy %s => %s",
user.name, user.server.base_url, user.server.host,
user.name, user.proxy_path, user.server.host,
)
yield self.api_request(user.server.base_url,
if user.spawn_pending:
raise RuntimeError(
"User %s's spawn is pending, shouldn't be added to the proxy yet!", user.name)
yield self.api_request(user.proxy_path,
method='POST',
body=dict(
target=user.server.host,
@@ -178,26 +200,43 @@ class Proxy(Base):
),
client=client,
)
@gen.coroutine
def delete_user(self, user, client=None):
"""Remove a user's server to the proxy table."""
"""Remove a user's server from the proxy table."""
self.log.info("Removing user %s from proxy", user.name)
yield self.api_request(user.server.base_url,
yield self.api_request(user.proxy_path,
method='DELETE',
client=client,
)
@gen.coroutine
def add_all_users(self):
def add_all_services(self, service_dict):
"""Update the proxy table from the database.
Used when loading up a new proxy.
"""
db = inspect(self).session
futures = []
for user in db.query(User):
if (user.server):
for orm_service in db.query(Service):
service = service_dict[orm_service.name]
if service.server:
futures.append(self.add_service(service))
# wait after submitting them all
for f in futures:
yield f
@gen.coroutine
def add_all_users(self, user_dict):
"""Update the proxy table from the database.
Used when loading up a new proxy.
"""
db = inspect(self).session
futures = []
for orm_user in db.query(User):
user = user_dict[orm_user]
if user.running:
futures.append(self.add_user(user))
# wait after submitting them all
for f in futures:
@@ -210,18 +249,38 @@ class Proxy(Base):
return json.loads(resp.body.decode('utf8', 'replace'))
@gen.coroutine
def check_routes(self, routes=None):
"""Check that all users are properly"""
def check_routes(self, user_dict, service_dict, routes=None):
"""Check that all users are properly routed on the proxy"""
if not routes:
routes = yield self.get_routes()
have_routes = { r['user'] for r in routes.values() if 'user' in r }
user_routes = { r['user'] for r in routes.values() if 'user' in r }
futures = []
db = inspect(self).session
for user in db.query(User).filter(User.server != None):
if user.name not in have_routes:
self.log.warn("Adding missing route for %s", user.name)
futures.append(self.add_user(user))
for orm_user in db.query(User):
user = user_dict[orm_user]
if user.running:
if user.name not in user_routes:
self.log.warning("Adding missing route for %s (%s)", user.name, user.server)
futures.append(self.add_user(user))
else:
# User not running, make sure it's not in the table
if user.name in user_routes:
self.log.warning("Removing route for not running %s", user.name)
futures.append(self.delete_user(user))
# check service routes
service_routes = { r['service'] for r in routes.values() if 'service' in r }
for orm_service in db.query(Service).filter(Service.server != None):
service = service_dict[orm_service.name]
if service.server is None:
# This should never be True, but seems to be on rare occasion.
# catch filter bug, either in sqlalchemy or my understanding of its behavior
self.log.error("Service %s has no server, but wasn't filtered out.", service)
continue
if service.name not in service_routes:
self.log.warning("Adding missing route for %s (%s)", service.name, service.server)
futures.append(self.add_service(service))
for f in futures:
yield f
@@ -229,9 +288,9 @@ class Proxy(Base):
class Hub(Base):
"""Bring it all together at the hub.
The Hub is a server, plus its API path suffix
the api_url is the full URL plus the api_path suffix on the end
of the server base_url.
"""
@@ -239,12 +298,13 @@ class Hub(Base):
id = Column(Integer, primary_key=True)
_server_id = Column(Integer, ForeignKey('servers.id'))
server = relationship(Server, primaryjoin=_server_id == Server.id)
host = ''
@property
def api_url(self):
"""return the full API url (with proto://host...)"""
return url_path_join(self.server.url, 'api')
def __repr__(self):
if self.server:
return "<%s %s:%s>" % (
@@ -254,38 +314,69 @@ class Hub(Base):
return "<%s [unconfigured]>" % self.__class__.__name__
# user:group many:many mapping table
user_group_map = Table('user_group_map', Base.metadata,
Column('user_id', ForeignKey('users.id'), primary_key=True),
Column('group_id', ForeignKey('groups.id'), primary_key=True),
)
class Group(Base):
"""User Groups"""
__tablename__ = 'groups'
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(Unicode(1023), unique=True)
users = relationship('User', secondary='user_group_map', back_populates='groups')
def __repr__(self):
return "<%s %s (%i users)>" % (
self.__class__.__name__, self.name, len(self.users)
)
@classmethod
def find(cls, db, name):
"""Find a group by name.
Returns None if not found.
"""
return db.query(cls).filter(cls.name==name).first()
class User(Base):
"""The User table
Each user has a single server,
and multiple tokens used for authorization.
API tokens grant access to the Hub's REST API.
These are used by single-user servers to authenticate requests,
and external services to manipulate the Hub.
Cookies are set with a single ID.
Resetting the Cookie ID invalidates all cookies, forcing user to login again.
A `state` column contains a JSON dict,
used for restoring state of a Spawner.
"""
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(Unicode)
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(Unicode(1023), unique=True)
# should we allow multiple servers per user?
_server_id = Column(Integer, ForeignKey('servers.id'))
_server_id = Column(Integer, ForeignKey('servers.id', ondelete="SET NULL"))
server = relationship(Server, primaryjoin=_server_id == Server.id)
admin = Column(Boolean, default=False)
last_activity = Column(DateTime, default=datetime.utcnow)
api_tokens = relationship("APIToken", backref="user")
cookie_id = Column(Unicode, default=new_token)
cookie_id = Column(Unicode(1023), default=new_token)
# User.state is actually Spawner state
# We will need to figure something else out if/when we have multiple spawners per user
state = Column(JSONDict)
spawner = None
spawn_pending = False
stop_pending = False
# Authenticators can store their state here:
auth_state = Column(JSONDict)
# group mapping
groups = relationship('Group', secondary='user_group_map', back_populates='users')
other_user_cookies = set([])
def __repr__(self):
if self.server:
return "<{cls}({name}@{ip}:{port})>".format(
@@ -299,32 +390,14 @@ class User(Base):
cls=self.__class__.__name__,
name=self.name,
)
@property
def escaped_name(self):
"""My name, escaped for use in URLs, cookies, etc."""
return quote(self.name, safe='@')
@property
def running(self):
"""property for whether a user has a running server"""
if self.spawner is None:
return False
if self.server is None:
return False
return True
def new_api_token(self):
"""Create a new API token"""
assert self.id is not None
db = inspect(self).session
token = new_token()
orm_token = APIToken(user_id=self.id)
orm_token.token = token
db.add(orm_token)
db.commit()
return token
def new_api_token(self, token=None):
"""Create a new API token
If `token` is given, load that token.
"""
return APIToken.new(token=token, user=self)
@classmethod
def find(cls, db, name):
"""Find a user by name.
@@ -332,172 +405,164 @@ class User(Base):
Returns None if not found.
"""
return db.query(cls).filter(cls.name==name).first()
class Service(Base):
"""A service run with JupyterHub
A service is similar to a User without a Spawner.
A service can have API tokens for accessing the Hub's API
It has:
- name
- admin
- api tokens
- server (if proxied http endpoint)
In addition to what it has in common with users, a Service has extra info:
- pid: the process id (if managed)
"""
__tablename__ = 'services'
id = Column(Integer, primary_key=True, autoincrement=True)
@gen.coroutine
def spawn(self, spawner_class, base_url='/', hub=None, config=None):
"""Start the user's spawner"""
db = inspect(self).session
if hub is None:
hub = db.query(Hub).first()
self.server = Server(
cookie_name='%s-%s' % (hub.server.cookie_name, quote(self.name, safe='')),
base_url=url_path_join(base_url, 'user', self.escaped_name),
)
db.add(self.server)
db.commit()
api_token = self.new_api_token()
db.commit()
spawner = self.spawner = spawner_class(
config=config,
user=self,
hub=hub,
db=db,
)
# we are starting a new server, make sure it doesn't restore state
spawner.clear_state()
spawner.api_token = api_token
self.spawn_pending = True
# wait for spawner.start to return
try:
f = spawner.start()
yield gen.with_timeout(timedelta(seconds=spawner.start_timeout), f)
except Exception as e:
if isinstance(e, gen.TimeoutError):
self.log.warn("{user}'s server failed to start in {s} seconds, giving up".format(
user=self.name, s=spawner.start_timeout,
))
e.reason = 'timeout'
else:
self.log.error("Unhandled error starting {user}'s server: {error}".format(
user=self.name, error=e,
))
e.reason = 'error'
try:
yield self.stop()
except Exception:
self.log.error("Failed to cleanup {user}'s server that failed to start".format(
user=self.name,
), exc_info=True)
# raise original exception
raise e
spawner.start_polling()
# common user interface:
name = Column(Unicode(1023), unique=True)
admin = Column(Boolean, default=False)
# store state
self.state = spawner.get_state()
self.last_activity = datetime.utcnow()
db.commit()
try:
yield self.server.wait_up(http=True, timeout=spawner.http_timeout)
except Exception as e:
if isinstance(e, TimeoutError):
self.log.warn(
"{user}'s server never showed up at {url} "
"after {http_timeout} seconds. Giving up".format(
user=self.name,
url=self.server.url,
http_timeout=spawner.http_timeout,
)
)
e.reason = 'timeout'
else:
e.reason = 'error'
self.log.error("Unhandled error waiting for {user}'s server to show up at {url}: {error}".format(
user=self.name, url=self.server.url, error=e,
))
try:
yield self.stop()
except Exception:
self.log.error("Failed to cleanup {user}'s server that failed to start".format(
user=self.name,
), exc_info=True)
# raise original TimeoutError
raise e
self.spawn_pending = False
return self
api_tokens = relationship("APIToken", backref="service")
@gen.coroutine
def stop(self):
"""Stop the user's spawner
and cleanup after it.
# service-specific interface
_server_id = Column(Integer, ForeignKey('servers.id'))
server = relationship(Server, primaryjoin=_server_id == Server.id)
pid = Column(Integer)
def new_api_token(self, token=None):
"""Create a new API token
If `token` is given, load that token.
"""
self.spawn_pending = False
if self.spawner is None:
return
self.spawner.stop_polling()
self.stop_pending = True
try:
status = yield self.spawner.poll()
if status is None:
yield self.spawner.stop()
self.spawner.clear_state()
self.state = self.spawner.get_state()
self.server = None
inspect(self).session.commit()
finally:
self.stop_pending = False
return APIToken.new(token=token, service=self)
@classmethod
def find(cls, db, name):
"""Find a service by name.
Returns None if not found.
"""
return db.query(cls).filter(cls.name==name).first()
class APIToken(Base):
"""An API token"""
__tablename__ = 'api_tokens'
# _constraint = ForeignKeyConstraint(['user_id', 'server_id'], ['users.id', 'services.id'])
@declared_attr
def user_id(cls):
return Column(Integer, ForeignKey('users.id'))
return Column(Integer, ForeignKey('users.id', ondelete="CASCADE"), nullable=True)
@declared_attr
def service_id(cls):
return Column(Integer, ForeignKey('services.id', ondelete="CASCADE"), nullable=True)
id = Column(Integer, primary_key=True)
hashed = Column(Unicode)
prefix = Column(Unicode)
hashed = Column(Unicode(1023))
prefix = Column(Unicode(1023))
prefix_length = 4
algorithm = "sha512"
rounds = 16384
salt_bytes = 8
@property
def token(self):
raise AttributeError("token is write-only")
@token.setter
def token(self, token):
"""Store the hashed value and prefix for a token"""
self.prefix = token[:self.prefix_length]
self.hashed = hash_token(token, rounds=self.rounds, salt=self.salt_bytes, algorithm=self.algorithm)
def __repr__(self):
return "<{cls}('{pre}...', user='{u}')>".format(
if self.user is not None:
kind = 'user'
name = self.user.name
elif self.service is not None:
kind = 'service'
name = self.service.name
else:
# this shouldn't happen
kind = 'owner'
name = 'unknown'
return "<{cls}('{pre}...', {kind}='{name}')>".format(
cls=self.__class__.__name__,
pre=self.prefix,
u=self.user.name,
kind=kind,
name=name,
)
@classmethod
def find(cls, db, token):
def find(cls, db, token, *, kind=None):
"""Find a token object by value.
Returns None if not found.
`kind='user'` only returns API tokens for users
`kind='service'` only returns API tokens for services
"""
prefix = token[:cls.prefix_length]
# since we can't filter on hashed values, filter on prefix
# so we aren't comparing with all tokens
prefix_match = db.query(cls).filter(bindparam('prefix', prefix).startswith(cls.prefix))
if kind == 'user':
prefix_match = prefix_match.filter(cls.user_id != None)
elif kind == 'service':
prefix_match = prefix_match.filter(cls.service_id != None)
elif kind is not None:
raise ValueError("kind must be 'user', 'service', or None, not %r" % kind)
for orm_token in prefix_match:
if orm_token.match(token):
return orm_token
def match(self, token):
"""Is this my token?"""
return compare_token(self.hashed, token)
@classmethod
def new(cls, token=None, user=None, service=None):
"""Generate a new API token for a user or service"""
assert user or service
assert not (user and service)
db = inspect(user or service).session
if token is None:
token = new_token()
else:
if len(token) < 8:
raise ValueError("Tokens must be at least 8 characters, got %r" % token)
found = APIToken.find(db, token)
if found:
raise ValueError("Collision on token: %s..." % token[:4])
orm_token = APIToken(token=token)
if user:
assert user.id is not None
orm_token.user_id = user.id
else:
assert service.id is not None
orm_token.service_id = service.id
db.add(orm_token)
db.commit()
return token
def new_session_factory(url="sqlite:///:memory:", reset=False, **kwargs):
"""Create a new session at url"""
if url.startswith('sqlite'):
kwargs.setdefault('connect_args', {'check_same_thread': False})
elif url.startswith('mysql'):
kwargs.setdefault('pool_recycle', 60)
if url.endswith(':memory:'):
# If we're using an in-memory database, ensure that only one connection

View File

301
jupyterhub/services/auth.py Normal file
View File

@@ -0,0 +1,301 @@
"""Authenticating services with JupyterHub
Cookies are sent to the Hub for verification, replying with a JSON model describing the authenticated user.
HubAuth can be used in any application, even outside tornado.
HubAuthenticated is a mixin class for tornado handlers that should authenticate with the Hub.
"""
import os
import socket
import time
from urllib.parse import quote
import requests
from tornado.log import app_log
from tornado.web import HTTPError
from traitlets.config import Configurable
from traitlets import Unicode, Integer, Instance, default
from ..utils import url_path_join
class _ExpiringDict(dict):
"""Dict-like cache for Hub API requests
Values will expire after max_age seconds.
A monotonic timer is used (time.monotonic).
A max_age of 0 means cache forever.
"""
max_age = 0
def __init__(self, max_age=0):
self.max_age = max_age
self.timestamps = {}
self.values = {}
def __setitem__(self, key, value):
"""Store key and record timestamp"""
self.timestamps[key] = time.monotonic()
self.values[key] = value
def _check_age(self, key):
"""Check timestamp for a key"""
if key not in self.values:
# not registered, nothing to do
return
now = time.monotonic()
timestamp = self.timestamps[key]
if self.max_age > 0 and timestamp + self.max_age < now:
self.values.pop(key)
self.timestamps.pop(key)
def __contains__(self, key):
"""dict check for `key in dict`"""
self._check_age(key)
return key in self.values
def __getitem__(self, key):
"""Check age before returning value"""
self._check_age(key)
return self.values[key]
def get(self, key, default=None):
"""dict-like get:"""
try:
return self[key]
except KeyError:
return default
class HubAuth(Configurable):
"""A class for authenticating with JupyterHub
This can be used by any application.
If using tornado, use via :class:`HubAuthenticated` mixin.
If using manually, use the ``.user_for_cookie(cookie_value)`` method
to identify the user corresponding to a given cookie value.
The following config must be set:
- api_token (token for authenticating with JupyterHub API),
fetched from the JUPYTERHUB_API_TOKEN env by default.
The following config MAY be set:
- api_url: the base URL of the Hub's internal API,
fetched from JUPYTERHUB_API_URL by default.
- cookie_cache_max_age: the number of seconds responses
from the Hub should be cached.
- login_url (the *public* ``/hub/login`` URL of the Hub).
- cookie_name: the name of the cookie I should be using,
if different from the default (unlikely).
"""
# where is the hub
api_url = Unicode(os.environ.get('JUPYTERHUB_API_URL') or 'http://127.0.0.1:8081/hub/api',
help="""The base API URL of the Hub.
Typically http://hub-ip:hub-port/hub/api
"""
).tag(config=True)
login_url = Unicode('/hub/login',
help="""The login URL of the Hub
Typically /hub/login
"""
).tag(config=True)
api_token = Unicode(os.environ.get('JUPYTERHUB_API_TOKEN', ''),
help="""API key for accessing Hub API.
Generate with `jupyterhub token [username]` or add to JupyterHub.services config.
"""
).tag(config=True)
cookie_name = Unicode('jupyterhub-services',
help="""The name of the cookie I should be looking for"""
).tag(config=True)
cookie_cache_max_age = Integer(300,
help="""The maximum time (in seconds) to cache the Hub's response for cookie authentication.
A larger value reduces load on the Hub and occasional response lag.
A smaller value reduces propagation time of changes on the Hub (rare).
Default: 300 (five minutes)
"""
).tag(config=True)
cookie_cache = Instance(_ExpiringDict, allow_none=False)
@default('cookie_cache')
def _cookie_cache(self):
return _ExpiringDict(self.cookie_cache_max_age)
def user_for_cookie(self, encrypted_cookie, use_cache=True):
"""Ask the Hub to identify the user for a given cookie.
Args:
encrypted_cookie (str): the cookie value (not decrypted, the Hub will do that)
use_cache (bool): Specify use_cache=False to skip cached cookie values (default: True)
Returns:
user_model (dict): The user model, if a user is identified, None if authentication fails.
The 'name' field contains the user's name.
"""
if use_cache:
cached = self.cookie_cache.get(encrypted_cookie)
if cached is not None:
return cached
try:
r = requests.get(
url_path_join(self.api_url,
"authorizations/cookie",
self.cookie_name,
quote(encrypted_cookie, safe=''),
),
headers = {
'Authorization' : 'token %s' % self.api_token,
},
)
except requests.ConnectionError:
msg = "Failed to connect to Hub API at %r." % self.api_url
msg += " Is the Hub accessible at this URL (from host: %s)?" % socket.gethostname()
if '127.0.0.1' in self.api_url:
msg += " Make sure to set c.JupyterHub.hub_ip to an IP accessible to" + \
" single-user servers if the servers are not on the same host as the Hub."
raise HTTPError(500, msg)
if r.status_code == 404:
data = None
elif r.status_code == 403:
app_log.error("I don't have permission to verify cookies, my auth token may have expired: [%i] %s", r.status_code, r.reason)
raise HTTPError(500, "Permission failure checking authorization, I may need a new token")
elif r.status_code >= 500:
app_log.error("Upstream failure verifying auth token: [%i] %s", r.status_code, r.reason)
raise HTTPError(502, "Failed to check authorization (upstream problem)")
elif r.status_code >= 400:
app_log.warning("Failed to check authorization: [%i] %s", r.status_code, r.reason)
raise HTTPError(500, "Failed to check authorization")
else:
data = r.json()
self.cookie_cache[encrypted_cookie] = data
return data
def get_user(self, handler):
"""Get the Hub user for a given tornado handler.
Checks cookie with the Hub to identify the current user.
Args:
handler (tornado.web.RequestHandler): the current request handler
Returns:
user_model (dict): The user model, if a user is identified, None if authentication fails.
The 'name' field contains the user's name.
"""
# only allow this to be called once per handler
# avoids issues if an error is raised,
# since this may be called again when trying to render the error page
if hasattr(handler, '_cached_hub_user'):
return handler._cached_hub_user
handler._cached_hub_user = None
encrypted_cookie = handler.get_cookie(self.cookie_name)
if encrypted_cookie:
user_model = self.user_for_cookie(encrypted_cookie)
handler._cached_hub_user = user_model
return user_model
else:
app_log.debug("No token cookie")
return None
class HubAuthenticated(object):
"""Mixin for tornado handlers that are authenticated with JupyterHub
A handler that mixes this in must have the following attributes/properties:
- .hub_auth: A HubAuth instance
- .hub_users: A set of usernames to allow.
If left unspecified or None, username will not be checked.
- .hub_groups: A set of group names to allow.
If left unspecified or None, groups will not be checked.
Examples::
class MyHandler(HubAuthenticated, web.RequestHandler):
hub_users = {'inara', 'mal'}
def initialize(self, hub_auth):
self.hub_auth = hub_auth
@web.authenticated
def get(self):
...
"""
hub_users = None # set of allowed users
hub_groups = None # set of allowed groups
# self.hub_auth must be a HubAuth instance.
# If nothing specified, use default config,
# which will be configured with defaults
# based on JupyterHub environment variables for services.
_hub_auth = None
@property
def hub_auth(self):
if self._hub_auth is None:
self._hub_auth = HubAuth()
return self._hub_auth
@hub_auth.setter
def hub_auth(self, auth):
self._hub_auth = auth
def check_hub_user(self, user_model):
"""Check whether Hub-authenticated user should be allowed.
Returns the input if the user should be allowed, None otherwise.
Override if you want to check anything other than the username's presence in hub_users list.
Args:
user_model (dict): the user model returned from :class:`HubAuth`
Returns:
user_model (dict): The user model if the user should be allowed, None otherwise.
"""
if self.hub_users is None and self.hub_groups is None:
# no whitelist specified, allow any authenticated Hub user
return user_model
name = user_model['name']
if self.hub_users and name in self.hub_users:
# user in whitelist
return user_model
elif self.hub_groups and set(user_model['groups']).union(self.hub_groups):
# group in whitelist
return user_model
else:
app_log.warning("Not allowing Hub user %s" % name)
return None
def get_current_user(self):
"""Tornado's authentication method
Returns:
user_model (dict): The user model, if a user is identified, None if authentication fails.
"""
user_model = self.hub_auth.get_user(self)
if not user_model:
return
return self.check_hub_user(user_model)

View File

@@ -0,0 +1,258 @@
"""A service is a process that talks to JupyterHub
Cases:
Managed:
- managed by JupyterHub (always subprocess, no custom Spawners)
- always a long-running process
- managed services are restarted automatically if they exit unexpectedly
Unmanaged:
- managed by external service (docker, systemd, etc.)
- do not need to be long-running processes, or processes at all
URL: needs a route added to the proxy.
- Public route will always be /services/service-name
- url specified in config
- if port is 0, Hub will select a port
API access:
- admin: tokens will have admin-access to the API
- not admin: tokens will only have non-admin access
(not much they can do other than defer to Hub for auth)
An externally managed service running on a URL::
{
'name': 'my-service',
'url': 'https://host:8888',
'admin': True,
'token': 'super-secret',
}
A hub-managed service with no URL:
{
'name': 'cull-idle',
'command': ['python', '/path/to/cull-idle']
'admin': True,
}
"""
from getpass import getuser
import pipes
import shutil
from subprocess import Popen
from urllib.parse import urlparse
from tornado import gen
from traitlets import (
HasTraits,
Any, Bool, Dict, Unicode, Instance,
default, observe,
)
from traitlets.config import LoggingConfigurable
from .. import orm
from ..traitlets import Command
from ..spawner import LocalProcessSpawner
from ..utils import url_path_join
class _MockUser(HasTraits):
name = Unicode()
server = Instance(orm.Server, allow_none=True)
state = Dict()
service = Instance(__module__ + '.Service')
# We probably shouldn't use a Spawner here,
# but there are too many concepts to share.
class _ServiceSpawner(LocalProcessSpawner):
"""Subclass of LocalProcessSpawner
Removes notebook-specific-ness from LocalProcessSpawner.
"""
cwd = Unicode()
cmd = Command(minlen=0)
def make_preexec_fn(self, name):
if not name or name == getuser():
# no setuid if no name
return
return super().make_preexec_fn(name)
def start(self):
"""Start the process"""
env = self.get_env()
cmd = self.cmd
self.log.info("Spawning %s", ' '.join(pipes.quote(s) for s in cmd))
try:
self.proc = Popen(self.cmd, env=env,
preexec_fn=self.make_preexec_fn(self.user.name),
start_new_session=True, # don't forward signals
cwd=self.cwd or None,
)
except PermissionError:
# use which to get abspath
script = shutil.which(cmd[0]) or cmd[0]
self.log.error("Permission denied trying to run %r. Does %s have access to this file?",
script, self.user.name,
)
raise
self.pid = self.proc.pid
class Service(LoggingConfigurable):
"""An object wrapping a service specification for Hub API consumers.
A service has inputs:
- name: str
the name of the service
- admin: bool(false)
whether the service should have administrative privileges
- url: str (None)
The URL where the service is/should be.
If specified, the service will be added to the proxy at /services/:name
If a service is to be managed by the Hub, it has a few extra options:
- command: (str/Popen list)
Command for JupyterHub to spawn the service.
Only use this if the service should be a subprocess.
If command is not specified, it is assumed to be managed
by a
- env: dict
environment variables to add to the current env
- user: str
The name of a system user to become.
If unspecified, run as the same user as the Hub.
"""
# inputs:
name = Unicode(
help="""The name of the service.
If the service has an http endpoint, it
"""
).tag(input=True)
admin = Bool(False,
help="Does the service need admin-access to the Hub API?"
).tag(input=True)
url = Unicode(
help="""URL of the service.
Only specify if the service runs an HTTP(s) endpoint that.
If managed, will be passed as JUPYTERHUB_SERVICE_URL env.
"""
).tag(input=True)
api_token = Unicode(
help="""The API token to use for the service.
If unspecified, an API token will be generated for managed services.
"""
).tag(input=True)
# Managed service API:
@property
def managed(self):
"""Am I managed by the Hub?"""
return bool(self.command)
command = Command(minlen=0,
help="Command to spawn this service, if managed."
).tag(input=True)
cwd = Unicode(
help="""The working directory in which to run the service."""
).tag(input=True)
environment = Dict(
help="""Environment variables to pass to the service.
Only used if the Hub is spawning the service.
"""
).tag(input=True)
user = Unicode(getuser(),
help="""The user to become when launching the service.
If unspecified, run the service as the same user as the Hub.
"""
).tag(input=True)
domain = Unicode()
host = Unicode()
proc = Any()
# handles on globals:
proxy = Any()
base_url = Unicode()
db = Any()
orm = Any()
@property
def server(self):
return self.orm.server
@property
def prefix(self):
return url_path_join(self.base_url, 'services', self.name)
@property
def proxy_path(self):
if not self.server:
return ''
if self.domain:
return url_path_join('/' + self.domain, self.server.base_url)
else:
return self.server.base_url
def __repr__(self):
return "<{cls}(name={name}{managed})>".format(
cls=self.__class__.__name__,
name=self.name,
managed=' managed' if self.managed else '',
)
def start(self):
"""Start a managed service"""
if not self.managed:
raise RuntimeError("Cannot start unmanaged service %s" % self)
self.log.info("Starting service %r: %r", self.name, self.command)
env = {}
env.update(self.environment)
env['JUPYTERHUB_SERVICE_NAME'] = self.name
env['JUPYTERHUB_API_TOKEN'] = self.api_token
env['JUPYTERHUB_API_URL'] = self.hub_api_url
env['JUPYTERHUB_BASE_URL'] = self.base_url
if self.url:
env['JUPYTERHUB_SERVICE_URL'] = self.url
env['JUPYTERHUB_SERVICE_PREFIX'] = self.server.base_url
self.spawner = _ServiceSpawner(
cmd=self.command,
environment=env,
api_token=self.api_token,
cwd=self.cwd,
user=_MockUser(
name=self.user,
service=self,
server=self.orm.server,
),
)
self.spawner.start()
self.proc = self.spawner.proc
self.spawner.add_poll_callback(self._proc_stopped)
self.spawner.start_polling()
def _proc_stopped(self):
"""Called when the service process unexpectedly exits"""
self.log.error("Service %s exited with status %i", self.name, self.proc.returncode)
self.start()
def stop(self):
"""Stop a managed service"""
if not self.managed:
raise RuntimeError("Cannot start unmanaged service %s" % self)
self.spawner.stop_polling()
return self.spawner.stop()

View File

@@ -1,123 +1,95 @@
#!/usr/bin/env python3
#!/usr/bin/env python
"""Extend regular notebook server to be aware of multiuser things."""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import os
try:
from urllib.parse import quote
except ImportError:
# PY2 Compat
from urllib import quote
import requests
from jinja2 import ChoiceLoader, FunctionLoader
from tornado import ioloop
from tornado.web import HTTPError
from textwrap import dedent
try:
import notebook
except ImportError:
raise ImportError("JupyterHub single-user server requires notebook >= 4.0")
from IPython.utils.traitlets import (
Integer,
from traitlets import (
Bool,
Unicode,
CUnicode,
default,
validate,
TraitError,
)
from IPython.html.notebookapp import NotebookApp, aliases as notebook_aliases
from IPython.html.auth.login import LoginHandler
from IPython.html.auth.logout import LogoutHandler
from notebook.notebookapp import (
NotebookApp,
aliases as notebook_aliases,
flags as notebook_flags,
)
from notebook.auth.login import LoginHandler
from notebook.auth.logout import LogoutHandler
from IPython.html.utils import url_path_join
from jupyterhub import __version__
from .services.auth import HubAuth, HubAuthenticated
from .utils import url_path_join
# Authenticate requests with the Hub
from distutils.version import LooseVersion as V
class HubAuthenticatedHandler(HubAuthenticated):
"""Class we are going to patch-in for authentication with the Hub"""
@property
def hub_auth(self):
return self.settings['hub_auth']
@property
def hub_users(self):
return { self.settings['user'] }
import IPython
if V(IPython.__version__) < V('3.0'):
raise ImportError("JupyterHub Requires IPython >= 3.0, found %s" % IPython.__version__)
# Define two methods to attach to AuthenticatedHandler,
# which authenticate via the central auth server.
class JupyterHubLoginHandler(LoginHandler):
"""LoginHandler that hooks up Hub authentication"""
@staticmethod
def login_available(settings):
return True
@staticmethod
def verify_token(self, cookie_name, encrypted_cookie):
"""method for token verification"""
cookie_cache = self.settings['cookie_cache']
if encrypted_cookie in cookie_cache:
# we've seen this token before, don't ask upstream again
return cookie_cache[encrypted_cookie]
hub_api_url = self.settings['hub_api_url']
hub_api_key = self.settings['hub_api_key']
r = requests.get(url_path_join(
hub_api_url, "authorizations/cookie", cookie_name, quote(encrypted_cookie, safe=''),
),
headers = {'Authorization' : 'token %s' % hub_api_key},
)
if r.status_code == 404:
data = None
elif r.status_code == 403:
self.log.error("I don't have permission to verify cookies, my auth token may have expired: [%i] %s", r.status_code, r.reason)
raise HTTPError(500, "Permission failure checking authorization, I may need to be restarted")
elif r.status_code >= 500:
self.log.error("Upstream failure verifying auth token: [%i] %s", r.status_code, r.reason)
raise HTTPError(502, "Failed to check authorization (upstream problem)")
elif r.status_code >= 400:
self.log.warn("Failed to check authorization: [%i] %s", r.status_code, r.reason)
raise HTTPError(500, "Failed to check authorization")
else:
data = r.json()
cookie_cache[encrypted_cookie] = data
return data
@staticmethod
def get_user(self):
"""alternative get_current_user to query the central server"""
# only allow this to be called once per handler
# avoids issues if an error is raised,
# since this may be called again when trying to render the error page
if hasattr(self, '_cached_user'):
return self._cached_user
self._cached_user = None
my_user = self.settings['user']
encrypted_cookie = self.get_cookie(self.cookie_name)
if encrypted_cookie:
auth_data = JupyterHubLoginHandler.verify_token(self, self.cookie_name, encrypted_cookie)
if not auth_data:
# treat invalid token the same as no token
return None
user = auth_data['name']
if user == my_user:
self._cached_user = user
return user
else:
return None
else:
self.log.debug("No token cookie")
return None
def get_user(handler):
"""alternative get_current_user to query the Hub"""
# patch in HubAuthenticated class for querying the Hub for cookie authentication
name = 'NowHubAuthenticated'
if handler.__class__.__name__ != name:
handler.__class__ = type(name, (HubAuthenticatedHandler, handler.__class__), {})
return handler.get_current_user()
class JupyterHubLogoutHandler(LogoutHandler):
def get(self):
self.redirect(url_path_join(self.settings['hub_prefix'], 'logout'))
self.redirect(
self.settings['hub_host'] +
url_path_join(self.settings['hub_prefix'], 'logout'))
# register new hub related command-line aliases
aliases = dict(notebook_aliases)
aliases.update({
'user' : 'SingleUserNotebookApp.user',
'cookie-name': 'SingleUserNotebookApp.cookie_name',
'cookie-name': 'HubAuth.cookie_name',
'hub-prefix': 'SingleUserNotebookApp.hub_prefix',
'hub-host': 'SingleUserNotebookApp.hub_host',
'hub-api-url': 'SingleUserNotebookApp.hub_api_url',
'base-url': 'SingleUserNotebookApp.base_url',
})
flags = dict(notebook_flags)
flags.update({
'disable-user-config': ({
'SingleUserNotebookApp': {
'disable_user_config': True
}
}, "Disable user-controlled configuration of the notebook server.")
})
page_template = """
{% extends "templates/page.html" %}
@@ -131,36 +103,81 @@ page_template = """
>
Control Panel</a>
{% endblock %}
{% block logo %}
<img src='{{logo_url}}' alt='Jupyter Notebook'/>
{% endblock logo %}
"""
def _exclude_home(path_list):
"""Filter out any entries in a path list that are in my home directory.
Used to disable per-user configuration.
"""
home = os.path.expanduser('~')
for p in path_list:
if not p.startswith(home):
yield p
class SingleUserNotebookApp(NotebookApp):
"""A Subclass of the regular NotebookApp that is aware of the parent multiuser context."""
description = dedent("""
Single-user server for JupyterHub. Extends the Jupyter Notebook server.
Meant to be invoked by JupyterHub Spawners, and not directly.
""")
examples = ""
subcommands = {}
version = __version__
classes = NotebookApp.classes + [HubAuth]
user = CUnicode(config=True)
def _user_changed(self, name, old, new):
self.log.name = new
cookie_name = Unicode(config=True)
hub_prefix = Unicode(config=True)
hub_api_url = Unicode(config=True)
hub_prefix = Unicode().tag(config=True)
hub_host = Unicode().tag(config=True)
hub_api_url = Unicode().tag(config=True)
aliases = aliases
flags = flags
# disble some single-user configurables
token = ''
open_browser = False
trust_xheaders = True
login_handler_class = JupyterHubLoginHandler
logout_handler_class = JupyterHubLogoutHandler
port_retries = 0 # disable port-retries, since the Spawner will tell us what port to use
cookie_cache_lifetime = Integer(
config=True,
default_value=300,
allow_none=True,
help="""
Time, in seconds, that we cache a validated cookie before requiring
revalidation with the hub.
""",
)
disable_user_config = Bool(False,
help="""Disable user configuration of single-user server.
Prevents user-writable files that normally configure the single-user server
from being loaded, ensuring admins have full control of configuration.
"""
).tag(config=True)
@validate('notebook_dir')
def _notebook_dir_validate(self, proposal):
value = os.path.expanduser(proposal['value'])
# Strip any trailing slashes
# *except* if it's root
_, path = os.path.splitdrive(value)
if path == os.sep:
return value
value = value.rstrip(os.sep)
if not os.path.isabs(value):
# If we receive a non-absolute path, make it absolute.
value = os.path.abspath(value)
if not os.path.isdir(value):
raise TraitError("No such notebook dir: %r" % value)
return value
@default('log_datefmt')
def _log_datefmt_default(self):
"""Exclude date from default date format"""
return "%Y-%m-%d %H:%M:%S"
@default('log_format')
def _log_format_default(self):
"""override default log format to include time"""
return "%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s %(module)s:%(lineno)d]%(end_color)s %(message)s"
@@ -169,49 +186,78 @@ class SingleUserNotebookApp(NotebookApp):
# disable the exit confirmation for background notebook processes
ioloop.IOLoop.instance().stop()
def _clear_cookie_cache(self):
self.log.debug("Clearing cookie cache")
self.tornado_settings['cookie_cache'].clear()
def migrate_config(self):
if self.disable_user_config:
# disable config-migration when user config is disabled
return
else:
super(SingleUserNotebookApp, self).migrate_config()
@property
def config_file_paths(self):
path = super(SingleUserNotebookApp, self).config_file_paths
if self.disable_user_config:
# filter out user-writable config dirs if user config is disabled
path = list(_exclude_home(path))
return path
@property
def nbextensions_path(self):
path = super(SingleUserNotebookApp, self).nbextensions_path
if self.disable_user_config:
path = list(_exclude_home(path))
return path
@validate('static_custom_path')
def _validate_static_custom_path(self, proposal):
path = proposal['value']
if self.disable_user_config:
path = list(_exclude_home(path))
return path
def start(self):
# Start a PeriodicCallback to clear cached cookies. This forces us to
# revalidate our user with the Hub at least every
# `cookie_cache_lifetime` seconds.
if self.cookie_cache_lifetime:
ioloop.PeriodicCallback(
self._clear_cookie_cache,
self.cookie_cache_lifetime * 1e3,
).start()
super(SingleUserNotebookApp, self).start()
def init_hub_auth(self):
if not os.environ.get('JPY_API_TOKEN'):
self.exit("JPY_API_TOKEN env is required to run jupyterhub-singleuser. Did you launch it manually?")
self.hub_auth = HubAuth(
parent=self,
api_token=os.environ.pop('JPY_API_TOKEN'),
api_url=self.hub_api_url,
)
def init_webapp(self):
# load the hub related settings into the tornado settings dict
env = os.environ
self.init_hub_auth()
s = self.tornado_settings
s['cookie_cache'] = {}
s['user'] = self.user
s['hub_api_key'] = env.pop('JPY_API_TOKEN')
s['hub_prefix'] = self.hub_prefix
s['cookie_name'] = self.cookie_name
s['login_url'] = self.hub_prefix
s['hub_api_url'] = self.hub_api_url
s['csp_report_uri'] = url_path_join(self.hub_prefix, 'security/csp-report')
s['hub_host'] = self.hub_host
s['hub_auth'] = self.hub_auth
s['login_url'] = self.hub_host + self.hub_prefix
s['csp_report_uri'] = self.hub_host + url_path_join(self.hub_prefix, 'security/csp-report')
super(SingleUserNotebookApp, self).init_webapp()
self.patch_templates()
def patch_templates(self):
"""Patch page templates to add Hub-related buttons"""
self.jinja_template_vars['logo_url'] = self.hub_host + url_path_join(self.hub_prefix, 'logo')
self.jinja_template_vars['hub_host'] = self.hub_host
self.jinja_template_vars['hub_prefix'] = self.hub_prefix
env = self.web_app.settings['jinja2_env']
env.globals['hub_control_panel_url'] = \
url_path_join(self.hub_prefix, 'home')
self.hub_host + url_path_join(self.hub_prefix, 'home')
# patch jinja env loading to modify page template
def get_page(name):
if name == 'page.html':
return page_template
orig_loader = env.loader
env.loader = ChoiceLoader([
FunctionLoader(get_page),
@@ -219,8 +265,8 @@ class SingleUserNotebookApp(NotebookApp):
])
def main():
return SingleUserNotebookApp.launch_instance()
def main(argv=None):
return SingleUserNotebookApp.launch_instance(argv)
if __name__ == "__main__":

File diff suppressed because it is too large Load Diff

View File

@@ -5,14 +5,19 @@
import logging
from getpass import getuser
from pytest import fixture
from subprocess import TimeoutExpired
import time
from unittest import mock
from pytest import fixture, yield_fixture, raises
from tornado import ioloop
from .. import orm
from ..utils import random_port
from .mocking import MockHub
from .test_services import mockservice_cmd
import jupyterhub.services.service
# global db session object
_db = None
@@ -53,3 +58,58 @@ def app(request):
app.stop()
request.addfinalizer(fin)
return app
# mock services for testing.
# Shorter intervals, etc.
class MockServiceSpawner(jupyterhub.services.service._ServiceSpawner):
poll_interval = 1
def _mockservice(request, app, url=False):
name = 'mock-service'
spec = {
'name': name,
'command': mockservice_cmd,
'admin': True,
}
if url:
spec['url'] = 'http://127.0.0.1:%i' % random_port(),
with mock.patch.object(jupyterhub.services.service, '_ServiceSpawner', MockServiceSpawner):
app.services = [{
'name': name,
'command': mockservice_cmd,
'url': 'http://127.0.0.1:%i' % random_port(),
'admin': True,
}]
app.init_services()
app.io_loop.add_callback(app.proxy.add_all_services, app._service_map)
assert name in app._service_map
service = app._service_map[name]
app.io_loop.add_callback(service.start)
request.addfinalizer(service.stop)
for i in range(20):
if not getattr(service, 'proc', False):
time.sleep(0.2)
# ensure process finishes starting
with raises(TimeoutExpired):
service.proc.wait(1)
return service
@yield_fixture
def mockservice(request, app):
yield _mockservice(request, app, url=False)
@yield_fixture
def mockservice_url(request, app):
yield _mockservice(request, app, url=True)
@yield_fixture
def no_patience(app):
"""Set slow-spawning timeouts to zero"""
with mock.patch.dict(app.tornado_application.settings,
{'slow_spawn_timeout': 0,
'slow_stop_timeout': 0}):
yield

View File

@@ -1,7 +1,7 @@
"""mock utilities for testing"""
import os
import sys
from datetime import timedelta
from tempfile import NamedTemporaryFile
import threading
@@ -13,25 +13,35 @@ from tornado import gen
from tornado.concurrent import Future
from tornado.ioloop import IOLoop
from ..spawner import LocalProcessSpawner
from traitlets import default
from ..app import JupyterHub
from ..auth import PAMAuthenticator
from .. import orm
from ..spawner import LocalProcessSpawner
from ..singleuser import SingleUserNotebookApp
from ..utils import random_port
from pamela import PAMError
def mock_authenticate(username, password, service='login'):
# mimic simplepam's failure to handle unicode
if isinstance(username, str):
return False
if isinstance(password, str):
return False
# just use equality for testing
if password == username:
return True
else:
raise PAMError("Fake")
def mock_open_session(username, service):
pass
class MockSpawner(LocalProcessSpawner):
"""Base mock spawner
- disables user-switching that we need root permissions to do
- spawns jupyterhub.tests.mocksu instead of a full single-user server
"""
def make_preexec_fn(self, *a, **kw):
# skip the setuid stuff
return
@@ -41,7 +51,8 @@ class MockSpawner(LocalProcessSpawner):
def user_env(self, env):
return env
@default('cmd')
def _cmd_default(self):
return [sys.executable, '-m', 'jupyterhub.tests.mocksu']
@@ -51,18 +62,20 @@ class SlowSpawner(MockSpawner):
@gen.coroutine
def start(self):
yield gen.Task(IOLoop.current().add_timeout, timedelta(seconds=2))
yield super().start()
(ip, port) = yield super().start()
yield gen.sleep(2)
return ip, port
@gen.coroutine
def stop(self):
yield gen.Task(IOLoop.current().add_timeout, timedelta(seconds=2))
yield gen.sleep(2)
yield super().stop()
class NeverSpawner(MockSpawner):
"""A spawner that will never start"""
@default('start_timeout')
def _start_timeout_default(self):
return 1
@@ -71,7 +84,24 @@ class NeverSpawner(MockSpawner):
return Future()
class FormSpawner(MockSpawner):
"""A spawner that has an options form defined"""
options_form = "IMAFORM"
def options_from_form(self, form_data):
options = {}
options['notspecified'] = 5
if 'bounds' in form_data:
options['bounds'] = [int(i) for i in form_data['bounds']]
if 'energy' in form_data:
options['energy'] = form_data['energy'][0]
if 'hello_file' in form_data:
options['hello'] = form_data['hello_file'][0]
return options
class MockPAMAuthenticator(PAMAuthenticator):
@default('admin_users')
def _admin_users_default(self):
return {'admin'}
@@ -80,20 +110,36 @@ class MockPAMAuthenticator(PAMAuthenticator):
return not user.name.startswith('dne')
def authenticate(self, *args, **kwargs):
with mock.patch('simplepam.authenticate', mock_authenticate):
with mock.patch.multiple('pamela',
authenticate=mock_authenticate,
open_session=mock_open_session,
close_session=mock_open_session,
):
return super(MockPAMAuthenticator, self).authenticate(*args, **kwargs)
class MockHub(JupyterHub):
"""Hub with various mock bits"""
db_file = None
def _ip_default(self):
return 'localhost'
last_activity_interval = 2
base_url = '/@/space%20word/'
@default('subdomain_host')
def _subdomain_host_default(self):
return os.environ.get('JUPYTERHUB_TEST_SUBDOMAIN_HOST', '')
@default('ip')
def _ip_default(self):
return '127.0.0.1'
@default('authenticator_class')
def _authenticator_class_default(self):
return MockPAMAuthenticator
@default('spawner_class')
def _spawner_class_default(self):
return MockSpawner
@@ -102,7 +148,8 @@ class MockHub(JupyterHub):
def start(self, argv=None):
self.db_file = NamedTemporaryFile()
self.db_url = 'sqlite:///' + self.db_file.name
self.pid_file = NamedTemporaryFile(delete=False).name
self.db_url = self.db_file.name
evt = threading.Event()
@@ -139,13 +186,90 @@ class MockHub(JupyterHub):
self.db_file.close()
def login_user(self, name):
r = requests.post(self.proxy.public_server.url + 'hub/login',
"""Login a user by name, returning her cookies."""
base_url = public_url(self)
r = requests.post(base_url + 'hub/login',
data={
'username': name,
'password': name,
},
allow_redirects=False,
)
r.raise_for_status()
assert r.cookies
return r.cookies
def public_host(app):
"""Return the public *host* (no URL prefix) of the given JupyterHub instance."""
if app.subdomain_host:
return app.subdomain_host
else:
return app.proxy.public_server.host
def public_url(app, user_or_service=None):
"""Return the full, public base URL (including prefix) of the given JupyterHub instance."""
if user_or_service:
if app.subdomain_host:
host = user_or_service.host
else:
host = public_host(app)
return host + user_or_service.server.base_url
else:
return public_host(app) + app.proxy.public_server.base_url
# single-user-server mocking:
class MockSingleUserServer(SingleUserNotebookApp):
"""Mock-out problematic parts of single-user server when run in a thread
Currently:
- disable signal handler
"""
def init_signal(self):
pass
class TestSingleUserSpawner(MockSpawner):
"""Spawner that starts a MockSingleUserServer in a thread."""
_thread = None
@gen.coroutine
def start(self):
self.user.server.port = random_port()
env = self.get_env()
args = self.get_args()
evt = threading.Event()
print(args, env)
def _run():
io_loop = IOLoop()
io_loop.make_current()
io_loop.add_callback(lambda : evt.set())
with mock.patch.dict(os.environ, env):
app = self._app = MockSingleUserServer()
app.initialize(args)
app.start()
self._thread = threading.Thread(target=_run)
self._thread.start()
ready = evt.wait(timeout=3)
assert ready
@gen.coroutine
def stop(self):
self._app.stop()
self._thread.join()
@gen.coroutine
def poll(self):
if self._thread is None:
return 0
if self._thread.is_alive():
return None
else:
return 0

View File

@@ -0,0 +1,67 @@
"""Mock service for testing
basic HTTP Server that echos URLs back,
and allow retrieval of sys.argv.
"""
import argparse
import json
import os
import sys
from urllib.parse import urlparse
import requests
from tornado import web, httpserver, ioloop
from jupyterhub.services.auth import HubAuthenticated
class EchoHandler(web.RequestHandler):
def get(self):
self.write(self.request.path)
class EnvHandler(web.RequestHandler):
def get(self):
self.set_header('Content-Type', 'application/json')
self.write(json.dumps(dict(os.environ)))
class APIHandler(web.RequestHandler):
def get(self, path):
api_token = os.environ['JUPYTERHUB_API_TOKEN']
api_url = os.environ['JUPYTERHUB_API_URL']
r = requests.get(api_url + path, headers={
'Authorization': 'token %s' % api_token
})
r.raise_for_status()
self.set_header('Content-Type', 'application/json')
self.write(r.text)
class WhoAmIHandler(HubAuthenticated, web.RequestHandler):
@web.authenticated
def get(self):
self.write(self.get_current_user())
def main():
if os.environ['JUPYTERHUB_SERVICE_URL']:
url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
app = web.Application([
(r'.*/env', EnvHandler),
(r'.*/api/(.*)', APIHandler),
(r'.*/whoami/?', WhoAmIHandler),
(r'.*', EchoHandler),
])
server = httpserver.HTTPServer(app)
server.listen(url.port, url.hostname)
try:
ioloop.IOLoop.instance().start()
except KeyboardInterrupt:
print('\nInterrupted')
if __name__ == '__main__':
from tornado.options import parse_command_line
parse_command_line()
main()

Binary file not shown.

View File

@@ -2,17 +2,22 @@
import json
import time
from datetime import timedelta
from queue import Queue
from urllib.parse import urlparse
import sys
from unittest import mock
from urllib.parse import urlparse, quote
from pytest import mark, yield_fixture
import requests
from tornado import gen
from ..utils import url_path_join as ujoin
import jupyterhub
from .. import orm
from ..user import User
from ..utils import url_path_join as ujoin
from . import mocking
from .mocking import public_host, public_url
def check_db_locks(func):
@@ -20,14 +25,13 @@ def check_db_locks(func):
Decorator for test functions that verifies no locks are held on the
application's database upon exit by creating and dropping a dummy table.
Relies on an instance of JupyterhubApp being the first argument to the
Relies on an instance of JupyterHubApp being the first argument to the
decorated function.
"""
def new_func(*args, **kwargs):
retval = func(*args, **kwargs)
def new_func(app, *args, **kwargs):
retval = func(app, *args, **kwargs)
app = args[0]
temp_session = app.session_factory()
temp_session.execute('CREATE TABLE dummy (foo INT)')
temp_session.execute('DROP TABLE dummy')
@@ -40,12 +44,21 @@ def check_db_locks(func):
def find_user(db, name):
return db.query(orm.User).filter(orm.User.name==name).first()
def add_user(db, **kwargs):
user = orm.User(**kwargs)
db.add(user)
def add_user(db, app=None, **kwargs):
orm_user = find_user(db, name=kwargs.get('name'))
if orm_user is None:
orm_user = orm.User(**kwargs)
db.add(orm_user)
else:
for attr, value in kwargs.items():
setattr(orm_user, attr, value)
db.commit()
return user
if app:
user = app.users[orm_user.id] = User(orm_user, app.tornado_settings)
return user
else:
return orm_user
def auth_header(db, name):
user = find_user(db, name)
@@ -76,17 +89,17 @@ def test_auth_api(app):
db = app.db
r = api_request(app, 'authorizations', 'gobbledygook')
assert r.status_code == 404
# make a new cookie token
user = db.query(orm.User).first()
api_token = user.new_api_token()
# check success:
r = api_request(app, 'authorizations/token', api_token)
assert r.status_code == 200
reply = r.json()
assert reply['name'] == user.name
# check fail
r = api_request(app, 'authorizations/token', api_token,
headers={'Authorization': 'no sir'},
@@ -100,7 +113,7 @@ def test_auth_api(app):
def test_referer_check(app, io_loop):
url = app.hub.server.url
url = ujoin(public_host(app), app.hub.server.base_url)
host = urlparse(url).netloc
user = find_user(app.db, 'admin')
if user is None:
@@ -110,7 +123,7 @@ def test_referer_check(app, io_loop):
# stop the admin's server so we don't mess up future tests
io_loop.run_sync(lambda : app.proxy.delete_user(app_user))
io_loop.run_sync(app_user.stop)
r = api_request(app, 'users',
headers={
'Authorization': '',
@@ -142,24 +155,28 @@ def test_referer_check(app, io_loop):
)
assert r.status_code == 200
# user API tests
@mark.user
def test_get_users(app):
db = app.db
r = api_request(app, 'users')
assert r.status_code == 200
users = sorted(r.json(), key=lambda d: d['name'])
for u in users:
u.pop('last_activity')
assert users == [
{
'name': 'admin',
'groups': [],
'admin': True,
'server': None,
'pending': None,
},
{
'name': 'user',
'groups': [],
'admin': False,
'server': None,
'pending': None,
@@ -171,6 +188,8 @@ def test_get_users(app):
)
assert r.status_code == 403
@mark.user
def test_add_user(app):
db = app.db
name = 'newuser'
@@ -182,6 +201,7 @@ def test_add_user(app):
assert not user.admin
@mark.user
def test_get_user(app):
name = 'user'
r = api_request(app, 'users', name)
@@ -190,12 +210,14 @@ def test_get_user(app):
user.pop('last_activity')
assert user == {
'name': name,
'groups': [],
'admin': False,
'server': None,
'pending': None,
}
@mark.user
def test_add_multi_user_bad(app):
r = api_request(app, 'users', method='post')
assert r.status_code == 400
@@ -204,6 +226,19 @@ def test_add_multi_user_bad(app):
r = api_request(app, 'users', method='post', data='[]')
assert r.status_code == 400
@mark.user
def test_add_multi_user_invalid(app):
app.authenticator.username_pattern = r'w.*'
r = api_request(app, 'users', method='post',
data=json.dumps({'usernames': ['Willow', 'Andrew', 'Tara']})
)
app.authenticator.username_pattern = ''
assert r.status_code == 400
assert r.json()['message'] == 'Invalid usernames: andrew, tara'
@mark.user
def test_add_multi_user(app):
db = app.db
names = ['a', 'b']
@@ -214,21 +249,21 @@ def test_add_multi_user(app):
reply = r.json()
r_names = [ user['name'] for user in reply ]
assert names == r_names
for name in names:
user = find_user(db, name)
assert user is not None
assert user.name == name
assert not user.admin
# try to create the same users again
r = api_request(app, 'users', method='post',
data=json.dumps({'usernames': names}),
)
assert r.status_code == 400
names = ['a', 'b', 'ab']
# try to create the same users again
r = api_request(app, 'users', method='post',
data=json.dumps({'usernames': names}),
@@ -239,6 +274,7 @@ def test_add_multi_user(app):
assert r_names == ['ab']
@mark.user
def test_add_multi_user_admin(app):
db = app.db
names = ['c', 'd']
@@ -249,7 +285,7 @@ def test_add_multi_user_admin(app):
reply = r.json()
r_names = [ user['name'] for user in reply ]
assert names == r_names
for name in names:
user = find_user(db, name)
assert user is not None
@@ -257,6 +293,7 @@ def test_add_multi_user_admin(app):
assert user.admin
@mark.user
def test_add_user_bad(app):
db = app.db
name = 'dne_newuser'
@@ -265,6 +302,8 @@ def test_add_user_bad(app):
user = find_user(db, name)
assert user is None
@mark.user
def test_add_admin(app):
db = app.db
name = 'newadmin'
@@ -277,13 +316,16 @@ def test_add_admin(app):
assert user.name == name
assert user.admin
@mark.user
def test_delete_user(app):
db = app.db
mal = add_user(db, name='mal')
r = api_request(app, 'users', 'mal', method='delete')
assert r.status_code == 204
@mark.user
def test_make_admin(app):
db = app.db
name = 'admin2'
@@ -303,58 +345,78 @@ def test_make_admin(app):
assert user.name == name
assert user.admin
def get_app_user(app, name):
"""Get the User object from the main thread
Needed for access to the Spawner.
No ORM methods should be called on the result.
"""
q = Queue()
def get_user():
def get_user_id():
user = find_user(app.db, name)
q.put(user)
app.io_loop.add_callback(get_user)
return q.get(timeout=2)
q.put(user.id)
app.io_loop.add_callback(get_user_id)
user_id = q.get(timeout=2)
return app.users[user_id]
def test_spawn(app, io_loop):
db = app.db
name = 'wash'
user = add_user(db, name=name)
r = api_request(app, 'users', name, 'server', method='post')
user = add_user(db, app=app, name=name)
options = {
's': ['value'],
'i': 5,
}
before_servers = sorted(db.query(orm.Server), key=lambda s: s.url)
r = api_request(app, 'users', name, 'server', method='post', data=json.dumps(options))
assert r.status_code == 201
assert 'pid' in user.state
app_user = get_app_user(app, name)
assert app_user.spawner is not None
assert app_user.spawner.user_options == options
assert not app_user.spawn_pending
status = io_loop.run_sync(app_user.spawner.poll)
assert status is None
assert user.server.base_url == '/user/%s' % name
r = requests.get(ujoin(app.proxy.public_server.url, user.server.base_url))
assert user.server.base_url == ujoin(app.base_url, 'user/%s' % name)
url = public_url(app, user)
print(url)
r = requests.get(url)
assert r.status_code == 200
assert r.text == user.server.base_url
r = requests.get(ujoin(app.proxy.public_server.url, user.server.base_url, 'args'))
r = requests.get(ujoin(url, 'args'))
assert r.status_code == 200
argv = r.json()
for expected in ['--user=%s' % name, '--base-url=%s' % user.server.base_url]:
for expected in ['--user="%s"' % name, '--base-url="%s"' % user.server.base_url]:
assert expected in argv
if app.subdomain_host:
assert '--hub-host="%s"' % app.subdomain_host in argv
r = api_request(app, 'users', name, 'server', method='delete')
assert r.status_code == 204
assert 'pid' not in user.state
status = io_loop.run_sync(app_user.spawner.poll)
assert status == 0
def test_slow_spawn(app, io_loop):
app.tornado_application.settings['spawner_class'] = mocking.SlowSpawner
app.tornado_application.settings['slow_spawn_timeout'] = 0
app.tornado_application.settings['slow_stop_timeout'] = 0
# check that we cleaned up after ourselves
assert user.server is None
after_servers = sorted(db.query(orm.Server), key=lambda s: s.url)
assert before_servers == after_servers
tokens = list(db.query(orm.APIToken).filter(orm.APIToken.user_id==user.id))
assert tokens == []
def test_slow_spawn(app, io_loop, no_patience, request):
patch = mock.patch.dict(app.tornado_settings, {'spawner_class': mocking.SlowSpawner})
patch.start()
request.addfinalizer(patch.stop)
db = app.db
name = 'zoe'
user = add_user(db, name=name)
user = add_user(db, app=app, name=name)
r = api_request(app, 'users', name, 'server', method='post')
r.raise_for_status()
assert r.status_code == 202
@@ -362,13 +424,12 @@ def test_slow_spawn(app, io_loop):
assert app_user.spawner is not None
assert app_user.spawn_pending
assert not app_user.stop_pending
dt = timedelta(seconds=0.1)
@gen.coroutine
def wait_spawn():
while app_user.spawn_pending:
yield gen.Task(io_loop.add_timeout, dt)
yield gen.sleep(0.1)
io_loop.run_sync(wait_spawn)
assert not app_user.spawn_pending
status = io_loop.run_sync(app_user.spawner.poll)
@@ -377,7 +438,7 @@ def test_slow_spawn(app, io_loop):
@gen.coroutine
def wait_stop():
while app_user.stop_pending:
yield gen.Task(io_loop.add_timeout, dt)
yield gen.sleep(0.1)
r = api_request(app, 'users', name, 'server', method='delete')
r.raise_for_status()
@@ -390,32 +451,32 @@ def test_slow_spawn(app, io_loop):
assert r.status_code == 202
assert app_user.spawner is not None
assert app_user.stop_pending
io_loop.run_sync(wait_stop)
assert not app_user.stop_pending
assert app_user.spawner is not None
r = api_request(app, 'users', name, 'server', method='delete')
assert r.status_code == 400
def test_never_spawn(app, io_loop):
app.tornado_application.settings['spawner_class'] = mocking.NeverSpawner
app.tornado_application.settings['slow_spawn_timeout'] = 0
def test_never_spawn(app, io_loop, no_patience, request):
patch = mock.patch.dict(app.tornado_settings, {'spawner_class': mocking.NeverSpawner})
patch.start()
request.addfinalizer(patch.stop)
db = app.db
name = 'badger'
user = add_user(db, name=name)
user = add_user(db, app=app, name=name)
r = api_request(app, 'users', name, 'server', method='post')
app_user = get_app_user(app, name)
assert app_user.spawner is not None
assert app_user.spawn_pending
dt = timedelta(seconds=0.1)
@gen.coroutine
def wait_pending():
while app_user.spawn_pending:
yield gen.Task(io_loop.add_timeout, dt)
yield gen.sleep(0.1)
io_loop.run_sync(wait_pending)
assert not app_user.spawn_pending
status = io_loop.run_sync(app_user.spawner.poll)
@@ -429,6 +490,295 @@ def test_get_proxy(app, io_loop):
assert list(reply.keys()) == ['/']
def test_cookie(app):
db = app.db
name = 'patience'
user = add_user(db, app=app, name=name)
r = api_request(app, 'users', name, 'server', method='post')
assert r.status_code == 201
assert 'pid' in user.state
app_user = get_app_user(app, name)
cookies = app.login_user(name)
# cookie jar gives '"cookie-value"', we want 'cookie-value'
cookie = cookies[user.server.cookie_name][1:-1]
r = api_request(app, 'authorizations/cookie', user.server.cookie_name, "nothintoseehere")
assert r.status_code == 404
r = api_request(app, 'authorizations/cookie', user.server.cookie_name, quote(cookie, safe=''))
r.raise_for_status()
reply = r.json()
assert reply['name'] == name
# deprecated cookie in body:
r = api_request(app, 'authorizations/cookie', user.server.cookie_name, data=cookie)
r.raise_for_status()
reply = r.json()
assert reply['name'] == name
def test_token(app):
name = 'book'
user = add_user(app.db, app=app, name=name)
token = user.new_api_token()
r = api_request(app, 'authorizations/token', token)
r.raise_for_status()
user_model = r.json()
assert user_model['name'] == name
r = api_request(app, 'authorizations/token', 'notauthorized')
assert r.status_code == 404
def test_get_token(app):
name = 'user'
user = add_user(app.db, app=app, name=name)
r = api_request(app, 'authorizations/token', method='post', data=json.dumps({
'username': name,
'password': name,
}))
assert r.status_code == 200
data = r.content.decode("utf-8")
token = json.loads(data)
assert not token['Authentication'] is None
def test_bad_get_token(app):
name = 'user'
password = 'fake'
user = add_user(app.db, app=app, name=name)
r = api_request(app, 'authorizations/token', method='post', data=json.dumps({
'username': name,
'password': password,
}))
assert r.status_code == 403
# group API tests
@mark.group
def test_groups_list(app):
r = api_request(app, 'groups')
r.raise_for_status()
reply = r.json()
assert reply == []
# create a group
group = orm.Group(name='alphaflight')
app.db.add(group)
app.db.commit()
r = api_request(app, 'groups')
r.raise_for_status()
reply = r.json()
assert reply == [{
'name': 'alphaflight',
'users': []
}]
@mark.group
def test_group_get(app):
group = orm.Group.find(app.db, name='alphaflight')
user = add_user(app.db, app=app, name='sasquatch')
group.users.append(user)
app.db.commit()
r = api_request(app, 'groups/runaways')
assert r.status_code == 404
r = api_request(app, 'groups/alphaflight')
r.raise_for_status()
reply = r.json()
assert reply == {
'name': 'alphaflight',
'users': ['sasquatch']
}
@mark.group
def test_group_create_delete(app):
db = app.db
r = api_request(app, 'groups/runaways', method='delete')
assert r.status_code == 404
r = api_request(app, 'groups/new', method='post', data=json.dumps({
'users': ['doesntexist']
}))
assert r.status_code == 400
assert orm.Group.find(db, name='new') is None
r = api_request(app, 'groups/omegaflight', method='post', data=json.dumps({
'users': ['sasquatch']
}))
r.raise_for_status()
omegaflight = orm.Group.find(db, name='omegaflight')
sasquatch = find_user(db, name='sasquatch')
assert omegaflight in sasquatch.groups
assert sasquatch in omegaflight.users
# create duplicate raises 400
r = api_request(app, 'groups/omegaflight', method='post')
assert r.status_code == 400
r = api_request(app, 'groups/omegaflight', method='delete')
assert r.status_code == 204
assert omegaflight not in sasquatch.groups
assert orm.Group.find(db, name='omegaflight') is None
# delete nonexistant gives 404
r = api_request(app, 'groups/omegaflight', method='delete')
assert r.status_code == 404
@mark.group
def test_group_add_users(app):
db = app.db
# must specify users
r = api_request(app, 'groups/alphaflight/users', method='post', data='{}')
assert r.status_code == 400
names = ['aurora', 'guardian', 'northstar', 'sasquatch', 'shaman', 'snowbird']
users = [ find_user(db, name=name) or add_user(db, app=app, name=name) for name in names ]
r = api_request(app, 'groups/alphaflight/users', method='post', data=json.dumps({
'users': names,
}))
r.raise_for_status()
for user in users:
print(user.name)
assert [ g.name for g in user.groups ] == ['alphaflight']
group = orm.Group.find(db, name='alphaflight')
assert sorted([ u.name for u in group.users ]) == sorted(names)
@mark.group
def test_group_delete_users(app):
db = app.db
# must specify users
r = api_request(app, 'groups/alphaflight/users', method='delete', data='{}')
assert r.status_code == 400
names = ['aurora', 'guardian', 'northstar', 'sasquatch', 'shaman', 'snowbird']
users = [ find_user(db, name=name) for name in names ]
r = api_request(app, 'groups/alphaflight/users', method='delete', data=json.dumps({
'users': names[:2],
}))
r.raise_for_status()
for user in users[:2]:
assert user.groups == []
for user in users[2:]:
assert [ g.name for g in user.groups ] == ['alphaflight']
group = orm.Group.find(db, name='alphaflight')
assert sorted([ u.name for u in group.users ]) == sorted(names[2:])
# service API
@mark.services
def test_get_services(app, mockservice):
db = app.db
r = api_request(app, 'services')
r.raise_for_status()
assert r.status_code == 200
services = r.json()
assert services == {
'mock-service': {
'name': 'mock-service',
'admin': True,
'command': mockservice.command,
'pid': mockservice.proc.pid,
'prefix': mockservice.server.base_url,
'url': mockservice.url,
}
}
r = api_request(app, 'services',
headers=auth_header(db, 'user'),
)
assert r.status_code == 403
@mark.services
def test_get_service(app, mockservice):
db = app.db
r = api_request(app, 'services/%s' % mockservice.name)
r.raise_for_status()
assert r.status_code == 200
service = r.json()
assert service == {
'name': 'mock-service',
'admin': True,
'command': mockservice.command,
'pid': mockservice.proc.pid,
'prefix': mockservice.server.base_url,
'url': mockservice.url,
}
r = api_request(app, 'services/%s' % mockservice.name,
headers={
'Authorization': 'token %s' % mockservice.api_token
}
)
r.raise_for_status()
r = api_request(app, 'services/%s' % mockservice.name,
headers=auth_header(db, 'user'),
)
assert r.status_code == 403
def test_root_api(app):
base_url = app.hub.server.url
url = ujoin(base_url, 'api')
r = requests.get(url)
r.raise_for_status()
expected = {
'version': jupyterhub.__version__
}
assert r.json() == expected
def test_info(app):
r = api_request(app, 'info')
r.raise_for_status()
data = r.json()
assert data['version'] == jupyterhub.__version__
assert sorted(data) == [
'authenticator',
'python',
'spawner',
'sys_executable',
'version',
]
assert data['python'] == sys.version
assert data['sys_executable'] == sys.executable
assert data['authenticator'] == {
'class': 'jupyterhub.tests.mocking.MockPAMAuthenticator',
'version': jupyterhub.__version__,
}
assert data['spawner'] == {
'class': 'jupyterhub.tests.mocking.MockSpawner',
'version': jupyterhub.__version__,
}
# general API tests
def test_options(app):
r = api_request(app, 'users', method='options')
r.raise_for_status()
assert 'Access-Control-Allow-Headers' in r.headers
def test_bad_json_body(app):
r = api_request(app, 'users', method='post', data='notjson')
assert r.status_code == 400
# shutdown must be last
def test_shutdown(app):
r = api_request(app, 'shutdown', method='post', data=json.dumps({
'servers': True,
@@ -442,3 +792,4 @@ def test_shutdown(app):
else:
break
assert not app.io_loop._running

View File

@@ -1,11 +1,17 @@
"""Test the JupyterHub entry point"""
import binascii
import os
import re
import sys
from getpass import getuser
from subprocess import check_output
from subprocess import check_output, Popen, PIPE
from tempfile import NamedTemporaryFile, TemporaryDirectory
from unittest.mock import patch
import pytest
from .mocking import MockHub
from .. import orm
def test_help_all():
out = check_output([sys.executable, '-m', 'jupyterhub', '--help-all']).decode('utf8', 'replace')
@@ -16,16 +22,31 @@ def test_token_app():
cmd = [sys.executable, '-m', 'jupyterhub', 'token']
out = check_output(cmd + ['--help-all']).decode('utf8', 'replace')
with TemporaryDirectory() as td:
out = check_output(cmd + [getuser()], cwd=td).decode('utf8', 'replace').strip()
with open(os.path.join(td, 'jupyterhub_config.py'), 'w') as f:
f.write("c.Authenticator.admin_users={'user'}")
out = check_output(cmd + ['user'], cwd=td).decode('utf8', 'replace').strip()
assert re.match(r'^[a-z0-9]+$', out)
def test_generate_config():
with NamedTemporaryFile(prefix='jupyterhub_config', suffix='.py') as tf:
cfg_file = tf.name
out = check_output([sys.executable, '-m', 'jupyterhub',
'--generate-config', '-f', cfg_file]
).decode('utf8', 'replace')
with open(cfg_file, 'w') as f:
f.write("c.A = 5")
p = Popen([sys.executable, '-m', 'jupyterhub',
'--generate-config', '-f', cfg_file],
stdout=PIPE, stdin=PIPE)
out, _ = p.communicate(b'n')
out = out.decode('utf8', 'replace')
assert os.path.exists(cfg_file)
with open(cfg_file) as f:
cfg_text = f.read()
assert cfg_text == 'c.A = 5'
p = Popen([sys.executable, '-m', 'jupyterhub',
'--generate-config', '-f', cfg_file],
stdout=PIPE, stdin=PIPE)
out, _ = p.communicate(b'x\ny')
out = out.decode('utf8', 'replace')
assert os.path.exists(cfg_file)
with open(cfg_file) as f:
cfg_text = f.read()
@@ -33,3 +54,105 @@ def test_generate_config():
assert cfg_file in out
assert 'Spawner.cmd' in cfg_text
assert 'Authenticator.whitelist' in cfg_text
def test_init_tokens(io_loop):
with TemporaryDirectory() as td:
db_file = os.path.join(td, 'jupyterhub.sqlite')
tokens = {
'super-secret-token': 'alyx',
'also-super-secret': 'gordon',
'boagasdfasdf': 'chell',
}
app = MockHub(db_url=db_file, api_tokens=tokens)
io_loop.run_sync(lambda : app.initialize([]))
db = app.db
for token, username in tokens.items():
api_token = orm.APIToken.find(db, token)
assert api_token is not None
user = api_token.user
assert user.name == username
# simulate second startup, reloading same tokens:
app = MockHub(db_url=db_file, api_tokens=tokens)
io_loop.run_sync(lambda : app.initialize([]))
db = app.db
for token, username in tokens.items():
api_token = orm.APIToken.find(db, token)
assert api_token is not None
user = api_token.user
assert user.name == username
# don't allow failed token insertion to create users:
tokens['short'] = 'gman'
app = MockHub(db_url=db_file, api_tokens=tokens)
with pytest.raises(ValueError):
io_loop.run_sync(lambda : app.initialize([]))
assert orm.User.find(app.db, 'gman') is None
def test_write_cookie_secret(tmpdir):
secret_path = str(tmpdir.join('cookie_secret'))
hub = MockHub(cookie_secret_file=secret_path)
hub.init_secrets()
assert os.path.exists(secret_path)
assert os.stat(secret_path).st_mode & 0o600
assert not os.stat(secret_path).st_mode & 0o177
def test_cookie_secret_permissions(tmpdir):
secret_file = tmpdir.join('cookie_secret')
secret_path = str(secret_file)
secret = os.urandom(1024)
secret_file.write(binascii.b2a_base64(secret))
hub = MockHub(cookie_secret_file=secret_path)
# raise with public secret file
os.chmod(secret_path, 0o664)
with pytest.raises(SystemExit):
hub.init_secrets()
# ok with same file, proper permissions
os.chmod(secret_path, 0o660)
hub.init_secrets()
assert hub.cookie_secret == secret
def test_cookie_secret_content(tmpdir):
secret_file = tmpdir.join('cookie_secret')
secret_file.write('not base 64: uñiço∂e')
secret_path = str(secret_file)
os.chmod(secret_path, 0o660)
hub = MockHub(cookie_secret_file=secret_path)
with pytest.raises(SystemExit):
hub.init_secrets()
def test_cookie_secret_env(tmpdir):
hub = MockHub(cookie_secret_file=str(tmpdir.join('cookie_secret')))
with patch.dict(os.environ, {'JPY_COOKIE_SECRET': 'not hex'}):
with pytest.raises(ValueError):
hub.init_secrets()
with patch.dict(os.environ, {'JPY_COOKIE_SECRET': 'abc123'}):
hub.init_secrets()
assert hub.cookie_secret == binascii.a2b_hex('abc123')
assert not os.path.exists(hub.cookie_secret_file)
def test_load_groups(io_loop):
to_load = {
'blue': ['cyclops', 'rogue', 'wolverine'],
'gold': ['storm', 'jean-grey', 'colossus'],
}
hub = MockHub(load_groups=to_load)
hub.init_db()
io_loop.run_sync(hub.init_users)
hub.init_groups()
db = hub.db
blue = orm.Group.find(db, name='blue')
assert blue is not None
assert sorted([ u.name for u in blue.users ]) == sorted(to_load['blue'])
gold = orm.Group.find(db, name='gold')
assert gold is not None
assert sorted([ u.name for u in gold.users ]) == sorted(to_load['gold'])

View File

@@ -3,7 +3,6 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from subprocess import CalledProcessError
from unittest import mock
import pytest
@@ -13,13 +12,13 @@ from jupyterhub import auth, orm
def test_pam_auth(io_loop):
authenticator = MockPAMAuthenticator()
authorized = io_loop.run_sync(lambda : authenticator.authenticate(None, {
authorized = io_loop.run_sync(lambda : authenticator.get_authenticated_user(None, {
'username': 'match',
'password': 'match',
}))
assert authorized == 'match'
authorized = io_loop.run_sync(lambda : authenticator.authenticate(None, {
authorized = io_loop.run_sync(lambda : authenticator.get_authenticated_user(None, {
'username': 'match',
'password': 'nomatch',
}))
@@ -27,19 +26,19 @@ def test_pam_auth(io_loop):
def test_pam_auth_whitelist(io_loop):
authenticator = MockPAMAuthenticator(whitelist={'wash', 'kaylee'})
authorized = io_loop.run_sync(lambda : authenticator.authenticate(None, {
authorized = io_loop.run_sync(lambda : authenticator.get_authenticated_user(None, {
'username': 'kaylee',
'password': 'kaylee',
}))
assert authorized == 'kaylee'
authorized = io_loop.run_sync(lambda : authenticator.authenticate(None, {
authorized = io_loop.run_sync(lambda : authenticator.get_authenticated_user(None, {
'username': 'wash',
'password': 'nomatch',
}))
assert authorized is None
authorized = io_loop.run_sync(lambda : authenticator.authenticate(None, {
authorized = io_loop.run_sync(lambda : authenticator.get_authenticated_user(None, {
'username': 'mal',
'password': 'mal',
}))
@@ -59,14 +58,14 @@ def test_pam_auth_group_whitelist(io_loop):
authenticator = MockPAMAuthenticator(group_whitelist={'group'})
with mock.patch.object(auth, 'getgrnam', getgrnam):
authorized = io_loop.run_sync(lambda : authenticator.authenticate(None, {
authorized = io_loop.run_sync(lambda : authenticator.get_authenticated_user(None, {
'username': 'kaylee',
'password': 'kaylee',
}))
assert authorized == 'kaylee'
with mock.patch.object(auth, 'getgrnam', getgrnam):
authorized = io_loop.run_sync(lambda : authenticator.authenticate(None, {
authorized = io_loop.run_sync(lambda : authenticator.get_authenticated_user(None, {
'username': 'mal',
'password': 'mal',
}))
@@ -75,7 +74,7 @@ def test_pam_auth_group_whitelist(io_loop):
def test_pam_auth_no_such_group(io_loop):
authenticator = MockPAMAuthenticator(group_whitelist={'nosuchcrazygroup'})
authorized = io_loop.run_sync(lambda : authenticator.authenticate(None, {
authorized = io_loop.run_sync(lambda : authenticator.get_authenticated_user(None, {
'username': 'kaylee',
'password': 'kaylee',
}))
@@ -93,33 +92,47 @@ def test_wont_add_system_user(io_loop):
def test_cant_add_system_user(io_loop):
user = orm.User(name='lioness4321')
authenticator = auth.PAMAuthenticator(whitelist={'mal'})
authenticator.add_user_cmd = ['jupyterhub-fake-command']
authenticator.create_system_users = True
def check_output(cmd, *a, **kw):
raise CalledProcessError(1, cmd)
class DummyFile:
def read(self):
return b'dummy error'
with mock.patch.object(auth, 'check_output', check_output):
with pytest.raises(RuntimeError):
class DummyPopen:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.returncode = 1
self.stdout = DummyFile()
def wait(self):
return
with mock.patch.object(auth, 'Popen', DummyPopen):
with pytest.raises(RuntimeError) as exc:
io_loop.run_sync(lambda : authenticator.add_user(user))
assert str(exc.value) == 'Failed to create system user lioness4321: dummy error'
def test_add_system_user(io_loop):
user = orm.User(name='lioness4321')
authenticator = auth.PAMAuthenticator(whitelist={'mal'})
authenticator.create_system_users = True
def check_output(*a, **kw):
return
authenticator.add_user_cmd = ['echo', '/home/USERNAME']
record = {}
def check_call(cmd, *a, **kw):
record['cmd'] = cmd
class DummyPopen:
def __init__(self, cmd, *args, **kwargs):
record['cmd'] = cmd
self.returncode = 0
def wait(self):
return
with mock.patch.object(auth, 'check_output', check_output), \
mock.patch.object(auth, 'check_call', check_call):
with mock.patch.object(auth, 'Popen', DummyPopen):
io_loop.run_sync(lambda : authenticator.add_user(user))
assert user.name in record['cmd']
assert record['cmd'] == ['echo', '/home/lioness4321', 'lioness4321']
def test_delete_user(io_loop):
@@ -147,3 +160,57 @@ def test_handlers(app):
assert handlers[0][0] == '/login'
def test_normalize_names(io_loop):
a = MockPAMAuthenticator()
authorized = io_loop.run_sync(lambda : a.get_authenticated_user(None, {
'username': 'ZOE',
'password': 'ZOE',
}))
assert authorized == 'zoe'
authorized = io_loop.run_sync(lambda: a.get_authenticated_user(None, {
'username': 'Glenn',
'password': 'Glenn',
}))
assert authorized == 'glenn'
authorized = io_loop.run_sync(lambda: a.get_authenticated_user(None, {
'username': 'hExi',
'password': 'hExi',
}))
assert authorized == 'hexi'
authorized = io_loop.run_sync(lambda: a.get_authenticated_user(None, {
'username': 'Test',
'password': 'Test',
}))
assert authorized == 'test'
def test_username_map(io_loop):
a = MockPAMAuthenticator(username_map={'wash': 'alpha'})
authorized = io_loop.run_sync(lambda : a.get_authenticated_user(None, {
'username': 'WASH',
'password': 'WASH',
}))
assert authorized == 'alpha'
authorized = io_loop.run_sync(lambda : a.get_authenticated_user(None, {
'username': 'Inara',
'password': 'Inara',
}))
assert authorized == 'inara'
def test_validate_names(io_loop):
a = auth.PAMAuthenticator()
assert a.validate_username('willow')
assert a.validate_username('giles')
assert a.validate_username('Test')
assert a.validate_username('hExi')
assert a.validate_username('Glenn#Smith!')
a = auth.PAMAuthenticator(username_pattern='w.*')
assert not a.validate_username('xander')
assert a.validate_username('willow')

View File

@@ -0,0 +1,48 @@
from glob import glob
import os
import shutil
from sqlalchemy.exc import OperationalError
from pytest import raises
from ..dbutil import upgrade
from ..app import NewToken, UpgradeDB, JupyterHub
here = os.path.dirname(__file__)
old_db = os.path.join(here, 'old-jupyterhub.sqlite')
def generate_old_db(path):
db_path = os.path.join(path, "jupyterhub.sqlite")
print(old_db, db_path)
shutil.copy(old_db, db_path)
return 'sqlite:///%s' % db_path
def test_upgrade(tmpdir):
print(tmpdir)
db_url = generate_old_db(str(tmpdir))
print(db_url)
upgrade(db_url)
def test_upgrade_entrypoint(tmpdir, io_loop):
generate_old_db(str(tmpdir))
tmpdir.chdir()
tokenapp = NewToken()
tokenapp.initialize(['kaylee'])
with raises(OperationalError):
tokenapp.start()
sqlite_files = glob(os.path.join(str(tmpdir), 'jupyterhub.sqlite*'))
assert len(sqlite_files) == 1
upgradeapp = UpgradeDB()
io_loop.run_sync(lambda : upgradeapp.initialize([]))
upgradeapp.start()
# check that backup was created:
sqlite_files = glob(os.path.join(str(tmpdir), 'jupyterhub.sqlite*'))
assert len(sqlite_files) == 2
# run tokenapp again, it should work
tokenapp.start()

View File

@@ -7,6 +7,7 @@ import pytest
from tornado import gen
from .. import orm
from ..user import User
from .mocking import MockSpawner
@@ -19,7 +20,7 @@ def test_server(db):
assert server.proto == 'http'
assert isinstance(server.port, int)
assert isinstance(server.cookie_name, str)
assert server.host == 'http://localhost:%i' % server.port
assert server.host == 'http://127.0.0.1:%i' % server.port
assert server.url == server.host + '/'
assert server.bind_url == 'http://*:%i/' % server.port
server.ip = '127.0.0.1'
@@ -89,13 +90,86 @@ def test_tokens(db):
assert len(user.api_tokens) == 2
found = orm.APIToken.find(db, token=token)
assert found.match(token)
assert found.user is user
assert found.service is None
found = orm.APIToken.find(db, 'something else')
assert found is None
secret = 'super-secret-preload-token'
token = user.new_api_token(secret)
assert token == secret
assert len(user.api_tokens) == 3
# raise ValueError on collision
with pytest.raises(ValueError):
user.new_api_token(token)
assert len(user.api_tokens) == 3
def test_service_tokens(db):
service = orm.Service(name='secret')
db.add(service)
db.commit()
token = service.new_api_token()
assert any(t.match(token) for t in service.api_tokens)
service.new_api_token()
assert len(service.api_tokens) == 2
found = orm.APIToken.find(db, token=token)
assert found.match(token)
assert found.user is None
assert found.service is service
service2 = orm.Service(name='secret')
db.add(service)
db.commit()
assert service2.id != service.id
def test_service_server(db):
service = orm.Service(name='has_servers')
db.add(service)
db.commit()
assert service.server is None
server = service.server = orm.Server()
assert service
assert server.id is None
db.commit()
assert isinstance(server.id, int)
def test_token_find(db):
service = db.query(orm.Service).first()
user = db.query(orm.User).first()
service_token = service.new_api_token()
user_token = user.new_api_token()
with pytest.raises(ValueError):
orm.APIToken.find(db, 'irrelevant', kind='richard')
# no kind, find anything
found = orm.APIToken.find(db, token=user_token)
assert found
assert found.match(user_token)
found = orm.APIToken.find(db, token=service_token)
assert found
assert found.match(service_token)
# kind=user, only find user tokens
found = orm.APIToken.find(db, token=user_token, kind='user')
assert found
assert found.match(user_token)
found = orm.APIToken.find(db, token=service_token, kind='user')
assert found is None
# kind=service, only find service tokens
found = orm.APIToken.find(db, token=service_token, kind='service')
assert found
assert found.match(service_token)
found = orm.APIToken.find(db, token=user_token, kind='service')
assert found is None
def test_spawn_fails(db, io_loop):
user = orm.User(name='aeofel')
db.add(user)
orm_user = orm.User(name='aeofel')
db.add(orm_user)
db.commit()
class BadSpawner(MockSpawner):
@@ -103,8 +177,27 @@ def test_spawn_fails(db, io_loop):
def start(self):
raise RuntimeError("Split the party")
user = User(orm_user, {
'spawner_class': BadSpawner,
'config': None,
})
with pytest.raises(Exception) as exc:
io_loop.run_sync(lambda : user.spawn(BadSpawner))
io_loop.run_sync(user.spawn)
assert user.server is None
assert not user.running
def test_groups(db):
user = orm.User.find(db, name='aeofel')
db.add(user)
group = orm.Group(name='lives')
db.add(group)
db.commit()
assert group.users == []
assert user.groups == []
group.users.append(user)
db.commit()
assert group.users == [user]
assert user.groups == [group]

View File

@@ -1,13 +1,22 @@
"""Tests for HTML pages"""
from urllib.parse import urlencode, urlparse
import requests
from ..utils import url_path_join as ujoin
from .. import orm
import mock
from .mocking import FormSpawner, public_url, public_host
from .test_api import api_request
def get_page(path, app, **kw):
base_url = ujoin(app.proxy.public_server.host, app.hub.server.base_url)
def get_page(path, app, hub=True, **kw):
if hub:
prefix = app.hub.server.base_url
else:
prefix = app.base_url
base_url = ujoin(public_host(app), prefix)
print(base_url)
return requests.get(ujoin(base_url, path), **kw)
@@ -16,15 +25,27 @@ def test_root_no_auth(app, io_loop):
routes = io_loop.run_sync(app.proxy.get_routes)
print(routes)
print(app.hub.server)
r = requests.get(app.proxy.public_server.host)
url = ujoin(public_host(app), app.hub.server.base_url)
print(url)
r = requests.get(url)
r.raise_for_status()
assert r.url == ujoin(app.proxy.public_server.host, app.hub.server.base_url)
assert r.url == ujoin(url, 'login')
def test_root_auth(app):
cookies = app.login_user('river')
r = requests.get(app.proxy.public_server.host, cookies=cookies)
r = requests.get(public_url(app), cookies=cookies)
r.raise_for_status()
assert r.url == ujoin(app.proxy.public_server.host, '/user/river')
assert r.url == public_url(app, app.users['river'])
def test_root_redirect(app):
name = 'wash'
cookies = app.login_user(name)
next_url = ujoin(app.base_url, 'user/other/test.ipynb')
url = '/?' + urlencode({'next': next_url})
r = get_page(url, app, cookies=cookies)
r.raise_for_status()
path = urlparse(r.url).path
assert path == ujoin(app.base_url, 'user/%s/test.ipynb' % name)
def test_home_no_auth(app):
r = get_page('home', app, allow_redirects=False)
@@ -56,3 +77,205 @@ def test_admin(app):
r.raise_for_status()
assert r.url.endswith('/admin')
def test_spawn_redirect(app, io_loop):
name = 'wash'
cookies = app.login_user(name)
u = app.users[orm.User.find(app.db, name)]
# ensure wash's server isn't running:
r = api_request(app, 'users', name, 'server', method='delete', cookies=cookies)
r.raise_for_status()
status = io_loop.run_sync(u.spawner.poll)
assert status is not None
# test spawn page when no server is running
r = get_page('spawn', app, cookies=cookies)
r.raise_for_status()
print(urlparse(r.url))
path = urlparse(r.url).path
assert path == ujoin(app.base_url, 'user/%s' % name)
# should have started server
status = io_loop.run_sync(u.spawner.poll)
assert status is None
# test spawn page when server is already running (just redirect)
r = get_page('spawn', app, cookies=cookies)
r.raise_for_status()
print(urlparse(r.url))
path = urlparse(r.url).path
assert path == ujoin(app.base_url, '/user/%s' % name)
def test_spawn_page(app):
with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}):
cookies = app.login_user('jones')
r = get_page('spawn', app, cookies=cookies)
assert r.url.endswith('/spawn')
assert FormSpawner.options_form in r.text
def test_spawn_form(app, io_loop):
with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}):
base_url = ujoin(public_host(app), app.hub.server.base_url)
cookies = app.login_user('jones')
orm_u = orm.User.find(app.db, 'jones')
u = app.users[orm_u]
io_loop.run_sync(u.stop)
r = requests.post(ujoin(base_url, 'spawn'), cookies=cookies, data={
'bounds': ['-1', '1'],
'energy': '511keV',
})
r.raise_for_status()
print(u.spawner)
print(u.spawner.user_options)
assert u.spawner.user_options == {
'energy': '511keV',
'bounds': [-1, 1],
'notspecified': 5,
}
def test_spawn_form_with_file(app, io_loop):
with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}):
base_url = ujoin(public_host(app), app.hub.server.base_url)
cookies = app.login_user('jones')
orm_u = orm.User.find(app.db, 'jones')
u = app.users[orm_u]
io_loop.run_sync(u.stop)
r = requests.post(ujoin(base_url, 'spawn'),
cookies=cookies,
data={
'bounds': ['-1', '1'],
'energy': '511keV',
},
files={'hello': ('hello.txt', b'hello world\n')}
)
r.raise_for_status()
assert u.spawner.user_options == {
'energy': '511keV',
'bounds': [-1, 1],
'notspecified': 5,
'hello': {'filename': 'hello.txt',
'body': b'hello world\n',
'content_type': 'application/unknown'},
}
def test_user_redirect(app):
name = 'wash'
cookies = app.login_user(name)
r = get_page('/user-redirect/tree/top/', app)
r.raise_for_status()
print(urlparse(r.url))
path = urlparse(r.url).path
assert path == ujoin(app.base_url, '/hub/login')
query = urlparse(r.url).query
assert query == urlencode({
'next': ujoin(app.hub.server.base_url, '/user-redirect/tree/top/')
})
r = get_page('/user-redirect/notebooks/test.ipynb', app, cookies=cookies)
r.raise_for_status()
print(urlparse(r.url))
path = urlparse(r.url).path
assert path == ujoin(app.base_url, '/user/%s/notebooks/test.ipynb' % name)
def test_user_redirect_deprecated(app):
"""redirecting from /user/someonelse/ URLs (deprecated)"""
name = 'wash'
cookies = app.login_user(name)
r = get_page('/user/baduser', app, cookies=cookies, hub=False)
r.raise_for_status()
print(urlparse(r.url))
path = urlparse(r.url).path
assert path == ujoin(app.base_url, '/user/%s' % name)
r = get_page('/user/baduser/test.ipynb', app, cookies=cookies, hub=False)
r.raise_for_status()
print(urlparse(r.url))
path = urlparse(r.url).path
assert path == ujoin(app.base_url, '/user/%s/test.ipynb' % name)
r = get_page('/user/baduser/test.ipynb', app, hub=False)
r.raise_for_status()
print(urlparse(r.url))
path = urlparse(r.url).path
assert path == ujoin(app.base_url, '/hub/login')
query = urlparse(r.url).query
assert query == urlencode({
'next': ujoin(app.base_url, '/hub/user/baduser/test.ipynb')
})
def test_login_fail(app):
name = 'wash'
base_url = public_url(app)
r = requests.post(base_url + 'hub/login',
data={
'username': name,
'password': 'wrong',
},
allow_redirects=False,
)
assert not r.cookies
def test_login_redirect(app, io_loop):
cookies = app.login_user('river')
user = app.users['river']
# no next_url, server running
io_loop.run_sync(user.spawn)
r = get_page('login', app, cookies=cookies, allow_redirects=False)
r.raise_for_status()
assert r.status_code == 302
assert '/user/river' in r.headers['Location']
# no next_url, server not running
io_loop.run_sync(user.stop)
r = get_page('login', app, cookies=cookies, allow_redirects=False)
r.raise_for_status()
assert r.status_code == 302
assert '/hub/' in r.headers['Location']
# next URL given, use it
r = get_page('login?next=/hub/admin', app, cookies=cookies, allow_redirects=False)
r.raise_for_status()
assert r.status_code == 302
assert r.headers['Location'].endswith('/hub/admin')
def test_logout(app):
name = 'wash'
cookies = app.login_user(name)
r = requests.get(public_host(app) + app.tornado_settings['logout_url'], cookies=cookies)
r.raise_for_status()
login_url = public_host(app) + app.tornado_settings['login_url']
assert r.url == login_url
assert r.cookies == {}
def test_login_no_whitelist_adds_user(app):
auth = app.authenticator
mock_add_user = mock.Mock()
with mock.patch.object(auth, 'add_user', mock_add_user):
cookies = app.login_user('jubal')
user = app.users['jubal']
assert mock_add_user.mock_calls == [mock.call(user)]
def test_static_files(app):
base_url = ujoin(public_host(app), app.hub.server.base_url)
r = requests.get(ujoin(base_url, 'logo'))
r.raise_for_status()
assert r.headers['content-type'] == 'image/png'
r = requests.get(ujoin(base_url, 'static', 'images', 'jupyter.png'))
r.raise_for_status()
assert r.headers['content-type'] == 'image/png'
r = requests.get(ujoin(base_url, 'static', 'css', 'style.min.css'))
r.raise_for_status()
assert r.headers['content-type'] == 'text/css'

View File

@@ -4,11 +4,12 @@ import json
import os
from queue import Queue
from subprocess import Popen
from urllib.parse import urlparse, unquote
from .. import orm
from .mocking import MockHub
from .test_api import api_request
from ..utils import wait_for_http_server
from ..utils import wait_for_http_server, url_path_join as ujoin
def test_external_proxy(request, io_loop):
"""Test a proxy started before the Hub"""
@@ -34,6 +35,8 @@ def test_external_proxy(request, io_loop):
'--api-port', str(proxy_port),
'--default-target', 'http://%s:%i' % (app.hub_ip, app.hub_port),
]
if app.subdomain_host:
cmd.append('--host-routing')
proxy = Popen(cmd, env=env)
def _cleanup_proxy():
if proxy.poll() is None:
@@ -60,7 +63,11 @@ def test_external_proxy(request, io_loop):
r.raise_for_status()
routes = io_loop.run_sync(app.proxy.get_routes)
assert sorted(routes.keys()) == ['/', '/user/river']
user_path = unquote(ujoin(app.base_url, 'user/river'))
if app.subdomain_host:
domain = urlparse(app.subdomain_host).hostname
user_path = '/%s.%s' % (name, domain) + user_path
assert sorted(routes.keys()) == ['/', user_path]
# teardown the proxy and start a new one in the same place
proxy.terminate()
@@ -76,7 +83,7 @@ def test_external_proxy(request, io_loop):
# check that the routes are correct
routes = io_loop.run_sync(app.proxy.get_routes)
assert sorted(routes.keys()) == ['/', '/user/river']
assert sorted(routes.keys()) == ['/', user_path]
# teardown the proxy again, and start a new one with different auth and port
proxy.terminate()
@@ -90,13 +97,16 @@ def test_external_proxy(request, io_loop):
'--api-port', str(proxy_port),
'--default-target', 'http://%s:%i' % (app.hub_ip, app.hub_port),
]
if app.subdomain_host:
cmd.append('--host-routing')
proxy = Popen(cmd, env=env)
wait_for_proxy()
# tell the hub where the new proxy is
r = api_request(app, 'proxy', method='patch', data=json.dumps({
'port': proxy_port,
'protocol': 'http',
'ip': app.ip,
'auth_token': new_auth_token,
}))
r.raise_for_status()
@@ -113,7 +123,8 @@ def test_external_proxy(request, io_loop):
# check that the routes are correct
routes = io_loop.run_sync(app.proxy.get_routes)
assert sorted(routes.keys()) == ['/', '/user/river']
assert sorted(routes.keys()) == ['/', user_path]
def test_check_routes(app, io_loop):
proxy = app.proxy
@@ -123,13 +134,24 @@ def test_check_routes(app, io_loop):
r.raise_for_status()
zoe = orm.User.find(app.db, 'zoe')
assert zoe is not None
zoe = app.users[zoe]
before = sorted(io_loop.run_sync(app.proxy.get_routes))
assert '/user/zoe' in before
io_loop.run_sync(app.proxy.check_routes)
assert unquote(zoe.proxy_path) in before
io_loop.run_sync(lambda : app.proxy.check_routes(app.users, app._service_map))
io_loop.run_sync(lambda : proxy.delete_user(zoe))
during = sorted(io_loop.run_sync(app.proxy.get_routes))
assert '/user/zoe' not in during
io_loop.run_sync(app.proxy.check_routes)
assert unquote(zoe.proxy_path) not in during
io_loop.run_sync(lambda : app.proxy.check_routes(app.users, app._service_map))
after = sorted(io_loop.run_sync(app.proxy.get_routes))
assert '/user/zoe' in after
assert unquote(zoe.proxy_path) in after
assert before == after
def test_patch_proxy_bad_req(app):
r = api_request(app, 'proxy', method='patch')
assert r.status_code == 400
r = api_request(app, 'proxy', method='patch', data='notjson')
assert r.status_code == 400
r = api_request(app, 'proxy', method='patch', data=json.dumps([]))
assert r.status_code == 400

View File

@@ -0,0 +1,104 @@
"""Tests for services"""
from binascii import hexlify
from contextlib import contextmanager
import os
from subprocess import Popen
import sys
from threading import Event
import time
import requests
from tornado import gen
from tornado.ioloop import IOLoop
from .mocking import public_url
from ..utils import url_path_join, wait_for_http_server
here = os.path.dirname(os.path.abspath(__file__))
mockservice_py = os.path.join(here, 'mockservice.py')
mockservice_cmd = [sys.executable, mockservice_py]
from ..utils import random_port
@contextmanager
def external_service(app, name='mockservice'):
env = {
'JUPYTERHUB_API_TOKEN': hexlify(os.urandom(5)),
'JUPYTERHUB_SERVICE_NAME': name,
'JUPYTERHUB_API_URL': url_path_join(app.hub.server.url, 'api/'),
'JUPYTERHUB_SERVICE_URL': 'http://127.0.0.1:%i' % random_port(),
}
p = Popen(mockservice_cmd, env=env)
IOLoop().run_sync(lambda : wait_for_http_server(env['JUPYTERHUB_SERVICE_URL']))
try:
yield env
finally:
p.terminate()
def test_managed_service(app, mockservice):
service = mockservice
proc = service.proc
first_pid = proc.pid
assert proc.poll() is None
# shut it down:
proc.terminate()
proc.wait(10)
assert proc.poll() is not None
# ensure Hub notices and brings it back up:
for i in range(20):
if service.proc is not proc:
break
else:
time.sleep(0.2)
assert service.proc.pid != first_pid
assert service.proc.poll() is None
def test_proxy_service(app, mockservice_url, io_loop):
service = mockservice_url
name = service.name
routes = io_loop.run_sync(app.proxy.get_routes)
url = public_url(app, service) + '/foo'
r = requests.get(url, allow_redirects=False)
path = '/services/{}/foo'.format(name)
r.raise_for_status()
assert r.status_code == 200
assert r.text.endswith(path)
def test_external_service(app, io_loop):
name = 'external'
with external_service(app, name=name) as env:
app.services = [{
'name': name,
'admin': True,
'url': env['JUPYTERHUB_SERVICE_URL'],
'api_token': env['JUPYTERHUB_API_TOKEN'],
}]
app.init_services()
app.init_api_tokens()
evt = Event()
@gen.coroutine
def add_services():
yield app.proxy.add_all_services(app._service_map)
evt.set()
app.io_loop.add_callback(add_services)
assert evt.wait(10)
service = app._service_map[name]
url = public_url(app, service) + '/api/users'
path = '/services/{}/api/users'.format(name)
r = requests.get(url, allow_redirects=False)
r.raise_for_status()
assert r.status_code == 200
resp = r.json()
assert isinstance(resp, list)
assert len(resp) >= 1
assert isinstance(resp[0], dict)
assert 'name' in resp[0]

View File

@@ -0,0 +1,209 @@
import json
from queue import Queue
import sys
from threading import Thread
import time
from unittest import mock
from pytest import raises
import requests
import requests_mock
from tornado.ioloop import IOLoop
from tornado.httpserver import HTTPServer
from tornado.web import RequestHandler, Application, authenticated, HTTPError
from ..services.auth import _ExpiringDict, HubAuth, HubAuthenticated
from ..utils import url_path_join
from .mocking import public_url
# mock for sending monotonic counter way into the future
monotonic_future = mock.patch('time.monotonic', lambda : sys.maxsize)
def test_expiring_dict():
cache = _ExpiringDict(max_age=30)
cache['key'] = 'cached value'
assert 'key' in cache
assert cache['key'] == 'cached value'
with raises(KeyError):
cache['nokey']
with monotonic_future:
assert 'key' not in cache
cache['key'] = 'cached value'
assert 'key' in cache
with monotonic_future:
assert 'key' not in cache
cache['key'] = 'cached value'
assert 'key' in cache
with monotonic_future:
with raises(KeyError):
cache['key']
cache['key'] = 'cached value'
assert 'key' in cache
with monotonic_future:
assert cache.get('key', 'default') == 'default'
cache.max_age = 0
cache['key'] = 'cached value'
assert 'key' in cache
with monotonic_future:
assert cache.get('key', 'default') == 'cached value'
def test_hub_auth():
start = time.monotonic()
auth = HubAuth(cookie_name='foo')
mock_model = {
'name': 'onyxia'
}
url = url_path_join(auth.api_url, "authorizations/cookie/foo/bar")
with requests_mock.Mocker() as m:
m.get(url, text=json.dumps(mock_model))
user_model = auth.user_for_cookie('bar')
assert user_model == mock_model
# check cache
user_model = auth.user_for_cookie('bar')
assert user_model == mock_model
with requests_mock.Mocker() as m:
m.get(url, status_code=404)
user_model = auth.user_for_cookie('bar', use_cache=False)
assert user_model is None
# invalidate cache with timer
mock_model = {
'name': 'willow'
}
with monotonic_future, requests_mock.Mocker() as m:
m.get(url, text=json.dumps(mock_model))
user_model = auth.user_for_cookie('bar')
assert user_model == mock_model
with requests_mock.Mocker() as m:
m.get(url, status_code=500)
with raises(HTTPError) as exc_info:
user_model = auth.user_for_cookie('bar', use_cache=False)
assert exc_info.value.status_code == 502
with requests_mock.Mocker() as m:
m.get(url, status_code=400)
with raises(HTTPError) as exc_info:
user_model = auth.user_for_cookie('bar', use_cache=False)
assert exc_info.value.status_code == 500
def test_hub_authenticated(request):
auth = HubAuth(cookie_name='jubal')
mock_model = {
'name': 'jubalearly'
}
cookie_url = url_path_join(auth.api_url, "authorizations/cookie", auth.cookie_name)
good_url = url_path_join(cookie_url, "early")
bad_url = url_path_join(cookie_url, "late")
class TestHandler(HubAuthenticated, RequestHandler):
hub_auth = auth
@authenticated
def get(self):
self.finish(self.get_current_user())
# start hub-authenticated service in a thread:
port = 50505
q = Queue()
def run():
app = Application([
('/*', TestHandler),
], login_url=auth.login_url)
http_server = HTTPServer(app)
http_server.listen(port)
loop = IOLoop.current()
loop.add_callback(lambda : q.put(loop))
loop.start()
t = Thread(target=run)
t.start()
def finish_thread():
loop.stop()
t.join()
request.addfinalizer(finish_thread)
# wait for thread to start
loop = q.get(timeout=10)
with requests_mock.Mocker(real_http=True) as m:
# no cookie
r = requests.get('http://127.0.0.1:%i' % port,
allow_redirects=False,
)
r.raise_for_status()
assert r.status_code == 302
assert auth.login_url in r.headers['Location']
# wrong cookie
m.get(bad_url, status_code=404)
r = requests.get('http://127.0.0.1:%i' % port,
cookies={'jubal': 'late'},
allow_redirects=False,
)
r.raise_for_status()
assert r.status_code == 302
assert auth.login_url in r.headers['Location']
# upstream 403
m.get(bad_url, status_code=403)
r = requests.get('http://127.0.0.1:%i' % port,
cookies={'jubal': 'late'},
allow_redirects=False,
)
assert r.status_code == 500
m.get(good_url, text=json.dumps(mock_model))
# no whitelist
r = requests.get('http://127.0.0.1:%i' % port,
cookies={'jubal': 'early'},
allow_redirects=False,
)
r.raise_for_status()
assert r.status_code == 200
# pass whitelist
TestHandler.hub_users = {'jubalearly'}
r = requests.get('http://127.0.0.1:%i' % port,
cookies={'jubal': 'early'},
allow_redirects=False,
)
r.raise_for_status()
assert r.status_code == 200
# no pass whitelist
TestHandler.hub_users = {'kaylee'}
r = requests.get('http://127.0.0.1:%i' % port,
cookies={'jubal': 'early'},
allow_redirects=False,
)
r.raise_for_status()
assert r.status_code == 302
assert auth.login_url in r.headers['Location']
def test_service_cookie_auth(app, mockservice_url):
cookies = app.login_user('badger')
r = requests.get(public_url(app, mockservice_url) + '/whoami/', cookies=cookies)
r.raise_for_status()
print(r.text)
reply = r.json()
sub_reply = { key: reply.get(key, 'missing') for key in ['name', 'admin']}
assert sub_reply == {
'name': 'badger',
'admin': False,
}

Some files were not shown because too many files have changed in this diff Show More