mirror of
https://github.com/jupyterhub/jupyterhub.git
synced 2025-10-07 18:14:10 +00:00
Compare commits
773 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
b1111363fd | ||
![]() |
6c99b807c2 | ||
![]() |
8d650f594e | ||
![]() |
04a0a3a2e5 | ||
![]() |
9cebfd6367 | ||
![]() |
587cd70221 | ||
![]() |
e94f5e043a | ||
![]() |
5456fb6356 | ||
![]() |
fb75b9a392 | ||
![]() |
90d341e6f7 | ||
![]() |
a0354de3c1 | ||
![]() |
2e4e1ce82f | ||
![]() |
06f646099f | ||
![]() |
3360817cb6 | ||
![]() |
e042ad0b4a | ||
![]() |
246f9f9044 | ||
![]() |
bc08f4de34 | ||
![]() |
12904ecc32 | ||
![]() |
601d371796 | ||
![]() |
30d9e09390 | ||
![]() |
7850a5d478 | ||
![]() |
f5a3b1bc5a | ||
![]() |
b2fe8e5691 | ||
![]() |
9d4c410996 | ||
![]() |
dcae92ce4a | ||
![]() |
29957b8cd8 | ||
![]() |
6299e0368c | ||
![]() |
c862b6062d | ||
![]() |
146587ffff | ||
![]() |
077d8dec9a | ||
![]() |
af8d6086fc | ||
![]() |
18f8661d73 | ||
![]() |
bd70f66c70 | ||
![]() |
ac213fc4b5 | ||
![]() |
db33549173 | ||
![]() |
e985e2b84c | ||
![]() |
1d9abf7528 | ||
![]() |
935baa8bc6 | ||
![]() |
9b77732319 | ||
![]() |
85aac0fa2d | ||
![]() |
abd6f35638 | ||
![]() |
ba4700b3f3 | ||
![]() |
05b11bd47a | ||
![]() |
71cb628563 | ||
![]() |
0d664355f0 | ||
![]() |
dd6261d031 | ||
![]() |
f3f5b69e49 | ||
![]() |
9ea4ca3646 | ||
![]() |
8ee9869ca0 | ||
![]() |
6cedd73d2a | ||
![]() |
59145ca0f7 | ||
![]() |
ab02f9c568 | ||
![]() |
a2f003ed31 | ||
![]() |
7b6dd9f5cf | ||
![]() |
0fa5c20f89 | ||
![]() |
204399ee2c | ||
![]() |
5e68dce02f | ||
![]() |
952bbea039 | ||
![]() |
630e85bfec | ||
![]() |
26f7bb51bd | ||
![]() |
a1c2a50810 | ||
![]() |
906abcc2f3 | ||
![]() |
5269370e4a | ||
![]() |
897f5f62d5 | ||
![]() |
727356870a | ||
![]() |
39aed3a5a0 | ||
![]() |
ed26578717 | ||
![]() |
22863f765f | ||
![]() |
b500bd002b | ||
![]() |
aca40b24c3 | ||
![]() |
b5fe5a80c6 | ||
![]() |
ad073dd5dd | ||
![]() |
7b815558c6 | ||
![]() |
55f58b3ba7 | ||
![]() |
e1f93a4721 | ||
![]() |
2e95f3c039 | ||
![]() |
b0ba51f209 | ||
![]() |
89e6c2110e | ||
![]() |
7dfdc23b4e | ||
![]() |
4c7df53a8a | ||
![]() |
678afd3783 | ||
![]() |
0185a08f32 | ||
![]() |
f3787dd2c8 | ||
![]() |
30f19cfc8c | ||
![]() |
a84fa38c6b | ||
![]() |
867ce4c213 | ||
![]() |
005118e09d | ||
![]() |
04ce67ee71 | ||
![]() |
31807929cb | ||
![]() |
cb4105b53e | ||
![]() |
151887dd56 | ||
![]() |
5f97487184 | ||
![]() |
4d2d677777 | ||
![]() |
6a3b3807c9 | ||
![]() |
02a52a0289 | ||
![]() |
7bd1e387df | ||
![]() |
edc0d7901f | ||
![]() |
8e561f1c12 | ||
![]() |
24d87c882f | ||
![]() |
1e333e2f29 | ||
![]() |
a507fa1c8a | ||
![]() |
90cc03b3ec | ||
![]() |
6f15113e2a | ||
![]() |
f3f08c9caa | ||
![]() |
c495c4731a | ||
![]() |
e08a50ef66 | ||
![]() |
fbcd792062 | ||
![]() |
bb81ce0160 | ||
![]() |
315087d67c | ||
![]() |
31e6a15a85 | ||
![]() |
aed99d8d19 | ||
![]() |
ec83708892 | ||
![]() |
bedac5f148 | ||
![]() |
376aa13981 | ||
![]() |
4bc8b48763 | ||
![]() |
21496890f6 | ||
![]() |
70dcd50e44 | ||
![]() |
24094567e5 | ||
![]() |
6bd0febbe1 | ||
![]() |
57075aba52 | ||
![]() |
f0260aae52 | ||
![]() |
edd8e21f71 | ||
![]() |
681d3ce2d8 | ||
![]() |
97e792ccde | ||
![]() |
a5a0543b2a | ||
![]() |
5a810ccba3 | ||
![]() |
0a6b2cdadc | ||
![]() |
08903e7af8 | ||
![]() |
78439329c0 | ||
![]() |
4dfd6bc4b9 | ||
![]() |
574cc39b5f | ||
![]() |
6fb43a8241 | ||
![]() |
84c82fe382 | ||
![]() |
5e45e76f5b | ||
![]() |
92fd819cd6 | ||
![]() |
cb5ef0c302 | ||
![]() |
34fab033fe | ||
![]() |
37f4c4429e | ||
![]() |
293410ec94 | ||
![]() |
ed6ee27dcd | ||
![]() |
ca16ddb7ad | ||
![]() |
2102c1fd1c | ||
![]() |
aa9676ec5e | ||
![]() |
5e93c7de4c | ||
![]() |
d22626906b | ||
![]() |
5f91ed044e | ||
![]() |
5c3c7493c1 | ||
![]() |
1b7965092e | ||
![]() |
ef60be5a99 | ||
![]() |
f78d652cd6 | ||
![]() |
3650575797 | ||
![]() |
0f000f6d41 | ||
![]() |
643729ac0c | ||
![]() |
91a67bf580 | ||
![]() |
c75eddb730 | ||
![]() |
0f5888ad6c | ||
![]() |
8c48f3b856 | ||
![]() |
6e7e18bc3c | ||
![]() |
3dfd7e5a84 | ||
![]() |
19ecbf3734 | ||
![]() |
eac3e8ba90 | ||
![]() |
a7a6829b69 | ||
![]() |
61299113c8 | ||
![]() |
21a57dfa0b | ||
![]() |
a7226a8231 | ||
![]() |
6e3dd21f60 | ||
![]() |
cf049730d4 | ||
![]() |
cb9ce4d3af | ||
![]() |
925ee1dfb2 | ||
![]() |
5d9122b26c | ||
![]() |
6821ad0c59 | ||
![]() |
ff7851ee2e | ||
![]() |
6940ed85b1 | ||
![]() |
3d497a7f43 | ||
![]() |
cc6968e225 | ||
![]() |
a6c517c344 | ||
![]() |
a3e08b7f52 | ||
![]() |
14c8d7dc46 | ||
![]() |
ac2590c679 | ||
![]() |
ead13c6a11 | ||
![]() |
5002ab2990 | ||
![]() |
ab3e7293a4 | ||
![]() |
062af5e5cb | ||
![]() |
92088570ea | ||
![]() |
604ccf515d | ||
![]() |
ec9b244990 | ||
![]() |
09acdc23b5 | ||
![]() |
e7808b50af | ||
![]() |
9c27095744 | ||
![]() |
690b07982e | ||
![]() |
784e5aa4ee | ||
![]() |
29187cab3a | ||
![]() |
43a72807c6 | ||
![]() |
1d1f6f1870 | ||
![]() |
505a6eb4e3 | ||
![]() |
cc49df8147 | ||
![]() |
98d60402b5 | ||
![]() |
319e8a1062 | ||
![]() |
0c5d564830 | ||
![]() |
c0404cf9d9 | ||
![]() |
f364661363 | ||
![]() |
f92d77b06d | ||
![]() |
2cf00e6aae | ||
![]() |
dfdb0cff2b | ||
![]() |
d0dad84ffa | ||
![]() |
1745937f1a | ||
![]() |
e7eb674a89 | ||
![]() |
b232633100 | ||
![]() |
6abd19c149 | ||
![]() |
0aa0ff8db7 | ||
![]() |
a907429fd4 | ||
![]() |
598b550a67 | ||
![]() |
92bb442494 | ||
![]() |
2d41f6223e | ||
![]() |
791dd5fb9f | ||
![]() |
9a0ccf4c98 | ||
![]() |
ad2abc5771 | ||
![]() |
2d99b3943f | ||
![]() |
a358132f95 | ||
![]() |
09cd37feee | ||
![]() |
0f3610e81d | ||
![]() |
3f97c438e2 | ||
![]() |
42351201d2 | ||
![]() |
907bbb8e9d | ||
![]() |
63f3d8b621 | ||
![]() |
47d6e841fd | ||
![]() |
e3bb09fabe | ||
![]() |
d4e0c01189 | ||
![]() |
50370d42b0 | ||
![]() |
aa190a80b7 | ||
![]() |
e48bae77aa | ||
![]() |
96cf0f99ed | ||
![]() |
f380968049 | ||
![]() |
02468f4625 | ||
![]() |
24611f94cf | ||
![]() |
dc75a9a4b7 | ||
![]() |
33f459a23a | ||
![]() |
bdcc251002 | ||
![]() |
86052ba7b4 | ||
![]() |
62ebcf55c9 | ||
![]() |
80ac2475a0 | ||
![]() |
5179d922f5 | ||
![]() |
26f085a8ed | ||
![]() |
b7d302cc72 | ||
![]() |
f2941e3631 | ||
![]() |
26a6401af4 | ||
![]() |
5c8ce338a1 | ||
![]() |
5addc7bbaf | ||
![]() |
da095170bf | ||
![]() |
1aab0a69bd | ||
![]() |
fc8e04b62f | ||
![]() |
c6c53b4e10 | ||
![]() |
9b0219a2d8 | ||
![]() |
6e212fa476 | ||
![]() |
58f9237b12 | ||
![]() |
74fd925219 | ||
![]() |
2696bb97d2 | ||
![]() |
9cefb27704 | ||
![]() |
5e75357b06 | ||
![]() |
79bebb4bc9 | ||
![]() |
0ed88f212b | ||
![]() |
a8c1cab5fe | ||
![]() |
e1a6b1a70f | ||
![]() |
c95ed16786 | ||
![]() |
ec784803b4 | ||
![]() |
302d7a22d3 | ||
![]() |
eccd5a460b | ||
![]() |
80437229a1 | ||
![]() |
237ffba641 | ||
![]() |
2695c5e49f | ||
![]() |
b7a608fdfd | ||
![]() |
c3413bad78 | ||
![]() |
dceb244e5b | ||
![]() |
cb31a0b162 | ||
![]() |
7ced657d79 | ||
![]() |
8dd9168077 | ||
![]() |
7c6591aefe | ||
![]() |
58c91e3fd4 | ||
![]() |
db4cf7ae62 | ||
![]() |
a17f5e4f1b | ||
![]() |
6cf7f2b0a7 | ||
![]() |
7e21ea9a48 | ||
![]() |
3f29198bae | ||
![]() |
d4293650ff | ||
![]() |
d65dd16881 | ||
![]() |
f36e163581 | ||
![]() |
f215adcfa2 | ||
![]() |
1549af6f56 | ||
![]() |
c553f82580 | ||
![]() |
196b4ebc9f | ||
![]() |
8710ce1687 | ||
![]() |
f65e8d7369 | ||
![]() |
dc5d9f02c7 | ||
![]() |
2f3f8d7826 | ||
![]() |
297da070fc | ||
![]() |
10ea92dcea | ||
![]() |
2e5f01f232 | ||
![]() |
1a080c4261 | ||
![]() |
0e08963355 | ||
![]() |
cd9e39bf54 | ||
![]() |
580e840165 | ||
![]() |
09a8fd5254 | ||
![]() |
8898faa141 | ||
![]() |
fdbb1dad79 | ||
![]() |
c39244168b | ||
![]() |
9591fd88c5 | ||
![]() |
3558ce958e | ||
![]() |
804a9b7be8 | ||
![]() |
3cae550b13 | ||
![]() |
138bad5913 | ||
![]() |
09011815af | ||
![]() |
7b0c845c3a | ||
![]() |
6a47123ec9 | ||
![]() |
19fab6bbf8 | ||
![]() |
90e6b63e59 | ||
![]() |
bd78217cf3 | ||
![]() |
b0833985e6 | ||
![]() |
a6f73b035f | ||
![]() |
251440ec64 | ||
![]() |
22a1df6fa0 | ||
![]() |
6389751c22 | ||
![]() |
8498691763 | ||
![]() |
1750ff0324 | ||
![]() |
2ce4c46afd | ||
![]() |
a20f5e44d1 | ||
![]() |
cd746d72d4 | ||
![]() |
f7eaff0828 | ||
![]() |
849f119a47 | ||
![]() |
52b68381f6 | ||
![]() |
46d495e1e2 | ||
![]() |
acc6c22355 | ||
![]() |
8143182971 | ||
![]() |
04a22cd482 | ||
![]() |
4376224084 | ||
![]() |
a9fe88c343 | ||
![]() |
6eb95e1c66 | ||
![]() |
a46287c4a6 | ||
![]() |
bc86ee1c31 | ||
![]() |
a73e6f0bf8 | ||
![]() |
10a6c5144d | ||
![]() |
4e5f43aeae | ||
![]() |
ff56db0c8b | ||
![]() |
95a9b97649 | ||
![]() |
a5b5208823 | ||
![]() |
783295fabd | ||
![]() |
1c942ec97c | ||
![]() |
3b6d2655ab | ||
![]() |
8a18d0daab | ||
![]() |
e9f7ccbd25 | ||
![]() |
68d9f35c0b | ||
![]() |
28d78134c1 | ||
![]() |
fd92ac852d | ||
![]() |
8399f5288e | ||
![]() |
f99b7cb7eb | ||
![]() |
bb5166077f | ||
![]() |
b72e4b66ca | ||
![]() |
ed85cd25d6 | ||
![]() |
3f90697e18 | ||
![]() |
73271a3e55 | ||
![]() |
6f9ea712de | ||
![]() |
6ee244e7cb | ||
![]() |
d66a4af79b | ||
![]() |
ea7b1caa4e | ||
![]() |
9cd880fb35 | ||
![]() |
658c152707 | ||
![]() |
6f1ba77608 | ||
![]() |
2344d696ca | ||
![]() |
bd816310cb | ||
![]() |
2bcf759a9f | ||
![]() |
82a04f7032 | ||
![]() |
4281babee4 | ||
![]() |
d89f2965cf | ||
![]() |
e2a2a9903a | ||
![]() |
4401cdc16a | ||
![]() |
e8d3fb2920 | ||
![]() |
f7ccc137ea | ||
![]() |
07bbb4ea02 | ||
![]() |
b189e70c9b | ||
![]() |
de4c9c1463 | ||
![]() |
8bdb73ced4 | ||
![]() |
dee9050939 | ||
![]() |
ae3c214708 | ||
![]() |
d6e81867bf | ||
![]() |
d30a5ee0a5 | ||
![]() |
88bb80be0f | ||
![]() |
bba1ba1678 | ||
![]() |
b50daf20d0 | ||
![]() |
5c6c7cdff5 | ||
![]() |
3f9b2a0c28 | ||
![]() |
453e119808 | ||
![]() |
a021f910c8 | ||
![]() |
e6c2afc4db | ||
![]() |
e6c7b28057 | ||
![]() |
b1840e8be7 | ||
![]() |
15e4b1ad8b | ||
![]() |
2517afcee0 | ||
![]() |
15c7ba3078 | ||
![]() |
f2cb24781a | ||
![]() |
e1d346b8c3 | ||
![]() |
97bdf4811c | ||
![]() |
45c871d779 | ||
![]() |
976fa9c907 | ||
![]() |
771c60ca37 | ||
![]() |
e15eeccd35 | ||
![]() |
ce535b55bc | ||
![]() |
33cb62c2ee | ||
![]() |
32fe3cf61d | ||
![]() |
73a05498ce | ||
![]() |
034147f604 | ||
![]() |
b629e520a9 | ||
![]() |
30280cc6a4 | ||
![]() |
f7f0b72776 | ||
![]() |
251289fc05 | ||
![]() |
6437093a67 | ||
![]() |
be5a878da5 | ||
![]() |
8dc73a852d | ||
![]() |
e37d82951e | ||
![]() |
acc311830e | ||
![]() |
6b1046697a | ||
![]() |
c5befc5b2a | ||
![]() |
e743a5733b | ||
![]() |
5f98801c99 | ||
![]() |
9858a3db9d | ||
![]() |
65c1a525b9 | ||
![]() |
8bd055d4bd | ||
![]() |
5ee14db1f9 | ||
![]() |
58069d015b | ||
![]() |
f2684b59ec | ||
![]() |
e0c0d03c5f | ||
![]() |
1ac47d2bb0 | ||
![]() |
bc75c71ca3 | ||
![]() |
c49fc14528 | ||
![]() |
078bd8c627 | ||
![]() |
33ba9fb5cf | ||
![]() |
4e7e586cb9 | ||
![]() |
62fa795052 | ||
![]() |
b6d9f89518 | ||
![]() |
afbf867169 | ||
![]() |
dace6ac156 | ||
![]() |
cbf2b8cb78 | ||
![]() |
96c5de63d8 | ||
![]() |
b8b57843a6 | ||
![]() |
e3fd4ad77d | ||
![]() |
c08148266a | ||
![]() |
a6a2d04c46 | ||
![]() |
8f7061fb9b | ||
![]() |
7b5235138f | ||
![]() |
7e3fa8c38d | ||
![]() |
151acd5bec | ||
![]() |
23ca2039f6 | ||
![]() |
b291103592 | ||
![]() |
e962c9993b | ||
![]() |
955b769d3f | ||
![]() |
9b914e8f01 | ||
![]() |
307ad636dc | ||
![]() |
2952f62726 | ||
![]() |
6d6e48f434 | ||
![]() |
a189196855 | ||
![]() |
d30e62a205 | ||
![]() |
e56d416210 | ||
![]() |
c0f37c48a1 | ||
![]() |
a3ed387455 | ||
![]() |
beedc94179 | ||
![]() |
5229604782 | ||
![]() |
cf665517dd | ||
![]() |
4663edd8a7 | ||
![]() |
312e7974d9 | ||
![]() |
ca8aa53b32 | ||
![]() |
7122ca1c24 | ||
![]() |
97cdb1a5d8 | ||
![]() |
31d3f7a20b | ||
![]() |
6f8a34127b | ||
![]() |
ee1a86d192 | ||
![]() |
707b300bd6 | ||
![]() |
c9e12182a2 | ||
![]() |
9b7186e9b8 | ||
![]() |
4eb07f9d48 | ||
![]() |
4f78cbbd1b | ||
![]() |
d962e8bcbc | ||
![]() |
ba695a0230 | ||
![]() |
dfed2437a8 | ||
![]() |
ecfcb4ec64 | ||
![]() |
b9335311de | ||
![]() |
354468db0a | ||
![]() |
340a736722 | ||
![]() |
7bf93cb7e6 | ||
![]() |
4fa9535fd4 | ||
![]() |
1abd3217aa | ||
![]() |
d0360d5c98 | ||
![]() |
74365ad05e | ||
![]() |
9dc24c0995 | ||
![]() |
fd40e27be4 | ||
![]() |
05b2bf4c96 | ||
![]() |
a0fcbcbc7d | ||
![]() |
3117ea9d34 | ||
![]() |
8973dea33e | ||
![]() |
3e7d0dbd23 | ||
![]() |
b26b1bc038 | ||
![]() |
74b1102dea | ||
![]() |
a89226279f | ||
![]() |
8b490c8ef0 | ||
![]() |
77a98e7875 | ||
![]() |
c02592d5ba | ||
![]() |
52d7dacbaa | ||
![]() |
9a8457deff | ||
![]() |
5039b3ac6f | ||
![]() |
00705223b6 | ||
![]() |
9f6ab4c419 | ||
![]() |
9012c7310d | ||
![]() |
a3edebcad9 | ||
![]() |
f2abb6a73f | ||
![]() |
e96e5b740a | ||
![]() |
ee067ad97a | ||
![]() |
d01b3a88b6 | ||
![]() |
5a22c978cf | ||
![]() |
f8a0e7d1be | ||
![]() |
25a65564b1 | ||
![]() |
c858023c88 | ||
![]() |
c3e470db26 | ||
![]() |
5908c4da7a | ||
![]() |
b08dbbd106 | ||
![]() |
3b320c75e9 | ||
![]() |
1aa6dc6686 | ||
![]() |
fdc4385e62 | ||
![]() |
5094448762 | ||
![]() |
98c7fa919f | ||
![]() |
5b9f51417f | ||
![]() |
7a91f89474 | ||
![]() |
bf7afa16e5 | ||
![]() |
0d57baae82 | ||
![]() |
446d197cf7 | ||
![]() |
2582f0bbe6 | ||
![]() |
1ee993c664 | ||
![]() |
542c20065f | ||
![]() |
39f663d03c | ||
![]() |
6474a55302 | ||
![]() |
8566d4c5ab | ||
![]() |
e374e93cfb | ||
![]() |
7bd4f6490c | ||
![]() |
25373f510d | ||
![]() |
82cab39e1c | ||
![]() |
22507cc1cd | ||
![]() |
2bded65c7e | ||
![]() |
a3a0c60804 | ||
![]() |
704b172887 | ||
![]() |
135717f8cb | ||
![]() |
1d87ba8534 | ||
![]() |
97cd27775b | ||
![]() |
fe2e9c282e | ||
![]() |
fab125975b | ||
![]() |
cefd7e3b1b | ||
![]() |
344a3e7b24 | ||
![]() |
a0ee237ada | ||
![]() |
e81eb9a5f8 | ||
![]() |
98d3b538af | ||
![]() |
3614a0e368 | ||
![]() |
0421497b1e | ||
![]() |
8b3c2fa12f | ||
![]() |
a58bea6d93 | ||
![]() |
c7c41cd761 | ||
![]() |
b282ec73c7 | ||
![]() |
dad26be2c6 | ||
![]() |
58d602e549 | ||
![]() |
5e14904205 | ||
![]() |
97293ab7ce | ||
![]() |
b6f634368c | ||
![]() |
7b4de150cc | ||
![]() |
7a268c94b0 | ||
![]() |
7a1fa78632 | ||
![]() |
19f02da64d | ||
![]() |
5bf1aac9cb | ||
![]() |
0ae034083c | ||
![]() |
5010af941b | ||
![]() |
015df7e060 | ||
![]() |
e025d58f6e | ||
![]() |
b151d333d3 | ||
![]() |
304c005a85 | ||
![]() |
e2591e8e36 | ||
![]() |
f3c22cb6d0 | ||
![]() |
b2527984bc | ||
![]() |
b8d2271191 | ||
![]() |
b8978b0235 | ||
![]() |
63ef6419cd | ||
![]() |
25dc429455 | ||
![]() |
7550e63fd0 | ||
![]() |
0561968fac | ||
![]() |
7811bf518b | ||
![]() |
bc7116ad94 | ||
![]() |
70eec33d06 | ||
![]() |
773973825f | ||
![]() |
a184d372f4 | ||
![]() |
ca1606a021 | ||
![]() |
5c6d7eb309 | ||
![]() |
4de6b39788 | ||
![]() |
f0494cc7d6 | ||
![]() |
9d98d1ee63 | ||
![]() |
f1238e17b1 | ||
![]() |
4201c8a6f3 | ||
![]() |
53396ed454 | ||
![]() |
8695823165 | ||
![]() |
ec8d008678 | ||
![]() |
a949ad14f8 | ||
![]() |
48e7bd4f10 | ||
![]() |
4b11f8f26b | ||
![]() |
b056444863 | ||
![]() |
872f021ddc | ||
![]() |
079b0c1b91 | ||
![]() |
2664b50a18 | ||
![]() |
6970df4dda | ||
![]() |
22c3064ec4 | ||
![]() |
d6ab65a2e7 | ||
![]() |
aa23b01a57 | ||
![]() |
d82de98001 | ||
![]() |
7df8597484 | ||
![]() |
1b99b1275c | ||
![]() |
d16461052b | ||
![]() |
9640364713 | ||
![]() |
18e0600727 | ||
![]() |
17fffda74e | ||
![]() |
3ac4f48f82 | ||
![]() |
6f8ae98ed0 | ||
![]() |
47b2ce6180 | ||
![]() |
d18d84e187 | ||
![]() |
c1dcdf49e5 | ||
![]() |
079005eab1 | ||
![]() |
dc8cea3a3e | ||
![]() |
efca88cf8b | ||
![]() |
c05a6b96b7 | ||
![]() |
a831ff3b61 | ||
![]() |
b814a09fe6 | ||
![]() |
fb48c8626a | ||
![]() |
fbdeb4c386 | ||
![]() |
4cf9ecc819 | ||
![]() |
e9573b6e24 | ||
![]() |
d5f0137052 | ||
![]() |
d9f5adb1fb | ||
![]() |
0c6aa064ac | ||
![]() |
646c853cf4 | ||
![]() |
fb3bc95623 | ||
![]() |
c8b4cab022 | ||
![]() |
06fb94b4ea | ||
![]() |
9f6cef4fb4 | ||
![]() |
0315dd5612 | ||
![]() |
e4e5bebc1a | ||
![]() |
c688e9ebad | ||
![]() |
6d6041a3c1 | ||
![]() |
dde7b5ea68 | ||
![]() |
9bf533b340 | ||
![]() |
f1a105abec | ||
![]() |
e6587b5dc8 | ||
![]() |
b2ad045a2d | ||
![]() |
89734d8c5f | ||
![]() |
53736099ba | ||
![]() |
2fcfa136c1 | ||
![]() |
9f85209a1b | ||
![]() |
cea1b2fd4d | ||
![]() |
312252b670 | ||
![]() |
4d6b30c17b | ||
![]() |
0beb9c2670 | ||
![]() |
a0289af59f | ||
![]() |
40363834c8 | ||
![]() |
0c9e5fd10b | ||
![]() |
3d90e5cdf6 | ||
![]() |
8e3f1f0955 | ||
![]() |
7c64415096 | ||
![]() |
e3fd1dba0e | ||
![]() |
9866a0fadc | ||
![]() |
f87f24d9e5 | ||
![]() |
4729ae4769 | ||
![]() |
691c4c158f | ||
![]() |
3c597339ba | ||
![]() |
e5fe174e03 | ||
![]() |
1c25a9d026 | ||
![]() |
2db378e9c1 | ||
![]() |
a4067ee681 | ||
![]() |
edb0831028 | ||
![]() |
dac3b0a6f5 | ||
![]() |
9a180cc8ad | ||
![]() |
e81764610e | ||
![]() |
e4e2b627fe | ||
![]() |
ec55f56725 | ||
![]() |
1e4f871bcc | ||
![]() |
69f72919bd | ||
![]() |
dc0336fa45 | ||
![]() |
8c341d262e | ||
![]() |
2b15464e12 | ||
![]() |
a686235ffb | ||
![]() |
29171a4d05 | ||
![]() |
e9123f55e0 | ||
![]() |
ee004486bd | ||
![]() |
498e234c37 | ||
![]() |
b29f19e206 | ||
![]() |
1e00343262 | ||
![]() |
3cd526c019 | ||
![]() |
ea99c58da5 | ||
![]() |
c64f23a64a | ||
![]() |
2099cd37fa | ||
![]() |
2559632079 | ||
![]() |
352df39454 | ||
![]() |
ce3a940b11 | ||
![]() |
6594e88390 | ||
![]() |
339758ec42 | ||
![]() |
0b4c7defd4 | ||
![]() |
6d71e9065b | ||
![]() |
631ab4d4eb | ||
![]() |
589ff47ae6 | ||
![]() |
877034d012 | ||
![]() |
3d440bf8f5 | ||
![]() |
138b2be010 | ||
![]() |
b729944480 | ||
![]() |
870afd9fac | ||
![]() |
e808814725 | ||
![]() |
122cf2250d | ||
![]() |
fa1d962507 | ||
![]() |
6504692c5c | ||
![]() |
bd36962643 | ||
![]() |
f5ccfc3f8a | ||
![]() |
c1a7e0513b | ||
![]() |
af71e79371 | ||
![]() |
bf911cf3a5 | ||
![]() |
6059a1c444 | ||
![]() |
c4966a4bf2 | ||
![]() |
cb9f356a69 | ||
![]() |
9d02f6a408 | ||
![]() |
ee76772e1b | ||
![]() |
f0a030a86d | ||
![]() |
1a31e56f33 | ||
![]() |
04e9e0e687 | ||
![]() |
cec917c2a2 | ||
![]() |
08989a8797 | ||
![]() |
b734c331e4 | ||
![]() |
fe477a6809 | ||
![]() |
6391a4a7f7 | ||
![]() |
e68220d4b3 | ||
![]() |
b873149f9b | ||
![]() |
86aebbcaea | ||
![]() |
fd260cf32f | ||
![]() |
69101a5b14 | ||
![]() |
151d6cbc48 | ||
![]() |
04675e5fcb | ||
![]() |
b38c6fe06a | ||
![]() |
089a12bdc9 | ||
![]() |
d9a0a2003f | ||
![]() |
ad704d9925 | ||
![]() |
0cca79eeee | ||
![]() |
457bea7c34 | ||
![]() |
2479679eeb | ||
![]() |
937405d2d8 | ||
![]() |
d1bed1b9cc | ||
![]() |
acc60bce57 | ||
![]() |
43807ff06b | ||
![]() |
b8a63bcc0c | ||
![]() |
66c1815a78 | ||
![]() |
4e5cfa2077 | ||
![]() |
ebaf5d31b7 | ||
![]() |
760a640c6a | ||
![]() |
4fc06e9504 | ||
![]() |
c283ccb122 | ||
![]() |
80df842b2b | ||
![]() |
f1a8a72a9f | ||
![]() |
0296e16232 | ||
![]() |
f6f7081483 | ||
![]() |
7f7cd0a314 | ||
![]() |
5ffb5763a5 | ||
![]() |
4382037110 | ||
![]() |
963cd88440 | ||
![]() |
885f99ac08 | ||
![]() |
7c3919980a | ||
![]() |
d8860d6f24 | ||
![]() |
6b992e37e3 | ||
![]() |
a3424355fa | ||
![]() |
569a91296d | ||
![]() |
9e2663491e |
21
.circleci/config.yml
Normal file
21
.circleci/config.yml
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# Python CircleCI 2.0 configuration file
|
||||||
|
# Updating CircleCI configuration from v1 to v2
|
||||||
|
# Check https://circleci.com/docs/2.0/language-python/ for more details
|
||||||
|
#
|
||||||
|
version: 2
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
machine: true
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- run:
|
||||||
|
name: build images
|
||||||
|
command: |
|
||||||
|
docker build -t jupyterhub/jupyterhub .
|
||||||
|
docker build -t jupyterhub/jupyterhub-onbuild onbuild
|
||||||
|
docker build -t jupyterhub/jupyterhub:alpine -f dockerfiles/Dockerfile.alpine .
|
||||||
|
docker build -t jupyterhub/singleuser singleuser
|
||||||
|
- run:
|
||||||
|
name: smoke test jupyterhub
|
||||||
|
command: |
|
||||||
|
docker run --rm -it jupyterhub/jupyterhub jupyterhub --help
|
37
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
37
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Hi! Thanks for using JupyterHub.
|
||||||
|
|
||||||
|
If you are reporting an issue with JupyterHub, please use the [GitHub issue](https://github.com/jupyterhub/jupyterhub/issues) search feature to check if your issue has been asked already. If it has, please add your comments to the existing issue.
|
||||||
|
|
||||||
|
**Describe the bug**
|
||||||
|
A clear and concise description of what the bug is.
|
||||||
|
|
||||||
|
**To Reproduce**
|
||||||
|
Steps to reproduce the behavior:
|
||||||
|
1. Go to '...'
|
||||||
|
2. Click on '....'
|
||||||
|
3. Scroll down to '....'
|
||||||
|
4. See error
|
||||||
|
|
||||||
|
**Expected behavior**
|
||||||
|
A clear and concise description of what you expected to happen.
|
||||||
|
|
||||||
|
**Screenshots**
|
||||||
|
If applicable, add screenshots to help explain your problem.
|
||||||
|
|
||||||
|
**Desktop (please complete the following information):**
|
||||||
|
- OS: [e.g. iOS]
|
||||||
|
- Browser [e.g. chrome, safari]
|
||||||
|
- Version [e.g. 22]
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context about the problem here.
|
||||||
|
|
||||||
|
- Running `jupyter troubleshoot` from the command line, if possible, and posting
|
||||||
|
its output would also be helpful.
|
||||||
|
- Running in `--debug` mode can also be helpful for troubleshooting.
|
7
.github/ISSUE_TEMPLATE/installation-and-configuration-issues.md
vendored
Normal file
7
.github/ISSUE_TEMPLATE/installation-and-configuration-issues.md
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
name: Installation and configuration issues
|
||||||
|
about: Installation and configuration assistance
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
If you are having issues with installation or configuration, you may ask for help on the JupyterHub gitter channel or file an issue here.
|
0
.github/PULL_REQUEST_TEMPLATE/.keep
vendored
Normal file
0
.github/PULL_REQUEST_TEMPLATE/.keep
vendored
Normal file
29
.github/issue_template.md
vendored
29
.github/issue_template.md
vendored
@@ -1,29 +0,0 @@
|
|||||||
Hi! Thanks for using JupyterHub.
|
|
||||||
|
|
||||||
If you are reporting an issue with JupyterHub:
|
|
||||||
|
|
||||||
- Please use the [GitHub issue](https://github.com/jupyterhub/jupyterhub/issues)
|
|
||||||
search feature to check if your issue has been asked already. If it has,
|
|
||||||
please add your comments to the existing issue.
|
|
||||||
|
|
||||||
- Where applicable, please fill out the details below to help us troubleshoot
|
|
||||||
the issue that you are facing. Please be as thorough as you are able to
|
|
||||||
provide details on the issue.
|
|
||||||
|
|
||||||
**How to reproduce the issue**
|
|
||||||
|
|
||||||
**What you expected to happen**
|
|
||||||
|
|
||||||
**What actually happens**
|
|
||||||
|
|
||||||
**Share what version of JupyterHub you are using**
|
|
||||||
|
|
||||||
Running `jupyter troubleshoot` from the command line, if possible, and posting
|
|
||||||
its output would also be helpful.
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
Insert jupyter troubleshoot output here
|
|
||||||
|
|
||||||
|
|
||||||
```
|
|
10
.gitignore
vendored
10
.gitignore
vendored
@@ -14,11 +14,13 @@ docs/source/_static/rest-api
|
|||||||
/jupyterhub_config.py
|
/jupyterhub_config.py
|
||||||
jupyterhub_cookie_secret
|
jupyterhub_cookie_secret
|
||||||
jupyterhub.sqlite
|
jupyterhub.sqlite
|
||||||
share/jupyter/hub/static/components
|
package-lock.json
|
||||||
share/jupyter/hub/static/css/style.min.css
|
share/jupyterhub/static/components
|
||||||
share/jupyter/hub/static/css/style.min.css.map
|
share/jupyterhub/static/css/style.min.css
|
||||||
|
share/jupyterhub/static/css/style.min.css.map
|
||||||
*.egg-info
|
*.egg-info
|
||||||
MANIFEST
|
MANIFEST
|
||||||
.coverage
|
.coverage
|
||||||
htmlcov
|
htmlcov
|
||||||
|
.idea/
|
||||||
|
.pytest_cache
|
||||||
|
37
.travis.yml
37
.travis.yml
@@ -3,13 +3,14 @@ sudo: false
|
|||||||
cache:
|
cache:
|
||||||
- pip
|
- pip
|
||||||
python:
|
python:
|
||||||
- nightly
|
|
||||||
- 3.6
|
- 3.6
|
||||||
- 3.5
|
- 3.5
|
||||||
- 3.4
|
- nightly
|
||||||
env:
|
env:
|
||||||
global:
|
global:
|
||||||
- ASYNC_TEST_TIMEOUT=15
|
- ASYNC_TEST_TIMEOUT=15
|
||||||
|
- MYSQL_HOST=127.0.0.1
|
||||||
|
- MYSQL_TCP_PORT=13306
|
||||||
services:
|
services:
|
||||||
- postgres
|
- postgres
|
||||||
- docker
|
- docker
|
||||||
@@ -20,6 +21,7 @@ before_install:
|
|||||||
- npm install
|
- npm install
|
||||||
- npm install -g configurable-http-proxy
|
- npm install -g configurable-http-proxy
|
||||||
- |
|
- |
|
||||||
|
# setup database
|
||||||
if [[ $JUPYTERHUB_TEST_DB_URL == mysql* ]]; then
|
if [[ $JUPYTERHUB_TEST_DB_URL == mysql* ]]; then
|
||||||
unset MYSQL_UNIX_PORT
|
unset MYSQL_UNIX_PORT
|
||||||
DB=mysql bash ci/docker-db.sh
|
DB=mysql bash ci/docker-db.sh
|
||||||
@@ -27,30 +29,25 @@ before_install:
|
|||||||
pip install 'mysql-connector<2.2'
|
pip install 'mysql-connector<2.2'
|
||||||
elif [[ $JUPYTERHUB_TEST_DB_URL == postgresql* ]]; then
|
elif [[ $JUPYTERHUB_TEST_DB_URL == postgresql* ]]; then
|
||||||
DB=postgres bash ci/init-db.sh
|
DB=postgres bash ci/init-db.sh
|
||||||
pip install psycopg2
|
pip install psycopg2-binary
|
||||||
fi
|
fi
|
||||||
install:
|
install:
|
||||||
- pip install -U pip
|
- pip install --upgrade pip
|
||||||
- pip install --pre -r dev-requirements.txt .
|
- pip install --pre -r dev-requirements.txt .
|
||||||
- pip freeze
|
- pip freeze
|
||||||
|
|
||||||
# running tests
|
# running tests
|
||||||
script:
|
script:
|
||||||
- |
|
- |
|
||||||
if [[ ! -z "$JUPYTERHUB_TEST_DB_URL" ]]; then
|
# run tests
|
||||||
# if testing upgrade-db, run `jupyterhub token` with 0.7
|
set -e
|
||||||
# to initialize an old db. Used in upgrade-tests
|
pytest -v --maxfail=2 --cov=jupyterhub jupyterhub/tests
|
||||||
export JUPYTERHUB_TEST_UPGRADE_DB_URL=${JUPYTERHUB_TEST_DB_URL}_upgrade
|
- |
|
||||||
# use virtualenv instead of venv because venv doesn't work here
|
# build docs
|
||||||
python -m pip install virtualenv
|
pushd docs
|
||||||
python -m virtualenv old-hub-env
|
pip install -r requirements.txt
|
||||||
./old-hub-env/bin/python -m pip install jupyterhub==0.7.2 psycopg2 'mysql-connector<2.2'
|
make html
|
||||||
./old-hub-env/bin/jupyterhub token kaylee \
|
popd
|
||||||
--JupyterHub.db_url=$JUPYTERHUB_TEST_UPGRADE_DB_URL \
|
|
||||||
--Authenticator.whitelist="{'kaylee'}" \
|
|
||||||
--JupyterHub.authenticator_class=jupyterhub.auth.Authenticator
|
|
||||||
fi
|
|
||||||
- pytest -v --maxfail=2 --cov=jupyterhub jupyterhub/tests
|
|
||||||
after_success:
|
after_success:
|
||||||
- codecov
|
- codecov
|
||||||
|
|
||||||
@@ -61,11 +58,11 @@ matrix:
|
|||||||
env: JUPYTERHUB_TEST_SUBDOMAIN_HOST=http://localhost.jovyan.org:8000
|
env: JUPYTERHUB_TEST_SUBDOMAIN_HOST=http://localhost.jovyan.org:8000
|
||||||
- python: 3.6
|
- python: 3.6
|
||||||
env:
|
env:
|
||||||
- MYSQL_HOST=127.0.0.1
|
|
||||||
- MYSQL_TCP_PORT=13306
|
|
||||||
- JUPYTERHUB_TEST_DB_URL=mysql+mysqlconnector://root@127.0.0.1:$MYSQL_TCP_PORT/jupyterhub
|
- JUPYTERHUB_TEST_DB_URL=mysql+mysqlconnector://root@127.0.0.1:$MYSQL_TCP_PORT/jupyterhub
|
||||||
- python: 3.6
|
- python: 3.6
|
||||||
env:
|
env:
|
||||||
- JUPYTERHUB_TEST_DB_URL=postgresql://postgres@127.0.0.1/jupyterhub
|
- JUPYTERHUB_TEST_DB_URL=postgresql://postgres@127.0.0.1/jupyterhub
|
||||||
|
- python: 3.7
|
||||||
|
dist: xenial
|
||||||
allow_failures:
|
allow_failures:
|
||||||
- python: nightly
|
- python: nightly
|
||||||
|
1
CODE_OF_CONDUCT.md
Normal file
1
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Please refer to [Project Jupyter's Code of Conduct](https://github.com/jupyter/governance/blob/master/conduct/code_of_conduct.md).
|
@@ -1,3 +1,98 @@
|
|||||||
# Contributing
|
# Contributing
|
||||||
|
|
||||||
Welcome! As a [Jupyter](https://jupyter.org) project, we follow the [Jupyter contributor guide](https://jupyter.readthedocs.io/en/latest/contributor/content-contributor.html).
|
Welcome! As a [Jupyter](https://jupyter.org) project, we follow the [Jupyter contributor guide](https://jupyter.readthedocs.io/en/latest/contributor/content-contributor.html).
|
||||||
|
|
||||||
|
|
||||||
|
## Set up your development system
|
||||||
|
|
||||||
|
For a development install, clone the [repository](https://github.com/jupyterhub/jupyterhub)
|
||||||
|
and then install from source:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/jupyterhub/jupyterhub
|
||||||
|
cd jupyterhub
|
||||||
|
npm install -g configurable-http-proxy
|
||||||
|
pip3 install -r dev-requirements.txt -e .
|
||||||
|
```
|
||||||
|
|
||||||
|
### Troubleshooting a development install
|
||||||
|
|
||||||
|
If the `pip3 install` command fails and complains about `lessc` being
|
||||||
|
unavailable, you may need to explicitly install some additional JavaScript
|
||||||
|
dependencies:
|
||||||
|
|
||||||
|
npm install
|
||||||
|
|
||||||
|
This will fetch client-side JavaScript dependencies necessary to compile CSS.
|
||||||
|
|
||||||
|
You may also need to manually update JavaScript and CSS after some development
|
||||||
|
updates, with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python3 setup.py js # fetch updated client-side js
|
||||||
|
python3 setup.py css # recompile CSS from LESS sources
|
||||||
|
```
|
||||||
|
|
||||||
|
## Running the test suite
|
||||||
|
|
||||||
|
We use [pytest](http://doc.pytest.org/en/latest/) for running tests.
|
||||||
|
|
||||||
|
1. Set up a development install as described above.
|
||||||
|
|
||||||
|
2. Set environment variable for `ASYNC_TEST_TIMEOUT` to 15 seconds:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export ASYNC_TEST_TIMEOUT=15
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Run tests.
|
||||||
|
|
||||||
|
To run all the tests:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pytest -v jupyterhub/tests
|
||||||
|
```
|
||||||
|
|
||||||
|
To run an individual test file (i.e. `test_api.py`):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pytest -v jupyterhub/tests/test_api.py
|
||||||
|
```
|
||||||
|
|
||||||
|
### Troubleshooting tests
|
||||||
|
|
||||||
|
If you see test failures because of timeouts, you may wish to increase the
|
||||||
|
`ASYNC_TEST_TIMEOUT` used by the
|
||||||
|
[pytest-tornado-plugin](https://github.com/eugeniy/pytest-tornado/blob/c79f68de2222eb7cf84edcfe28650ebf309a4d0c/README.rst#markers)
|
||||||
|
from the default of 5 seconds:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export ASYNC_TEST_TIMEOUT=15
|
||||||
|
```
|
||||||
|
|
||||||
|
If you see many test errors and failures, double check that you have installed
|
||||||
|
`configurable-http-proxy`.
|
||||||
|
|
||||||
|
## Building the Docs locally
|
||||||
|
|
||||||
|
1. Install the development system as described above.
|
||||||
|
|
||||||
|
2. Install the dependencies for documentation:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python3 -m pip install -r docs/requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Build the docs:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd docs
|
||||||
|
make clean
|
||||||
|
make html
|
||||||
|
```
|
||||||
|
|
||||||
|
4. View the docs:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
open build/html/index.html
|
||||||
|
```
|
||||||
|
23
Dockerfile
23
Dockerfile
@@ -21,29 +21,25 @@
|
|||||||
# your jupyterhub_config.py will be added automatically
|
# your jupyterhub_config.py will be added automatically
|
||||||
# from your docker directory.
|
# from your docker directory.
|
||||||
|
|
||||||
FROM debian:jessie
|
FROM ubuntu:18.04
|
||||||
MAINTAINER Jupyter Project <jupyter@googlegroups.com>
|
LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
|
||||||
|
|
||||||
# install nodejs, utf8 locale, set CDN because default httpredir is unreliable
|
# install nodejs, utf8 locale, set CDN because default httpredir is unreliable
|
||||||
ENV DEBIAN_FRONTEND noninteractive
|
ENV DEBIAN_FRONTEND noninteractive
|
||||||
RUN REPO=http://cdn-fastly.deb.debian.org && \
|
RUN apt-get -y update && \
|
||||||
echo "deb $REPO/debian jessie main\ndeb $REPO/debian-security jessie/updates main" > /etc/apt/sources.list && \
|
|
||||||
apt-get -y update && \
|
|
||||||
apt-get -y upgrade && \
|
apt-get -y upgrade && \
|
||||||
apt-get -y install wget locales git bzip2 &&\
|
apt-get -y install wget git bzip2 && \
|
||||||
/usr/sbin/update-locale LANG=C.UTF-8 && \
|
apt-get purge && \
|
||||||
locale-gen C.UTF-8 && \
|
|
||||||
apt-get remove -y locales && \
|
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
ENV LANG C.UTF-8
|
ENV LANG C.UTF-8
|
||||||
|
|
||||||
# install Python + NodeJS with conda
|
# install Python + NodeJS with conda
|
||||||
RUN wget -q https://repo.continuum.io/miniconda/Miniconda3-4.2.12-Linux-x86_64.sh -O /tmp/miniconda.sh && \
|
RUN wget -q https://repo.continuum.io/miniconda/Miniconda3-4.5.1-Linux-x86_64.sh -O /tmp/miniconda.sh && \
|
||||||
echo 'd0c7c71cc5659e54ab51f2005a8d96f3 */tmp/miniconda.sh' | md5sum -c - && \
|
echo '0c28787e3126238df24c5d4858bd0744 */tmp/miniconda.sh' | md5sum -c - && \
|
||||||
bash /tmp/miniconda.sh -f -b -p /opt/conda && \
|
bash /tmp/miniconda.sh -f -b -p /opt/conda && \
|
||||||
/opt/conda/bin/conda install --yes -c conda-forge \
|
/opt/conda/bin/conda install --yes -c conda-forge \
|
||||||
python=3.5 sqlalchemy tornado jinja2 traitlets requests pip pycurl \
|
python=3.6 sqlalchemy tornado jinja2 traitlets requests pip pycurl \
|
||||||
nodejs configurable-http-proxy && \
|
nodejs configurable-http-proxy && \
|
||||||
/opt/conda/bin/pip install --upgrade pip && \
|
/opt/conda/bin/pip install --upgrade pip && \
|
||||||
rm /tmp/miniconda.sh
|
rm /tmp/miniconda.sh
|
||||||
@@ -52,8 +48,7 @@ ENV PATH=/opt/conda/bin:$PATH
|
|||||||
ADD . /src/jupyterhub
|
ADD . /src/jupyterhub
|
||||||
WORKDIR /src/jupyterhub
|
WORKDIR /src/jupyterhub
|
||||||
|
|
||||||
RUN npm install --unsafe-perm && \
|
RUN pip install . && \
|
||||||
pip install . && \
|
|
||||||
rm -rf $PWD ~/.cache ~/.npm
|
rm -rf $PWD ~/.cache ~/.npm
|
||||||
|
|
||||||
RUN mkdir -p /srv/jupyterhub/
|
RUN mkdir -p /srv/jupyterhub/
|
||||||
|
19
MANIFEST.in
19
MANIFEST.in
@@ -3,6 +3,7 @@ include COPYING.md
|
|||||||
include setupegg.py
|
include setupegg.py
|
||||||
include bower-lite
|
include bower-lite
|
||||||
include package.json
|
include package.json
|
||||||
|
include package-lock.json
|
||||||
include *requirements.txt
|
include *requirements.txt
|
||||||
include Dockerfile
|
include Dockerfile
|
||||||
|
|
||||||
@@ -18,15 +19,15 @@ graft docs
|
|||||||
prune docs/node_modules
|
prune docs/node_modules
|
||||||
|
|
||||||
# prune some large unused files from components
|
# prune some large unused files from components
|
||||||
prune share/jupyter/hub/static/components/bootstrap/dist/css
|
prune share/jupyterhub/static/components/bootstrap/dist/css
|
||||||
exclude share/jupyter/hub/static/components/bootstrap/dist/fonts/*.svg
|
exclude share/jupyterhub/static/components/bootstrap/dist/fonts/*.svg
|
||||||
prune share/jupyter/hub/static/components/font-awesome/css
|
prune share/jupyterhub/static/components/font-awesome/css
|
||||||
prune share/jupyter/hub/static/components/font-awesome/scss
|
prune share/jupyterhub/static/components/font-awesome/scss
|
||||||
exclude share/jupyter/hub/static/components/font-awesome/fonts/*.svg
|
exclude share/jupyterhub/static/components/font-awesome/fonts/*.svg
|
||||||
prune share/jupyter/hub/static/components/jquery/external
|
prune share/jupyterhub/static/components/jquery/external
|
||||||
prune share/jupyter/hub/static/components/jquery/src
|
prune share/jupyterhub/static/components/jquery/src
|
||||||
prune share/jupyter/hub/static/components/moment/lang
|
prune share/jupyterhub/static/components/moment/lang
|
||||||
prune share/jupyter/hub/static/components/moment/min
|
prune share/jupyterhub/static/components/moment/min
|
||||||
|
|
||||||
# Patterns to exclude from any directory
|
# Patterns to exclude from any directory
|
||||||
global-exclude *~
|
global-exclude *~
|
||||||
|
1
PULL_REQUEST_TEMPLATE.md
Normal file
1
PULL_REQUEST_TEMPLATE.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
|
98
README.md
98
README.md
@@ -11,8 +11,8 @@
|
|||||||
|
|
||||||
|
|
||||||
[](https://pypi.python.org/pypi/jupyterhub)
|
[](https://pypi.python.org/pypi/jupyterhub)
|
||||||
[](http://jupyterhub.readthedocs.org/en/latest/?badge=latest)
|
[](https://jupyterhub.readthedocs.org/en/latest/?badge=latest)
|
||||||
[](http://jupyterhub.readthedocs.io/en/0.7.2/?badge=0.7.2)
|
[](https://jupyterhub.readthedocs.io/en/0.7.2/?badge=0.7.2)
|
||||||
[](https://travis-ci.org/jupyterhub/jupyterhub)
|
[](https://travis-ci.org/jupyterhub/jupyterhub)
|
||||||
[](https://circleci.com/gh/jupyterhub/jupyterhub)
|
[](https://circleci.com/gh/jupyterhub/jupyterhub)
|
||||||
[](https://codecov.io/github/jupyterhub/jupyterhub?branch=master)
|
[](https://codecov.io/github/jupyterhub/jupyterhub?branch=master)
|
||||||
@@ -20,7 +20,7 @@
|
|||||||
|
|
||||||
With [JupyterHub](https://jupyterhub.readthedocs.io) you can create a
|
With [JupyterHub](https://jupyterhub.readthedocs.io) you can create a
|
||||||
**multi-user Hub** which spawns, manages, and proxies multiple instances of the
|
**multi-user Hub** which spawns, manages, and proxies multiple instances of the
|
||||||
single-user [Jupyter notebook (IPython notebook)](https://jupyter-notebook.readthedocs.io)
|
single-user [Jupyter notebook](https://jupyter-notebook.readthedocs.io)
|
||||||
server.
|
server.
|
||||||
|
|
||||||
[Project Jupyter](https://jupyter.org) created JupyterHub to support many
|
[Project Jupyter](https://jupyter.org) created JupyterHub to support many
|
||||||
@@ -34,11 +34,11 @@ Three main actors make up JupyterHub:
|
|||||||
|
|
||||||
- multi-user **Hub** (tornado process)
|
- multi-user **Hub** (tornado process)
|
||||||
- configurable http **proxy** (node-http-proxy)
|
- configurable http **proxy** (node-http-proxy)
|
||||||
- multiple **single-user Jupyter notebook servers** (Python/IPython/tornado)
|
- multiple **single-user Jupyter notebook servers** (Python/Jupyter/tornado)
|
||||||
|
|
||||||
Basic principles for operation are:
|
Basic principles for operation are:
|
||||||
|
|
||||||
- Hub spawns a proxy.
|
- Hub launches a proxy.
|
||||||
- Proxy forwards all requests to Hub by default.
|
- Proxy forwards all requests to Hub by default.
|
||||||
- Hub handles login, and spawns single-user servers on demand.
|
- Hub handles login, and spawns single-user servers on demand.
|
||||||
- Hub configures proxy to forward url prefixes to the single-user notebook
|
- Hub configures proxy to forward url prefixes to the single-user notebook
|
||||||
@@ -50,37 +50,62 @@ for administration of the Hub and its users.
|
|||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
|
|
||||||
### Check prerequisites
|
### Check prerequisites
|
||||||
|
|
||||||
A Linux/Unix based system with the following:
|
- A Linux/Unix based system
|
||||||
|
- [Python](https://www.python.org/downloads/) 3.5 or greater
|
||||||
|
- [nodejs/npm](https://www.npmjs.com/)
|
||||||
|
|
||||||
- [Python](https://www.python.org/downloads/) 3.4 or greater
|
* If you are using **`conda`**, the nodejs and npm dependencies will be installed for
|
||||||
- [nodejs/npm](https://www.npmjs.com/) Install a recent version of
|
you by conda.
|
||||||
[nodejs/npm](https://docs.npmjs.com/getting-started/installing-node)
|
|
||||||
For example, install it on Linux (Debian/Ubuntu) using:
|
|
||||||
|
|
||||||
sudo apt-get install npm nodejs-legacy
|
* If you are using **`pip`**, install a recent version of
|
||||||
|
[nodejs/npm](https://docs.npmjs.com/getting-started/installing-node).
|
||||||
|
For example, install it on Linux (Debian/Ubuntu) using:
|
||||||
|
|
||||||
The `nodejs-legacy` package installs the `node` executable and is currently
|
```
|
||||||
required for npm to work on Debian/Ubuntu.
|
sudo apt-get install npm nodejs-legacy
|
||||||
|
```
|
||||||
|
|
||||||
|
The `nodejs-legacy` package installs the `node` executable and is currently
|
||||||
|
required for npm to work on Debian/Ubuntu.
|
||||||
|
|
||||||
- TLS certificate and key for HTTPS communication
|
- TLS certificate and key for HTTPS communication
|
||||||
- Domain name
|
- Domain name
|
||||||
|
|
||||||
### Install packages
|
### Install packages
|
||||||
|
|
||||||
|
#### Using `conda`
|
||||||
|
|
||||||
|
To install JupyterHub along with its dependencies including nodejs/npm:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
conda install -c conda-forge jupyterhub
|
||||||
|
```
|
||||||
|
|
||||||
|
If you plan to run notebook servers locally, install the Jupyter notebook
|
||||||
|
or JupyterLab:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
conda install notebook
|
||||||
|
conda install jupyterlab
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Using `pip`
|
||||||
|
|
||||||
JupyterHub can be installed with `pip`, and the proxy with `npm`:
|
JupyterHub can be installed with `pip`, and the proxy with `npm`:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm install -g configurable-http-proxy
|
npm install -g configurable-http-proxy
|
||||||
pip3 install jupyterhub
|
python3 -m pip install jupyterhub
|
||||||
```
|
```
|
||||||
|
|
||||||
If you plan to run notebook servers locally, you will need to install the
|
If you plan to run notebook servers locally, you will need to install the
|
||||||
[Jupyter notebook](https://jupyter.readthedocs.io/en/latest/install.html)
|
[Jupyter notebook](https://jupyter.readthedocs.io/en/latest/install.html)
|
||||||
package:
|
package:
|
||||||
|
|
||||||
pip3 install --upgrade notebook
|
python3 -m pip install --upgrade notebook
|
||||||
|
|
||||||
### Run the Hub server
|
### Run the Hub server
|
||||||
|
|
||||||
@@ -99,7 +124,7 @@ more configuration of the system.
|
|||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
The [Getting Started](http://jupyterhub.readthedocs.io/en/latest/getting-started/index.html) section of the
|
The [Getting Started](https://jupyterhub.readthedocs.io/en/latest/getting-started/index.html) section of the
|
||||||
documentation explains the common steps in setting up JupyterHub.
|
documentation explains the common steps in setting up JupyterHub.
|
||||||
|
|
||||||
The [**JupyterHub tutorial**](https://github.com/jupyterhub/jupyterhub-tutorial)
|
The [**JupyterHub tutorial**](https://github.com/jupyterhub/jupyterhub-tutorial)
|
||||||
@@ -151,7 +176,7 @@ not, Jupyter Notebook version 4 or greater must be installed.
|
|||||||
|
|
||||||
The JupyterHub docker image can be started with the following command:
|
The JupyterHub docker image can be started with the following command:
|
||||||
|
|
||||||
docker run -d --name jupyterhub jupyterhub/jupyterhub jupyterhub
|
docker run -p 8000:8000 -d --name jupyterhub jupyterhub/jupyterhub jupyterhub
|
||||||
|
|
||||||
This command will create a container named `jupyterhub` that you can
|
This command will create a container named `jupyterhub` that you can
|
||||||
**stop and resume** with `docker stop/start`.
|
**stop and resume** with `docker stop/start`.
|
||||||
@@ -163,7 +188,7 @@ If you want to run docker on a computer that has a public IP then you should
|
|||||||
(as in MUST) **secure it with ssl** by adding ssl options to your docker
|
(as in MUST) **secure it with ssl** by adding ssl options to your docker
|
||||||
configuration or by using a ssl enabled proxy.
|
configuration or by using a ssl enabled proxy.
|
||||||
|
|
||||||
[Mounting volumes](https://docs.docker.com/engine/userguide/containers/dockervolumes/) will
|
[Mounting volumes](https://docs.docker.com/engine/admin/volumes/volumes/) will
|
||||||
allow you to **store data outside the docker image (host system) so it will be persistent**, even when you start
|
allow you to **store data outside the docker image (host system) so it will be persistent**, even when you start
|
||||||
a new image.
|
a new image.
|
||||||
|
|
||||||
@@ -175,38 +200,9 @@ These accounts will be used for authentication in JupyterHub's default configura
|
|||||||
|
|
||||||
If you would like to contribute to the project, please read our
|
If you would like to contribute to the project, please read our
|
||||||
[contributor documentation](http://jupyter.readthedocs.io/en/latest/contributor/content-contributor.html)
|
[contributor documentation](http://jupyter.readthedocs.io/en/latest/contributor/content-contributor.html)
|
||||||
and the [`CONTRIBUTING.md`](CONTRIBUTING.md).
|
and the [`CONTRIBUTING.md`](CONTRIBUTING.md). The `CONTRIBUTING.md` file
|
||||||
|
explains how to set up a development installation, how to run the test suite,
|
||||||
For a **development install**, clone the [repository](https://github.com/jupyterhub/jupyterhub)
|
and how to contribute to documentation.
|
||||||
and then install from source:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git clone https://github.com/jupyterhub/jupyterhub
|
|
||||||
cd jupyterhub
|
|
||||||
pip3 install -r dev-requirements.txt -e .
|
|
||||||
```
|
|
||||||
|
|
||||||
If the `pip3 install` command fails and complains about `lessc` being
|
|
||||||
unavailable, you may need to explicitly install some additional JavaScript
|
|
||||||
dependencies:
|
|
||||||
|
|
||||||
npm install
|
|
||||||
|
|
||||||
This will fetch client-side JavaScript dependencies necessary to compile CSS.
|
|
||||||
|
|
||||||
You may also need to manually update JavaScript and CSS after some development
|
|
||||||
updates, with:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
python3 setup.py js # fetch updated client-side js
|
|
||||||
python3 setup.py css # recompile CSS from LESS sources
|
|
||||||
```
|
|
||||||
|
|
||||||
We use [pytest](http://doc.pytest.org/en/latest/) for **running tests**:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pytest jupyterhub/tests
|
|
||||||
```
|
|
||||||
|
|
||||||
### A note about platform support
|
### A note about platform support
|
||||||
|
|
||||||
@@ -237,7 +233,7 @@ our JupyterHub [Gitter](https://gitter.im/jupyterhub/jupyterhub) channel.
|
|||||||
|
|
||||||
- [Reporting Issues](https://github.com/jupyterhub/jupyterhub/issues)
|
- [Reporting Issues](https://github.com/jupyterhub/jupyterhub/issues)
|
||||||
- [JupyterHub tutorial](https://github.com/jupyterhub/jupyterhub-tutorial)
|
- [JupyterHub tutorial](https://github.com/jupyterhub/jupyterhub-tutorial)
|
||||||
- [Documentation for JupyterHub](http://jupyterhub.readthedocs.io/en/latest/) | [PDF (latest)](https://media.readthedocs.org/pdf/jupyterhub/latest/jupyterhub.pdf) | [PDF (stable)](https://media.readthedocs.org/pdf/jupyterhub/stable/jupyterhub.pdf)
|
- [Documentation for JupyterHub](https://jupyterhub.readthedocs.io/en/latest/) | [PDF (latest)](https://media.readthedocs.org/pdf/jupyterhub/latest/jupyterhub.pdf) | [PDF (stable)](https://media.readthedocs.org/pdf/jupyterhub/stable/jupyterhub.pdf)
|
||||||
- [Documentation for JupyterHub's REST API](http://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyter/jupyterhub/master/docs/rest-api.yml#/default)
|
- [Documentation for JupyterHub's REST API](http://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyter/jupyterhub/master/docs/rest-api.yml#/default)
|
||||||
- [Documentation for Project Jupyter](http://jupyter.readthedocs.io/en/latest/index.html) | [PDF](https://media.readthedocs.org/pdf/jupyter/latest/jupyter.pdf)
|
- [Documentation for Project Jupyter](http://jupyter.readthedocs.io/en/latest/index.html) | [PDF](https://media.readthedocs.org/pdf/jupyter/latest/jupyter.pdf)
|
||||||
- [Project Jupyter website](https://jupyter.org)
|
- [Project Jupyter website](https://jupyter.org)
|
||||||
|
@@ -18,7 +18,7 @@ import shutil
|
|||||||
HERE = os.path.abspath(os.path.dirname(__file__))
|
HERE = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|
||||||
|
|
||||||
components = join(HERE, "share", "jupyter", "hub", "static", "components")
|
components = join(HERE, "share", "jupyterhub", "static", "components")
|
||||||
node_modules = join(HERE, "node_modules")
|
node_modules = join(HERE, "node_modules")
|
||||||
|
|
||||||
if os.path.exists(components):
|
if os.path.exists(components):
|
||||||
|
2
ci/docker-db.sh
Normal file → Executable file
2
ci/docker-db.sh
Normal file → Executable file
@@ -8,7 +8,7 @@ export MYSQL_HOST=127.0.0.1
|
|||||||
export MYSQL_TCP_PORT=${MYSQL_TCP_PORT:-13306}
|
export MYSQL_TCP_PORT=${MYSQL_TCP_PORT:-13306}
|
||||||
export PGHOST=127.0.0.1
|
export PGHOST=127.0.0.1
|
||||||
NAME="hub-test-$DB"
|
NAME="hub-test-$DB"
|
||||||
DOCKER_RUN="docker run --rm -d --name $NAME"
|
DOCKER_RUN="docker run -d --name $NAME"
|
||||||
|
|
||||||
docker rm -f "$NAME" 2>/dev/null || true
|
docker rm -f "$NAME" 2>/dev/null || true
|
||||||
|
|
||||||
|
8
ci/init-db.sh
Normal file → Executable file
8
ci/init-db.sh
Normal file → Executable file
@@ -21,7 +21,7 @@ esac
|
|||||||
|
|
||||||
set -x
|
set -x
|
||||||
|
|
||||||
$SQL 'DROP DATABASE jupyterhub;' 2>/dev/null || true
|
for SUFFIX in '' _upgrade_072 _upgrade_081; do
|
||||||
$SQL "CREATE DATABASE jupyterhub ${EXTRA_CREATE};"
|
$SQL "DROP DATABASE jupyterhub${SUFFIX};" 2>/dev/null || true
|
||||||
$SQL 'DROP DATABASE jupyterhub_upgrade;' 2>/dev/null || true
|
$SQL "CREATE DATABASE jupyterhub${SUFFIX} ${EXTRA_CREATE};"
|
||||||
$SQL "CREATE DATABASE jupyterhub_upgrade ${EXTRA_CREATE};"
|
done
|
||||||
|
24
circle.yml
24
circle.yml
@@ -1,24 +0,0 @@
|
|||||||
machine:
|
|
||||||
services:
|
|
||||||
- docker
|
|
||||||
|
|
||||||
dependencies:
|
|
||||||
override:
|
|
||||||
- ls
|
|
||||||
|
|
||||||
test:
|
|
||||||
override:
|
|
||||||
- docker build -t jupyterhub/jupyterhub .
|
|
||||||
- docker build -t jupyterhub/jupyterhub-onbuild:${CIRCLE_TAG:-latest} onbuild
|
|
||||||
|
|
||||||
deployment:
|
|
||||||
hub:
|
|
||||||
branch: master
|
|
||||||
commands:
|
|
||||||
- docker login -u $DOCKER_USER -p $DOCKER_PASS -e unused@example.com
|
|
||||||
- docker push jupyterhub/jupyterhub-onbuild
|
|
||||||
release:
|
|
||||||
tag: /.*/
|
|
||||||
commands:
|
|
||||||
- docker login -u $DOCKER_USER -p $DOCKER_PASS -e unused@example.com
|
|
||||||
- docker push jupyterhub/jupyterhub-onbuild:$CIRCLE_TAG
|
|
@@ -1,9 +1,14 @@
|
|||||||
-r requirements.txt
|
-r requirements.txt
|
||||||
mock
|
mock
|
||||||
|
beautifulsoup4
|
||||||
codecov
|
codecov
|
||||||
cryptography
|
cryptography
|
||||||
pytest-cov
|
pytest-cov
|
||||||
pytest-tornado
|
pytest-tornado
|
||||||
pytest>=2.8
|
pytest>=3.3
|
||||||
notebook
|
notebook
|
||||||
requests-mock
|
requests-mock
|
||||||
|
virtualenv
|
||||||
|
# temporary pin of attrs for jsonschema 0.3.0a1
|
||||||
|
# seems to be a pip bug
|
||||||
|
attrs>=17.4.0
|
||||||
|
11
dockerfiles/Dockerfile.alpine
Normal file
11
dockerfiles/Dockerfile.alpine
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
FROM python:3.6.3-alpine3.6
|
||||||
|
|
||||||
|
ARG JUPYTERHUB_VERSION=0.8.1
|
||||||
|
|
||||||
|
RUN pip3 install --no-cache jupyterhub==${JUPYTERHUB_VERSION}
|
||||||
|
ENV LANG=en_US.UTF-8
|
||||||
|
|
||||||
|
USER nobody
|
||||||
|
CMD ["jupyterhub"]
|
||||||
|
|
||||||
|
|
21
dockerfiles/README.md
Normal file
21
dockerfiles/README.md
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
## What is Dockerfile.alpine
|
||||||
|
Dockerfile.alpine contains base image for jupyterhub. It does not work independently, but only as part of a full jupyterhub cluster
|
||||||
|
|
||||||
|
## How to use it?
|
||||||
|
|
||||||
|
1. A running configurable-http-proxy, whose API is accessible.
|
||||||
|
2. A jupyterhub_config file.
|
||||||
|
3. Authentication and other libraries required by the specific jupyterhub_config file.
|
||||||
|
|
||||||
|
|
||||||
|
## Steps to test it outside a cluster
|
||||||
|
|
||||||
|
* start configurable-http-proxy in another container
|
||||||
|
* specify CONFIGPROXY_AUTH_TOKEN env in both containers
|
||||||
|
* put both containers on the same network (e.g. docker create network jupyterhub; docker run ... --net jupyterhub)
|
||||||
|
* tell jupyterhub where CHP is (e.g. c.ConfigurableHTTPProxy.api_url = 'http://chp:8001')
|
||||||
|
* tell jupyterhub not to start the proxy itself (c.ConfigurableHTTPProxy.should_start = False)
|
||||||
|
* Use dummy authenticator for ease of testing. Update following in jupyterhub_config file
|
||||||
|
- c.JupyterHub.authenticator_class = 'dummyauthenticator.DummyAuthenticator'
|
||||||
|
- c.DummyAuthenticator.password = "your strong password"
|
||||||
|
|
@@ -2,7 +2,7 @@
|
|||||||
#
|
#
|
||||||
|
|
||||||
# You can set these variables from the command line.
|
# You can set these variables from the command line.
|
||||||
SPHINXOPTS =
|
SPHINXOPTS = "-W"
|
||||||
SPHINXBUILD = sphinx-build
|
SPHINXBUILD = sphinx-build
|
||||||
PAPER =
|
PAPER =
|
||||||
BUILDDIR = build
|
BUILDDIR = build
|
||||||
|
@@ -1,19 +1,22 @@
|
|||||||
|
# ReadTheDocs uses the `environment.yaml` so make sure to update that as well
|
||||||
|
# if you change the dependencies of JupyterHub in the various `requirements.txt`
|
||||||
name: jhub_docs
|
name: jhub_docs
|
||||||
channels:
|
channels:
|
||||||
- conda-forge
|
- conda-forge
|
||||||
dependencies:
|
dependencies:
|
||||||
- nodejs
|
- nodejs
|
||||||
- python=3.5
|
- python=3.6
|
||||||
- alembic
|
- alembic
|
||||||
- jinja2
|
- jinja2
|
||||||
- pamela
|
- pamela
|
||||||
- requests
|
- requests
|
||||||
- sqlalchemy>=1
|
- sqlalchemy>=1
|
||||||
- tornado>=4.1
|
- tornado>=5.0
|
||||||
- traitlets>=4.1
|
- traitlets>=4.1
|
||||||
- sphinx>=1.4, !=1.5.4
|
- sphinx>=1.7
|
||||||
- sphinx_rtd_theme
|
|
||||||
- pip:
|
- pip:
|
||||||
- jupyter_alabaster_theme
|
|
||||||
- python-oauth2
|
- python-oauth2
|
||||||
- recommonmark==0.4.0
|
- recommonmark==0.4.0
|
||||||
|
- async_generator
|
||||||
|
- prometheus_client
|
||||||
|
- attrs>=17.4.0
|
||||||
|
@@ -1,3 +1,5 @@
|
|||||||
|
# ReadTheDocs uses the `environment.yaml` so make sure to update that as well
|
||||||
|
# if you change this file
|
||||||
-r ../requirements.txt
|
-r ../requirements.txt
|
||||||
sphinx>=1.4
|
sphinx>=1.7
|
||||||
recommonmark==0.4.0
|
recommonmark==0.4.0
|
||||||
|
@@ -3,7 +3,7 @@ swagger: '2.0'
|
|||||||
info:
|
info:
|
||||||
title: JupyterHub
|
title: JupyterHub
|
||||||
description: The REST API for JupyterHub
|
description: The REST API for JupyterHub
|
||||||
version: 0.8.0dev
|
version: 0.9.4
|
||||||
license:
|
license:
|
||||||
name: BSD-3-Clause
|
name: BSD-3-Clause
|
||||||
schemes:
|
schemes:
|
||||||
@@ -240,18 +240,47 @@ paths:
|
|||||||
description: The user's notebook named-server has stopped
|
description: The user's notebook named-server has stopped
|
||||||
'202':
|
'202':
|
||||||
description: The user's notebook named-server has not yet stopped as it is taking a while to stop
|
description: The user's notebook named-server has not yet stopped as it is taking a while to stop
|
||||||
/users/{name}/admin-access:
|
/users/{name}/tokens:
|
||||||
post:
|
get:
|
||||||
summary: Grant admin access to this user's notebook server
|
summary: List tokens for the user
|
||||||
parameters:
|
|
||||||
- name: name
|
|
||||||
description: username
|
|
||||||
in: path
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
responses:
|
responses:
|
||||||
'200':
|
'200':
|
||||||
description: Sets a cookie granting the requesting administrator access to the user's notebook server
|
description: The list of tokens
|
||||||
|
schema:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: '#/definitions/Token'
|
||||||
|
post:
|
||||||
|
summary: Create a new token for the user
|
||||||
|
parameters:
|
||||||
|
- name: expires_in
|
||||||
|
type: number
|
||||||
|
required: false
|
||||||
|
in: body
|
||||||
|
description: lifetime (in seconds) after which the requested token will expire.
|
||||||
|
- name: note
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
in: body
|
||||||
|
description: A note attached to the token for future bookkeeping
|
||||||
|
responses:
|
||||||
|
'201':
|
||||||
|
description: The newly created token
|
||||||
|
schema:
|
||||||
|
$ref: '#/definitions/Token'
|
||||||
|
/users/{name}/tokens/{token_id}:
|
||||||
|
get:
|
||||||
|
summary: Get the model for a token by id
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: The info for the new token
|
||||||
|
schema:
|
||||||
|
$ref: '#/definitions/Token'
|
||||||
|
delete:
|
||||||
|
summary: Delete (revoke) a token by id
|
||||||
|
responses:
|
||||||
|
'204':
|
||||||
|
description: The token has been deleted
|
||||||
/user:
|
/user:
|
||||||
summary: Return authenticated user's model
|
summary: Return authenticated user's model
|
||||||
description:
|
description:
|
||||||
@@ -588,12 +617,55 @@ definitions:
|
|||||||
description: The user's notebook server's base URL, if running; null if not.
|
description: The user's notebook server's base URL, if running; null if not.
|
||||||
pending:
|
pending:
|
||||||
type: string
|
type: string
|
||||||
enum: ["spawn", "stop"]
|
enum: ["spawn", "stop", null]
|
||||||
description: The currently pending action, if any
|
description: The currently pending action, if any
|
||||||
last_activity:
|
last_activity:
|
||||||
type: string
|
type: string
|
||||||
format: date-time
|
format: date-time
|
||||||
description: Timestamp of last-seen activity from the user
|
description: Timestamp of last-seen activity from the user
|
||||||
|
servers:
|
||||||
|
type: object
|
||||||
|
description: The active servers for this user.
|
||||||
|
items:
|
||||||
|
schema:
|
||||||
|
$ref: '#/definitions/Server'
|
||||||
|
Server:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
description: The server's name. The user's default server has an empty name ('')
|
||||||
|
ready:
|
||||||
|
type: boolean
|
||||||
|
description: |
|
||||||
|
Whether the server is ready for traffic.
|
||||||
|
Will always be false when any transition is pending.
|
||||||
|
pending:
|
||||||
|
type: string
|
||||||
|
enum: ["spawn", "stop", null]
|
||||||
|
description: |
|
||||||
|
The currently pending action, if any.
|
||||||
|
A server is not ready if an action is pending.
|
||||||
|
url:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The URL where the server can be accessed
|
||||||
|
(typically /user/:name/:server.name/).
|
||||||
|
progress_url:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The URL for an event-stream to retrieve events during a spawn.
|
||||||
|
started:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
description: UTC timestamp when the server was last started.
|
||||||
|
last_activity:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
description: UTC timestamp last-seen activity on this server.
|
||||||
|
state:
|
||||||
|
type: object
|
||||||
|
description: Arbitrary internal state from this server's spawner. Only available on the hub's users list or get-user-by-name method, and only if a hub admin. None otherwise.
|
||||||
Group:
|
Group:
|
||||||
type: object
|
type: object
|
||||||
properties:
|
properties:
|
||||||
@@ -628,3 +700,40 @@ definitions:
|
|||||||
description: The command used to start the service (if managed)
|
description: The command used to start the service (if managed)
|
||||||
items:
|
items:
|
||||||
type: string
|
type: string
|
||||||
|
info:
|
||||||
|
type: object
|
||||||
|
description: |
|
||||||
|
Additional information a deployment can attach to a service.
|
||||||
|
JupyterHub does not use this field.
|
||||||
|
Token:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
token:
|
||||||
|
type: string
|
||||||
|
description: The token itself. Only present in responses to requests for a new token.
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
description: The id of the API token. Used for modifying or deleting the token.
|
||||||
|
user:
|
||||||
|
type: string
|
||||||
|
description: The user that owns a token (undefined if owned by a service)
|
||||||
|
service:
|
||||||
|
type: string
|
||||||
|
description: The service that owns the token (undefined of owned by a user)
|
||||||
|
note:
|
||||||
|
type: string
|
||||||
|
description: A note about the token, typically describing what it was created for.
|
||||||
|
created:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
description: Timestamp when this token was created
|
||||||
|
expires_at:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
description: Timestamp when this token expires. Null if there is no expiry.
|
||||||
|
last_activity:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
description: |
|
||||||
|
Timestamp of last-seen activity using this token.
|
||||||
|
Can be null if token has never been used.
|
||||||
|
106
docs/source/_static/custom.css
Normal file
106
docs/source/_static/custom.css
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
div#helm-chart-schema h2,
|
||||||
|
div#helm-chart-schema h3,
|
||||||
|
div#helm-chart-schema h4,
|
||||||
|
div#helm-chart-schema h5,
|
||||||
|
div#helm-chart-schema h6 {
|
||||||
|
font-family: courier new;
|
||||||
|
}
|
||||||
|
|
||||||
|
h3, h3 ~ * {
|
||||||
|
margin-left: 3% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
h4, h4 ~ * {
|
||||||
|
margin-left: 6% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
h5, h5 ~ * {
|
||||||
|
margin-left: 9% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
h6, h6 ~ * {
|
||||||
|
margin-left: 12% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
h7, h7 ~ * {
|
||||||
|
margin-left: 15% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.logo {
|
||||||
|
width:100%
|
||||||
|
}
|
||||||
|
|
||||||
|
.right-next {
|
||||||
|
float: right;
|
||||||
|
max-width: 45%;
|
||||||
|
overflow: auto;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.right-next::after{
|
||||||
|
content: ' »';
|
||||||
|
}
|
||||||
|
|
||||||
|
.left-prev {
|
||||||
|
float: left;
|
||||||
|
max-width: 45%;
|
||||||
|
overflow: auto;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.left-prev::before{
|
||||||
|
content: '« ';
|
||||||
|
}
|
||||||
|
|
||||||
|
.prev-next-bottom {
|
||||||
|
margin-top: 3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.prev-next-top {
|
||||||
|
margin-bottom: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Sidebar TOC and headers */
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper div {
|
||||||
|
margin-bottom: .8em;
|
||||||
|
}
|
||||||
|
div.sphinxsidebar h3 {
|
||||||
|
font-size: 1.3em;
|
||||||
|
padding-top: 0px;
|
||||||
|
font-weight: 800;
|
||||||
|
margin-left: 0px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar p.caption {
|
||||||
|
font-size: 1.2em;
|
||||||
|
margin-bottom: 0px;
|
||||||
|
margin-left: 0px !important;
|
||||||
|
font-weight: 900;
|
||||||
|
color: #767676;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul {
|
||||||
|
font-size: .8em;
|
||||||
|
margin-top: 0px;
|
||||||
|
padding-left: 3%;
|
||||||
|
margin-left: 0px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.relations ul {
|
||||||
|
font-size: 1em;
|
||||||
|
margin-left: 0px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
div#searchbox form {
|
||||||
|
margin-left: 0px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* body elements */
|
||||||
|
.toctree-wrapper span.caption-text {
|
||||||
|
color: #767676;
|
||||||
|
font-style: italic;
|
||||||
|
font-weight: 300;
|
||||||
|
}
|
BIN
docs/source/_static/images/logo/favicon.ico
Normal file
BIN
docs/source/_static/images/logo/favicon.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.4 KiB |
BIN
docs/source/_static/images/logo/logo.png
Normal file
BIN
docs/source/_static/images/logo/logo.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 38 KiB |
16
docs/source/_templates/navigation.html
Normal file
16
docs/source/_templates/navigation.html
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{# Custom template for navigation.html
|
||||||
|
|
||||||
|
alabaster theme does not provide blocks for titles to
|
||||||
|
be overridden so this custom theme handles title and
|
||||||
|
toctree for sidebar
|
||||||
|
#}
|
||||||
|
<h3>{{ _('Table of Contents') }}</h3>
|
||||||
|
{{ toctree(includehidden=theme_sidebar_includehidden, collapse=theme_sidebar_collapse) }}
|
||||||
|
{% if theme_extra_nav_links %}
|
||||||
|
<hr />
|
||||||
|
<ul>
|
||||||
|
{% for text, uri in theme_extra_nav_links.items() %}
|
||||||
|
<li class="toctree-l1"><a href="{{ uri }}">{{ text }}</a></li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
{% endif %}
|
30
docs/source/_templates/page.html
Normal file
30
docs/source/_templates/page.html
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
{% extends '!page.html' %}
|
||||||
|
|
||||||
|
{# Custom template for page.html
|
||||||
|
|
||||||
|
Alabaster theme does not provide blocks for prev/next at bottom of each page.
|
||||||
|
This is _in addition_ to the prev/next in the sidebar. The "Prev/Next" text
|
||||||
|
or symbols are handled by CSS classes in _static/custom.css
|
||||||
|
#}
|
||||||
|
|
||||||
|
{% macro prev_next(prev, next, prev_title='', next_title='') %}
|
||||||
|
{%- if prev %}
|
||||||
|
<a class='left-prev' href="{{ prev.link|e }}" title="{{ _('previous chapter')}}">{{ prev_title or prev.title }}</a>
|
||||||
|
{%- endif %}
|
||||||
|
{%- if next %}
|
||||||
|
<a class='right-next' href="{{ next.link|e }}" title="{{ _('next chapter')}}">{{ next_title or next.title }}</a>
|
||||||
|
{%- endif %}
|
||||||
|
<div style='clear:both;'></div>
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
|
||||||
|
{% block body %}
|
||||||
|
<div class='prev-next-top'>
|
||||||
|
{{ prev_next(prev, next, 'Previous', 'Next') }}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{{super()}}
|
||||||
|
<div class='prev-next-bottom'>
|
||||||
|
{{ prev_next(prev, next) }}
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
17
docs/source/_templates/relations.html
Normal file
17
docs/source/_templates/relations.html
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
{# Custom template for relations.html
|
||||||
|
|
||||||
|
alabaster theme does not provide previous/next page by default
|
||||||
|
#}
|
||||||
|
<div class="relations">
|
||||||
|
<h3>Navigation</h3>
|
||||||
|
<ul>
|
||||||
|
<li><a href="{{ pathto(master_doc) }}">Documentation Home</a><ul>
|
||||||
|
{%- if prev %}
|
||||||
|
<li><a href="{{ prev.link|e }}" title="Previous">Previous topic</a></li>
|
||||||
|
{%- endif %}
|
||||||
|
{%- if next %}
|
||||||
|
<li><a href="{{ next.link|e }}" title="Next">Next topic</a></li>
|
||||||
|
{%- endif %}
|
||||||
|
</ul>
|
||||||
|
</ul>
|
||||||
|
</div>
|
@@ -7,6 +7,172 @@ command line for details.
|
|||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
## 0.9
|
||||||
|
|
||||||
|
### [0.9.4] 2018-09-24
|
||||||
|
|
||||||
|
JupyterHub 0.9.4 is a small bugfix release.
|
||||||
|
|
||||||
|
- Fixes an issue that required all running user servers to be restarted
|
||||||
|
when performing an upgrade from 0.8 to 0.9.
|
||||||
|
- Fixes content-type for API endpoints back to `application/json`.
|
||||||
|
It was `text/html` in 0.9.0-0.9.3.
|
||||||
|
|
||||||
|
### [0.9.3] 2018-09-12
|
||||||
|
|
||||||
|
JupyterHub 0.9.3 contains small bugfixes and improvements
|
||||||
|
|
||||||
|
- Fix token page and model handling of `expires_at`.
|
||||||
|
This field was missing from the REST API model for tokens
|
||||||
|
and could cause the token page to not render
|
||||||
|
- Add keep-alive to progress event stream to avoid proxies dropping
|
||||||
|
the connection due to inactivity
|
||||||
|
- Documentation and example improvements
|
||||||
|
- Disable quit button when using notebook 5.6
|
||||||
|
- Prototype new feature (may change prior to 1.0):
|
||||||
|
pass requesting Handler to Spawners during start,
|
||||||
|
accessible as `self.handler`
|
||||||
|
|
||||||
|
### [0.9.2] 2018-08-10
|
||||||
|
|
||||||
|
JupyterHub 0.9.2 contains small bugfixes and improvements.
|
||||||
|
|
||||||
|
- Documentation and example improvements
|
||||||
|
- Add `Spawner.consecutive_failure_limit` config for aborting the Hub if too many spawns fail in a row.
|
||||||
|
- Fix for handling SIGTERM when run with asyncio (tornado 5)
|
||||||
|
- Windows compatibility fixes
|
||||||
|
|
||||||
|
|
||||||
|
### [0.9.1] 2018-07-04
|
||||||
|
|
||||||
|
JupyterHub 0.9.1 contains a number of small bugfixes on top of 0.9.
|
||||||
|
|
||||||
|
- Use a PID file for the proxy to decrease the likelihood that a leftover proxy process will prevent JupyterHub from restarting
|
||||||
|
- `c.LocalProcessSpawner.shell_cmd` is now configurable
|
||||||
|
- API requests to stopped servers (requests to the hub for `/user/:name/api/...`) fail with 404 rather than triggering a restart of the server
|
||||||
|
- Compatibility fix for notebook 5.6.0 which will introduce further
|
||||||
|
security checks for local connections
|
||||||
|
- Managed services always use localhost to talk to the Hub if the Hub listening on all interfaces
|
||||||
|
- When using a URL prefix, the Hub route will be `JupyterHub.base_url` instead of unconditionally `/`
|
||||||
|
- additional fixes and improvements
|
||||||
|
|
||||||
|
### [0.9.0] 2018-06-15
|
||||||
|
|
||||||
|
JupyterHub 0.9 is a major upgrade of JupyterHub.
|
||||||
|
There are several changes to the database schema,
|
||||||
|
so make sure to backup your database and run:
|
||||||
|
|
||||||
|
jupyterhub upgrade-db
|
||||||
|
|
||||||
|
after upgrading jupyterhub.
|
||||||
|
|
||||||
|
The biggest change for 0.9 is the switch to asyncio coroutines everywhere
|
||||||
|
instead of tornado coroutines. Custom Spawners and Authenticators are still
|
||||||
|
free to use tornado coroutines for async methods, as they will continue to
|
||||||
|
work. As part of this upgrade, JupyterHub 0.9 drops support for Python < 3.5
|
||||||
|
and tornado < 5.0.
|
||||||
|
|
||||||
|
|
||||||
|
#### Changed
|
||||||
|
|
||||||
|
- Require Python >= 3.5
|
||||||
|
- Require tornado >= 5.0
|
||||||
|
- Use asyncio coroutines throughout
|
||||||
|
- Set status 409 for conflicting actions instead of 400,
|
||||||
|
e.g. creating users or groups that already exist.
|
||||||
|
- timestamps in REST API continue to be UTC, but now include 'Z' suffix
|
||||||
|
to identify them as such.
|
||||||
|
- REST API User model always includes `servers` dict,
|
||||||
|
not just when named servers are enabled.
|
||||||
|
- `server` info is no longer available to oauth identification endpoints,
|
||||||
|
only user info and group membership.
|
||||||
|
- `User.last_activity` may be None if a user has not been seen,
|
||||||
|
rather than starting with the user creation time
|
||||||
|
which is now separately stored as `User.created`.
|
||||||
|
- static resources are now found in `$PREFIX/share/jupyterhub` instead of `share/jupyter/hub` for improved consistency.
|
||||||
|
- Deprecate `.extra_log_file` config. Use pipe redirection instead:
|
||||||
|
|
||||||
|
jupyterhub &>> /var/log/jupyterhub.log
|
||||||
|
|
||||||
|
- Add `JupyterHub.bind_url` config for setting the full bind URL of the proxy.
|
||||||
|
Sets ip, port, base_url all at once.
|
||||||
|
- Add `JupyterHub.hub_bind_url` for setting the full host+port of the Hub.
|
||||||
|
`hub_bind_url` supports unix domain sockets, e.g.
|
||||||
|
`unix+http://%2Fsrv%2Fjupyterhub.sock`
|
||||||
|
- Deprecate `JupyterHub.hub_connect_port` config in favor of `JupyterHub.hub_connect_url`. `hub_connect_ip` is not deprecated
|
||||||
|
and can still be used in the common case where only the ip address of the hub differs from the bind ip.
|
||||||
|
|
||||||
|
#### Added
|
||||||
|
|
||||||
|
- Spawners can define a `.progress` method which should be an async generator.
|
||||||
|
The generator should yield events of the form:
|
||||||
|
```python
|
||||||
|
{
|
||||||
|
"message": "some-state-message",
|
||||||
|
"progress": 50,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
These messages will be shown with a progress bar on the spawn-pending page.
|
||||||
|
The `async_generator` package can be used to make async generators
|
||||||
|
compatible with Python 3.5.
|
||||||
|
- track activity of individual API tokens
|
||||||
|
- new REST API for managing API tokens at `/hub/api/user/tokens[/token-id]`
|
||||||
|
- allow viewing/revoking tokens via token page
|
||||||
|
- User creation time is available in the REST API as `User.created`
|
||||||
|
- Server start time is stored as `Server.started`
|
||||||
|
- `Spawner.start` may return a URL for connecting to a notebook instead of `(ip, port)`. This enables Spawners to launch servers that setup their own HTTPS.
|
||||||
|
- Optimize database performance by disabling sqlalchemy expire_on_commit by default.
|
||||||
|
- Add `python -m jupyterhub.dbutil shell` entrypoint for quickly
|
||||||
|
launching an IPython session connected to your JupyterHub database.
|
||||||
|
- Include `User.auth_state` in user model on single-user REST endpoints for admins only.
|
||||||
|
- Include `Server.state` in server model on REST endpoints for admins only.
|
||||||
|
- Add `Authenticator.blacklist` for blacklisting users instead of whitelisting.
|
||||||
|
- Pass `c.JupyterHub.tornado_settings['cookie_options']` down to Spawners
|
||||||
|
so that cookie options (e.g. `expires_days`) can be set globally for the whole application.
|
||||||
|
- SIGINFO (`ctrl-t`) handler showing the current status of all running threads,
|
||||||
|
coroutines, and CPU/memory/FD consumption.
|
||||||
|
- Add async `Spawner.get_options_form` alternative to `.options_form`, so it can be a coroutine.
|
||||||
|
- Add `JupyterHub.redirect_to_server` config to govern whether
|
||||||
|
users should be sent to their server on login or the JuptyerHub home page.
|
||||||
|
- html page templates can be more easily customized and extended.
|
||||||
|
- Allow registering external OAuth clients for using the Hub as an OAuth provider.
|
||||||
|
- Add basic prometheus metrics at `/hub/metrics` endpoint.
|
||||||
|
- Add session-id cookie, enabling immediate revocation of login tokens.
|
||||||
|
- Authenticators may specify that users are admins by specifying the `admin` key when return the user model as a dict.
|
||||||
|
- Added "Start All" button to admin page for launching all user servers at once.
|
||||||
|
- Services have an `info` field which is a dictionary.
|
||||||
|
This is accessible via the REST API.
|
||||||
|
- `JupyterHub.extra_handlers` allows defining additional tornado RequestHandlers attached to the Hub.
|
||||||
|
- API tokens may now expire.
|
||||||
|
Expiry is available in the REST model as `expires_at`,
|
||||||
|
and settable when creating API tokens by specifying `expires_in`.
|
||||||
|
|
||||||
|
|
||||||
|
#### Fixed
|
||||||
|
|
||||||
|
- Remove green from theme to improve accessibility
|
||||||
|
- Fix error when proxy deletion fails due to route already being deleted
|
||||||
|
- clear `?redirects` from URL on successful launch
|
||||||
|
- disable send2trash by default, which is rarely desirable for jupyterhub
|
||||||
|
- Put PAM calls in a thread so they don't block the main application
|
||||||
|
in cases where PAM is slow (e.g. LDAP).
|
||||||
|
- Remove implicit spawn from login handler,
|
||||||
|
instead relying on subsequent request for `/user/:name` to trigger spawn.
|
||||||
|
- Fixed several inconsistencies for initial redirects,
|
||||||
|
depending on whether server is running or not and whether the user is logged in or not.
|
||||||
|
- Admin requests for `/user/:name` (when admin-access is enabled) launch the right server if it's not running instead of redirecting to their own.
|
||||||
|
- Major performance improvement starting up JupyterHub with many users,
|
||||||
|
especially when most are inactive.
|
||||||
|
- Various fixes in race conditions and performance improvements with the default proxy.
|
||||||
|
- Fixes for CORS headers
|
||||||
|
- Stop setting `.form-control` on spawner form inputs unconditionally.
|
||||||
|
- Better recovery from database errors and database connection issues
|
||||||
|
without having to restart the Hub.
|
||||||
|
- Fix handling of `~` character in usernames.
|
||||||
|
- Fix jupyterhub startup when `getpass.getuser()` would fail,
|
||||||
|
e.g. due to missing entry in passwd file in containers.
|
||||||
|
|
||||||
|
|
||||||
## 0.8
|
## 0.8
|
||||||
|
|
||||||
### [0.8.1] 2017-11-07
|
### [0.8.1] 2017-11-07
|
||||||
@@ -63,7 +229,7 @@ in your Dockerfile is sufficient.
|
|||||||
|
|
||||||
```python
|
```python
|
||||||
{
|
{
|
||||||
'username': 'name'
|
'username': 'name',
|
||||||
'state': {}
|
'state': {}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -260,7 +426,12 @@ Fix removal of `/login` page in 0.4.0, breaking some OAuth providers.
|
|||||||
First preview release
|
First preview release
|
||||||
|
|
||||||
|
|
||||||
[Unreleased]: https://github.com/jupyterhub/jupyterhub/compare/0.8.1...HEAD
|
[Unreleased]: https://github.com/jupyterhub/jupyterhub/compare/0.9.4...HEAD
|
||||||
|
[0.9.4]: https://github.com/jupyterhub/jupyterhub/compare/0.9.3...0.9.4
|
||||||
|
[0.9.3]: https://github.com/jupyterhub/jupyterhub/compare/0.9.2...0.9.3
|
||||||
|
[0.9.2]: https://github.com/jupyterhub/jupyterhub/compare/0.9.1...0.9.2
|
||||||
|
[0.9.1]: https://github.com/jupyterhub/jupyterhub/compare/0.9.0...0.9.1
|
||||||
|
[0.9.0]: https://github.com/jupyterhub/jupyterhub/compare/0.8.1...0.9.0
|
||||||
[0.8.1]: https://github.com/jupyterhub/jupyterhub/compare/0.8.0...0.8.1
|
[0.8.1]: https://github.com/jupyterhub/jupyterhub/compare/0.8.0...0.8.1
|
||||||
[0.8.0]: https://github.com/jupyterhub/jupyterhub/compare/0.7.2...0.8.0
|
[0.8.0]: https://github.com/jupyterhub/jupyterhub/compare/0.7.2...0.8.0
|
||||||
[0.7.2]: https://github.com/jupyterhub/jupyterhub/compare/0.7.1...0.7.2
|
[0.7.2]: https://github.com/jupyterhub/jupyterhub/compare/0.7.1...0.7.2
|
||||||
|
@@ -21,7 +21,6 @@ extensions = [
|
|||||||
'sphinx.ext.intersphinx',
|
'sphinx.ext.intersphinx',
|
||||||
'sphinx.ext.napoleon',
|
'sphinx.ext.napoleon',
|
||||||
'autodoc_traits',
|
'autodoc_traits',
|
||||||
'jupyter_alabaster_theme',
|
|
||||||
]
|
]
|
||||||
|
|
||||||
templates_path = ['_templates']
|
templates_path = ['_templates']
|
||||||
@@ -36,12 +35,14 @@ author = u'Project Jupyter team'
|
|||||||
|
|
||||||
# Autopopulate version
|
# Autopopulate version
|
||||||
from os.path import dirname
|
from os.path import dirname
|
||||||
|
|
||||||
docs = dirname(dirname(__file__))
|
docs = dirname(dirname(__file__))
|
||||||
root = dirname(docs)
|
root = dirname(docs)
|
||||||
sys.path.insert(0, root)
|
sys.path.insert(0, root)
|
||||||
sys.path.insert(0, os.path.join(docs, 'sphinxext'))
|
sys.path.insert(0, os.path.join(docs, 'sphinxext'))
|
||||||
|
|
||||||
import jupyterhub
|
import jupyterhub
|
||||||
|
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = '%i.%i' % jupyterhub.version_info[:2]
|
version = '%i.%i' % jupyterhub.version_info[:2]
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
@@ -57,81 +58,86 @@ default_role = 'literal'
|
|||||||
|
|
||||||
# -- Source -------------------------------------------------------------
|
# -- Source -------------------------------------------------------------
|
||||||
|
|
||||||
source_parsers = {
|
source_parsers = {'.md': 'recommonmark.parser.CommonMarkParser'}
|
||||||
'.md': 'recommonmark.parser.CommonMarkParser',
|
|
||||||
}
|
|
||||||
|
|
||||||
source_suffix = ['.rst', '.md']
|
source_suffix = ['.rst', '.md']
|
||||||
#source_encoding = 'utf-8-sig'
|
# source_encoding = 'utf-8-sig'
|
||||||
|
|
||||||
# -- Options for HTML output ----------------------------------------------
|
# -- Options for HTML output ----------------------------------------------
|
||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages.
|
# The theme to use for HTML and HTML Help pages.
|
||||||
html_theme = 'jupyter_alabaster_theme'
|
html_theme = 'alabaster'
|
||||||
|
|
||||||
#html_theme_options = {}
|
html_logo = '_static/images/logo/logo.png'
|
||||||
#html_theme_path = []
|
html_favicon = '_static/images/logo/favicon.ico'
|
||||||
#html_title = None
|
|
||||||
#html_short_title = None
|
|
||||||
#html_logo = None
|
|
||||||
#html_favicon = None
|
|
||||||
|
|
||||||
# Paths that contain custom static files (such as style sheets)
|
# Paths that contain custom static files (such as style sheets)
|
||||||
html_static_path = ['_static']
|
html_static_path = ['_static']
|
||||||
|
|
||||||
#html_extra_path = []
|
html_theme_options = {
|
||||||
#html_last_updated_fmt = '%b %d, %Y'
|
'show_related': True,
|
||||||
#html_use_smartypants = True
|
'description': 'Documentation for JupyterHub',
|
||||||
#html_sidebars = {}
|
'github_user': 'jupyterhub',
|
||||||
#html_additional_pages = {}
|
'github_repo': 'jupyterhub',
|
||||||
#html_domain_indices = True
|
'github_banner': False,
|
||||||
#html_use_index = True
|
'github_button': True,
|
||||||
#html_split_index = False
|
'github_type': 'star',
|
||||||
#html_show_sourcelink = True
|
'show_powered_by': False,
|
||||||
#html_show_sphinx = True
|
'extra_nav_links': {
|
||||||
#html_show_copyright = True
|
'GitHub Repo': 'http://github.com/jupyterhub/jupyterhub',
|
||||||
#html_use_opensearch = ''
|
'Issue Tracker': 'http://github.com/jupyterhub/jupyterhub/issues',
|
||||||
#html_file_suffix = None
|
},
|
||||||
#html_search_language = 'en'
|
}
|
||||||
#html_search_options = {'type': 'default'}
|
|
||||||
#html_search_scorer = 'scorer.js'
|
html_sidebars = {
|
||||||
|
'**': [
|
||||||
|
'about.html',
|
||||||
|
'searchbox.html',
|
||||||
|
'navigation.html',
|
||||||
|
'relations.html',
|
||||||
|
'sourcelink.html',
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
htmlhelp_basename = 'JupyterHubdoc'
|
htmlhelp_basename = 'JupyterHubdoc'
|
||||||
|
|
||||||
# -- Options for LaTeX output ---------------------------------------------
|
# -- Options for LaTeX output ---------------------------------------------
|
||||||
|
|
||||||
latex_elements = {
|
latex_elements = {
|
||||||
#'papersize': 'letterpaper',
|
# 'papersize': 'letterpaper',
|
||||||
#'pointsize': '10pt',
|
# 'pointsize': '10pt',
|
||||||
#'preamble': '',
|
# 'preamble': '',
|
||||||
#'figure_align': 'htbp',
|
# 'figure_align': 'htbp',
|
||||||
}
|
}
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title,
|
# (source start file, target name, title,
|
||||||
# author, documentclass [howto, manual, or own class]).
|
# author, documentclass [howto, manual, or own class]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
(master_doc, 'JupyterHub.tex', u'JupyterHub Documentation',
|
(
|
||||||
u'Project Jupyter team', 'manual'),
|
master_doc,
|
||||||
|
'JupyterHub.tex',
|
||||||
|
u'JupyterHub Documentation',
|
||||||
|
u'Project Jupyter team',
|
||||||
|
'manual',
|
||||||
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
#latex_logo = None
|
# latex_logo = None
|
||||||
#latex_use_parts = False
|
# latex_use_parts = False
|
||||||
#latex_show_pagerefs = False
|
# latex_show_pagerefs = False
|
||||||
#latex_show_urls = False
|
# latex_show_urls = False
|
||||||
#latex_appendices = []
|
# latex_appendices = []
|
||||||
#latex_domain_indices = True
|
# latex_domain_indices = True
|
||||||
|
|
||||||
|
|
||||||
# -- manual page output -------------------------------------------------
|
# -- manual page output -------------------------------------------------
|
||||||
|
|
||||||
# One entry per manual page. List of tuples
|
# One entry per manual page. List of tuples
|
||||||
# (source start file, name, description, authors, manual section).
|
# (source start file, name, description, authors, manual section).
|
||||||
man_pages = [
|
man_pages = [(master_doc, 'jupyterhub', u'JupyterHub Documentation', [author], 1)]
|
||||||
(master_doc, 'jupyterhub', u'JupyterHub Documentation',
|
|
||||||
[author], 1)
|
|
||||||
]
|
|
||||||
|
|
||||||
#man_show_urls = False
|
# man_show_urls = False
|
||||||
|
|
||||||
|
|
||||||
# -- Texinfo output -----------------------------------------------------
|
# -- Texinfo output -----------------------------------------------------
|
||||||
@@ -140,15 +146,21 @@ man_pages = [
|
|||||||
# (source start file, target name, title, author,
|
# (source start file, target name, title, author,
|
||||||
# dir menu entry, description, category)
|
# dir menu entry, description, category)
|
||||||
texinfo_documents = [
|
texinfo_documents = [
|
||||||
(master_doc, 'JupyterHub', u'JupyterHub Documentation',
|
(
|
||||||
author, 'JupyterHub', 'One line description of project.',
|
master_doc,
|
||||||
'Miscellaneous'),
|
'JupyterHub',
|
||||||
|
u'JupyterHub Documentation',
|
||||||
|
author,
|
||||||
|
'JupyterHub',
|
||||||
|
'One line description of project.',
|
||||||
|
'Miscellaneous',
|
||||||
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
#texinfo_appendices = []
|
# texinfo_appendices = []
|
||||||
#texinfo_domain_indices = True
|
# texinfo_domain_indices = True
|
||||||
#texinfo_show_urls = 'footnote'
|
# texinfo_show_urls = 'footnote'
|
||||||
#texinfo_no_detailmenu = False
|
# texinfo_no_detailmenu = False
|
||||||
|
|
||||||
|
|
||||||
# -- Epub output --------------------------------------------------------
|
# -- Epub output --------------------------------------------------------
|
||||||
@@ -170,13 +182,12 @@ intersphinx_mapping = {'https://docs.python.org/3/': None}
|
|||||||
|
|
||||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||||
if not on_rtd:
|
if not on_rtd:
|
||||||
import jupyter_alabaster_theme
|
html_theme = 'alabaster'
|
||||||
html_theme = 'jupyter_alabaster_theme'
|
|
||||||
html_theme_path = [jupyter_alabaster_theme.get_path()]
|
|
||||||
else:
|
else:
|
||||||
# readthedocs.org uses their theme by default, so no need to specify it
|
# readthedocs.org uses their theme by default, so no need to specify it
|
||||||
# build rest-api, since RTD doesn't run make
|
# build rest-api, since RTD doesn't run make
|
||||||
from subprocess import check_call as sh
|
from subprocess import check_call as sh
|
||||||
|
|
||||||
sh(['make', 'rest-api'], cwd=docs)
|
sh(['make', 'rest-api'], cwd=docs)
|
||||||
|
|
||||||
# -- Spell checking -------------------------------------------------------
|
# -- Spell checking -------------------------------------------------------
|
||||||
@@ -188,4 +199,4 @@ except ImportError:
|
|||||||
else:
|
else:
|
||||||
extensions.append("sphinxcontrib.spelling")
|
extensions.append("sphinxcontrib.spelling")
|
||||||
|
|
||||||
spelling_word_list_filename='spelling_wordlist.txt'
|
spelling_word_list_filename = 'spelling_wordlist.txt'
|
||||||
|
@@ -3,38 +3,65 @@
|
|||||||
Project Jupyter thanks the following people for their help and
|
Project Jupyter thanks the following people for their help and
|
||||||
contribution on JupyterHub:
|
contribution on JupyterHub:
|
||||||
|
|
||||||
|
- adelcast
|
||||||
- Analect
|
- Analect
|
||||||
- anderbubble
|
- anderbubble
|
||||||
|
- anikitml
|
||||||
|
- ankitksharma
|
||||||
- apetresc
|
- apetresc
|
||||||
|
- athornton
|
||||||
- barrachri
|
- barrachri
|
||||||
|
- BerserkerTroll
|
||||||
- betatim
|
- betatim
|
||||||
- Carreau
|
- Carreau
|
||||||
|
- cfournie
|
||||||
- charnpreetsingh
|
- charnpreetsingh
|
||||||
|
- chicovenancio
|
||||||
|
- cikao
|
||||||
- ckald
|
- ckald
|
||||||
|
- cmoscardi
|
||||||
|
- consideRatio
|
||||||
|
- cqzlxl
|
||||||
- CRegenschein
|
- CRegenschein
|
||||||
- cwaldbieser
|
- cwaldbieser
|
||||||
- danielballen
|
- danielballen
|
||||||
- danoventa
|
- danoventa
|
||||||
- daradib
|
- daradib
|
||||||
|
- darky2004
|
||||||
- datapolitan
|
- datapolitan
|
||||||
- dblockow-d2dcrc
|
- dblockow-d2dcrc
|
||||||
- DeepHorizons
|
- DeepHorizons
|
||||||
|
- DerekHeldtWerle
|
||||||
- dhirschfeld
|
- dhirschfeld
|
||||||
- dietmarw
|
- dietmarw
|
||||||
|
- dingc3
|
||||||
- dmartzol
|
- dmartzol
|
||||||
- DominicFollettSmith
|
- DominicFollettSmith
|
||||||
- dsblank
|
- dsblank
|
||||||
|
- dtaniwaki
|
||||||
|
- echarles
|
||||||
- ellisonbg
|
- ellisonbg
|
||||||
|
- emmanuel
|
||||||
- evanlinde
|
- evanlinde
|
||||||
- Fokko
|
- Fokko
|
||||||
- fperez
|
- fperez
|
||||||
|
- franga2000
|
||||||
|
- GladysNalvarte
|
||||||
|
- glenak1911
|
||||||
|
- gweis
|
||||||
- iamed18
|
- iamed18
|
||||||
|
- jamescurtin
|
||||||
- JamiesHQ
|
- JamiesHQ
|
||||||
|
- JasonJWilliamsNY
|
||||||
- jbweston
|
- jbweston
|
||||||
- jdavidheiser
|
- jdavidheiser
|
||||||
- jencabral
|
- jencabral
|
||||||
- jhamrick
|
- jhamrick
|
||||||
|
- jkinkead
|
||||||
|
- johnkpark
|
||||||
- josephtate
|
- josephtate
|
||||||
|
- jzf2101
|
||||||
|
- karfai
|
||||||
- kinuax
|
- kinuax
|
||||||
- KrishnaPG
|
- KrishnaPG
|
||||||
- kroq-gar78
|
- kroq-gar78
|
||||||
@@ -44,27 +71,44 @@ contribution on JupyterHub:
|
|||||||
- minrk
|
- minrk
|
||||||
- mistercrunch
|
- mistercrunch
|
||||||
- Mistobaan
|
- Mistobaan
|
||||||
|
- mpacer
|
||||||
- mwmarkland
|
- mwmarkland
|
||||||
|
- ndly
|
||||||
- nthiery
|
- nthiery
|
||||||
|
- nxg
|
||||||
- ObiWahn
|
- ObiWahn
|
||||||
- ozancaglayan
|
- ozancaglayan
|
||||||
|
- paccorsi
|
||||||
- parente
|
- parente
|
||||||
- PeterDaveHello
|
- PeterDaveHello
|
||||||
- peterruppel
|
- peterruppel
|
||||||
|
- phill84
|
||||||
- pjamason
|
- pjamason
|
||||||
- prasadkatti
|
- prasadkatti
|
||||||
- rafael-ladislau
|
- rafael-ladislau
|
||||||
|
- rcthomas
|
||||||
- rgbkrk
|
- rgbkrk
|
||||||
|
- rkdarst
|
||||||
- robnagler
|
- robnagler
|
||||||
|
- rschroll
|
||||||
- ryanlovett
|
- ryanlovett
|
||||||
|
- sangramga
|
||||||
- Scrypy
|
- Scrypy
|
||||||
|
- schon
|
||||||
- shreddd
|
- shreddd
|
||||||
|
- Siecje
|
||||||
|
- smiller5678
|
||||||
- spoorthyv
|
- spoorthyv
|
||||||
- ssanderson
|
- ssanderson
|
||||||
|
- summerswallow
|
||||||
|
- syutbai
|
||||||
- takluyver
|
- takluyver
|
||||||
- temogen
|
- temogen
|
||||||
- ThomasMChen
|
- ThomasMChen
|
||||||
|
- Thoralf Gutierrez
|
||||||
|
- timfreund
|
||||||
- TimShawver
|
- TimShawver
|
||||||
|
- tklever
|
||||||
- Todd-Z-Li
|
- Todd-Z-Li
|
||||||
- toobaz
|
- toobaz
|
||||||
- tsaeger
|
- tsaeger
|
||||||
|
@@ -151,9 +151,9 @@ easy to do with RStudio too.
|
|||||||
- https://getcarina.com/blog/learning-how-to-whale/
|
- https://getcarina.com/blog/learning-how-to-whale/
|
||||||
- http://carolynvanslyck.com/talk/carina/jupyterhub/#/
|
- http://carolynvanslyck.com/talk/carina/jupyterhub/#/
|
||||||
|
|
||||||
### Red Hat
|
### jcloud.io
|
||||||
|
- Open to public JupyterHub server
|
||||||
|
- https://jcloud.io
|
||||||
## Miscellaneous
|
## Miscellaneous
|
||||||
|
|
||||||
- https://medium.com/@ybarraud/setting-up-jupyterhub-with-sudospawner-and-anaconda-844628c0dbee#.rm3yt87e1
|
- https://medium.com/@ybarraud/setting-up-jupyterhub-with-sudospawner-and-anaconda-844628c0dbee#.rm3yt87e1
|
||||||
|
@@ -35,6 +35,10 @@ Configuring only the main IP and port of JupyterHub should be sufficient for
|
|||||||
most deployments of JupyterHub. However, more customized scenarios may need
|
most deployments of JupyterHub. However, more customized scenarios may need
|
||||||
additional networking details to be configured.
|
additional networking details to be configured.
|
||||||
|
|
||||||
|
Note that `c.JupyterHub.ip` and `c.JupyterHub.port` are single values,
|
||||||
|
not tuples or lists – JupyterHub listens to only a single IP address and
|
||||||
|
port.
|
||||||
|
|
||||||
## Set the Proxy's REST API communication URL (optional)
|
## Set the Proxy's REST API communication URL (optional)
|
||||||
|
|
||||||
By default, this REST API listens on port 8081 of `localhost` only.
|
By default, this REST API listens on port 8081 of `localhost` only.
|
||||||
@@ -86,3 +90,12 @@ configuration for, e.g. docker, is:
|
|||||||
c.JupyterHub.hub_ip = '0.0.0.0' # listen on all interfaces
|
c.JupyterHub.hub_ip = '0.0.0.0' # listen on all interfaces
|
||||||
c.JupyterHub.hub_connect_ip = '10.0.1.4' # ip as seen on the docker network. Can also be a hostname.
|
c.JupyterHub.hub_connect_ip = '10.0.1.4' # ip as seen on the docker network. Can also be a hostname.
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Adjusting the hub's URL
|
||||||
|
|
||||||
|
The hub will most commonly be running on a hostname of its own. If it
|
||||||
|
is not – for example, if the hub is being reverse-proxied and being
|
||||||
|
exposed at a URL such as `https://proxy.example.org/jupyter/` – then
|
||||||
|
you will need to tell JupyterHub the base URL of the service. In such
|
||||||
|
a case, it is both necessary and sufficient to set
|
||||||
|
`c.JupyterHub.base_url = '/jupyter/'` in the configuration.
|
||||||
|
@@ -45,7 +45,7 @@ is important that these files be put in a secure location on your server, where
|
|||||||
they are not readable by regular users.
|
they are not readable by regular users.
|
||||||
|
|
||||||
If you are using a **chain certificate**, see also chained certificate for SSL
|
If you are using a **chain certificate**, see also chained certificate for SSL
|
||||||
in the JupyterHub `troubleshooting FAQ <troubleshooting>`_.
|
in the JupyterHub `Troubleshooting FAQ <../troubleshooting.html>`_.
|
||||||
|
|
||||||
Using letsencrypt
|
Using letsencrypt
|
||||||
~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~
|
||||||
@@ -72,8 +72,13 @@ would be the needed configuration:
|
|||||||
If SSL termination happens outside of the Hub
|
If SSL termination happens outside of the Hub
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
In certain cases, e.g. behind `SSL termination in NGINX <https://www.nginx.com/resources/admin-guide/nginx-ssl-termination/>`_,
|
In certain cases, for example if the hub is running behind a reverse proxy, and
|
||||||
allowing no SSL running on the hub may be the desired configuration option.
|
`SSL termination is being provided by NGINX <https://www.nginx.com/resources/admin-guide/nginx-ssl-termination/>`_,
|
||||||
|
it is reasonable to run the hub without SSL.
|
||||||
|
|
||||||
|
To achieve this, simply omit the configuration settings
|
||||||
|
``c.JupyterHub.ssl_key`` and ``c.JupyterHub.ssl_cert``
|
||||||
|
(setting them to ``None`` does not have the same effect, and is an error).
|
||||||
|
|
||||||
.. _cookie-secret:
|
.. _cookie-secret:
|
||||||
|
|
||||||
|
@@ -88,7 +88,7 @@ c.JupyterHub.services = [
|
|||||||
{
|
{
|
||||||
'name': 'cull-idle',
|
'name': 'cull-idle',
|
||||||
'admin': True,
|
'admin': True,
|
||||||
'command': 'python cull_idle_servers.py --timeout=3600'.split(),
|
'command': 'python3 cull_idle_servers.py --timeout=3600'.split(),
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
@@ -115,7 +115,7 @@ variable. Run `cull_idle_servers.py` manually.
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
export JUPYTERHUB_API_TOKEN='token'
|
export JUPYTERHUB_API_TOKEN='token'
|
||||||
python cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
|
python3 cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
|
||||||
```
|
```
|
||||||
|
|
||||||
[cull_idle_servers]: https://github.com/jupyterhub/jupyterhub/blob/master/examples/cull-idle/cull_idle_servers.py
|
[cull_idle_servers]: https://github.com/jupyterhub/jupyterhub/blob/master/examples/cull-idle/cull_idle_servers.py
|
||||||
|
@@ -19,7 +19,7 @@ Three subsystems make up JupyterHub:
|
|||||||
|
|
||||||
JupyterHub performs the following functions:
|
JupyterHub performs the following functions:
|
||||||
|
|
||||||
- The Hub spawns a proxy
|
- The Hub launches a proxy
|
||||||
- The proxy forwards all requests to the Hub by default
|
- The proxy forwards all requests to the Hub by default
|
||||||
- The Hub handles user login and spawns single-user servers on demand
|
- The Hub handles user login and spawns single-user servers on demand
|
||||||
- The Hub configures the proxy to forward URL prefixes to the single-user
|
- The Hub configures the proxy to forward URL prefixes to the single-user
|
||||||
@@ -58,7 +58,12 @@ Contents
|
|||||||
* :doc:`reference/services`
|
* :doc:`reference/services`
|
||||||
* :doc:`reference/rest`
|
* :doc:`reference/rest`
|
||||||
* :doc:`reference/upgrading`
|
* :doc:`reference/upgrading`
|
||||||
|
* :doc:`reference/templates`
|
||||||
|
* :doc:`reference/config-user-env`
|
||||||
* :doc:`reference/config-examples`
|
* :doc:`reference/config-examples`
|
||||||
|
* :doc:`reference/config-ghoauth`
|
||||||
|
* :doc:`reference/config-proxy`
|
||||||
|
* :doc:`reference/config-sudo`
|
||||||
|
|
||||||
**API Reference**
|
**API Reference**
|
||||||
|
|
||||||
|
@@ -37,7 +37,7 @@ If you want to run docker on a computer that has a public IP then you should
|
|||||||
(as in MUST) **secure it with ssl** by adding ssl options to your docker
|
(as in MUST) **secure it with ssl** by adding ssl options to your docker
|
||||||
configuration or using a ssl enabled proxy.
|
configuration or using a ssl enabled proxy.
|
||||||
|
|
||||||
`Mounting volumes <https://docs.docker.com/engine/userguide/containers/dockervolumes/>`_
|
`Mounting volumes <https://docs.docker.com/engine/admin/volumes/volumes/>`_
|
||||||
will allow you to store data outside the docker image (host system) so it will
|
will allow you to store data outside the docker image (host system) so it will
|
||||||
be persistent, even when you start a new image.
|
be persistent, even when you start a new image.
|
||||||
|
|
||||||
|
@@ -5,20 +5,27 @@
|
|||||||
Before installing JupyterHub, you will need:
|
Before installing JupyterHub, you will need:
|
||||||
|
|
||||||
- a Linux/Unix based system
|
- a Linux/Unix based system
|
||||||
- [Python](https://www.python.org/downloads/) 3.4 or greater. An understanding
|
- [Python](https://www.python.org/downloads/) 3.5 or greater. An understanding
|
||||||
of using [`pip`](https://pip.pypa.io/en/stable/) or
|
of using [`pip`](https://pip.pypa.io/en/stable/) or
|
||||||
[`conda`](https://conda.io/docs/get-started.html) for
|
[`conda`](https://conda.io/docs/get-started.html) for
|
||||||
installing Python packages is helpful.
|
installing Python packages is helpful.
|
||||||
- [nodejs/npm](https://www.npmjs.com/). [Install nodejs/npm](https://docs.npmjs.com/getting-started/installing-node),
|
- [nodejs/npm](https://www.npmjs.com/). [Install nodejs/npm](https://docs.npmjs.com/getting-started/installing-node),
|
||||||
using your operating system's package manager. For example, install on Linux
|
using your operating system's package manager.
|
||||||
Debian/Ubuntu using:
|
|
||||||
|
|
||||||
```bash
|
* If you are using **`conda`**, the nodejs and npm dependencies will be installed for
|
||||||
sudo apt-get install npm nodejs-legacy
|
you by conda.
|
||||||
```
|
|
||||||
|
* If you are using **`pip`**, install a recent version of
|
||||||
|
[nodejs/npm](https://docs.npmjs.com/getting-started/installing-node).
|
||||||
|
For example, install it on Linux (Debian/Ubuntu) using:
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo apt-get install npm nodejs-legacy
|
||||||
|
```
|
||||||
|
|
||||||
|
The `nodejs-legacy` package installs the `node` executable and is currently
|
||||||
|
required for npm to work on Debian/Ubuntu.
|
||||||
|
|
||||||
The `nodejs-legacy` package installs the `node` executable and is currently
|
|
||||||
required for `npm` to work on Debian/Ubuntu.
|
|
||||||
- TLS certificate and key for HTTPS communication
|
- TLS certificate and key for HTTPS communication
|
||||||
- Domain name
|
- Domain name
|
||||||
|
|
||||||
|
@@ -38,6 +38,8 @@ with any provider, is also available.
|
|||||||
|
|
||||||
- ldapauthenticator for LDAP
|
- ldapauthenticator for LDAP
|
||||||
- tmpauthenticator for temporary accounts
|
- tmpauthenticator for temporary accounts
|
||||||
|
- For Shibboleth, [jhub_shibboleth_auth](https://github.com/gesiscss/jhub_shibboleth_auth)
|
||||||
|
and [jhub_remote_user_authenticator](https://github.com/cwaldbieser/jhub_remote_user_authenticator)
|
||||||
|
|
||||||
## Technical Overview of Authentication
|
## Technical Overview of Authentication
|
||||||
|
|
||||||
@@ -145,7 +147,7 @@ If such state should be persisted, `.authenticate()` should return a dictionary
|
|||||||
|
|
||||||
```python
|
```python
|
||||||
{
|
{
|
||||||
'username': 'name',
|
'name': username,
|
||||||
'auth_state': {
|
'auth_state': {
|
||||||
'key': 'value',
|
'key': 'value',
|
||||||
}
|
}
|
||||||
@@ -206,7 +208,13 @@ class MyAuthenticator(Authenticator):
|
|||||||
spawner.environment['UPSTREAM_TOKEN'] = auth_state['upstream_token']
|
spawner.environment['UPSTREAM_TOKEN'] = auth_state['upstream_token']
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## pre_spawn_start and post_spawn_stop hooks
|
||||||
|
|
||||||
|
Authenticators uses two hooks, [pre_spawn_start(user, spawner)][] and
|
||||||
|
[post_spawn_stop(user, spawner)][] to add pass additional state information
|
||||||
|
between the authenticator and a spawner. These hooks are typically used auth-related
|
||||||
|
startup, i.e. opening a PAM session, and auth-related cleanup, i.e. closing a
|
||||||
|
PAM session.
|
||||||
|
|
||||||
## JupyterHub as an OAuth provider
|
## JupyterHub as an OAuth provider
|
||||||
|
|
||||||
@@ -218,5 +226,5 @@ Beginning with version 0.8, JupyterHub is an OAuth provider.
|
|||||||
[OAuth]: https://en.wikipedia.org/wiki/OAuth
|
[OAuth]: https://en.wikipedia.org/wiki/OAuth
|
||||||
[GitHub OAuth]: https://developer.github.com/v3/oauth/
|
[GitHub OAuth]: https://developer.github.com/v3/oauth/
|
||||||
[OAuthenticator]: https://github.com/jupyterhub/oauthenticator
|
[OAuthenticator]: https://github.com/jupyterhub/oauthenticator
|
||||||
[pre_spawn_start(user, spawner)]: http://jupyterhub.readthedocs.io/en/latest/api/auth.html#jupyterhub.auth.Authenticator.pre_spawn_start
|
[pre_spawn_start(user, spawner)]: https://jupyterhub.readthedocs.io/en/latest/api/auth.html#jupyterhub.auth.Authenticator.pre_spawn_start
|
||||||
[post_spawn_stop(user, spawner)]: http://jupyterhub.readthedocs.io/en/latest/api/auth.html#jupyterhub.auth.Authenticator.post_spawn_stop
|
[post_spawn_stop(user, spawner)]: https://jupyterhub.readthedocs.io/en/latest/api/auth.html#jupyterhub.auth.Authenticator.post_spawn_stop
|
||||||
|
@@ -1,272 +1,8 @@
|
|||||||
# Configuration examples
|
# Configuration examples
|
||||||
|
|
||||||
This section provides examples, including configuration files and tips, for the
|
The following sections provide examples, including configuration files and tips, for the
|
||||||
following configurations:
|
following:
|
||||||
|
|
||||||
- Using GitHub OAuth
|
- Configuring GitHub OAuth
|
||||||
- Using nginx reverse proxy
|
- Using reverse proxy (nginx and Apache)
|
||||||
|
- Run JupyterHub without root privileges using `sudo`
|
||||||
## Using GitHub OAuth
|
|
||||||
|
|
||||||
In this example, we show a configuration file for a fairly standard JupyterHub
|
|
||||||
deployment with the following assumptions:
|
|
||||||
|
|
||||||
* Running JupyterHub on a single cloud server
|
|
||||||
* Using SSL on the standard HTTPS port 443
|
|
||||||
* Using GitHub OAuth (using oauthenticator) for login
|
|
||||||
* Users exist locally on the server
|
|
||||||
* Users' notebooks to be served from `~/assignments` to allow users to browse
|
|
||||||
for notebooks within other users' home directories
|
|
||||||
* You want the landing page for each user to be a `Welcome.ipynb` notebook in
|
|
||||||
their assignments directory.
|
|
||||||
* All runtime files are put into `/srv/jupyterhub` and log files in `/var/log`.
|
|
||||||
|
|
||||||
The `jupyterhub_config.py` file would have these settings:
|
|
||||||
|
|
||||||
```python
|
|
||||||
# jupyterhub_config.py file
|
|
||||||
c = get_config()
|
|
||||||
|
|
||||||
import os
|
|
||||||
pjoin = os.path.join
|
|
||||||
|
|
||||||
runtime_dir = os.path.join('/srv/jupyterhub')
|
|
||||||
ssl_dir = pjoin(runtime_dir, 'ssl')
|
|
||||||
if not os.path.exists(ssl_dir):
|
|
||||||
os.makedirs(ssl_dir)
|
|
||||||
|
|
||||||
# Allows multiple single-server per user
|
|
||||||
c.JupyterHub.allow_named_servers = True
|
|
||||||
|
|
||||||
# https on :443
|
|
||||||
c.JupyterHub.port = 443
|
|
||||||
c.JupyterHub.ssl_key = pjoin(ssl_dir, 'ssl.key')
|
|
||||||
c.JupyterHub.ssl_cert = pjoin(ssl_dir, 'ssl.cert')
|
|
||||||
|
|
||||||
# put the JupyterHub cookie secret and state db
|
|
||||||
# in /var/run/jupyterhub
|
|
||||||
c.JupyterHub.cookie_secret_file = pjoin(runtime_dir, 'cookie_secret')
|
|
||||||
c.JupyterHub.db_url = pjoin(runtime_dir, 'jupyterhub.sqlite')
|
|
||||||
# or `--db=/path/to/jupyterhub.sqlite` on the command-line
|
|
||||||
|
|
||||||
# use GitHub OAuthenticator for local users
|
|
||||||
c.JupyterHub.authenticator_class = 'oauthenticator.LocalGitHubOAuthenticator'
|
|
||||||
c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL']
|
|
||||||
|
|
||||||
# create system users that don't exist yet
|
|
||||||
c.LocalAuthenticator.create_system_users = True
|
|
||||||
|
|
||||||
# specify users and admin
|
|
||||||
c.Authenticator.whitelist = {'rgbkrk', 'minrk', 'jhamrick'}
|
|
||||||
c.Authenticator.admin_users = {'jhamrick', 'rgbkrk'}
|
|
||||||
|
|
||||||
# start single-user notebook servers in ~/assignments,
|
|
||||||
# with ~/assignments/Welcome.ipynb as the default landing page
|
|
||||||
# this config could also be put in
|
|
||||||
# /etc/jupyter/jupyter_notebook_config.py
|
|
||||||
c.Spawner.notebook_dir = '~/assignments'
|
|
||||||
c.Spawner.args = ['--NotebookApp.default_url=/notebooks/Welcome.ipynb']
|
|
||||||
```
|
|
||||||
|
|
||||||
Using the GitHub Authenticator requires a few additional
|
|
||||||
environment variable to be set prior to launching JupyterHub:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export GITHUB_CLIENT_ID=github_id
|
|
||||||
export GITHUB_CLIENT_SECRET=github_secret
|
|
||||||
export OAUTH_CALLBACK_URL=https://example.com/hub/oauth_callback
|
|
||||||
export CONFIGPROXY_AUTH_TOKEN=super-secret
|
|
||||||
# append log output to log file /var/log/jupyterhub.log
|
|
||||||
jupyterhub -f /etc/jupyterhub/jupyterhub_config.py &>> /var/log/jupyterhub.log
|
|
||||||
```
|
|
||||||
|
|
||||||
## Using a reverse proxy
|
|
||||||
|
|
||||||
In the following example, we show configuration files for a JupyterHub server
|
|
||||||
running locally on port `8000` but accessible from the outside on the standard
|
|
||||||
SSL port `443`. This could be useful if the JupyterHub server machine is also
|
|
||||||
hosting other domains or content on `443`. The goal in this example is to
|
|
||||||
satisfy the following:
|
|
||||||
|
|
||||||
* JupyterHub is running on a server, accessed *only* via `HUB.DOMAIN.TLD:443`
|
|
||||||
* On the same machine, `NO_HUB.DOMAIN.TLD` strictly serves different content,
|
|
||||||
also on port `443`
|
|
||||||
* `nginx` or `apache` is used as the public access point (which means that
|
|
||||||
only nginx/apache will bind to `443`)
|
|
||||||
* After testing, the server in question should be able to score at least an A on the
|
|
||||||
Qualys SSL Labs [SSL Server Test](https://www.ssllabs.com/ssltest/)
|
|
||||||
|
|
||||||
Let's start out with needed JupyterHub configuration in `jupyterhub_config.py`:
|
|
||||||
|
|
||||||
```python
|
|
||||||
# Force the proxy to only listen to connections to 127.0.0.1
|
|
||||||
c.JupyterHub.ip = '127.0.0.1'
|
|
||||||
```
|
|
||||||
|
|
||||||
For high-quality SSL configuration, we also generate Diffie-Helman parameters.
|
|
||||||
This can take a few minutes:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
openssl dhparam -out /etc/ssl/certs/dhparam.pem 4096
|
|
||||||
```
|
|
||||||
|
|
||||||
### nginx
|
|
||||||
|
|
||||||
The **`nginx` server config file** is fairly standard fare except for the two
|
|
||||||
`location` blocks within the `HUB.DOMAIN.TLD` config file:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# top-level http config for websocket headers
|
|
||||||
# If Upgrade is defined, Connection = upgrade
|
|
||||||
# If Upgrade is empty, Connection = close
|
|
||||||
map $http_upgrade $connection_upgrade {
|
|
||||||
default upgrade;
|
|
||||||
'' close;
|
|
||||||
}
|
|
||||||
|
|
||||||
# HTTP server to redirect all 80 traffic to SSL/HTTPS
|
|
||||||
server {
|
|
||||||
listen 80;
|
|
||||||
server_name HUB.DOMAIN.TLD;
|
|
||||||
|
|
||||||
# Tell all requests to port 80 to be 302 redirected to HTTPS
|
|
||||||
return 302 https://$host$request_uri;
|
|
||||||
}
|
|
||||||
|
|
||||||
# HTTPS server to handle JupyterHub
|
|
||||||
server {
|
|
||||||
listen 443;
|
|
||||||
ssl on;
|
|
||||||
|
|
||||||
server_name HUB.DOMAIN.TLD;
|
|
||||||
|
|
||||||
ssl_certificate /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem;
|
|
||||||
ssl_certificate_key /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem;
|
|
||||||
|
|
||||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
|
|
||||||
ssl_prefer_server_ciphers on;
|
|
||||||
ssl_dhparam /etc/ssl/certs/dhparam.pem;
|
|
||||||
ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA';
|
|
||||||
ssl_session_timeout 1d;
|
|
||||||
ssl_session_cache shared:SSL:50m;
|
|
||||||
ssl_stapling on;
|
|
||||||
ssl_stapling_verify on;
|
|
||||||
add_header Strict-Transport-Security max-age=15768000;
|
|
||||||
|
|
||||||
# Managing literal requests to the JupyterHub front end
|
|
||||||
location / {
|
|
||||||
proxy_pass http://127.0.0.1:8000;
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
|
|
||||||
# websocket headers
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection $connection_upgrade;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Managing requests to verify letsencrypt host
|
|
||||||
location ~ /.well-known {
|
|
||||||
allow all;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
If `nginx` is not running on port 443, substitute `$http_host` for `$host` on
|
|
||||||
the lines setting the `Host` header.
|
|
||||||
|
|
||||||
`nginx` will now be the front facing element of JupyterHub on `443` which means
|
|
||||||
it is also free to bind other servers, like `NO_HUB.DOMAIN.TLD` to the same port
|
|
||||||
on the same machine and network interface. In fact, one can simply use the same
|
|
||||||
server blocks as above for `NO_HUB` and simply add line for the root directory
|
|
||||||
of the site as well as the applicable location call:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
server {
|
|
||||||
listen 80;
|
|
||||||
server_name NO_HUB.DOMAIN.TLD;
|
|
||||||
|
|
||||||
# Tell all requests to port 80 to be 302 redirected to HTTPS
|
|
||||||
return 302 https://$host$request_uri;
|
|
||||||
}
|
|
||||||
|
|
||||||
server {
|
|
||||||
listen 443;
|
|
||||||
ssl on;
|
|
||||||
|
|
||||||
# INSERT OTHER SSL PARAMETERS HERE AS ABOVE
|
|
||||||
# SSL cert may differ
|
|
||||||
|
|
||||||
# Set the appropriate root directory
|
|
||||||
root /var/www/html
|
|
||||||
|
|
||||||
# Set URI handling
|
|
||||||
location / {
|
|
||||||
try_files $uri $uri/ =404;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Managing requests to verify letsencrypt host
|
|
||||||
location ~ /.well-known {
|
|
||||||
allow all;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Now restart `nginx`, restart the JupyterHub, and enjoy accessing
|
|
||||||
`https://HUB.DOMAIN.TLD` while serving other content securely on
|
|
||||||
`https://NO_HUB.DOMAIN.TLD`.
|
|
||||||
|
|
||||||
|
|
||||||
### Apache
|
|
||||||
|
|
||||||
As with nginx above, you can use [Apache](https://httpd.apache.org) as the reverse proxy.
|
|
||||||
First, we will need to enable the apache modules that we are going to need:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
a2enmod ssl rewrite proxy proxy_http proxy_wstunnel
|
|
||||||
```
|
|
||||||
|
|
||||||
Our Apache configuration is equivalent to the nginx configuration above:
|
|
||||||
|
|
||||||
- Redirect HTTP to HTTPS
|
|
||||||
- Good SSL Configuration
|
|
||||||
- Support for websockets on any proxied URL
|
|
||||||
- JupyterHub is running locally at http://127.0.0.1:8000
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# redirect HTTP to HTTPS
|
|
||||||
Listen 80
|
|
||||||
<VirtualHost HUB.DOMAIN.TLD:80>
|
|
||||||
ServerName HUB.DOMAIN.TLD
|
|
||||||
Redirect / https://HUB.DOMAIN.TLD/
|
|
||||||
</VirtualHost>
|
|
||||||
|
|
||||||
Listen 443
|
|
||||||
<VirtualHost HUB.DOMAIN.TLD:443>
|
|
||||||
|
|
||||||
ServerName HUB.DOMAIN.TLD
|
|
||||||
|
|
||||||
# configure SSL
|
|
||||||
SSLEngine on
|
|
||||||
SSLCertificateFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem
|
|
||||||
SSLCertificateKeyFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem
|
|
||||||
SSLProtocol All -SSLv2 -SSLv3
|
|
||||||
SSLOpenSSLConfCmd DHParameters /etc/ssl/certs/dhparam.pem
|
|
||||||
SSLCipherSuite EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH
|
|
||||||
|
|
||||||
# Use RewriteEngine to handle websocket connection upgrades
|
|
||||||
RewriteEngine On
|
|
||||||
RewriteCond %{HTTP:Connection} Upgrade [NC]
|
|
||||||
RewriteCond %{HTTP:Upgrade} websocket [NC]
|
|
||||||
RewriteRule /(.*) ws://127.0.0.1:8000/$1 [P,L]
|
|
||||||
|
|
||||||
<Location "/">
|
|
||||||
# preserve Host header to avoid cross-origin problems
|
|
||||||
ProxyPreserveHost on
|
|
||||||
# proxy to JupyterHub
|
|
||||||
ProxyPass http://127.0.0.1:8000/
|
|
||||||
ProxyPassReverse http://127.0.0.1:8000/
|
|
||||||
</Location>
|
|
||||||
</VirtualHost>
|
|
||||||
```
|
|
||||||
|
82
docs/source/reference/config-ghoauth.md
Normal file
82
docs/source/reference/config-ghoauth.md
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
# Configure GitHub OAuth
|
||||||
|
|
||||||
|
In this example, we show a configuration file for a fairly standard JupyterHub
|
||||||
|
deployment with the following assumptions:
|
||||||
|
|
||||||
|
* Running JupyterHub on a single cloud server
|
||||||
|
* Using SSL on the standard HTTPS port 443
|
||||||
|
* Using GitHub OAuth (using oauthenticator) for login
|
||||||
|
* Using the default spawner (to configure other spawners, uncomment and edit
|
||||||
|
`spawner_class` as well as follow the instructions for your desired spawner)
|
||||||
|
* Users exist locally on the server
|
||||||
|
* Users' notebooks to be served from `~/assignments` to allow users to browse
|
||||||
|
for notebooks within other users' home directories
|
||||||
|
* You want the landing page for each user to be a `Welcome.ipynb` notebook in
|
||||||
|
their assignments directory.
|
||||||
|
* All runtime files are put into `/srv/jupyterhub` and log files in `/var/log`.
|
||||||
|
|
||||||
|
|
||||||
|
The `jupyterhub_config.py` file would have these settings:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# jupyterhub_config.py file
|
||||||
|
c = get_config()
|
||||||
|
|
||||||
|
import os
|
||||||
|
pjoin = os.path.join
|
||||||
|
|
||||||
|
runtime_dir = os.path.join('/srv/jupyterhub')
|
||||||
|
ssl_dir = pjoin(runtime_dir, 'ssl')
|
||||||
|
if not os.path.exists(ssl_dir):
|
||||||
|
os.makedirs(ssl_dir)
|
||||||
|
|
||||||
|
# Allows multiple single-server per user
|
||||||
|
c.JupyterHub.allow_named_servers = True
|
||||||
|
|
||||||
|
# https on :443
|
||||||
|
c.JupyterHub.port = 443
|
||||||
|
c.JupyterHub.ssl_key = pjoin(ssl_dir, 'ssl.key')
|
||||||
|
c.JupyterHub.ssl_cert = pjoin(ssl_dir, 'ssl.cert')
|
||||||
|
|
||||||
|
# put the JupyterHub cookie secret and state db
|
||||||
|
# in /var/run/jupyterhub
|
||||||
|
c.JupyterHub.cookie_secret_file = pjoin(runtime_dir, 'cookie_secret')
|
||||||
|
c.JupyterHub.db_url = pjoin(runtime_dir, 'jupyterhub.sqlite')
|
||||||
|
# or `--db=/path/to/jupyterhub.sqlite` on the command-line
|
||||||
|
|
||||||
|
# use GitHub OAuthenticator for local users
|
||||||
|
c.JupyterHub.authenticator_class = 'oauthenticator.LocalGitHubOAuthenticator'
|
||||||
|
c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL']
|
||||||
|
|
||||||
|
# create system users that don't exist yet
|
||||||
|
c.LocalAuthenticator.create_system_users = True
|
||||||
|
|
||||||
|
# specify users and admin
|
||||||
|
c.Authenticator.whitelist = {'rgbkrk', 'minrk', 'jhamrick'}
|
||||||
|
c.Authenticator.admin_users = {'jhamrick', 'rgbkrk'}
|
||||||
|
|
||||||
|
# uses the default spawner
|
||||||
|
# To use a different spawner, uncomment `spawner_class` and set to desired
|
||||||
|
# spawner (e.g. SudoSpawner). Follow instructions for desired spawner
|
||||||
|
# configuration.
|
||||||
|
# c.JupyterHub.spawner_class = 'sudospawner.SudoSpawner'
|
||||||
|
|
||||||
|
# start single-user notebook servers in ~/assignments,
|
||||||
|
# with ~/assignments/Welcome.ipynb as the default landing page
|
||||||
|
# this config could also be put in
|
||||||
|
# /etc/jupyter/jupyter_notebook_config.py
|
||||||
|
c.Spawner.notebook_dir = '~/assignments'
|
||||||
|
c.Spawner.args = ['--NotebookApp.default_url=/notebooks/Welcome.ipynb']
|
||||||
|
```
|
||||||
|
|
||||||
|
Using the GitHub Authenticator requires a few additional
|
||||||
|
environment variable to be set prior to launching JupyterHub:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export GITHUB_CLIENT_ID=github_id
|
||||||
|
export GITHUB_CLIENT_SECRET=github_secret
|
||||||
|
export OAUTH_CALLBACK_URL=https://example.com/hub/oauth_callback
|
||||||
|
export CONFIGPROXY_AUTH_TOKEN=super-secret
|
||||||
|
# append log output to log file /var/log/jupyterhub.log
|
||||||
|
jupyterhub -f /etc/jupyterhub/jupyterhub_config.py &>> /var/log/jupyterhub.log
|
||||||
|
```
|
192
docs/source/reference/config-proxy.md
Normal file
192
docs/source/reference/config-proxy.md
Normal file
@@ -0,0 +1,192 @@
|
|||||||
|
# Using a reverse proxy
|
||||||
|
|
||||||
|
In the following example, we show configuration files for a JupyterHub server
|
||||||
|
running locally on port `8000` but accessible from the outside on the standard
|
||||||
|
SSL port `443`. This could be useful if the JupyterHub server machine is also
|
||||||
|
hosting other domains or content on `443`. The goal in this example is to
|
||||||
|
satisfy the following:
|
||||||
|
|
||||||
|
* JupyterHub is running on a server, accessed *only* via `HUB.DOMAIN.TLD:443`
|
||||||
|
* On the same machine, `NO_HUB.DOMAIN.TLD` strictly serves different content,
|
||||||
|
also on port `443`
|
||||||
|
* `nginx` or `apache` is used as the public access point (which means that
|
||||||
|
only nginx/apache will bind to `443`)
|
||||||
|
* After testing, the server in question should be able to score at least an A on the
|
||||||
|
Qualys SSL Labs [SSL Server Test](https://www.ssllabs.com/ssltest/)
|
||||||
|
|
||||||
|
Let's start out with needed JupyterHub configuration in `jupyterhub_config.py`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Force the proxy to only listen to connections to 127.0.0.1
|
||||||
|
c.JupyterHub.ip = '127.0.0.1'
|
||||||
|
```
|
||||||
|
|
||||||
|
For high-quality SSL configuration, we also generate Diffie-Helman parameters.
|
||||||
|
This can take a few minutes:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
openssl dhparam -out /etc/ssl/certs/dhparam.pem 4096
|
||||||
|
```
|
||||||
|
|
||||||
|
## nginx
|
||||||
|
|
||||||
|
This **`nginx` config file** is fairly standard fare except for the two
|
||||||
|
`location` blocks within the main section for HUB.DOMAIN.tld.
|
||||||
|
To create a new site for jupyterhub in your nginx config, make a new file
|
||||||
|
in `sites.enabled`, e.g. `/etc/nginx/sites.enabled/jupyterhub.conf`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# top-level http config for websocket headers
|
||||||
|
# If Upgrade is defined, Connection = upgrade
|
||||||
|
# If Upgrade is empty, Connection = close
|
||||||
|
map $http_upgrade $connection_upgrade {
|
||||||
|
default upgrade;
|
||||||
|
'' close;
|
||||||
|
}
|
||||||
|
|
||||||
|
# HTTP server to redirect all 80 traffic to SSL/HTTPS
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name HUB.DOMAIN.TLD;
|
||||||
|
|
||||||
|
# Tell all requests to port 80 to be 302 redirected to HTTPS
|
||||||
|
return 302 https://$host$request_uri;
|
||||||
|
}
|
||||||
|
|
||||||
|
# HTTPS server to handle JupyterHub
|
||||||
|
server {
|
||||||
|
listen 443;
|
||||||
|
ssl on;
|
||||||
|
|
||||||
|
server_name HUB.DOMAIN.TLD;
|
||||||
|
|
||||||
|
ssl_certificate /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem;
|
||||||
|
ssl_certificate_key /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem;
|
||||||
|
|
||||||
|
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
|
||||||
|
ssl_prefer_server_ciphers on;
|
||||||
|
ssl_dhparam /etc/ssl/certs/dhparam.pem;
|
||||||
|
ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA';
|
||||||
|
ssl_session_timeout 1d;
|
||||||
|
ssl_session_cache shared:SSL:50m;
|
||||||
|
ssl_stapling on;
|
||||||
|
ssl_stapling_verify on;
|
||||||
|
add_header Strict-Transport-Security max-age=15768000;
|
||||||
|
|
||||||
|
# Managing literal requests to the JupyterHub front end
|
||||||
|
location / {
|
||||||
|
proxy_pass http://127.0.0.1:8000;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
|
||||||
|
# websocket headers
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection $connection_upgrade;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Managing requests to verify letsencrypt host
|
||||||
|
location ~ /.well-known {
|
||||||
|
allow all;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
If `nginx` is not running on port 443, substitute `$http_host` for `$host` on
|
||||||
|
the lines setting the `Host` header.
|
||||||
|
|
||||||
|
`nginx` will now be the front facing element of JupyterHub on `443` which means
|
||||||
|
it is also free to bind other servers, like `NO_HUB.DOMAIN.TLD` to the same port
|
||||||
|
on the same machine and network interface. In fact, one can simply use the same
|
||||||
|
server blocks as above for `NO_HUB` and simply add line for the root directory
|
||||||
|
of the site as well as the applicable location call:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name NO_HUB.DOMAIN.TLD;
|
||||||
|
|
||||||
|
# Tell all requests to port 80 to be 302 redirected to HTTPS
|
||||||
|
return 302 https://$host$request_uri;
|
||||||
|
}
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 443;
|
||||||
|
ssl on;
|
||||||
|
|
||||||
|
# INSERT OTHER SSL PARAMETERS HERE AS ABOVE
|
||||||
|
# SSL cert may differ
|
||||||
|
|
||||||
|
# Set the appropriate root directory
|
||||||
|
root /var/www/html
|
||||||
|
|
||||||
|
# Set URI handling
|
||||||
|
location / {
|
||||||
|
try_files $uri $uri/ =404;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Managing requests to verify letsencrypt host
|
||||||
|
location ~ /.well-known {
|
||||||
|
allow all;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Now restart `nginx`, restart the JupyterHub, and enjoy accessing
|
||||||
|
`https://HUB.DOMAIN.TLD` while serving other content securely on
|
||||||
|
`https://NO_HUB.DOMAIN.TLD`.
|
||||||
|
|
||||||
|
|
||||||
|
## Apache
|
||||||
|
|
||||||
|
As with nginx above, you can use [Apache](https://httpd.apache.org) as the reverse proxy.
|
||||||
|
First, we will need to enable the apache modules that we are going to need:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
a2enmod ssl rewrite proxy proxy_http proxy_wstunnel
|
||||||
|
```
|
||||||
|
|
||||||
|
Our Apache configuration is equivalent to the nginx configuration above:
|
||||||
|
|
||||||
|
- Redirect HTTP to HTTPS
|
||||||
|
- Good SSL Configuration
|
||||||
|
- Support for websockets on any proxied URL
|
||||||
|
- JupyterHub is running locally at http://127.0.0.1:8000
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# redirect HTTP to HTTPS
|
||||||
|
Listen 80
|
||||||
|
<VirtualHost HUB.DOMAIN.TLD:80>
|
||||||
|
ServerName HUB.DOMAIN.TLD
|
||||||
|
Redirect / https://HUB.DOMAIN.TLD/
|
||||||
|
</VirtualHost>
|
||||||
|
|
||||||
|
Listen 443
|
||||||
|
<VirtualHost HUB.DOMAIN.TLD:443>
|
||||||
|
|
||||||
|
ServerName HUB.DOMAIN.TLD
|
||||||
|
|
||||||
|
# configure SSL
|
||||||
|
SSLEngine on
|
||||||
|
SSLCertificateFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem
|
||||||
|
SSLCertificateKeyFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem
|
||||||
|
SSLProtocol All -SSLv2 -SSLv3
|
||||||
|
SSLOpenSSLConfCmd DHParameters /etc/ssl/certs/dhparam.pem
|
||||||
|
SSLCipherSuite EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH
|
||||||
|
|
||||||
|
# Use RewriteEngine to handle websocket connection upgrades
|
||||||
|
RewriteEngine On
|
||||||
|
RewriteCond %{HTTP:Connection} Upgrade [NC]
|
||||||
|
RewriteCond %{HTTP:Upgrade} websocket [NC]
|
||||||
|
RewriteRule /(.*) ws://127.0.0.1:8000/$1 [P,L]
|
||||||
|
|
||||||
|
<Location "/">
|
||||||
|
# preserve Host header to avoid cross-origin problems
|
||||||
|
ProxyPreserveHost on
|
||||||
|
# proxy to JupyterHub
|
||||||
|
ProxyPass http://127.0.0.1:8000/
|
||||||
|
ProxyPassReverse http://127.0.0.1:8000/
|
||||||
|
</Location>
|
||||||
|
</VirtualHost>
|
||||||
|
```
|
254
docs/source/reference/config-sudo.md
Normal file
254
docs/source/reference/config-sudo.md
Normal file
@@ -0,0 +1,254 @@
|
|||||||
|
# Run JupyterHub without root privileges using `sudo`
|
||||||
|
|
||||||
|
**Note:** Setting up `sudo` permissions involves many pieces of system
|
||||||
|
configuration. It is quite easy to get wrong and very difficult to debug.
|
||||||
|
Only do this if you are very sure you must.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
There are many Authenticators and Spawners available for JupyterHub. Some, such
|
||||||
|
as DockerSpawner or OAuthenticator, do not need any elevated permissions. This
|
||||||
|
document describes how to get the full default behavior of JupyterHub while
|
||||||
|
running notebook servers as real system users on a shared system without
|
||||||
|
running the Hub itself as root.
|
||||||
|
|
||||||
|
Since JupyterHub needs to spawn processes as other users, the simplest way
|
||||||
|
is to run it as root, spawning user servers with [setuid](http://linux.die.net/man/2/setuid).
|
||||||
|
But this isn't especially safe, because you have a process running on the
|
||||||
|
public web as root.
|
||||||
|
|
||||||
|
A **more prudent way** to run the server while preserving functionality is to
|
||||||
|
create a dedicated user with `sudo` access restricted to launching and
|
||||||
|
monitoring single-user servers.
|
||||||
|
|
||||||
|
## Create a user
|
||||||
|
|
||||||
|
To do this, first create a user that will run the Hub:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo useradd rhea
|
||||||
|
```
|
||||||
|
|
||||||
|
This user shouldn't have a login shell or password (possible with -r).
|
||||||
|
|
||||||
|
## Set up sudospawner
|
||||||
|
|
||||||
|
Next, you will need [sudospawner](https://github.com/jupyter/sudospawner)
|
||||||
|
to enable monitoring the single-user servers with sudo:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo pip install sudospawner
|
||||||
|
```
|
||||||
|
|
||||||
|
Now we have to configure sudo to allow the Hub user (`rhea`) to launch
|
||||||
|
the sudospawner script on behalf of our hub users (here `zoe` and `wash`).
|
||||||
|
We want to confine these permissions to only what we really need.
|
||||||
|
|
||||||
|
## Edit `/etc/sudoers`
|
||||||
|
|
||||||
|
To do this we add to `/etc/sudoers` (use `visudo` for safe editing of sudoers):
|
||||||
|
|
||||||
|
- specify the list of users `JUPYTER_USERS` for whom `rhea` can spawn servers
|
||||||
|
- set the command `JUPYTER_CMD` that `rhea` can execute on behalf of users
|
||||||
|
- give `rhea` permission to run `JUPYTER_CMD` on behalf of `JUPYTER_USERS`
|
||||||
|
without entering a password
|
||||||
|
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# comma-separated whitelist of users that can spawn single-user servers
|
||||||
|
# this should include all of your Hub users
|
||||||
|
Runas_Alias JUPYTER_USERS = rhea, zoe, wash
|
||||||
|
|
||||||
|
# the command(s) the Hub can run on behalf of the above users without needing a password
|
||||||
|
# the exact path may differ, depending on how sudospawner was installed
|
||||||
|
Cmnd_Alias JUPYTER_CMD = /usr/local/bin/sudospawner
|
||||||
|
|
||||||
|
# actually give the Hub user permission to run the above command on behalf
|
||||||
|
# of the above users without prompting for a password
|
||||||
|
rhea ALL=(JUPYTER_USERS) NOPASSWD:JUPYTER_CMD
|
||||||
|
```
|
||||||
|
|
||||||
|
It might be useful to modify `secure_path` to add commands in path.
|
||||||
|
|
||||||
|
As an alternative to adding every user to the `/etc/sudoers` file, you can
|
||||||
|
use a group in the last line above, instead of `JUPYTER_USERS`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
rhea ALL=(%jupyterhub) NOPASSWD:JUPYTER_CMD
|
||||||
|
```
|
||||||
|
|
||||||
|
If the `jupyterhub` group exists, there will be no need to edit `/etc/sudoers`
|
||||||
|
again. A new user will gain access to the application when added to the group:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ adduser -G jupyterhub newuser
|
||||||
|
```
|
||||||
|
|
||||||
|
## Test `sudo` setup
|
||||||
|
|
||||||
|
Test that the new user doesn't need to enter a password to run the sudospawner
|
||||||
|
command.
|
||||||
|
|
||||||
|
This should prompt for your password to switch to rhea, but *not* prompt for
|
||||||
|
any password for the second switch. It should show some help output about
|
||||||
|
logging options:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ sudo -u rhea sudo -n -u $USER /usr/local/bin/sudospawner --help
|
||||||
|
Usage: /usr/local/bin/sudospawner [OPTIONS]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
|
||||||
|
--help show this help information
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
And this should fail:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ sudo -u rhea sudo -n -u $USER echo 'fail'
|
||||||
|
sudo: a password is required
|
||||||
|
```
|
||||||
|
|
||||||
|
## Enable PAM for non-root
|
||||||
|
|
||||||
|
By default, [PAM authentication](http://en.wikipedia.org/wiki/Pluggable_authentication_module)
|
||||||
|
is used by JupyterHub. To use PAM, the process may need to be able to read
|
||||||
|
the shadow password database.
|
||||||
|
|
||||||
|
### Shadow group (Linux)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ ls -l /etc/shadow
|
||||||
|
-rw-r----- 1 root shadow 2197 Jul 21 13:41 shadow
|
||||||
|
```
|
||||||
|
|
||||||
|
If there's already a shadow group, you are set. If its permissions are more like:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ ls -l /etc/shadow
|
||||||
|
-rw------- 1 root wheel 2197 Jul 21 13:41 shadow
|
||||||
|
```
|
||||||
|
|
||||||
|
Then you may want to add a shadow group, and make the shadow file group-readable:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ sudo groupadd shadow
|
||||||
|
$ sudo chgrp shadow /etc/shadow
|
||||||
|
$ sudo chmod g+r /etc/shadow
|
||||||
|
```
|
||||||
|
|
||||||
|
We want our new user to be able to read the shadow passwords, so add it to the shadow group:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ sudo usermod -a -G shadow rhea
|
||||||
|
```
|
||||||
|
|
||||||
|
If you want jupyterhub to serve pages on a restricted port (such as port 80 for http),
|
||||||
|
then you will need to give `node` permission to do so:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo setcap 'cap_net_bind_service=+ep' /usr/bin/node
|
||||||
|
```
|
||||||
|
However, you may want to further understand the consequences of this.
|
||||||
|
|
||||||
|
You may also be interested in limiting the amount of CPU any process can use
|
||||||
|
on your server. `cpulimit` is a useful tool that is available for many Linux
|
||||||
|
distributions' packaging system. This can be used to keep any user's process
|
||||||
|
from using too much CPU cycles. You can configure it accoring to [these
|
||||||
|
instructions](http://ubuntuforums.org/showthread.php?t=992706).
|
||||||
|
|
||||||
|
|
||||||
|
### Shadow group (FreeBSD)
|
||||||
|
|
||||||
|
**NOTE:** This has not been tested and may not work as expected.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ ls -l /etc/spwd.db /etc/master.passwd
|
||||||
|
-rw------- 1 root wheel 2516 Aug 22 13:35 /etc/master.passwd
|
||||||
|
-rw------- 1 root wheel 40960 Aug 22 13:35 /etc/spwd.db
|
||||||
|
```
|
||||||
|
|
||||||
|
Add a shadow group if there isn't one, and make the shadow file group-readable:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ sudo pw group add shadow
|
||||||
|
$ sudo chgrp shadow /etc/spwd.db
|
||||||
|
$ sudo chmod g+r /etc/spwd.db
|
||||||
|
$ sudo chgrp shadow /etc/master.passwd
|
||||||
|
$ sudo chmod g+r /etc/master.passwd
|
||||||
|
```
|
||||||
|
|
||||||
|
We want our new user to be able to read the shadow passwords, so add it to the
|
||||||
|
shadow group:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ sudo pw user mod rhea -G shadow
|
||||||
|
```
|
||||||
|
|
||||||
|
## Test that PAM works
|
||||||
|
|
||||||
|
We can verify that PAM is working, with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ sudo -u rhea python3 -c "import pamela, getpass; print(pamela.authenticate('$USER', getpass.getpass()))"
|
||||||
|
Password: [enter your unix password]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Make a directory for JupyterHub
|
||||||
|
|
||||||
|
JupyterHub stores its state in a database, so it needs write access to a directory.
|
||||||
|
The simplest way to deal with this is to make a directory owned by your Hub user,
|
||||||
|
and use that as the CWD when launching the server.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ sudo mkdir /etc/jupyterhub
|
||||||
|
$ sudo chown rhea /etc/jupyterhub
|
||||||
|
```
|
||||||
|
|
||||||
|
## Start jupyterhub
|
||||||
|
|
||||||
|
Finally, start the server as our newly configured user, `rhea`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ cd /etc/jupyterhub
|
||||||
|
$ sudo -u rhea jupyterhub --JupyterHub.spawner_class=sudospawner.SudoSpawner
|
||||||
|
```
|
||||||
|
|
||||||
|
And try logging in.
|
||||||
|
|
||||||
|
### Troubleshooting: SELinux
|
||||||
|
|
||||||
|
If you still get a generic `Permission denied` `PermissionError`, it's possible SELinux is blocking you.
|
||||||
|
Here's how you can make a module to allow this.
|
||||||
|
First, put this in a file sudo_exec_selinux.te:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
module sudo_exec 1.1;
|
||||||
|
|
||||||
|
require {
|
||||||
|
type unconfined_t;
|
||||||
|
type sudo_exec_t;
|
||||||
|
class file { read entrypoint };
|
||||||
|
}
|
||||||
|
|
||||||
|
#============= unconfined_t ==============
|
||||||
|
allow unconfined_t sudo_exec_t:file entrypoint;
|
||||||
|
```
|
||||||
|
|
||||||
|
Then run all of these commands as root:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ checkmodule -M -m -o sudo_exec_selinux.mod sudo_exec_selinux.te
|
||||||
|
$ semodule_package -o sudo_exec_selinux.pp -m sudo_exec_selinux.mod
|
||||||
|
$ semodule -i sudo_exec_selinux.pp
|
||||||
|
```
|
||||||
|
|
||||||
|
### Troubleshooting: PAM session errors
|
||||||
|
|
||||||
|
If the PAM authentication doesn't work and you see errors for
|
||||||
|
`login:session-auth`, or similar, considering updating to `master`
|
||||||
|
and/or incorporating this commit https://github.com/jupyter/jupyterhub/commit/40368b8f555f04ffdd662ffe99d32392a088b1d2
|
||||||
|
and configuration option, `c.PAMAuthenticator.open_sessions = False`.
|
147
docs/source/reference/config-user-env.md
Normal file
147
docs/source/reference/config-user-env.md
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
# Configuring user environments
|
||||||
|
|
||||||
|
Deploying JupyterHub means you are providing Jupyter notebook environments for
|
||||||
|
multiple users. Often, this includes a desire to configure the user
|
||||||
|
environment in some way.
|
||||||
|
|
||||||
|
Since the `jupyterhub-singleuser` server extends the standard Jupyter notebook
|
||||||
|
server, most configuration and documentation that applies to Jupyter Notebook
|
||||||
|
applies to the single-user environments. Configuration of user environments
|
||||||
|
typically does not occur through JupyterHub itself, but rather through system-
|
||||||
|
wide configuration of Jupyter, which is inherited by `jupyterhub-singleuser`.
|
||||||
|
|
||||||
|
**Tip:** When searching for configuration tips for JupyterHub user
|
||||||
|
environments, try removing JupyterHub from your search because there are a lot
|
||||||
|
more people out there configuring Jupyter than JupyterHub and the
|
||||||
|
configuration is the same.
|
||||||
|
|
||||||
|
This section will focus on user environments, including:
|
||||||
|
|
||||||
|
- Installing packages
|
||||||
|
- Configuring Jupyter and IPython
|
||||||
|
- Installing kernelspecs
|
||||||
|
- Using containers vs. multi-user hosts
|
||||||
|
|
||||||
|
|
||||||
|
## Installing packages
|
||||||
|
|
||||||
|
To make packages available to users, you generally will install packages
|
||||||
|
system-wide or in a shared environment.
|
||||||
|
|
||||||
|
This installation location should always be in the same environment that
|
||||||
|
`jupyterhub-singleuser` itself is installed in, and must be *readable and
|
||||||
|
executable* by your users. If you want users to be able to install additional
|
||||||
|
packages, it must also be *writable* by your users.
|
||||||
|
|
||||||
|
If you are using a standard system Python install, you would use:
|
||||||
|
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo python3 -m pip install numpy
|
||||||
|
```
|
||||||
|
|
||||||
|
to install the numpy package in the default system Python 3 environment
|
||||||
|
(typically `/usr/local`).
|
||||||
|
|
||||||
|
You may also use conda to install packages. If you do, you should make sure
|
||||||
|
that the conda environment has appropriate permissions for users to be able to
|
||||||
|
run Python code in the env.
|
||||||
|
|
||||||
|
|
||||||
|
## Configuring Jupyter and IPython
|
||||||
|
|
||||||
|
[Jupyter](https://jupyter-notebook.readthedocs.io/en/stable/config_overview.html)
|
||||||
|
and [IPython](https://ipython.readthedocs.io/en/stable/development/config.html)
|
||||||
|
have their own configuration systems.
|
||||||
|
|
||||||
|
As a JupyterHub administrator, you will typically want to install and configure
|
||||||
|
environments for all JupyterHub users. For example, you wish for each student in
|
||||||
|
a class to have the same user environment configuration.
|
||||||
|
|
||||||
|
Jupyter and IPython support **"system-wide"** locations for configuration, which
|
||||||
|
is the logical place to put global configuration that you want to affect all
|
||||||
|
users. It's generally more efficient to configure user environments "system-wide",
|
||||||
|
and it's a good idea to avoid creating files in users' home directories.
|
||||||
|
|
||||||
|
The typical locations for these config files are:
|
||||||
|
- **system-wide** in `/etc/{jupyter|ipython}`
|
||||||
|
- **env-wide** (environment wide) in `{sys.prefix}/etc/{jupyter|ipython}`.
|
||||||
|
|
||||||
|
### Example: Enable an extension system-wide
|
||||||
|
|
||||||
|
For example, to enable the `cython` IPython extension for all of your users,
|
||||||
|
create the file `/etc/ipython/ipython_config.py`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
c.InteractiveShellApp.extensions.append("cython")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example: Enable a Jupyter notebook configuration setting for all users
|
||||||
|
|
||||||
|
To enable Jupyter notebook's internal idle-shutdown behavior (requires
|
||||||
|
notebook ≥ 5.4), set the following in the `/etc/jupyter/jupyter_notebook_config.py`
|
||||||
|
file:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# shutdown the server after no activity for an hour
|
||||||
|
c.NotebookApp.shutdown_no_activity_timeout = 60 * 60
|
||||||
|
# shutdown kernels after no activity for 20 minutes
|
||||||
|
c.MappingKernelManager.cull_idle_timeout = 20 * 60
|
||||||
|
# check for idle kernels every two minutes
|
||||||
|
c.MappingKernelManager.cull_interval = 2 * 60
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Installing kernelspecs
|
||||||
|
|
||||||
|
You may have multiple Jupyter kernels installed and want to make sure that
|
||||||
|
they are available to all of your users. This means installing kernelspecs
|
||||||
|
either system-wide (e.g. in /usr/local/) or in the `sys.prefix` of JupyterHub
|
||||||
|
itself.
|
||||||
|
|
||||||
|
Jupyter kernelspec installation is system wide by default, but some kernels
|
||||||
|
may default to installing kernelspecs in your home directory. These will need
|
||||||
|
to be moved system-wide to ensure that they are accessible.
|
||||||
|
|
||||||
|
You can see where your kernelspecs are with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
jupyter kernelspec list
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example: Installing kernels system-wide
|
||||||
|
|
||||||
|
Assuming I have a Python 2 and Python 3 environment that I want to make
|
||||||
|
sure are available, I can install their specs system-wide (in /usr/local) with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
/path/to/python3 -m IPython kernel install --prefix=/usr/local
|
||||||
|
/path/to/python2 -m IPython kernel install --prefix=/usr/local
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Multi-user hosts vs. Containers
|
||||||
|
|
||||||
|
There are two broad categories of user environments that depend on what
|
||||||
|
Spawner you choose:
|
||||||
|
|
||||||
|
- Multi-user hosts (shared system)
|
||||||
|
- Container-based
|
||||||
|
|
||||||
|
How you configure user environments for each category can differ a bit
|
||||||
|
depending on what Spawner you are using.
|
||||||
|
|
||||||
|
The first category is a **shared system (multi-user host)** where
|
||||||
|
each user has a JupyterHub account and a home directory as well as being
|
||||||
|
a real system user. In this example, shared configuration and installation
|
||||||
|
must be in a 'system-wide' location, such as `/etc/` or `/usr/local`
|
||||||
|
or a custom prefix such as `/opt/conda`.
|
||||||
|
|
||||||
|
When JupyterHub uses **container-based** Spawners (e.g. KubeSpawner or
|
||||||
|
DockerSpawner), the 'system-wide' environment is really the container image
|
||||||
|
which you are using for users.
|
||||||
|
|
||||||
|
In both cases, you want to *avoid putting configuration in user home
|
||||||
|
directories* because users can change those configuration settings. Also,
|
||||||
|
home directories typically persist once they are created, so they are
|
||||||
|
difficult for admins to update later.
|
62
docs/source/reference/database.md
Normal file
62
docs/source/reference/database.md
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
# The Hub's Database
|
||||||
|
|
||||||
|
JupyterHub uses a database to store information about users, services, and other
|
||||||
|
data needed for operating the Hub.
|
||||||
|
|
||||||
|
## Default SQLite database
|
||||||
|
|
||||||
|
The default database for JupyterHub is a [SQLite](https://sqlite.org) database.
|
||||||
|
We have chosen SQLite as JupyterHub's default for its lightweight simplicity
|
||||||
|
in certain uses such as testing, small deployments and workshops.
|
||||||
|
|
||||||
|
For production systems, SQLite has some disadvantages when used with JupyterHub:
|
||||||
|
|
||||||
|
- `upgrade-db` may not work, and you may need to start with a fresh database
|
||||||
|
- `downgrade-db` **will not** work if you want to rollback to an earlier
|
||||||
|
version, so backup the `jupyterhub.sqlite` file before upgrading
|
||||||
|
|
||||||
|
The sqlite documentation provides a helpful page about [when to use SQLite and
|
||||||
|
where traditional RDBMS may be a better choice](https://sqlite.org/whentouse.html).
|
||||||
|
|
||||||
|
## Using an RDBMS (PostgreSQL, MySQL)
|
||||||
|
|
||||||
|
When running a long term deployment or a production system, we recommend using
|
||||||
|
a traditional RDBMS database, such as [PostgreSQL](https://www.postgresql.org)
|
||||||
|
or [MySQL](https://www.mysql.com), that supports the SQL `ALTER TABLE`
|
||||||
|
statement.
|
||||||
|
|
||||||
|
## Notes and Tips
|
||||||
|
|
||||||
|
### SQLite
|
||||||
|
|
||||||
|
The SQLite database should not be used on NFS. SQLite uses reader/writer locks
|
||||||
|
to control access to the database. This locking mechanism might not work
|
||||||
|
correctly if the database file is kept on an NFS filesystem. This is because
|
||||||
|
`fcntl()` file locking is broken on many NFS implementations. Therefore, you
|
||||||
|
should avoid putting SQLite database files on NFS since it will not handle well
|
||||||
|
multiple processes which might try to access the file at the same time.
|
||||||
|
|
||||||
|
### PostgreSQL
|
||||||
|
|
||||||
|
We recommend using PostgreSQL for production if you are unsure whether to use
|
||||||
|
MySQL or PostgreSQL or if you do not have a strong preference. There is
|
||||||
|
additional configuration required for MySQL that is not needed for PostgreSQL.
|
||||||
|
|
||||||
|
### MySQL / MariaDB
|
||||||
|
|
||||||
|
- You should use the `pymysql` sqlalchemy provider (the other one, MySQLdb,
|
||||||
|
isn't available for py3).
|
||||||
|
- You also need to set `pool_recycle` to some value (typically 60 - 300)
|
||||||
|
which depends on your MySQL setup. This is necessary since MySQL kills
|
||||||
|
connections serverside if they've been idle for a while, and the connection
|
||||||
|
from the hub will be idle for longer than most connections. This behavior
|
||||||
|
will lead to frustrating 'the connection has gone away' errors from
|
||||||
|
sqlalchemy if `pool_recycle` is not set.
|
||||||
|
- If you use `utf8mb4` collation with MySQL earlier than 5.7.7 or MariaDB
|
||||||
|
earlier than 10.2.1 you may get an `1709, Index column size too large` error.
|
||||||
|
To fix this you need to set `innodb_large_prefix` to enabled and
|
||||||
|
`innodb_file_format` to `Barracuda` to allow for the index sizes jupyterhub
|
||||||
|
uses. `row_format` will be set to `DYNAMIC` as long as those options are set
|
||||||
|
correctly. Later versions of MariaDB and MySQL should set these values by
|
||||||
|
default, as well as have a default `DYNAMIC` `row_format` and pose no trouble
|
||||||
|
to users.
|
@@ -11,5 +11,11 @@ Technical Reference
|
|||||||
services
|
services
|
||||||
proxy
|
proxy
|
||||||
rest
|
rest
|
||||||
|
database
|
||||||
upgrading
|
upgrading
|
||||||
|
templates
|
||||||
|
config-user-env
|
||||||
config-examples
|
config-examples
|
||||||
|
config-ghoauth
|
||||||
|
config-proxy
|
||||||
|
config-sudo
|
||||||
|
@@ -1,22 +1,26 @@
|
|||||||
# Writing a custom Proxy implementation
|
# Writing a custom Proxy implementation
|
||||||
|
|
||||||
JupyterHub 0.8 introduced the ability to write a custom implementation of the proxy.
|
JupyterHub 0.8 introduced the ability to write a custom implementation of the
|
||||||
This enables deployments with different needs than the default proxy,
|
proxy. This enables deployments with different needs than the default proxy,
|
||||||
configurable-http-proxy (CHP).
|
configurable-http-proxy (CHP). CHP is a single-process nodejs proxy that the
|
||||||
CHP is a single-process nodejs proxy that they Hub manages by default as a subprocess
|
Hub manages by default as a subprocess (it can be run externally, as well, and
|
||||||
(it can be run externally, as well, and typically is in production deployments).
|
typically is in production deployments).
|
||||||
|
|
||||||
The upside to CHP, and why we use it by default, is that it's easy to install and run (if you have nodejs, you are set!).
|
The upside to CHP, and why we use it by default, is that it's easy to install
|
||||||
The downsides are that it's a single process and does not support any persistence of the routing table.
|
and run (if you have nodejs, you are set!). The downsides are that it's a
|
||||||
So if the proxy process dies, your whole JupyterHub instance is inaccessible until the Hub notices, restarts the proxy, and restores the routing table.
|
single process and does not support any persistence of the routing table. So
|
||||||
For deployments that want to avoid such a single point of failure,
|
if the proxy process dies, your whole JupyterHub instance is inaccessible
|
||||||
or leverage existing proxy infrastructure in their chosen deployment (such as Kubernetes ingress objects),
|
until the Hub notices, restarts the proxy, and restores the routing table. For
|
||||||
the Proxy API provides a way to do that.
|
deployments that want to avoid such a single point of failure, or leverage
|
||||||
|
existing proxy infrastructure in their chosen deployment (such as Kubernetes
|
||||||
|
ingress objects), the Proxy API provides a way to do that.
|
||||||
|
|
||||||
In general, for a proxy to be usable by JupyterHub, it must:
|
In general, for a proxy to be usable by JupyterHub, it must:
|
||||||
|
|
||||||
1. support websockets without prior knowledge of the URL where websockets may occur
|
1. support websockets without prior knowledge of the URL where websockets may
|
||||||
2. support trie-based routing (i.e. allow different routes on `/foo` and `/foo/bar` and route based on specificity)
|
occur
|
||||||
|
2. support trie-based routing (i.e. allow different routes on `/foo` and
|
||||||
|
`/foo/bar` and route based on specificity)
|
||||||
3. adding or removing a route should not cause existing connections to drop
|
3. adding or removing a route should not cause existing connections to drop
|
||||||
|
|
||||||
Optionally, if the JupyterHub deployment is to use host-based routing,
|
Optionally, if the JupyterHub deployment is to use host-based routing,
|
||||||
@@ -35,10 +39,10 @@ class MyProxy(Proxy):
|
|||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## Starting and stopping the proxy
|
## Starting and stopping the proxy
|
||||||
|
|
||||||
If your proxy should be launched when the Hub starts, you must define how to start and stop your proxy:
|
If your proxy should be launched when the Hub starts, you must define how
|
||||||
|
to start and stop your proxy:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from tornado import gen
|
from tornado import gen
|
||||||
@@ -55,8 +59,8 @@ class MyProxy(Proxy):
|
|||||||
|
|
||||||
These methods **may** be coroutines.
|
These methods **may** be coroutines.
|
||||||
|
|
||||||
`c.Proxy.should_start` is a configurable flag that determines whether the Hub should call these methods when the Hub itself starts and stops.
|
`c.Proxy.should_start` is a configurable flag that determines whether the
|
||||||
|
Hub should call these methods when the Hub itself starts and stops.
|
||||||
|
|
||||||
### Purely external proxies
|
### Purely external proxies
|
||||||
|
|
||||||
@@ -70,31 +74,30 @@ class MyProxy(Proxy):
|
|||||||
should_start = False
|
should_start = False
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Routes
|
||||||
|
|
||||||
## Adding and removing routes
|
At its most basic, a Proxy implementation defines a mechanism to add, remove,
|
||||||
|
and retrieve routes. A proxy that implements these three methods is complete.
|
||||||
At its most basic, a Proxy implementation defines a mechanism to add, remove, and retrieve routes.
|
|
||||||
A proxy that implements these three methods is complete.
|
|
||||||
Each of these methods **may** be a coroutine.
|
Each of these methods **may** be a coroutine.
|
||||||
|
|
||||||
**Definition:** routespec
|
**Definition:** routespec
|
||||||
|
|
||||||
A routespec, which will appear in these methods, is a string describing a route to be proxied,
|
A routespec, which will appear in these methods, is a string describing a
|
||||||
such as `/user/name/`. A routespec will:
|
route to be proxied, such as `/user/name/`. A routespec will:
|
||||||
|
|
||||||
1. always end with `/`
|
1. always end with `/`
|
||||||
2. always start with `/` if it is a path-based route `/proxy/path/`
|
2. always start with `/` if it is a path-based route `/proxy/path/`
|
||||||
3. precede the leading `/` with a host for host-based routing, e.g. `host.tld/proxy/path/`
|
3. precede the leading `/` with a host for host-based routing, e.g.
|
||||||
|
`host.tld/proxy/path/`
|
||||||
|
|
||||||
### Adding a route
|
### Adding a route
|
||||||
|
|
||||||
When adding a route, JupyterHub may pass a JSON-serializable dict as a `data` argument
|
When adding a route, JupyterHub may pass a JSON-serializable dict as a `data`
|
||||||
that should be attacked to the proxy route.
|
argument that should be attacked to the proxy route. When that route is
|
||||||
When that route is retrieved, the `data` argument should be returned as well.
|
retrieved, the `data` argument should be returned as well. If your proxy
|
||||||
If your proxy implementation doesn't support storing data attached to routes,
|
implementation doesn't support storing data attached to routes, then your
|
||||||
then your Python wrapper may have to handle storing the `data` piece itself,
|
Python wrapper may have to handle storing the `data` piece itself, e.g in a
|
||||||
e.g in a simple file or database.
|
simple file or database.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
@gen.coroutine
|
@gen.coroutine
|
||||||
@@ -113,12 +116,10 @@ proxy.add_route('/user/pgeorgiou/', 'http://127.0.0.1:1227',
|
|||||||
{'user': 'pgeorgiou'})
|
{'user': 'pgeorgiou'})
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
### Removing routes
|
### Removing routes
|
||||||
|
|
||||||
`delete_route()` is given a routespec to delete.
|
`delete_route()` is given a routespec to delete. If there is no such route,
|
||||||
If there is no such route, `delete_route` should still succeed,
|
`delete_route` should still succeed, but a warning may be issued.
|
||||||
but a warning may be issued.
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
@gen.coroutine
|
@gen.coroutine
|
||||||
@@ -126,18 +127,17 @@ def delete_route(self, routespec):
|
|||||||
"""Delete the route"""
|
"""Delete the route"""
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
### Retrieving routes
|
### Retrieving routes
|
||||||
|
|
||||||
For retrieval, you only *need* to implement a single method that retrieves all routes.
|
For retrieval, you only *need* to implement a single method that retrieves all
|
||||||
The return value for this function should be a dictionary, keyed by `routespect`,
|
routes. The return value for this function should be a dictionary, keyed by
|
||||||
of dicts whose keys are the same three arguments passed to `add_route`
|
`routespect`, of dicts whose keys are the same three arguments passed to
|
||||||
(`routespec`, `target`, `data`)
|
`add_route` (`routespec`, `target`, `data`)
|
||||||
|
|
||||||
```python
|
```python
|
||||||
@gen.coroutine
|
@gen.coroutine
|
||||||
def get_all_routes(self):
|
def get_all_routes(self):
|
||||||
"""Return all routes, keyed by routespec""""
|
"""Return all routes, keyed by routespec"""
|
||||||
```
|
```
|
||||||
|
|
||||||
```python
|
```python
|
||||||
@@ -150,15 +150,15 @@ def get_all_routes(self):
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Note on activity tracking
|
||||||
|
|
||||||
|
JupyterHub can track activity of users, for use in services such as culling
|
||||||
#### Note on activity tracking
|
idle servers. As of JupyterHub 0.8, this activity tracking is the
|
||||||
|
responsibility of the proxy. If your proxy implementation can track activity
|
||||||
JupyterHub can track activity of users, for use in services such as culling idle servers.
|
to endpoints, it may add a `last_activity` key to the `data` of routes
|
||||||
As of JupyterHub 0.8, this activity tracking is the responsibility of the proxy.
|
retrieved in `.get_all_routes()`. If present, the value of `last_activity`
|
||||||
If your proxy implementation can track activity to endpoints,
|
should be an [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) UTC date
|
||||||
it may add a `last_activity` key to the `data` of routes retrieved in `.get_all_routes()`.
|
string:
|
||||||
If present, the value of `last_activity` should be an [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) UTC date string:
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
{
|
{
|
||||||
@@ -173,11 +173,9 @@ If present, the value of `last_activity` should be an [ISO8601](https://en.wikip
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If the proxy does not track activity, then only activity to the Hub itself is
|
||||||
|
tracked, and services such as cull-idle will not work.
|
||||||
|
|
||||||
If the proxy does not track activity, then only activity to the Hub itself is tracked,
|
Now that `notebook-5.0` tracks activity internally, we can retrieve activity
|
||||||
and services such as cull-idle will not work.
|
information from the single-user servers instead, removing the need to track
|
||||||
|
activity in the proxy. But this is not yet implemented in JupyterHub 0.8.0.
|
||||||
Now that `notebook-5.0` tracks activity internally,
|
|
||||||
we can retrieve activity information from the single-user servers instead,
|
|
||||||
removing the need to track activity in the proxy.
|
|
||||||
But this is not yet implemented in JupyterHub 0.8.0.
|
|
||||||
|
@@ -15,7 +15,7 @@ This section provides the following information about Services:
|
|||||||
## Definition of a Service
|
## Definition of a Service
|
||||||
|
|
||||||
When working with JupyterHub, a **Service** is defined as a process that interacts
|
When working with JupyterHub, a **Service** is defined as a process that interacts
|
||||||
with the Hub's REST API. A Service may perform a specific or
|
with the Hub's REST API. A Service may perform a specific
|
||||||
action or task. For example, the following tasks can each be a unique Service:
|
action or task. For example, the following tasks can each be a unique Service:
|
||||||
|
|
||||||
- shutting down individuals' single user notebook servers that have been idle
|
- shutting down individuals' single user notebook servers that have been idle
|
||||||
@@ -205,9 +205,9 @@ To use HubAuth, you must set the `.api_token`, either programmatically when cons
|
|||||||
or via the `JUPYTERHUB_API_TOKEN` environment variable.
|
or via the `JUPYTERHUB_API_TOKEN` environment variable.
|
||||||
|
|
||||||
Most of the logic for authentication implementation is found in the
|
Most of the logic for authentication implementation is found in the
|
||||||
[`HubAuth.user_for_cookie`](services.auth.html#jupyterhub.services.auth.HubAuth.user_for_cookie)
|
[`HubAuth.user_for_cookie`][HubAuth.user_for_cookie]
|
||||||
and in the
|
and in the
|
||||||
[`HubAuth.user_for_token`](services.auth.html#jupyterhub.services.auth.HubAuth.user_for_token)
|
[`HubAuth.user_for_token`][HubAuth.user_for_token]
|
||||||
methods, which makes a request of the Hub, and returns:
|
methods, which makes a request of the Hub, and returns:
|
||||||
|
|
||||||
- None, if no user could be identified, or
|
- None, if no user could be identified, or
|
||||||
@@ -359,14 +359,16 @@ and taking note of the following process:
|
|||||||
```
|
```
|
||||||
|
|
||||||
An example of using an Externally-Managed Service and authentication is
|
An example of using an Externally-Managed Service and authentication is
|
||||||
in [nbviewer README]_ section on securing the notebook viewer,
|
in [nbviewer README][nbviewer example] section on securing the notebook viewer,
|
||||||
and an example of its configuration is found [here](https://github.com/jupyter/nbviewer/blob/master/nbviewer/providers/base.py#L94).
|
and an example of its configuration is found [here](https://github.com/jupyter/nbviewer/blob/master/nbviewer/providers/base.py#L94).
|
||||||
nbviewer can also be run as a Hub-Managed Service as described [nbviewer README]_
|
nbviewer can also be run as a Hub-Managed Service as described [nbviewer README][nbviewer example]
|
||||||
section on securing the notebook viewer.
|
section on securing the notebook viewer.
|
||||||
|
|
||||||
|
|
||||||
[requests]: http://docs.python-requests.org/en/master/
|
[requests]: http://docs.python-requests.org/en/master/
|
||||||
[services_auth]: ../api/services.auth.html
|
[services_auth]: ../api/services.auth.html
|
||||||
[HubAuth]: ../api/services.auth.html#jupyterhub.services.auth.HubAuth
|
[HubAuth]: ../api/services.auth.html#jupyterhub.services.auth.HubAuth
|
||||||
|
[HubAuth.user_for_cookie]: ../api/services.auth.html#jupyterhub.services.auth.HubAuth.user_for_cookie
|
||||||
|
[HubAuth.user_for_token]: ../api/services.auth.html#jupyterhub.services.auth.HubAuth.user_for_token
|
||||||
[HubAuthenticated]: ../api/services.auth.html#jupyterhub.services.auth.HubAuthenticated
|
[HubAuthenticated]: ../api/services.auth.html#jupyterhub.services.auth.HubAuthenticated
|
||||||
[nbviewer example]: https://github.com/jupyter/nbviewer#securing-the-notebook-viewer
|
[nbviewer example]: https://github.com/jupyter/nbviewer#securing-the-notebook-viewer
|
||||||
|
@@ -46,7 +46,16 @@ Most `Spawner.start` functions will look similar to this example:
|
|||||||
def start(self):
|
def start(self):
|
||||||
self.ip = '127.0.0.1'
|
self.ip = '127.0.0.1'
|
||||||
self.port = random_port()
|
self.port = random_port()
|
||||||
yield self._actually_start_server_somehow()
|
# get environment variables,
|
||||||
|
# several of which are required for configuring the single-user server
|
||||||
|
env = self.get_env()
|
||||||
|
cmd = []
|
||||||
|
# get jupyterhub command to run,
|
||||||
|
# typically ['jupyterhub-singleuser']
|
||||||
|
cmd.extend(self.cmd)
|
||||||
|
cmd.extend(self.get_args())
|
||||||
|
|
||||||
|
yield self._actually_start_server_somehow(cmd, env)
|
||||||
return (self.ip, self.port)
|
return (self.ip, self.port)
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -170,9 +179,12 @@ If you are interested in building a custom spawner, you can read [this tutorial]
|
|||||||
Some spawners of the single-user notebook servers allow setting limits or
|
Some spawners of the single-user notebook servers allow setting limits or
|
||||||
guarantees on resources, such as CPU and memory. To provide a consistent
|
guarantees on resources, such as CPU and memory. To provide a consistent
|
||||||
experience for sysadmins and users, we provide a standard way to set and
|
experience for sysadmins and users, we provide a standard way to set and
|
||||||
discover these resource limits and guarantees, such as for memory and CPU. For
|
discover these resource limits and guarantees, such as for memory and CPU.
|
||||||
the limits and guarantees to be useful, the spawner must implement support for
|
For the limits and guarantees to be useful, **the spawner must implement
|
||||||
them.
|
support for them**. For example, LocalProcessSpawner, the default
|
||||||
|
spawner, does not support limits and guarantees. One of the spawners
|
||||||
|
that supports limits and guarantees is the `systemdspawner`.
|
||||||
|
|
||||||
|
|
||||||
### Memory Limits & Guarantees
|
### Memory Limits & Guarantees
|
||||||
|
|
||||||
@@ -184,14 +196,14 @@ allocate. Attempting to use more memory than this limit will cause errors. The
|
|||||||
single-user notebook server can discover its own memory limit by looking at
|
single-user notebook server can discover its own memory limit by looking at
|
||||||
the environment variable `MEM_LIMIT`, which is specified in absolute bytes.
|
the environment variable `MEM_LIMIT`, which is specified in absolute bytes.
|
||||||
|
|
||||||
`c.Spawner.mem_guarantee`: Sometimes, a **guarantee** of a *minumum amount of
|
`c.Spawner.mem_guarantee`: Sometimes, a **guarantee** of a *minimum amount of
|
||||||
memory* is desirable. In this case, you can set `c.Spawner.mem_guarantee` to
|
memory* is desirable. In this case, you can set `c.Spawner.mem_guarantee` to
|
||||||
to provide a guarantee that at minimum this much memory will always be
|
to provide a guarantee that at minimum this much memory will always be
|
||||||
available for the single-user notebook server to use. The environment variable
|
available for the single-user notebook server to use. The environment variable
|
||||||
`MEM_GUARANTEE` will also be set in the single-user notebook server.
|
`MEM_GUARANTEE` will also be set in the single-user notebook server.
|
||||||
|
|
||||||
The spawner's underlying system or cluster is responsible for enforcing these
|
**The spawner's underlying system or cluster is responsible for enforcing these
|
||||||
limits and providing these guarantees. If these values are set to `None`, no
|
limits and providing these guarantees.** If these values are set to `None`, no
|
||||||
limits or guarantees are provided, and no environment values are set.
|
limits or guarantees are provided, and no environment values are set.
|
||||||
|
|
||||||
### CPU Limits & Guarantees
|
### CPU Limits & Guarantees
|
||||||
@@ -208,6 +220,6 @@ higher priority applications might be taking up CPU.
|
|||||||
guarantee for CPU usage. The environment variable `CPU_GUARANTEE` will be set
|
guarantee for CPU usage. The environment variable `CPU_GUARANTEE` will be set
|
||||||
in the single-user notebook server when a guarantee is being provided.
|
in the single-user notebook server when a guarantee is being provided.
|
||||||
|
|
||||||
The spawner's underlying system or cluster is responsible for enforcing these
|
**The spawner's underlying system or cluster is responsible for enforcing these
|
||||||
limits and providing these guarantees. If these values are set to `None`, no
|
limits and providing these guarantees.** If these values are set to `None`, no
|
||||||
limits or guarantees are provided, and no environment values are set.
|
limits or guarantees are provided, and no environment values are set.
|
||||||
|
93
docs/source/reference/templates.md
Normal file
93
docs/source/reference/templates.md
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
# Working with templates and UI
|
||||||
|
|
||||||
|
The pages of the JupyterHub application are generated from
|
||||||
|
[Jinja](http://jinja.pocoo.org/) templates. These allow the header, for
|
||||||
|
example, to be defined once and incorporated into all pages. By providing
|
||||||
|
your own templates, you can have complete control over JupyterHub's
|
||||||
|
appearance.
|
||||||
|
|
||||||
|
## Custom Templates
|
||||||
|
|
||||||
|
JupyterHub will look for custom templates in all of the paths in the
|
||||||
|
`JupyterHub.template_paths` configuration option, falling back on the
|
||||||
|
[default templates](https://github.com/jupyterhub/jupyterhub/tree/master/share/jupyterhub/templates)
|
||||||
|
if no custom template with that name is found. This fallback
|
||||||
|
behavior is new in version 0.9; previous versions searched only those paths
|
||||||
|
explicitly included in `template_paths`. You may override as many
|
||||||
|
or as few templates as you desire.
|
||||||
|
|
||||||
|
## Extending Templates
|
||||||
|
|
||||||
|
Jinja provides a mechanism to [extend templates](http://jinja.pocoo.org/docs/2.10/templates/#template-inheritance).
|
||||||
|
A base template can define a `block`, and child templates can replace or
|
||||||
|
supplement the material in the block. The
|
||||||
|
[JupyterHub templates](https://github.com/jupyterhub/jupyterhub/tree/master/share/jupyterhub/templates)
|
||||||
|
make extensive use of blocks, which allows you to customize parts of the
|
||||||
|
interface easily.
|
||||||
|
|
||||||
|
In general, a child template can extend a base template, `base.html`, by beginning with:
|
||||||
|
|
||||||
|
```html
|
||||||
|
{% extends "base.html" %}
|
||||||
|
```
|
||||||
|
|
||||||
|
This works, unless you are trying to extend the default template for the same
|
||||||
|
file name. Starting in version 0.9, you may refer to the base file with a
|
||||||
|
`templates/` prefix. Thus, if you are writing a custom `base.html`, start the
|
||||||
|
file with this block:
|
||||||
|
|
||||||
|
```html
|
||||||
|
{% extends "templates/base.html" %}
|
||||||
|
```
|
||||||
|
|
||||||
|
By defining `block`s with same name as in the base template, child templates
|
||||||
|
can replace those sections with custom content. The content from the base
|
||||||
|
template can be included with the `{{ super() }}` directive.
|
||||||
|
|
||||||
|
### Example
|
||||||
|
|
||||||
|
To add an additional message to the spawn-pending page, below the existing
|
||||||
|
text about the server starting up, place this content in a file named
|
||||||
|
`spawn_pending.html` in a directory included in the
|
||||||
|
`JupyterHub.template_paths` configuration option.
|
||||||
|
|
||||||
|
```html
|
||||||
|
{% extends "templates/spawn_pending.html" %}
|
||||||
|
|
||||||
|
{% block message %}
|
||||||
|
{{ super() }}
|
||||||
|
<p>Patience is a virtue.</p>
|
||||||
|
{% endblock %}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Page Announcements
|
||||||
|
|
||||||
|
To add announcements to be displayed on a page, you have two options:
|
||||||
|
|
||||||
|
- Extend the page templates as described above
|
||||||
|
- Use configuration variables
|
||||||
|
|
||||||
|
### Announcement Configuration Variables
|
||||||
|
|
||||||
|
If you set the configuration variable `JupyterHub.template_vars =
|
||||||
|
{'announcement': 'some_text}`, the given `some_text` will be placed on
|
||||||
|
the top of all pages. The more specific variables
|
||||||
|
`announcement_login`, `announcement_spawn`, `announcement_home`, and
|
||||||
|
`announcement_logout` are more specific and only show on their
|
||||||
|
respective pages (overriding the global `announcement` variable).
|
||||||
|
Note that changing these variables require a restart, unlike direct
|
||||||
|
template extension.
|
||||||
|
|
||||||
|
You can get the same effect by extending templates, which allows you
|
||||||
|
to update the messages without restarting. Set
|
||||||
|
`c.JupyterHub.template_paths` as mentioned above, and then create a
|
||||||
|
template (for example, `login.html`) with:
|
||||||
|
|
||||||
|
```html
|
||||||
|
{% extends "templates/login.html" %}
|
||||||
|
{% set announcement = 'some message' %}
|
||||||
|
```
|
||||||
|
|
||||||
|
Extending `page.html` puts the message on all pages, but note that
|
||||||
|
extending `page.html` take precedence over an extension of a specific
|
||||||
|
page (unlike the variable-based approach above).
|
@@ -2,30 +2,22 @@
|
|||||||
|
|
||||||
From time to time, you may wish to upgrade JupyterHub to take advantage
|
From time to time, you may wish to upgrade JupyterHub to take advantage
|
||||||
of new releases. Much of this process is automated using scripts,
|
of new releases. Much of this process is automated using scripts,
|
||||||
such as those generated by alembic for database upgrades. Before upgrading a
|
such as those generated by alembic for database upgrades. Whether you
|
||||||
JupyterHub deployment, it's critical to backup your data and configurations
|
are using the default SQLite database or an RDBMS, such as PostgreSQL or
|
||||||
before shutting down the JupyterHub process and server.
|
MySQL, the process follows similar steps.
|
||||||
|
|
||||||
## Databases: SQLite (default) or RDBMS (PostgreSQL, MySQL)
|
**Before upgrading a JupyterHub deployment**, it's critical to backup your data
|
||||||
|
and configurations before shutting down the JupyterHub process and server.
|
||||||
|
|
||||||
The default database for JupyterHub is a [SQLite](https://sqlite.org) database.
|
## Note about upgrading the SQLite database
|
||||||
We have chosen SQLite as JupyterHub's default for its lightweight simplicity
|
|
||||||
in certain uses such as testing, small deployments and workshops.
|
|
||||||
|
|
||||||
When running a long term deployment or a production system, we recommend using
|
When used in production systems, SQLite has some disadvantages when it
|
||||||
a traditional RDBMS database, such as [PostgreSQL](https://www.postgresql.org)
|
comes to upgrading JupyterHub. These are:
|
||||||
or [MySQL](https://www.mysql.com), that supports the SQL `ALTER TABLE`
|
|
||||||
statement.
|
|
||||||
|
|
||||||
For production systems, SQLite has some disadvantages when used with JupyterHub:
|
|
||||||
|
|
||||||
- `upgrade-db` may not work, and you may need to start with a fresh database
|
- `upgrade-db` may not work, and you may need to start with a fresh database
|
||||||
- `downgrade-db` **will not** work if you want to rollback to an earlier
|
- `downgrade-db` **will not** work if you want to rollback to an earlier
|
||||||
version, so backup the `jupyterhub.sqlite` file before upgrading
|
version, so backup the `jupyterhub.sqlite` file before upgrading
|
||||||
|
|
||||||
The sqlite documentation provides a helpful page about [when to use sqlite and
|
|
||||||
where traditional RDBMS may be a better choice](https://sqlite.org/whentouse.html).
|
|
||||||
|
|
||||||
## The upgrade process
|
## The upgrade process
|
||||||
|
|
||||||
Five fundamental process steps are needed when upgrading JupyterHub and its
|
Five fundamental process steps are needed when upgrading JupyterHub and its
|
||||||
|
@@ -166,7 +166,7 @@ startup
|
|||||||
statsd
|
statsd
|
||||||
stdin
|
stdin
|
||||||
stdout
|
stdout
|
||||||
stoppped
|
stopped
|
||||||
subclasses
|
subclasses
|
||||||
subcommand
|
subcommand
|
||||||
subdomain
|
subdomain
|
||||||
|
@@ -9,6 +9,7 @@ problem and how to resolve it.
|
|||||||
- sudospawner fails to run
|
- sudospawner fails to run
|
||||||
- What is the default behavior when none of the lists (admin, whitelist,
|
- What is the default behavior when none of the lists (admin, whitelist,
|
||||||
group whitelist) are set?
|
group whitelist) are set?
|
||||||
|
- JupyterHub Docker container not accessible at localhost
|
||||||
|
|
||||||
[*Errors*](#errors)
|
[*Errors*](#errors)
|
||||||
- 500 error after spawning my single-user server
|
- 500 error after spawning my single-user server
|
||||||
@@ -63,6 +64,17 @@ this to a particular set of users, and the admin_users lets you specify who
|
|||||||
among them may use the admin interface (not necessary, unless you need to do
|
among them may use the admin interface (not necessary, unless you need to do
|
||||||
things like inspect other users' servers, or modify the userlist at runtime).
|
things like inspect other users' servers, or modify the userlist at runtime).
|
||||||
|
|
||||||
|
### JupyterHub Docker container not accessible at localhost
|
||||||
|
|
||||||
|
Even though the command to start your Docker container exposes port 8000
|
||||||
|
(`docker run -p 8000:8000 -d --name jupyterhub jupyterhub/jupyterhub jupyterhub`),
|
||||||
|
it is possible that the IP address itself is not accessible/visible. As a result
|
||||||
|
when you try http://localhost:8000 in your browser, you are unable to connect
|
||||||
|
even though the container is running properly. One workaround is to explicitly
|
||||||
|
tell Jupyterhub to start at `0.0.0.0` which is visible to everyone. Try this
|
||||||
|
command:
|
||||||
|
`docker run -p 8000:8000 -d --name jupyterhub jupyterhub/jupyterhub jupyterhub --ip 0.0.0.0 --port 8000`
|
||||||
|
|
||||||
|
|
||||||
## Errors
|
## Errors
|
||||||
|
|
||||||
@@ -89,7 +101,7 @@ check if the cookie corresponds to the right user. This request is logged.
|
|||||||
If everything is working, the response logged will be similar to this:
|
If everything is working, the response logged will be similar to this:
|
||||||
|
|
||||||
```
|
```
|
||||||
200 GET /hub/api/authorizations/cookie/jupyter-hub-token-name/[secret] (@10.0.1.4) 6.10ms
|
200 GET /hub/api/authorizations/cookie/jupyterhub-token-name/[secret] (@10.0.1.4) 6.10ms
|
||||||
```
|
```
|
||||||
|
|
||||||
You should see a similar 200 message, as above, in the Hub log when you first
|
You should see a similar 200 message, as above, in the Hub log when you first
|
||||||
@@ -99,7 +111,7 @@ may mean that your single-user notebook server isn't connecting to your Hub.
|
|||||||
If you see 403 (forbidden) like this, it's a token problem:
|
If you see 403 (forbidden) like this, it's a token problem:
|
||||||
|
|
||||||
```
|
```
|
||||||
403 GET /hub/api/authorizations/cookie/jupyter-hub-token-name/[secret] (@10.0.1.4) 4.14ms
|
403 GET /hub/api/authorizations/cookie/jupyterhub-token-name/[secret] (@10.0.1.4) 4.14ms
|
||||||
```
|
```
|
||||||
|
|
||||||
Check the logs of the single-user notebook server, which may have more detailed
|
Check the logs of the single-user notebook server, which may have more detailed
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
.. upgrade-dot-eight:
|
.. _upgrade-dot-eight:
|
||||||
|
|
||||||
Upgrading to JupyterHub version 0.8
|
Upgrading to JupyterHub version 0.8
|
||||||
===================================
|
===================================
|
||||||
|
@@ -7,14 +7,18 @@ from sphinx.ext.autodoc import ClassDocumenter, AttributeDocumenter
|
|||||||
|
|
||||||
class ConfigurableDocumenter(ClassDocumenter):
|
class ConfigurableDocumenter(ClassDocumenter):
|
||||||
"""Specialized Documenter subclass for traits with config=True"""
|
"""Specialized Documenter subclass for traits with config=True"""
|
||||||
|
|
||||||
objtype = 'configurable'
|
objtype = 'configurable'
|
||||||
directivetype = 'class'
|
directivetype = 'class'
|
||||||
|
|
||||||
def get_object_members(self, want_all):
|
def get_object_members(self, want_all):
|
||||||
"""Add traits with .tag(config=True) to members list"""
|
"""Add traits with .tag(config=True) to members list"""
|
||||||
check, members = super().get_object_members(want_all)
|
check, members = super().get_object_members(want_all)
|
||||||
get_traits = self.object.class_own_traits if self.options.inherited_members \
|
get_traits = (
|
||||||
else self.object.class_traits
|
self.object.class_own_traits
|
||||||
|
if self.options.inherited_members
|
||||||
|
else self.object.class_traits
|
||||||
|
)
|
||||||
trait_members = []
|
trait_members = []
|
||||||
for name, trait in sorted(get_traits(config=True).items()):
|
for name, trait in sorted(get_traits(config=True).items()):
|
||||||
# put help in __doc__ where autodoc will look for it
|
# put help in __doc__ where autodoc will look for it
|
||||||
@@ -42,10 +46,7 @@ class TraitDocumenter(AttributeDocumenter):
|
|||||||
default_s = ''
|
default_s = ''
|
||||||
else:
|
else:
|
||||||
default_s = repr(default)
|
default_s = repr(default)
|
||||||
sig = ' = {}({})'.format(
|
sig = ' = {}({})'.format(self.object.__class__.__name__, default_s)
|
||||||
self.object.__class__.__name__,
|
|
||||||
default_s,
|
|
||||||
)
|
|
||||||
return super().add_directive_header(sig)
|
return super().add_directive_header(sig)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -25,6 +25,9 @@ Another use would be to copy initial content, such as tutorial files or referenc
|
|||||||
You can define your own bootstrap process by implementing a `pre_spawn_hook` on any spawner.
|
You can define your own bootstrap process by implementing a `pre_spawn_hook` on any spawner.
|
||||||
The Spawner itself is passed as parameter to your hook and you can easily get the contextual information out of the spawning process.
|
The Spawner itself is passed as parameter to your hook and you can easily get the contextual information out of the spawning process.
|
||||||
|
|
||||||
|
Similarly, there may be cases where you would like to clean up after a spawner stops.
|
||||||
|
You may implement a `post_stop_hook` that is always executed after the spawner stops.
|
||||||
|
|
||||||
If you implement a hook, make sure that it is *idempotent*. It will be executed every time
|
If you implement a hook, make sure that it is *idempotent*. It will be executed every time
|
||||||
a notebook server is spawned to the user. That means you should somehow
|
a notebook server is spawned to the user. That means you should somehow
|
||||||
ensure that things which should run only once are not running again and again.
|
ensure that things which should run only once are not running again and again.
|
||||||
|
@@ -1,8 +1,14 @@
|
|||||||
# Example for a Spawner.pre_spawn_hook
|
"""
|
||||||
# create a directory for the user before the spawner starts
|
Example for a Spawner.pre_spawn_hook
|
||||||
|
create a directory for the user before the spawner starts
|
||||||
|
"""
|
||||||
|
# pylint: disable=import-error
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
|
from jupyter_client.localinterfaces import public_ips
|
||||||
|
|
||||||
def create_dir_hook(spawner):
|
def create_dir_hook(spawner):
|
||||||
|
""" Create directory """
|
||||||
username = spawner.user.name # get the username
|
username = spawner.user.name # get the username
|
||||||
volume_path = os.path.join('/volumes/jupyterhub', username)
|
volume_path = os.path.join('/volumes/jupyterhub', username)
|
||||||
if not os.path.exists(volume_path):
|
if not os.path.exists(volume_path):
|
||||||
@@ -10,17 +16,25 @@ def create_dir_hook(spawner):
|
|||||||
# now do whatever you think your user needs
|
# now do whatever you think your user needs
|
||||||
# ...
|
# ...
|
||||||
|
|
||||||
# attach the hook function to the spawner
|
def clean_dir_hook(spawner):
|
||||||
|
""" Delete directory """
|
||||||
|
username = spawner.user.name # get the username
|
||||||
|
temp_path = os.path.join('/volumes/jupyterhub', username, 'temp')
|
||||||
|
if os.path.exists(temp_path) and os.path.isdir(temp_path):
|
||||||
|
shutil.rmtree(temp_path)
|
||||||
|
|
||||||
|
# attach the hook functions to the spawner
|
||||||
|
# pylint: disable=undefined-variable
|
||||||
c.Spawner.pre_spawn_hook = create_dir_hook
|
c.Spawner.pre_spawn_hook = create_dir_hook
|
||||||
|
c.Spawner.post_stop_hook = clean_dir_hook
|
||||||
|
|
||||||
# Use the DockerSpawner to serve your users' notebooks
|
# Use the DockerSpawner to serve your users' notebooks
|
||||||
c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner'
|
c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner'
|
||||||
from jupyter_client.localinterfaces import public_ips
|
|
||||||
c.JupyterHub.hub_ip = public_ips()[0]
|
c.JupyterHub.hub_ip = public_ips()[0]
|
||||||
c.DockerSpawner.hub_ip_connect = public_ips()[0]
|
c.DockerSpawner.hub_ip_connect = public_ips()[0]
|
||||||
c.DockerSpawner.container_ip = "0.0.0.0"
|
c.DockerSpawner.container_ip = "0.0.0.0"
|
||||||
|
|
||||||
# You can now mount the volume to the docker container as we've
|
# You can now mount the volume to the docker container as we've
|
||||||
# made sure the directory exists
|
# made sure the directory exists
|
||||||
|
# pylint: disable=bad-whitespace
|
||||||
c.DockerSpawner.volumes = { '/volumes/jupyterhub/{username}/': '/home/jovyan/work' }
|
c.DockerSpawner.volumes = { '/volumes/jupyterhub/{username}/': '/home/jovyan/work' }
|
||||||
|
|
||||||
|
@@ -15,7 +15,7 @@ c.JupyterHub.services = [
|
|||||||
{
|
{
|
||||||
'name': 'cull-idle',
|
'name': 'cull-idle',
|
||||||
'admin': True,
|
'admin': True,
|
||||||
'command': 'python cull_idle_servers.py --timeout=3600'.split(),
|
'command': 'python3 cull_idle_servers.py --timeout=3600'.split(),
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
@@ -37,5 +37,5 @@ variable. Run `cull_idle_servers.py` manually.
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
export JUPYTERHUB_API_TOKEN=`jupyterhub token`
|
export JUPYTERHUB_API_TOKEN=`jupyterhub token`
|
||||||
python cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
|
python3 cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
|
||||||
```
|
```
|
||||||
|
343
examples/cull-idle/cull_idle_servers.py
Normal file → Executable file
343
examples/cull-idle/cull_idle_servers.py
Normal file → Executable file
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
"""script to monitor and cull idle single-user servers
|
"""script to monitor and cull idle single-user servers
|
||||||
|
|
||||||
Caveats:
|
Caveats:
|
||||||
@@ -16,102 +16,348 @@ You can run this as a service managed by JupyterHub with this in your config::
|
|||||||
{
|
{
|
||||||
'name': 'cull-idle',
|
'name': 'cull-idle',
|
||||||
'admin': True,
|
'admin': True,
|
||||||
'command': 'python cull_idle_servers.py --timeout=3600'.split(),
|
'command': 'python3 cull_idle_servers.py --timeout=3600'.split(),
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
Or run it manually by generating an API token and storing it in `JUPYTERHUB_API_TOKEN`:
|
Or run it manually by generating an API token and storing it in `JUPYTERHUB_API_TOKEN`:
|
||||||
|
|
||||||
export JUPYTERHUB_API_TOKEN=`jupyterhub token`
|
export JUPYTERHUB_API_TOKEN=`jupyterhub token`
|
||||||
python cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
|
python3 cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
|
||||||
|
|
||||||
|
This script uses the same ``--timeout`` and ``--max-age`` values for
|
||||||
|
culling users and users' servers. If you want a different value for
|
||||||
|
users and servers, you should add this script to the services list
|
||||||
|
twice, just with different ``name``s, different values, and one with
|
||||||
|
the ``--cull-users`` option.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime
|
from datetime import datetime, timezone
|
||||||
|
from functools import partial
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from dateutil.parser import parse as parse_date
|
try:
|
||||||
|
from urllib.parse import quote
|
||||||
|
except ImportError:
|
||||||
|
from urllib import quote
|
||||||
|
|
||||||
from tornado.gen import coroutine
|
import dateutil.parser
|
||||||
|
|
||||||
|
from tornado.gen import coroutine, multi
|
||||||
|
from tornado.locks import Semaphore
|
||||||
from tornado.log import app_log
|
from tornado.log import app_log
|
||||||
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
|
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
|
||||||
from tornado.ioloop import IOLoop, PeriodicCallback
|
from tornado.ioloop import IOLoop, PeriodicCallback
|
||||||
from tornado.options import define, options, parse_command_line
|
from tornado.options import define, options, parse_command_line
|
||||||
|
|
||||||
|
|
||||||
|
def parse_date(date_string):
|
||||||
|
"""Parse a timestamp
|
||||||
|
|
||||||
|
If it doesn't have a timezone, assume utc
|
||||||
|
|
||||||
|
Returned datetime object will always be timezone-aware
|
||||||
|
"""
|
||||||
|
dt = dateutil.parser.parse(date_string)
|
||||||
|
if not dt.tzinfo:
|
||||||
|
# assume naïve timestamps are UTC
|
||||||
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
|
return dt
|
||||||
|
|
||||||
|
|
||||||
|
def format_td(td):
|
||||||
|
"""
|
||||||
|
Nicely format a timedelta object
|
||||||
|
|
||||||
|
as HH:MM:SS
|
||||||
|
"""
|
||||||
|
if td is None:
|
||||||
|
return "unknown"
|
||||||
|
if isinstance(td, str):
|
||||||
|
return td
|
||||||
|
seconds = int(td.total_seconds())
|
||||||
|
h = seconds // 3600
|
||||||
|
seconds = seconds % 3600
|
||||||
|
m = seconds // 60
|
||||||
|
seconds = seconds % 60
|
||||||
|
return "{h:02}:{m:02}:{seconds:02}".format(h=h, m=m, seconds=seconds)
|
||||||
|
|
||||||
|
|
||||||
@coroutine
|
@coroutine
|
||||||
def cull_idle(url, api_token, timeout, cull_users=False):
|
def cull_idle(url, api_token, inactive_limit, cull_users=False, max_age=0, concurrency=10):
|
||||||
"""Shutdown idle single-user servers
|
"""Shutdown idle single-user servers
|
||||||
|
|
||||||
If cull_users, inactive *users* will be deleted as well.
|
If cull_users, inactive *users* will be deleted as well.
|
||||||
"""
|
"""
|
||||||
auth_header = {
|
auth_header = {
|
||||||
'Authorization': 'token %s' % api_token
|
'Authorization': 'token %s' % api_token,
|
||||||
}
|
}
|
||||||
req = HTTPRequest(url=url + '/users',
|
req = HTTPRequest(
|
||||||
|
url=url + '/users',
|
||||||
headers=auth_header,
|
headers=auth_header,
|
||||||
)
|
)
|
||||||
now = datetime.datetime.utcnow()
|
now = datetime.now(timezone.utc)
|
||||||
cull_limit = now - datetime.timedelta(seconds=timeout)
|
|
||||||
client = AsyncHTTPClient()
|
client = AsyncHTTPClient()
|
||||||
resp = yield client.fetch(req)
|
|
||||||
|
if concurrency:
|
||||||
|
semaphore = Semaphore(concurrency)
|
||||||
|
@coroutine
|
||||||
|
def fetch(req):
|
||||||
|
"""client.fetch wrapped in a semaphore to limit concurrency"""
|
||||||
|
yield semaphore.acquire()
|
||||||
|
try:
|
||||||
|
return (yield client.fetch(req))
|
||||||
|
finally:
|
||||||
|
yield semaphore.release()
|
||||||
|
else:
|
||||||
|
fetch = client.fetch
|
||||||
|
|
||||||
|
resp = yield fetch(req)
|
||||||
users = json.loads(resp.body.decode('utf8', 'replace'))
|
users = json.loads(resp.body.decode('utf8', 'replace'))
|
||||||
futures = []
|
futures = []
|
||||||
|
|
||||||
@coroutine
|
@coroutine
|
||||||
def cull_one(user, last_activity):
|
def handle_server(user, server_name, server):
|
||||||
"""cull one user"""
|
"""Handle (maybe) culling a single server
|
||||||
|
|
||||||
# shutdown server first. Hub doesn't allow deleting users with running servers.
|
Returns True if server is now stopped (user removable),
|
||||||
if user['server']:
|
False otherwise.
|
||||||
app_log.info("Culling server for %s (inactive since %s)", user['name'], last_activity)
|
"""
|
||||||
req = HTTPRequest(url=url + '/users/%s/server' % user['name'],
|
log_name = user['name']
|
||||||
method='DELETE',
|
if server_name:
|
||||||
headers=auth_header,
|
log_name = '%s/%s' % (user['name'], server_name)
|
||||||
|
if server.get('pending'):
|
||||||
|
app_log.warning(
|
||||||
|
"Not culling server %s with pending %s",
|
||||||
|
log_name, server['pending'])
|
||||||
|
return False
|
||||||
|
|
||||||
|
# jupyterhub < 0.9 defined 'server.url' once the server was ready
|
||||||
|
# as an *implicit* signal that the server was ready.
|
||||||
|
# 0.9 adds a dedicated, explicit 'ready' field.
|
||||||
|
# By current (0.9) definitions, servers that have no pending
|
||||||
|
# events and are not ready shouldn't be in the model,
|
||||||
|
# but let's check just to be safe.
|
||||||
|
|
||||||
|
if not server.get('ready', bool(server['url'])):
|
||||||
|
app_log.warning(
|
||||||
|
"Not culling not-ready not-pending server %s: %s",
|
||||||
|
log_name, server)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if server.get('started'):
|
||||||
|
age = now - parse_date(server['started'])
|
||||||
|
else:
|
||||||
|
# started may be undefined on jupyterhub < 0.9
|
||||||
|
age = None
|
||||||
|
|
||||||
|
# check last activity
|
||||||
|
# last_activity can be None in 0.9
|
||||||
|
if server['last_activity']:
|
||||||
|
inactive = now - parse_date(server['last_activity'])
|
||||||
|
else:
|
||||||
|
# no activity yet, use start date
|
||||||
|
# last_activity may be None with jupyterhub 0.9,
|
||||||
|
# which introduces the 'started' field which is never None
|
||||||
|
# for running servers
|
||||||
|
inactive = age
|
||||||
|
|
||||||
|
should_cull = (inactive is not None and
|
||||||
|
inactive.total_seconds() >= inactive_limit)
|
||||||
|
if should_cull:
|
||||||
|
app_log.info(
|
||||||
|
"Culling server %s (inactive for %s)",
|
||||||
|
log_name, format_td(inactive))
|
||||||
|
|
||||||
|
if max_age and not should_cull:
|
||||||
|
# only check started if max_age is specified
|
||||||
|
# so that we can still be compatible with jupyterhub 0.8
|
||||||
|
# which doesn't define the 'started' field
|
||||||
|
if age is not None and age.total_seconds() >= max_age:
|
||||||
|
app_log.info(
|
||||||
|
"Culling server %s (age: %s, inactive for %s)",
|
||||||
|
log_name, format_td(age), format_td(inactive))
|
||||||
|
should_cull = True
|
||||||
|
|
||||||
|
if not should_cull:
|
||||||
|
app_log.debug(
|
||||||
|
"Not culling server %s (age: %s, inactive for %s)",
|
||||||
|
log_name, format_td(age), format_td(inactive))
|
||||||
|
return False
|
||||||
|
|
||||||
|
if server_name:
|
||||||
|
# culling a named server
|
||||||
|
delete_url = url + "/users/%s/servers/%s" % (
|
||||||
|
quote(user['name']), quote(server['name'])
|
||||||
)
|
)
|
||||||
yield client.fetch(req)
|
else:
|
||||||
if cull_users:
|
delete_url = url + '/users/%s/server' % quote(user['name'])
|
||||||
app_log.info("Culling user %s (inactive since %s)", user['name'], last_activity)
|
|
||||||
req = HTTPRequest(url=url + '/users/%s' % user['name'],
|
req = HTTPRequest(
|
||||||
method='DELETE',
|
url=delete_url, method='DELETE', headers=auth_header,
|
||||||
headers=auth_header,
|
)
|
||||||
|
resp = yield fetch(req)
|
||||||
|
if resp.code == 202:
|
||||||
|
app_log.warning(
|
||||||
|
"Server %s is slow to stop",
|
||||||
|
log_name,
|
||||||
)
|
)
|
||||||
yield client.fetch(req)
|
# return False to prevent culling user with pending shutdowns
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
@coroutine
|
||||||
|
def handle_user(user):
|
||||||
|
"""Handle one user.
|
||||||
|
|
||||||
|
Create a list of their servers, and async exec them. Wait for
|
||||||
|
that to be done, and if all servers are stopped, possibly cull
|
||||||
|
the user.
|
||||||
|
"""
|
||||||
|
# shutdown servers first.
|
||||||
|
# Hub doesn't allow deleting users with running servers.
|
||||||
|
# jupyterhub 0.9 always provides a 'servers' model.
|
||||||
|
# 0.8 only does this when named servers are enabled.
|
||||||
|
if 'servers' in user:
|
||||||
|
servers = user['servers']
|
||||||
|
else:
|
||||||
|
# jupyterhub < 0.9 without named servers enabled.
|
||||||
|
# create servers dict with one entry for the default server
|
||||||
|
# from the user model.
|
||||||
|
# only if the server is running.
|
||||||
|
servers = {}
|
||||||
|
if user['server']:
|
||||||
|
servers[''] = {
|
||||||
|
'last_activity': user['last_activity'],
|
||||||
|
'pending': user['pending'],
|
||||||
|
'url': user['server'],
|
||||||
|
}
|
||||||
|
server_futures = [
|
||||||
|
handle_server(user, server_name, server)
|
||||||
|
for server_name, server in servers.items()
|
||||||
|
]
|
||||||
|
results = yield multi(server_futures)
|
||||||
|
if not cull_users:
|
||||||
|
return
|
||||||
|
# some servers are still running, cannot cull users
|
||||||
|
still_alive = len(results) - sum(results)
|
||||||
|
if still_alive:
|
||||||
|
app_log.debug(
|
||||||
|
"Not culling user %s with %i servers still alive",
|
||||||
|
user['name'], still_alive)
|
||||||
|
return False
|
||||||
|
|
||||||
|
should_cull = False
|
||||||
|
if user.get('created'):
|
||||||
|
age = now - parse_date(user['created'])
|
||||||
|
else:
|
||||||
|
# created may be undefined on jupyterhub < 0.9
|
||||||
|
age = None
|
||||||
|
|
||||||
|
# check last activity
|
||||||
|
# last_activity can be None in 0.9
|
||||||
|
if user['last_activity']:
|
||||||
|
inactive = now - parse_date(user['last_activity'])
|
||||||
|
else:
|
||||||
|
# no activity yet, use start date
|
||||||
|
# last_activity may be None with jupyterhub 0.9,
|
||||||
|
# which introduces the 'created' field which is never None
|
||||||
|
inactive = age
|
||||||
|
|
||||||
|
should_cull = (inactive is not None and
|
||||||
|
inactive.total_seconds() >= inactive_limit)
|
||||||
|
if should_cull:
|
||||||
|
app_log.info(
|
||||||
|
"Culling user %s (inactive for %s)",
|
||||||
|
user['name'], inactive)
|
||||||
|
|
||||||
|
if max_age and not should_cull:
|
||||||
|
# only check created if max_age is specified
|
||||||
|
# so that we can still be compatible with jupyterhub 0.8
|
||||||
|
# which doesn't define the 'started' field
|
||||||
|
if age is not None and age.total_seconds() >= max_age:
|
||||||
|
app_log.info(
|
||||||
|
"Culling user %s (age: %s, inactive for %s)",
|
||||||
|
user['name'], format_td(age), format_td(inactive))
|
||||||
|
should_cull = True
|
||||||
|
|
||||||
|
if not should_cull:
|
||||||
|
app_log.debug(
|
||||||
|
"Not culling user %s (created: %s, last active: %s)",
|
||||||
|
user['name'], format_td(age), format_td(inactive))
|
||||||
|
return False
|
||||||
|
|
||||||
|
req = HTTPRequest(
|
||||||
|
url=url + '/users/%s' % user['name'],
|
||||||
|
method='DELETE',
|
||||||
|
headers=auth_header,
|
||||||
|
)
|
||||||
|
yield fetch(req)
|
||||||
|
return True
|
||||||
|
|
||||||
for user in users:
|
for user in users:
|
||||||
if not user['server'] and not cull_users:
|
futures.append((user['name'], handle_user(user)))
|
||||||
# server not running and not culling users, nothing to do
|
|
||||||
continue
|
|
||||||
last_activity = parse_date(user['last_activity'])
|
|
||||||
if last_activity < cull_limit:
|
|
||||||
futures.append((user['name'], cull_one(user, last_activity)))
|
|
||||||
else:
|
|
||||||
app_log.debug("Not culling %s (active since %s)", user['name'], last_activity)
|
|
||||||
|
|
||||||
for (name, f) in futures:
|
for (name, f) in futures:
|
||||||
yield f
|
try:
|
||||||
app_log.debug("Finished culling %s", name)
|
result = yield f
|
||||||
|
except Exception:
|
||||||
|
app_log.exception("Error processing %s", name)
|
||||||
|
else:
|
||||||
|
if result:
|
||||||
|
app_log.debug("Finished culling %s", name)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
define('url', default=os.environ.get('JUPYTERHUB_API_URL'), help="The JupyterHub API URL")
|
define(
|
||||||
define('timeout', default=600, help="The idle timeout (in seconds)")
|
'url',
|
||||||
define('cull_every', default=0, help="The interval (in seconds) for checking for idle servers to cull")
|
default=os.environ.get('JUPYTERHUB_API_URL'),
|
||||||
define('cull_users', default=False,
|
help="The JupyterHub API URL",
|
||||||
help="""Cull users in addition to servers.
|
|
||||||
This is for use in temporary-user cases such as tmpnb.""",
|
|
||||||
)
|
)
|
||||||
|
define('timeout', default=600, help="The idle timeout (in seconds)")
|
||||||
|
define('cull_every', default=0,
|
||||||
|
help="The interval (in seconds) for checking for idle servers to cull")
|
||||||
|
define('max_age', default=0,
|
||||||
|
help="The maximum age (in seconds) of servers that should be culled even if they are active")
|
||||||
|
define('cull_users', default=False,
|
||||||
|
help="""Cull users in addition to servers.
|
||||||
|
This is for use in temporary-user cases such as tmpnb.""",
|
||||||
|
)
|
||||||
|
define('concurrency', default=10,
|
||||||
|
help="""Limit the number of concurrent requests made to the Hub.
|
||||||
|
|
||||||
|
Deleting a lot of users at the same time can slow down the Hub,
|
||||||
|
so limit the number of API requests we have outstanding at any given time.
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
parse_command_line()
|
parse_command_line()
|
||||||
if not options.cull_every:
|
if not options.cull_every:
|
||||||
options.cull_every = options.timeout // 2
|
options.cull_every = options.timeout // 2
|
||||||
|
|
||||||
api_token = os.environ['JUPYTERHUB_API_TOKEN']
|
api_token = os.environ['JUPYTERHUB_API_TOKEN']
|
||||||
|
|
||||||
|
try:
|
||||||
|
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
|
||||||
|
except ImportError as e:
|
||||||
|
app_log.warning(
|
||||||
|
"Could not load pycurl: %s\n"
|
||||||
|
"pycurl is recommended if you have a large number of users.",
|
||||||
|
e)
|
||||||
|
|
||||||
loop = IOLoop.current()
|
loop = IOLoop.current()
|
||||||
cull = lambda : cull_idle(options.url, api_token, options.timeout, options.cull_users)
|
cull = partial(
|
||||||
# run once before scheduling periodic call
|
cull_idle,
|
||||||
loop.run_sync(cull)
|
url=options.url,
|
||||||
|
api_token=api_token,
|
||||||
|
inactive_limit=options.timeout,
|
||||||
|
cull_users=options.cull_users,
|
||||||
|
max_age=options.max_age,
|
||||||
|
concurrency=options.concurrency,
|
||||||
|
)
|
||||||
|
# schedule first cull immediately
|
||||||
|
# because PeriodicCallback doesn't start until the end of the first interval
|
||||||
|
loop.add_callback(cull)
|
||||||
# schedule periodic cull
|
# schedule periodic cull
|
||||||
pc = PeriodicCallback(cull, 1e3 * options.cull_every)
|
pc = PeriodicCallback(cull, 1e3 * options.cull_every)
|
||||||
pc.start()
|
pc.start()
|
||||||
@@ -119,4 +365,3 @@ if __name__ == '__main__':
|
|||||||
loop.start()
|
loop.start()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
pass
|
pass
|
||||||
|
|
@@ -3,6 +3,6 @@ c.JupyterHub.services = [
|
|||||||
{
|
{
|
||||||
'name': 'cull-idle',
|
'name': 'cull-idle',
|
||||||
'admin': True,
|
'admin': True,
|
||||||
'command': 'python cull_idle_servers.py --timeout=3600'.split(),
|
'command': 'python3 cull_idle_servers.py --timeout=3600'.split(),
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
90
examples/external-oauth/README.md
Normal file
90
examples/external-oauth/README.md
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
# Using JupyterHub as an OAuth provider
|
||||||
|
|
||||||
|
JupyterHub 0.9 introduces the ability to use JupyterHub as an OAuth provider
|
||||||
|
for external services that may not be otherwise integrated with JupyterHub.
|
||||||
|
The main feature this enables is using JupyterHub like a 'regular' OAuth 2
|
||||||
|
provider for services running anywhere.
|
||||||
|
|
||||||
|
There are two examples here. `whoami-oauth` (in the service-whoami directory) uses `jupyterhub.services.HubOAuthenticated`
|
||||||
|
to authenticate requests with the Hub for a service run on its own host.
|
||||||
|
This is an implementation of OAuth 2.0 provided by the jupyterhub package,
|
||||||
|
which configures all of the necessary URLs from environment variables.
|
||||||
|
|
||||||
|
The second is `whoami-oauth-basic`, which implements the full OAuth process
|
||||||
|
without any inheritance, so it can be used as a reference for OAuth
|
||||||
|
implementations in other web servers or languages.
|
||||||
|
|
||||||
|
## Run the example
|
||||||
|
|
||||||
|
1. generate an API token:
|
||||||
|
|
||||||
|
export JUPYTERHUB_API_TOKEN=`openssl rand -hex 32`
|
||||||
|
|
||||||
|
2. launch a version of the the whoami service.
|
||||||
|
For `whoami-oauth`:
|
||||||
|
|
||||||
|
bash launch-service.sh &
|
||||||
|
|
||||||
|
or for `whoami-oauth-basic`:
|
||||||
|
|
||||||
|
bash launch-service-basic.sh &
|
||||||
|
|
||||||
|
3. Launch JupyterHub:
|
||||||
|
|
||||||
|
jupyterhub
|
||||||
|
|
||||||
|
4. Visit http://127.0.0.1:5555/
|
||||||
|
|
||||||
|
After logging in with your local-system credentials, you should see a JSON dump of your user info:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"admin": false,
|
||||||
|
"last_activity": "2016-05-27T14:05:18.016372",
|
||||||
|
"name": "queequeg",
|
||||||
|
"pending": null,
|
||||||
|
"server": "/user/queequeg"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
The essential pieces for using JupyterHub as an OAuth provider are:
|
||||||
|
|
||||||
|
1. registering your service with jupyterhub:
|
||||||
|
|
||||||
|
```python
|
||||||
|
c.JupyterHub.services = [
|
||||||
|
{
|
||||||
|
# the name of your service
|
||||||
|
# should be simple and unique.
|
||||||
|
# mostly used to identify your service in logging
|
||||||
|
"name": "my-service",
|
||||||
|
# the oauth client id of your service
|
||||||
|
# must be unique but isn't private
|
||||||
|
# can be randomly generated or hand-written
|
||||||
|
"oauth_client_id": "abc123",
|
||||||
|
# the API token and client secret of the service
|
||||||
|
# should be generated securely,
|
||||||
|
# e.g. via `openssl rand -hex 32`
|
||||||
|
"api_token": "abc123...",
|
||||||
|
# the redirect target for jupyterhub to send users
|
||||||
|
# after successful authentication
|
||||||
|
"oauth_redirect_uri": "https://service-host/oauth_callback"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Telling your service how to authenticate with JupyterHub.
|
||||||
|
|
||||||
|
The relevant OAuth URLs and keys for using JupyterHub as an OAuth provider are:
|
||||||
|
|
||||||
|
1. the client_id, used in oauth requests
|
||||||
|
2. the api token registered with jupyterhub is the client_secret for oauth requests
|
||||||
|
3. oauth url of the Hub, which is "/hub/api/oauth2/authorize", e.g. `https://myhub.horse/hub/api/oauth2/authorize`
|
||||||
|
4. a redirect handler to receive the authenticated response
|
||||||
|
(at `oauth_redirect_uri` registered in jupyterhub config)
|
||||||
|
5. the token URL for completing the oauth process is "/hub/api/oauth2/token",
|
||||||
|
e.g. `https://myhub.horse/hub/api/oauth2/token`.
|
||||||
|
The reply is JSON and the token is in the field `access_token`.
|
||||||
|
6. Users can be identified by oauth token by making a request to `/hub/api/user`
|
||||||
|
with the new token in the `Authorization` header.
|
18
examples/external-oauth/jupyterhub_config.py
Normal file
18
examples/external-oauth/jupyterhub_config.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
# get the oauth client's API token.
|
||||||
|
# this could come from anywhere
|
||||||
|
api_token = os.getenv("JUPYTERHUB_API_TOKEN")
|
||||||
|
if not api_token:
|
||||||
|
raise ValueError("Make sure to `export JUPYTERHUB_API_TOKEN=$(openssl rand -hex 32)`")
|
||||||
|
|
||||||
|
# tell JupyterHub to register the service as an external oauth client
|
||||||
|
|
||||||
|
c.JupyterHub.services = [
|
||||||
|
{
|
||||||
|
'name': 'external-oauth',
|
||||||
|
'oauth_client_id': "whoami-oauth-client-test",
|
||||||
|
'api_token': api_token,
|
||||||
|
'oauth_redirect_uri': 'http://127.0.0.1:5555/oauth_callback',
|
||||||
|
},
|
||||||
|
]
|
20
examples/external-oauth/launch-service-basic.sh
Normal file
20
examples/external-oauth/launch-service-basic.sh
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# script to launch whoami-oauth-basic service
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# the service needs to know:
|
||||||
|
# 1. API token
|
||||||
|
if [[ -z "${JUPYTERHUB_API_TOKEN}" ]]; then
|
||||||
|
echo 'set API token with export JUPYTERHUB_API_TOKEN=$(openssl rand -hex 32)'
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 2. oauth client ID
|
||||||
|
export JUPYTERHUB_CLIENT_ID='whoami-oauth-client-test'
|
||||||
|
# 3. where the Hub is
|
||||||
|
export JUPYTERHUB_URL='http://127.0.0.1:8000'
|
||||||
|
|
||||||
|
# 4. where to run
|
||||||
|
export JUPYTERHUB_SERVICE_URL='http://127.0.0.1:5555'
|
||||||
|
|
||||||
|
# launch the service
|
||||||
|
exec python3 whoami-oauth-basic.py
|
21
examples/external-oauth/launch-service.sh
Normal file
21
examples/external-oauth/launch-service.sh
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# script to launch whoami-oauth service
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# the service needs to know:
|
||||||
|
# 1. API token
|
||||||
|
if [[ -z "${JUPYTERHUB_API_TOKEN}" ]]; then
|
||||||
|
echo 'set API token with export JUPYTERHUB_API_TOKEN=$(openssl rand -hex 32)'
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 2. oauth client ID
|
||||||
|
export JUPYTERHUB_CLIENT_ID="whoami-oauth-client-test"
|
||||||
|
# 3. what URL to run on
|
||||||
|
export JUPYTERHUB_SERVICE_PREFIX='/'
|
||||||
|
export JUPYTERHUB_SERVICE_URL='http://127.0.0.1:5555'
|
||||||
|
export JUPYTERHUB_OAUTH_CALLBACK_URL="$JUPYTERHUB_SERVICE_URL/oauth_callback"
|
||||||
|
# 4. where the Hub is
|
||||||
|
export JUPYTERHUB_HOST='http://127.0.0.1:8000'
|
||||||
|
|
||||||
|
# launch the service
|
||||||
|
exec python3 ../service-whoami/whoami-oauth.py
|
135
examples/external-oauth/whoami-oauth-basic.py
Normal file
135
examples/external-oauth/whoami-oauth-basic.py
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
"""Basic implementation of OAuth without any inheritance
|
||||||
|
|
||||||
|
Implements OAuth handshake manually
|
||||||
|
so all URLs and requests necessary for OAuth with JupyterHub should be in one place
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from urllib.parse import urlencode, urlparse
|
||||||
|
|
||||||
|
from tornado.auth import OAuth2Mixin
|
||||||
|
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
|
||||||
|
from tornado.httputil import url_concat
|
||||||
|
from tornado.ioloop import IOLoop
|
||||||
|
from tornado import log
|
||||||
|
from tornado import web
|
||||||
|
|
||||||
|
|
||||||
|
class JupyterHubLoginHandler(web.RequestHandler):
|
||||||
|
"""Login Handler
|
||||||
|
|
||||||
|
this handler both begins and ends the OAuth process
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def token_for_code(self, code):
|
||||||
|
"""Complete OAuth by requesting an access token for an oauth code"""
|
||||||
|
params = dict(
|
||||||
|
client_id=self.settings['client_id'],
|
||||||
|
client_secret=self.settings['api_token'],
|
||||||
|
grant_type='authorization_code',
|
||||||
|
code=code,
|
||||||
|
redirect_uri=self.settings['redirect_uri'],
|
||||||
|
)
|
||||||
|
req = HTTPRequest(self.settings['token_url'], method='POST',
|
||||||
|
body=urlencode(params).encode('utf8'),
|
||||||
|
headers={
|
||||||
|
'Content-Type': 'application/x-www-form-urlencoded',
|
||||||
|
},
|
||||||
|
)
|
||||||
|
response = await AsyncHTTPClient().fetch(req)
|
||||||
|
data = json.loads(response.body.decode('utf8', 'replace'))
|
||||||
|
return data['access_token']
|
||||||
|
|
||||||
|
async def get(self):
|
||||||
|
code = self.get_argument('code', None)
|
||||||
|
if code:
|
||||||
|
# code is set, we are the oauth callback
|
||||||
|
# complete oauth
|
||||||
|
token = await self.token_for_code(code)
|
||||||
|
# login successful, set cookie and redirect back to home
|
||||||
|
self.set_secure_cookie('whoami-oauth-token', token)
|
||||||
|
self.redirect('/')
|
||||||
|
else:
|
||||||
|
# we are the login handler,
|
||||||
|
# begin oauth process which will come back later with an
|
||||||
|
# authorization_code
|
||||||
|
self.redirect(url_concat(
|
||||||
|
self.settings['authorize_url'],
|
||||||
|
dict(
|
||||||
|
redirect_uri=self.settings['redirect_uri'],
|
||||||
|
client_id=self.settings['client_id'],
|
||||||
|
response_type='code',
|
||||||
|
)
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
class WhoAmIHandler(web.RequestHandler):
|
||||||
|
"""Serve the JSON model for the authenticated user"""
|
||||||
|
|
||||||
|
def get_current_user(self):
|
||||||
|
"""The login handler stored a JupyterHub API token in a cookie
|
||||||
|
|
||||||
|
@web.authenticated calls this method.
|
||||||
|
If a Falsy value is returned, the request is redirected to `login_url`.
|
||||||
|
If a Truthy value is returned, the request is allowed to proceed.
|
||||||
|
"""
|
||||||
|
token = self.get_secure_cookie('whoami-oauth-token')
|
||||||
|
|
||||||
|
if token:
|
||||||
|
# secure cookies are bytes, decode to str
|
||||||
|
return token.decode('ascii', 'replace')
|
||||||
|
|
||||||
|
async def user_for_token(self, token):
|
||||||
|
"""Retrieve the user for a given token, via /hub/api/user"""
|
||||||
|
|
||||||
|
req = HTTPRequest(
|
||||||
|
self.settings['user_url'],
|
||||||
|
headers={
|
||||||
|
'Authorization': f'token {token}'
|
||||||
|
},
|
||||||
|
)
|
||||||
|
response = await AsyncHTTPClient().fetch(req)
|
||||||
|
return json.loads(response.body.decode('utf8', 'replace'))
|
||||||
|
|
||||||
|
@web.authenticated
|
||||||
|
async def get(self):
|
||||||
|
user_token = self.get_current_user()
|
||||||
|
user_model = await self.user_for_token(user_token)
|
||||||
|
self.set_header('content-type', 'application/json')
|
||||||
|
self.write(json.dumps(user_model, indent=1, sort_keys=True))
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
log.enable_pretty_logging()
|
||||||
|
|
||||||
|
# construct OAuth URLs from jupyterhub base URL
|
||||||
|
hub_api = os.environ['JUPYTERHUB_URL'].rstrip('/') + '/hub/api'
|
||||||
|
authorize_url = hub_api + '/oauth2/authorize'
|
||||||
|
token_url = hub_api + '/oauth2/token'
|
||||||
|
user_url = hub_api + '/user'
|
||||||
|
|
||||||
|
app = web.Application([
|
||||||
|
('/oauth_callback', JupyterHubLoginHandler),
|
||||||
|
('/', WhoAmIHandler),
|
||||||
|
],
|
||||||
|
login_url='/oauth_callback',
|
||||||
|
cookie_secret=os.urandom(32),
|
||||||
|
api_token=os.environ['JUPYTERHUB_API_TOKEN'],
|
||||||
|
client_id=os.environ['JUPYTERHUB_CLIENT_ID'],
|
||||||
|
redirect_uri=os.environ['JUPYTERHUB_SERVICE_URL'].rstrip('/') + '/oauth_callback',
|
||||||
|
authorize_url=authorize_url,
|
||||||
|
token_url=token_url,
|
||||||
|
user_url=user_url,
|
||||||
|
)
|
||||||
|
|
||||||
|
url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
|
||||||
|
log.app_log.info("Running basic whoami service on %s",
|
||||||
|
os.environ['JUPYTERHUB_SERVICE_URL'])
|
||||||
|
app.listen(url.port, url.hostname)
|
||||||
|
IOLoop.current().start()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
BIN
examples/external-oauth/whoami.png
Normal file
BIN
examples/external-oauth/whoami.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 35 KiB |
60
examples/service-announcement/README.md
Normal file
60
examples/service-announcement/README.md
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
|
||||||
|
# Simple Announcement Service Example
|
||||||
|
|
||||||
|
This is a simple service that allows administrators to manage announcements
|
||||||
|
that appear when JupyterHub renders pages.
|
||||||
|
|
||||||
|
To run the service as a hub-managed service simply include in your JupyterHub
|
||||||
|
configuration file something like:
|
||||||
|
|
||||||
|
c.JupyterHub.services = [
|
||||||
|
{
|
||||||
|
'name': 'announcement',
|
||||||
|
'url': 'http://127.0.0.1:8888',
|
||||||
|
'command': ["python", "-m", "announcement"],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
This starts the announcements service up at `/services/announcement` when
|
||||||
|
JupyterHub launches. By default the announcement text is empty.
|
||||||
|
|
||||||
|
The `announcement` module has a configurable port (default 8888) and an API
|
||||||
|
prefix setting. By default the API prefix is `JUPYTERHUB_SERVICE_PREFIX` if
|
||||||
|
that environment variable is set or `/` if it is not.
|
||||||
|
|
||||||
|
## Managing the Announcement
|
||||||
|
|
||||||
|
Admin users can set the announcement text with an API token:
|
||||||
|
|
||||||
|
$ curl -X POST -H "Authorization: token <token>" \
|
||||||
|
-d "{'announcement':'JupyterHub will be upgraded on August 14!'}" \
|
||||||
|
https://.../services/announcement
|
||||||
|
|
||||||
|
Anyone can read the announcement:
|
||||||
|
|
||||||
|
$ curl https://.../services/announcement | python -m json.tool
|
||||||
|
{
|
||||||
|
announcement: "JupyterHub will be upgraded on August 14!",
|
||||||
|
timestamp: "...",
|
||||||
|
user: "..."
|
||||||
|
}
|
||||||
|
|
||||||
|
The time the announcement was posted is recorded in the `timestamp` field and
|
||||||
|
the user who posted the announcement is recorded in the `user` field.
|
||||||
|
|
||||||
|
To clear the announcement text, just DELETE. Only admin users can do this.
|
||||||
|
|
||||||
|
$ curl -X POST -H "Authorization: token <token>" \
|
||||||
|
https://.../services/announcement
|
||||||
|
|
||||||
|
## Seeing the Announcement in JupyterHub
|
||||||
|
|
||||||
|
To be able to render the announcement, include the provide `page.html` template
|
||||||
|
that extends the base `page.html` template. Set `c.JupyterHub.template_paths`
|
||||||
|
in JupyterHub's configuration to include the path to the extending template.
|
||||||
|
The template changes the `announcement` element and does a JQuery `$.get()` call
|
||||||
|
to retrieve the announcement text.
|
||||||
|
|
||||||
|
JupyterHub's configurable announcement template variables can be set for various
|
||||||
|
pages like login, logout, spawn, and home. Including the template provided in
|
||||||
|
this example overrides all of those.
|
73
examples/service-announcement/announcement.py
Normal file
73
examples/service-announcement/announcement.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
|
||||||
|
import argparse
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
from jupyterhub.services.auth import HubAuthenticated
|
||||||
|
from tornado import escape, gen, ioloop, web
|
||||||
|
|
||||||
|
|
||||||
|
class AnnouncementRequestHandler(HubAuthenticated, web.RequestHandler):
|
||||||
|
"""Dynamically manage page announcements"""
|
||||||
|
|
||||||
|
hub_users = []
|
||||||
|
allow_admin = True
|
||||||
|
|
||||||
|
def initialize(self, storage):
|
||||||
|
"""Create storage for announcement text"""
|
||||||
|
self.storage = storage
|
||||||
|
|
||||||
|
@web.authenticated
|
||||||
|
def post(self):
|
||||||
|
"""Update announcement"""
|
||||||
|
doc = escape.json_decode(self.request.body)
|
||||||
|
self.storage["announcement"] = doc["announcement"]
|
||||||
|
self.storage["timestamp"] = datetime.datetime.now().isoformat()
|
||||||
|
self.storage["user"] = user["name"]
|
||||||
|
self.write_to_json(self.storage)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
"""Retrieve announcement"""
|
||||||
|
self.write_to_json(self.storage)
|
||||||
|
|
||||||
|
@web.authenticated
|
||||||
|
def delete(self):
|
||||||
|
"""Clear announcement"""
|
||||||
|
self.storage["announcement"] = ""
|
||||||
|
self.write_to_json(self.storage)
|
||||||
|
|
||||||
|
def write_to_json(self, doc):
|
||||||
|
"""Write dictionary document as JSON"""
|
||||||
|
self.set_header("Content-Type", "application/json; charset=UTF-8")
|
||||||
|
self.write(escape.utf8(json.dumps(doc)))
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
args = parse_arguments()
|
||||||
|
application = create_application(**vars(args))
|
||||||
|
application.listen(args.port)
|
||||||
|
ioloop.IOLoop.current().start()
|
||||||
|
|
||||||
|
|
||||||
|
def parse_arguments():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("--api-prefix", "-a",
|
||||||
|
default=os.environ.get("JUPYTERHUB_SERVICE_PREFIX", "/"),
|
||||||
|
help="application API prefix")
|
||||||
|
parser.add_argument("--port", "-p",
|
||||||
|
default=8888,
|
||||||
|
help="port for API to listen on",
|
||||||
|
type=int)
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def create_application(api_prefix="/",
|
||||||
|
handler=AnnouncementRequestHandler,
|
||||||
|
**kwargs):
|
||||||
|
storage = dict(announcement="", timestamp="", user="")
|
||||||
|
return web.Application([(api_prefix, handler, dict(storage=storage))])
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
15
examples/service-announcement/jupyterhub_config.py
Normal file
15
examples/service-announcement/jupyterhub_config.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
|
||||||
|
# To run the announcement service managed by the hub, add this.
|
||||||
|
|
||||||
|
c.JupyterHub.services = [
|
||||||
|
{
|
||||||
|
'name': 'announcement',
|
||||||
|
'url': 'http://127.0.0.1:8888',
|
||||||
|
'command': ["python", "-m", "announcement"],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
# The announcements need to get on the templates somehow, see page.html
|
||||||
|
# for an example of how to do this.
|
||||||
|
|
||||||
|
c.JupyterHub.template_paths = ["templates"]
|
14
examples/service-announcement/templates/page.html
Normal file
14
examples/service-announcement/templates/page.html
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{% extends "templates/page.html" %}
|
||||||
|
{% block announcement %}
|
||||||
|
<div class="container text-center announcement">
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block script %}
|
||||||
|
{{ super() }}
|
||||||
|
<script>
|
||||||
|
$.get("/services/announcement/", function(data) {
|
||||||
|
$(".announcement").html(data["announcement"]);
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
{% endblock %}
|
@@ -26,6 +26,10 @@ After logging in with your local-system credentials, you should see a JSON dump
|
|||||||
|
|
||||||
This relies on the Hub starting the whoami services, via config (see [jupyterhub_config.py](./jupyterhub_config.py)).
|
This relies on the Hub starting the whoami services, via config (see [jupyterhub_config.py](./jupyterhub_config.py)).
|
||||||
|
|
||||||
|
You may set the `hub_users` configuration in the service script
|
||||||
|
to restrict access to the service to a whitelist of allowed users.
|
||||||
|
By default, any authenticated user is allowed.
|
||||||
|
|
||||||
A similar service could be run externally, by setting the JupyterHub service environment variables:
|
A similar service could be run externally, by setting the JupyterHub service environment variables:
|
||||||
|
|
||||||
JUPYTERHUB_API_TOKEN
|
JUPYTERHUB_API_TOKEN
|
||||||
|
@@ -17,7 +17,11 @@ from jupyterhub.services.auth import HubOAuthenticated, HubOAuthCallbackHandler
|
|||||||
from jupyterhub.utils import url_path_join
|
from jupyterhub.utils import url_path_join
|
||||||
|
|
||||||
class WhoAmIHandler(HubOAuthenticated, RequestHandler):
|
class WhoAmIHandler(HubOAuthenticated, RequestHandler):
|
||||||
hub_users = {getuser()} # the users allowed to access this service
|
# hub_users can be a set of users who are allowed to access the service
|
||||||
|
# `getuser()` here would mean only the user who started the service
|
||||||
|
# can access the service:
|
||||||
|
|
||||||
|
# hub_users = {getuser()}
|
||||||
|
|
||||||
@authenticated
|
@authenticated
|
||||||
def get(self):
|
def get(self):
|
||||||
|
@@ -15,7 +15,11 @@ from jupyterhub.services.auth import HubAuthenticated
|
|||||||
|
|
||||||
|
|
||||||
class WhoAmIHandler(HubAuthenticated, RequestHandler):
|
class WhoAmIHandler(HubAuthenticated, RequestHandler):
|
||||||
hub_users = {getuser()} # the users allowed to access me
|
# hub_users can be a set of users who are allowed to access the service
|
||||||
|
# `getuser()` here would mean only the user who started the service
|
||||||
|
# can access the service:
|
||||||
|
|
||||||
|
# hub_users = {getuser()}
|
||||||
|
|
||||||
@authenticated
|
@authenticated
|
||||||
def get(self):
|
def get(self):
|
||||||
|
3
hooks/README.md
Normal file
3
hooks/README.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# Docker Cloud build hooks
|
||||||
|
|
||||||
|
These are the hooks
|
4
hooks/post_build
Executable file
4
hooks/post_build
Executable file
@@ -0,0 +1,4 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -exuo pipefail
|
||||||
|
|
||||||
|
docker build --build-arg BASE_IMAGE=$DOCKER_REPO:$DOCKER_TAG -t ${DOCKER_REPO}-onbuild:$DOCKER_TAG onbuild
|
33
hooks/post_push
Executable file
33
hooks/post_push
Executable file
@@ -0,0 +1,33 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -exuo pipefail
|
||||||
|
|
||||||
|
export ONBUILD=${DOCKER_REPO}-onbuild
|
||||||
|
# push ONBUILD image
|
||||||
|
docker push $ONBUILD:$DOCKER_TAG
|
||||||
|
|
||||||
|
function get_hub_version() {
|
||||||
|
rm -f hub_version
|
||||||
|
docker run --rm -v $PWD:/version -u $(id -u) -i $DOCKER_REPO:$DOCKER_TAG sh -c 'jupyterhub --version > /version/hub_version'
|
||||||
|
hub_xyz=$(cat hub_version)
|
||||||
|
split=( ${hub_xyz//./ } )
|
||||||
|
hub_xy="${split[0]}.${split[1]}"
|
||||||
|
# add .dev on hub_xy so it's 1.0.dev
|
||||||
|
if [[ ! -z "${split[3]}" ]]; then
|
||||||
|
hub_xy="${hub_xy}.${split[3]}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
get_hub_version
|
||||||
|
|
||||||
|
# when building master, push 0.9.0.dev as well
|
||||||
|
docker tag $DOCKER_REPO:$DOCKER_TAG $DOCKER_REPO:$hub_xyz
|
||||||
|
docker push $DOCKER_REPO:$hub_xyz
|
||||||
|
docker tag $ONBUILD:$DOCKER_TAG $ONBUILD:$hub_xyz
|
||||||
|
docker push $ONBUILD:$hub_xyz
|
||||||
|
|
||||||
|
# when building 0.9.x, push 0.9 as well
|
||||||
|
docker tag $DOCKER_REPO:$DOCKER_TAG $DOCKER_REPO:$hub_xy
|
||||||
|
docker push $DOCKER_REPO:$hub_xy
|
||||||
|
docker tag $ONBUILD:$DOCKER_TAG $ONBUILD:$hub_xy
|
||||||
|
docker push $ONBUILD:$hub_xyz
|
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
|
|
||||||
def get_data_files():
|
def get_data_files():
|
||||||
"""Walk up until we find share/jupyter/hub"""
|
"""Walk up until we find share/jupyterhub"""
|
||||||
import sys
|
import sys
|
||||||
from os.path import join, abspath, dirname, exists, split
|
from os.path import join, abspath, dirname, exists, split
|
||||||
path = abspath(dirname(__file__))
|
path = abspath(dirname(__file__))
|
||||||
@@ -12,10 +12,10 @@ def get_data_files():
|
|||||||
for path in starting_points:
|
for path in starting_points:
|
||||||
# walk up, looking for prefix/share/jupyter
|
# walk up, looking for prefix/share/jupyter
|
||||||
while path != '/':
|
while path != '/':
|
||||||
share_jupyter = join(path, 'share', 'jupyter', 'hub')
|
share_jupyterhub = join(path, 'share', 'jupyterhub')
|
||||||
static = join(share_jupyter, 'static')
|
static = join(share_jupyterhub, 'static')
|
||||||
if all(exists(join(static, f)) for f in ['components', 'css']):
|
if all(exists(join(static, f)) for f in ['components', 'css']):
|
||||||
return share_jupyter
|
return share_jupyterhub
|
||||||
path, _ = split(path)
|
path, _ = split(path)
|
||||||
# didn't find it, give up
|
# didn't find it, give up
|
||||||
return ''
|
return ''
|
||||||
|
@@ -5,12 +5,19 @@
|
|||||||
|
|
||||||
version_info = (
|
version_info = (
|
||||||
0,
|
0,
|
||||||
8,
|
9,
|
||||||
1,
|
4,
|
||||||
# 'dev',
|
"", # release (b1, rc1, or "" for final or dev)
|
||||||
|
# "dev", # dev or nothing
|
||||||
)
|
)
|
||||||
|
|
||||||
__version__ = '.'.join(map(str, version_info))
|
# pep 440 version: no dot before beta/rc, but before .dev
|
||||||
|
# 0.1.0rc1
|
||||||
|
# 0.1.0a1
|
||||||
|
# 0.1.0b1.dev
|
||||||
|
# 0.1.0.dev
|
||||||
|
|
||||||
|
__version__ = ".".join(map(str, version_info[:3])) + ".".join(version_info[3:])
|
||||||
|
|
||||||
|
|
||||||
def _check_version(hub_version, singleuser_version, log):
|
def _check_version(hub_version, singleuser_version, log):
|
||||||
|
@@ -30,11 +30,9 @@ if 'jupyterhub' in sys.modules:
|
|||||||
else:
|
else:
|
||||||
fileConfig(config.config_file_name)
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
# add your model's MetaData object here
|
# add your model's MetaData object here for 'autogenerate' support
|
||||||
# for 'autogenerate' support
|
from jupyterhub import orm
|
||||||
# from myapp import mymodel
|
target_metadata = orm.Base.metadata
|
||||||
# target_metadata = mymodel.Base.metadata
|
|
||||||
target_metadata = None
|
|
||||||
|
|
||||||
# other values from the config, defined by the needs of env.py,
|
# other values from the config, defined by the needs of env.py,
|
||||||
# can be acquired:
|
# can be acquired:
|
||||||
|
42
jupyterhub/alembic/versions/1cebaf56856c_session_id.py
Normal file
42
jupyterhub/alembic/versions/1cebaf56856c_session_id.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
"""Add session_id to auth tokens
|
||||||
|
|
||||||
|
Revision ID: 1cebaf56856c
|
||||||
|
Revises: 3ec6993fe20c
|
||||||
|
Create Date: 2017-12-07 14:43:51.500740
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '1cebaf56856c'
|
||||||
|
down_revision = '3ec6993fe20c'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
import logging
|
||||||
|
logger = logging.getLogger('alembic')
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
tables = ('oauth_access_tokens', 'oauth_codes')
|
||||||
|
|
||||||
|
|
||||||
|
def add_column_if_table_exists(table, column):
|
||||||
|
engine = op.get_bind().engine
|
||||||
|
if table not in engine.table_names():
|
||||||
|
# table doesn't exist, no need to upgrade
|
||||||
|
# because jupyterhub will create it on launch
|
||||||
|
logger.warning("Skipping upgrade of absent table: %s", table)
|
||||||
|
return
|
||||||
|
op.add_column(table, column)
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
for table in tables:
|
||||||
|
add_column_if_table_exists(table, sa.Column('session_id', sa.Unicode(255)))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# sqlite cannot downgrade because of limited ALTER TABLE support (no DROP COLUMN)
|
||||||
|
for table in tables:
|
||||||
|
op.drop_column(table, 'session_id')
|
44
jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py
Normal file
44
jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
"""token tracking
|
||||||
|
|
||||||
|
Revision ID: 56cc5a70207e
|
||||||
|
Revises: 1cebaf56856c
|
||||||
|
Create Date: 2017-12-19 15:21:09.300513
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '56cc5a70207e'
|
||||||
|
down_revision = '1cebaf56856c'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
import logging
|
||||||
|
logger = logging.getLogger('alembic')
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
tables = op.get_bind().engine.table_names()
|
||||||
|
op.add_column('api_tokens', sa.Column('created', sa.DateTime(), nullable=True))
|
||||||
|
op.add_column('api_tokens', sa.Column('last_activity', sa.DateTime(), nullable=True))
|
||||||
|
op.add_column('api_tokens', sa.Column('note', sa.Unicode(length=1023), nullable=True))
|
||||||
|
if 'oauth_access_tokens' in tables:
|
||||||
|
op.add_column('oauth_access_tokens', sa.Column('created', sa.DateTime(), nullable=True))
|
||||||
|
op.add_column('oauth_access_tokens', sa.Column('last_activity', sa.DateTime(), nullable=True))
|
||||||
|
if op.get_context().dialect.name == 'sqlite':
|
||||||
|
logger.warning("sqlite cannot use ALTER TABLE to create foreign keys. Upgrade will be incomplete.")
|
||||||
|
else:
|
||||||
|
op.create_foreign_key(None, 'oauth_access_tokens', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')
|
||||||
|
op.create_foreign_key(None, 'oauth_codes', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_constraint(None, 'oauth_codes', type_='foreignkey')
|
||||||
|
op.drop_constraint(None, 'oauth_access_tokens', type_='foreignkey')
|
||||||
|
op.drop_column('oauth_access_tokens', 'last_activity')
|
||||||
|
op.drop_column('oauth_access_tokens', 'created')
|
||||||
|
op.drop_column('api_tokens', 'note')
|
||||||
|
op.drop_column('api_tokens', 'last_activity')
|
||||||
|
op.drop_column('api_tokens', 'created')
|
24
jupyterhub/alembic/versions/896818069c98_token_expires.py
Normal file
24
jupyterhub/alembic/versions/896818069c98_token_expires.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
"""Add APIToken.expires_at
|
||||||
|
|
||||||
|
Revision ID: 896818069c98
|
||||||
|
Revises: d68c98b66cd4
|
||||||
|
Create Date: 2018-05-07 11:35:58.050542
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '896818069c98'
|
||||||
|
down_revision = 'd68c98b66cd4'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column('api_tokens', sa.Column('expires_at', sa.DateTime(), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column('api_tokens', 'expires_at')
|
47
jupyterhub/alembic/versions/99a28a4418e1_user_created.py
Normal file
47
jupyterhub/alembic/versions/99a28a4418e1_user_created.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
"""user.created and spawner.started
|
||||||
|
|
||||||
|
Revision ID: 99a28a4418e1
|
||||||
|
Revises: 56cc5a70207e
|
||||||
|
Create Date: 2018-03-21 14:27:17.466841
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '99a28a4418e1'
|
||||||
|
down_revision = '56cc5a70207e'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column('users', sa.Column('created', sa.DateTime, nullable=True))
|
||||||
|
c = op.get_bind()
|
||||||
|
# fill created date with current time
|
||||||
|
now = datetime.utcnow()
|
||||||
|
c.execute("""
|
||||||
|
UPDATE users
|
||||||
|
SET created='%s'
|
||||||
|
""" % (now,)
|
||||||
|
)
|
||||||
|
|
||||||
|
tables = c.engine.table_names()
|
||||||
|
|
||||||
|
if 'spawners' in tables:
|
||||||
|
op.add_column('spawners', sa.Column('started', sa.DateTime, nullable=True))
|
||||||
|
# fill started value with now for running servers
|
||||||
|
c.execute("""
|
||||||
|
UPDATE spawners
|
||||||
|
SET started='%s'
|
||||||
|
WHERE server_id IS NOT NULL
|
||||||
|
""" % (now,)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column('users', 'created')
|
||||||
|
op.drop_column('spawners', 'started')
|
@@ -0,0 +1,29 @@
|
|||||||
|
"""client-description
|
||||||
|
|
||||||
|
Revision ID: d68c98b66cd4
|
||||||
|
Revises: 99a28a4418e1
|
||||||
|
Create Date: 2018-04-13 10:50:17.968636
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'd68c98b66cd4'
|
||||||
|
down_revision = '99a28a4418e1'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
tables = op.get_bind().engine.table_names()
|
||||||
|
if 'oauth_clients' in tables:
|
||||||
|
op.add_column(
|
||||||
|
'oauth_clients',
|
||||||
|
sa.Column('description', sa.Unicode(length=1023))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column('oauth_clients', 'description')
|
@@ -3,13 +3,15 @@
|
|||||||
# Copyright (c) Jupyter Development Team.
|
# Copyright (c) Jupyter Development Team.
|
||||||
# Distributed under the terms of the Modified BSD License.
|
# Distributed under the terms of the Modified BSD License.
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
import json
|
import json
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
|
|
||||||
from oauth2.web.tornado import OAuth2Handler
|
from oauth2.web.tornado import OAuth2Handler
|
||||||
from tornado import web, gen
|
from tornado import web
|
||||||
|
|
||||||
from .. import orm
|
from .. import orm
|
||||||
|
from ..user import User
|
||||||
from ..utils import token_authenticated
|
from ..utils import token_authenticated
|
||||||
from .base import BaseHandler, APIHandler
|
from .base import BaseHandler, APIHandler
|
||||||
|
|
||||||
@@ -22,7 +24,11 @@ class TokenAPIHandler(APIHandler):
|
|||||||
orm_token = orm.OAuthAccessToken.find(self.db, token)
|
orm_token = orm.OAuthAccessToken.find(self.db, token)
|
||||||
if orm_token is None:
|
if orm_token is None:
|
||||||
raise web.HTTPError(404)
|
raise web.HTTPError(404)
|
||||||
|
|
||||||
|
# record activity whenever we see a token
|
||||||
|
now = orm_token.last_activity = datetime.utcnow()
|
||||||
if orm_token.user:
|
if orm_token.user:
|
||||||
|
orm_token.user.last_activity = now
|
||||||
model = self.user_model(self.users[orm_token.user])
|
model = self.user_model(self.users[orm_token.user])
|
||||||
elif orm_token.service:
|
elif orm_token.service:
|
||||||
model = self.service_model(orm_token.service)
|
model = self.service_model(orm_token.service)
|
||||||
@@ -31,17 +37,22 @@ class TokenAPIHandler(APIHandler):
|
|||||||
self.db.delete(orm_token)
|
self.db.delete(orm_token)
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
raise web.HTTPError(404)
|
raise web.HTTPError(404)
|
||||||
|
self.db.commit()
|
||||||
self.write(json.dumps(model))
|
self.write(json.dumps(model))
|
||||||
|
|
||||||
@gen.coroutine
|
async def post(self):
|
||||||
def post(self):
|
warn_msg = (
|
||||||
user = self.get_current_user()
|
"Using deprecated token creation endpoint %s."
|
||||||
|
" Use /hub/api/users/:user/tokens instead."
|
||||||
|
) % self.request.uri
|
||||||
|
self.log.warning(warn_msg)
|
||||||
|
requester = user = self.get_current_user()
|
||||||
if user is None:
|
if user is None:
|
||||||
# allow requesting a token with username and password
|
# allow requesting a token with username and password
|
||||||
# for authenticators where that's possible
|
# for authenticators where that's possible
|
||||||
data = self.get_json_body()
|
data = self.get_json_body()
|
||||||
try:
|
try:
|
||||||
user = yield self.login_user(data)
|
requester = user = await self.login_user(data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.log.error("Failure trying to authenticate with form data: %s" % e)
|
self.log.error("Failure trying to authenticate with form data: %s" % e)
|
||||||
user = None
|
user = None
|
||||||
@@ -49,17 +60,25 @@ class TokenAPIHandler(APIHandler):
|
|||||||
raise web.HTTPError(403)
|
raise web.HTTPError(403)
|
||||||
else:
|
else:
|
||||||
data = self.get_json_body()
|
data = self.get_json_body()
|
||||||
# admin users can request
|
# admin users can request tokens for other users
|
||||||
if data and data.get('username') != user.name:
|
if data and data.get('username'):
|
||||||
if user.admin:
|
user = self.find_user(data['username'])
|
||||||
user = self.find_user(data['username'])
|
if user is not requester and not requester.admin:
|
||||||
if user is None:
|
|
||||||
raise web.HTTPError(400, "No such user '%s'" % data['username'])
|
|
||||||
else:
|
|
||||||
raise web.HTTPError(403, "Only admins can request tokens for other users.")
|
raise web.HTTPError(403, "Only admins can request tokens for other users.")
|
||||||
api_token = user.new_api_token()
|
if requester.admin and user is None:
|
||||||
|
raise web.HTTPError(400, "No such user '%s'" % data['username'])
|
||||||
|
|
||||||
|
note = (data or {}).get('note')
|
||||||
|
if not note:
|
||||||
|
note = "Requested via deprecated api"
|
||||||
|
if requester is not user:
|
||||||
|
kind = 'user' if isinstance(user, User) else 'service'
|
||||||
|
note += " by %s %s" % (kind, requester.name)
|
||||||
|
|
||||||
|
api_token = user.new_api_token(note=note)
|
||||||
self.write(json.dumps({
|
self.write(json.dumps({
|
||||||
'token': api_token,
|
'token': api_token,
|
||||||
|
'warning': warn_msg,
|
||||||
'user': self.user_model(user),
|
'user': self.user_model(user),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
@@ -2,24 +2,36 @@
|
|||||||
# Copyright (c) Jupyter Development Team.
|
# Copyright (c) Jupyter Development Team.
|
||||||
# Distributed under the terms of the Modified BSD License.
|
# Distributed under the terms of the Modified BSD License.
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from http.client import responses
|
from http.client import responses
|
||||||
|
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
from tornado import web
|
from tornado import web
|
||||||
|
|
||||||
|
from .. import orm
|
||||||
from ..handlers import BaseHandler
|
from ..handlers import BaseHandler
|
||||||
from ..utils import url_path_join
|
from ..utils import isoformat, url_path_join
|
||||||
|
|
||||||
|
|
||||||
class APIHandler(BaseHandler):
|
class APIHandler(BaseHandler):
|
||||||
|
"""Base class for API endpoints
|
||||||
|
|
||||||
|
Differences from page handlers:
|
||||||
|
|
||||||
|
- JSON responses and errors
|
||||||
|
- strict referer checking for Cookie-authenticated requests
|
||||||
|
- strict content-security-policy
|
||||||
|
- methods for REST API models
|
||||||
|
"""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def content_security_policy(self):
|
def content_security_policy(self):
|
||||||
return '; '.join([super().content_security_policy, "default-src 'none'"])
|
return '; '.join([super().content_security_policy, "default-src 'none'"])
|
||||||
|
|
||||||
def set_default_headers(self):
|
def get_content_type(self):
|
||||||
self.set_header('Content-Type', 'application/json')
|
return 'application/json'
|
||||||
super().set_default_headers()
|
|
||||||
|
|
||||||
def check_referer(self):
|
def check_referer(self):
|
||||||
"""Check Origin for cross-site API requests.
|
"""Check Origin for cross-site API requests.
|
||||||
@@ -75,6 +87,7 @@ class APIHandler(BaseHandler):
|
|||||||
"""Write JSON errors instead of HTML"""
|
"""Write JSON errors instead of HTML"""
|
||||||
exc_info = kwargs.get('exc_info')
|
exc_info = kwargs.get('exc_info')
|
||||||
message = ''
|
message = ''
|
||||||
|
exception = None
|
||||||
status_message = responses.get(status_code, 'Unknown Error')
|
status_message = responses.get(status_code, 'Unknown Error')
|
||||||
if exc_info:
|
if exc_info:
|
||||||
exception = exc_info[1]
|
exception = exc_info[1]
|
||||||
@@ -88,13 +101,85 @@ class APIHandler(BaseHandler):
|
|||||||
reason = getattr(exception, 'reason', '')
|
reason = getattr(exception, 'reason', '')
|
||||||
if reason:
|
if reason:
|
||||||
status_message = reason
|
status_message = reason
|
||||||
|
|
||||||
|
if exception and isinstance(exception, SQLAlchemyError):
|
||||||
|
self.log.warning("Rolling back session due to database error %s", exception)
|
||||||
|
self.db.rollback()
|
||||||
|
|
||||||
|
self.set_header('Content-Type', 'application/json')
|
||||||
|
if isinstance(exception, web.HTTPError):
|
||||||
|
# allow setting headers from exceptions
|
||||||
|
# since exception handler clears headers
|
||||||
|
headers = getattr(exception, 'headers', None)
|
||||||
|
if headers:
|
||||||
|
for key, value in headers.items():
|
||||||
|
self.set_header(key, value)
|
||||||
|
# Content-Length must be recalculated.
|
||||||
|
self.clear_header('Content-Length')
|
||||||
|
|
||||||
self.write(json.dumps({
|
self.write(json.dumps({
|
||||||
'status': status_code,
|
'status': status_code,
|
||||||
'message': message or status_message,
|
'message': message or status_message,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
def user_model(self, user):
|
def server_model(self, spawner, include_state=False):
|
||||||
|
"""Get the JSON model for a Spawner"""
|
||||||
|
return {
|
||||||
|
'name': spawner.name,
|
||||||
|
'last_activity': isoformat(spawner.orm_spawner.last_activity),
|
||||||
|
'started': isoformat(spawner.orm_spawner.started),
|
||||||
|
'pending': spawner.pending,
|
||||||
|
'ready': spawner.ready,
|
||||||
|
'state': spawner.get_state() if include_state else None,
|
||||||
|
'url': url_path_join(spawner.user.url, spawner.name, '/'),
|
||||||
|
'progress_url': spawner._progress_url,
|
||||||
|
}
|
||||||
|
|
||||||
|
def token_model(self, token):
|
||||||
|
"""Get the JSON model for an APIToken"""
|
||||||
|
expires_at = None
|
||||||
|
if isinstance(token, orm.APIToken):
|
||||||
|
kind = 'api_token'
|
||||||
|
extra = {
|
||||||
|
'note': token.note,
|
||||||
|
}
|
||||||
|
expires_at = token.expires_at
|
||||||
|
elif isinstance(token, orm.OAuthAccessToken):
|
||||||
|
kind = 'oauth'
|
||||||
|
extra = {
|
||||||
|
'oauth_client': token.client.description or token.client.client_id,
|
||||||
|
}
|
||||||
|
if token.expires_at:
|
||||||
|
expires_at = datetime.fromtimestamp(token.expires_at)
|
||||||
|
else:
|
||||||
|
raise TypeError(
|
||||||
|
"token must be an APIToken or OAuthAccessToken, not %s"
|
||||||
|
% type(token))
|
||||||
|
|
||||||
|
if token.user:
|
||||||
|
owner_key = 'user'
|
||||||
|
owner = token.user.name
|
||||||
|
|
||||||
|
else:
|
||||||
|
owner_key = 'service'
|
||||||
|
owner = token.service.name
|
||||||
|
|
||||||
|
model = {
|
||||||
|
owner_key: owner,
|
||||||
|
'id': token.api_id,
|
||||||
|
'kind': kind,
|
||||||
|
'created': isoformat(token.created),
|
||||||
|
'last_activity': isoformat(token.last_activity),
|
||||||
|
'expires_at': isoformat(expires_at),
|
||||||
|
}
|
||||||
|
model.update(extra)
|
||||||
|
return model
|
||||||
|
|
||||||
|
def user_model(self, user, include_servers=False, include_state=False):
|
||||||
"""Get the JSON model for a User object"""
|
"""Get the JSON model for a User object"""
|
||||||
|
if isinstance(user, orm.User):
|
||||||
|
user = self.users[user.id]
|
||||||
|
|
||||||
model = {
|
model = {
|
||||||
'kind': 'user',
|
'kind': 'user',
|
||||||
'name': user.name,
|
'name': user.name,
|
||||||
@@ -102,19 +187,22 @@ class APIHandler(BaseHandler):
|
|||||||
'groups': [ g.name for g in user.groups ],
|
'groups': [ g.name for g in user.groups ],
|
||||||
'server': user.url if user.running else None,
|
'server': user.url if user.running else None,
|
||||||
'pending': None,
|
'pending': None,
|
||||||
'last_activity': user.last_activity.isoformat(),
|
'created': isoformat(user.created),
|
||||||
|
'last_activity': isoformat(user.last_activity),
|
||||||
}
|
}
|
||||||
model['pending'] = user.spawners[''].pending or None
|
if '' in user.spawners:
|
||||||
|
model['pending'] = user.spawners[''].pending
|
||||||
|
|
||||||
if self.allow_named_servers:
|
if not include_servers:
|
||||||
servers = model['servers'] = {}
|
model['servers'] = None
|
||||||
for name, spawner in user.spawners.items():
|
return model
|
||||||
if spawner.ready:
|
|
||||||
servers[name] = s = {'name': name}
|
servers = model['servers'] = {}
|
||||||
if spawner.pending:
|
for name, spawner in user.spawners.items():
|
||||||
s['pending'] = spawner.pending
|
# include 'active' servers, not just ready
|
||||||
if spawner.server:
|
# (this includes pending events)
|
||||||
s['url'] = url_path_join(user.url, name, '/')
|
if spawner.active:
|
||||||
|
servers[name] = self.server_model(spawner, include_state=include_state)
|
||||||
return model
|
return model
|
||||||
|
|
||||||
def group_model(self, group):
|
def group_model(self, group):
|
||||||
@@ -137,6 +225,7 @@ class APIHandler(BaseHandler):
|
|||||||
'name': str,
|
'name': str,
|
||||||
'admin': bool,
|
'admin': bool,
|
||||||
'groups': list,
|
'groups': list,
|
||||||
|
'auth_state': dict,
|
||||||
}
|
}
|
||||||
|
|
||||||
_group_model_types = {
|
_group_model_types = {
|
||||||
@@ -178,5 +267,14 @@ class APIHandler(BaseHandler):
|
|||||||
|
|
||||||
|
|
||||||
def options(self, *args, **kwargs):
|
def options(self, *args, **kwargs):
|
||||||
self.set_header('Access-Control-Allow-Headers', 'accept, content-type')
|
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
|
|
||||||
|
class API404(APIHandler):
|
||||||
|
"""404 for API requests
|
||||||
|
|
||||||
|
Ensures JSON 404 errors for malformed URLs
|
||||||
|
"""
|
||||||
|
async def prepare(self):
|
||||||
|
await super().prepare()
|
||||||
|
raise web.HTTPError(404)
|
||||||
|
@@ -41,6 +41,37 @@ class GroupListAPIHandler(_GroupAPIHandler):
|
|||||||
data = [ self.group_model(g) for g in self.db.query(orm.Group) ]
|
data = [ self.group_model(g) for g in self.db.query(orm.Group) ]
|
||||||
self.write(json.dumps(data))
|
self.write(json.dumps(data))
|
||||||
|
|
||||||
|
@admin_only
|
||||||
|
async def post(self):
|
||||||
|
"""POST creates Multiple groups """
|
||||||
|
model = self.get_json_body()
|
||||||
|
if not model or not isinstance(model, dict) or not model.get('groups'):
|
||||||
|
raise web.HTTPError(400, "Must specify at least one group to create")
|
||||||
|
|
||||||
|
groupnames = model.pop("groups",[])
|
||||||
|
self._check_group_model(model)
|
||||||
|
|
||||||
|
created = []
|
||||||
|
for name in groupnames:
|
||||||
|
existing = orm.Group.find(self.db, name=name)
|
||||||
|
if existing is not None:
|
||||||
|
raise web.HTTPError(409, "Group %s already exists" % name)
|
||||||
|
|
||||||
|
usernames = model.get('users', [])
|
||||||
|
# check that users exist
|
||||||
|
users = self._usernames_to_users(usernames)
|
||||||
|
# create the group
|
||||||
|
self.log.info("Creating new group %s with %i users",
|
||||||
|
name, len(users),
|
||||||
|
)
|
||||||
|
self.log.debug("Users: %s", usernames)
|
||||||
|
group = orm.Group(name=name, users=users)
|
||||||
|
self.db.add(group)
|
||||||
|
self.db.commit()
|
||||||
|
created.append(group)
|
||||||
|
self.write(json.dumps([self.group_model(group) for group in created]))
|
||||||
|
self.set_status(201)
|
||||||
|
|
||||||
|
|
||||||
class GroupAPIHandler(_GroupAPIHandler):
|
class GroupAPIHandler(_GroupAPIHandler):
|
||||||
"""View and modify groups by name"""
|
"""View and modify groups by name"""
|
||||||
@@ -51,8 +82,7 @@ class GroupAPIHandler(_GroupAPIHandler):
|
|||||||
self.write(json.dumps(self.group_model(group)))
|
self.write(json.dumps(self.group_model(group)))
|
||||||
|
|
||||||
@admin_only
|
@admin_only
|
||||||
@gen.coroutine
|
async def post(self, name):
|
||||||
def post(self, name):
|
|
||||||
"""POST creates a group by name"""
|
"""POST creates a group by name"""
|
||||||
model = self.get_json_body()
|
model = self.get_json_body()
|
||||||
if model is None:
|
if model is None:
|
||||||
@@ -62,7 +92,7 @@ class GroupAPIHandler(_GroupAPIHandler):
|
|||||||
|
|
||||||
existing = orm.Group.find(self.db, name=name)
|
existing = orm.Group.find(self.db, name=name)
|
||||||
if existing is not None:
|
if existing is not None:
|
||||||
raise web.HTTPError(400, "Group %s already exists" % name)
|
raise web.HTTPError(409, "Group %s already exists" % name)
|
||||||
|
|
||||||
usernames = model.get('users', [])
|
usernames = model.get('users', [])
|
||||||
# check that users exist
|
# check that users exist
|
||||||
@@ -109,9 +139,8 @@ class GroupUsersAPIHandler(_GroupAPIHandler):
|
|||||||
self.db.commit()
|
self.db.commit()
|
||||||
self.write(json.dumps(self.group_model(group)))
|
self.write(json.dumps(self.group_model(group)))
|
||||||
|
|
||||||
@gen.coroutine
|
|
||||||
@admin_only
|
@admin_only
|
||||||
def delete(self, name):
|
async def delete(self, name):
|
||||||
"""DELETE removes users from a group"""
|
"""DELETE removes users from a group"""
|
||||||
group = self.find_group(name)
|
group = self.find_group(name)
|
||||||
data = self.get_json_body()
|
data = self.get_json_body()
|
||||||
|
@@ -16,29 +16,26 @@ from .base import APIHandler
|
|||||||
class ProxyAPIHandler(APIHandler):
|
class ProxyAPIHandler(APIHandler):
|
||||||
|
|
||||||
@admin_only
|
@admin_only
|
||||||
@gen.coroutine
|
async def get(self):
|
||||||
def get(self):
|
|
||||||
"""GET /api/proxy fetches the routing table
|
"""GET /api/proxy fetches the routing table
|
||||||
|
|
||||||
This is the same as fetching the routing table directly from the proxy,
|
This is the same as fetching the routing table directly from the proxy,
|
||||||
but without clients needing to maintain separate
|
but without clients needing to maintain separate
|
||||||
"""
|
"""
|
||||||
routes = yield self.proxy.get_all_routes()
|
routes = await self.proxy.get_all_routes()
|
||||||
self.write(json.dumps(routes))
|
self.write(json.dumps(routes))
|
||||||
|
|
||||||
@admin_only
|
@admin_only
|
||||||
@gen.coroutine
|
async def post(self):
|
||||||
def post(self):
|
|
||||||
"""POST checks the proxy to ensure that it's up to date.
|
"""POST checks the proxy to ensure that it's up to date.
|
||||||
|
|
||||||
Can be used to jumpstart a newly launched proxy
|
Can be used to jumpstart a newly launched proxy
|
||||||
without waiting for the check_routes interval.
|
without waiting for the check_routes interval.
|
||||||
"""
|
"""
|
||||||
yield self.proxy.check_routes(self.users, self.services)
|
await self.proxy.check_routes(self.users, self.services)
|
||||||
|
|
||||||
@admin_only
|
@admin_only
|
||||||
@gen.coroutine
|
async def patch(self):
|
||||||
def patch(self):
|
|
||||||
"""PATCH updates the location of the proxy
|
"""PATCH updates the location of the proxy
|
||||||
|
|
||||||
Can be used to notify the Hub that a new proxy is in charge
|
Can be used to notify the Hub that a new proxy is in charge
|
||||||
@@ -58,8 +55,7 @@ class ProxyAPIHandler(APIHandler):
|
|||||||
if 'auth_token' in model:
|
if 'auth_token' in model:
|
||||||
self.proxy.auth_token = model['auth_token']
|
self.proxy.auth_token = model['auth_token']
|
||||||
self.log.info("Updated proxy at %s", self.proxy)
|
self.log.info("Updated proxy at %s", self.proxy)
|
||||||
yield self.proxy.check_routes(self.users, self.services)
|
await self.proxy.check_routes(self.users, self.services)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
default_handlers = [
|
default_handlers = [
|
||||||
|
@@ -23,6 +23,7 @@ def service_model(service):
|
|||||||
'prefix': service.server.base_url if service.server else '',
|
'prefix': service.server.base_url if service.server else '',
|
||||||
'command': service.command,
|
'command': service.command,
|
||||||
'pid': service.proc.pid if service.proc else 0,
|
'pid': service.proc.pid if service.proc else 0,
|
||||||
|
'info': service.info
|
||||||
}
|
}
|
||||||
|
|
||||||
class ServiceListAPIHandler(APIHandler):
|
class ServiceListAPIHandler(APIHandler):
|
||||||
|
@@ -3,12 +3,17 @@
|
|||||||
# Copyright (c) Jupyter Development Team.
|
# Copyright (c) Jupyter Development Team.
|
||||||
# Distributed under the terms of the Modified BSD License.
|
# Distributed under the terms of the Modified BSD License.
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from datetime import datetime
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from tornado import gen, web
|
from async_generator import aclosing
|
||||||
|
from tornado import web
|
||||||
|
from tornado.iostream import StreamClosedError
|
||||||
|
|
||||||
from .. import orm
|
from .. import orm
|
||||||
from ..utils import admin_only
|
from ..user import User
|
||||||
|
from ..utils import admin_only, iterate_until, maybe_future, url_path_join
|
||||||
from .base import APIHandler
|
from .base import APIHandler
|
||||||
|
|
||||||
|
|
||||||
@@ -17,8 +22,8 @@ class SelfAPIHandler(APIHandler):
|
|||||||
|
|
||||||
Based on the authentication info. Acts as a 'whoami' for auth tokens.
|
Based on the authentication info. Acts as a 'whoami' for auth tokens.
|
||||||
"""
|
"""
|
||||||
@web.authenticated
|
|
||||||
def get(self):
|
async def get(self):
|
||||||
user = self.get_current_user()
|
user = self.get_current_user()
|
||||||
if user is None:
|
if user is None:
|
||||||
# whoami can be accessed via oauth token
|
# whoami can be accessed via oauth token
|
||||||
@@ -31,13 +36,14 @@ class SelfAPIHandler(APIHandler):
|
|||||||
class UserListAPIHandler(APIHandler):
|
class UserListAPIHandler(APIHandler):
|
||||||
@admin_only
|
@admin_only
|
||||||
def get(self):
|
def get(self):
|
||||||
users = [ self._user_from_orm(u) for u in self.db.query(orm.User) ]
|
data = [
|
||||||
data = [ self.user_model(u) for u in users ]
|
self.user_model(u, include_servers=True, include_state=True)
|
||||||
|
for u in self.db.query(orm.User)
|
||||||
|
]
|
||||||
self.write(json.dumps(data))
|
self.write(json.dumps(data))
|
||||||
|
|
||||||
@admin_only
|
@admin_only
|
||||||
@gen.coroutine
|
async def post(self):
|
||||||
def post(self):
|
|
||||||
data = self.get_json_body()
|
data = self.get_json_body()
|
||||||
if not data or not isinstance(data, dict) or not data.get('usernames'):
|
if not data or not isinstance(data, dict) or not data.get('usernames'):
|
||||||
raise web.HTTPError(400, "Must specify at least one user to create")
|
raise web.HTTPError(400, "Must specify at least one user to create")
|
||||||
@@ -69,7 +75,7 @@ class UserListAPIHandler(APIHandler):
|
|||||||
raise web.HTTPError(400, msg)
|
raise web.HTTPError(400, msg)
|
||||||
|
|
||||||
if not to_create:
|
if not to_create:
|
||||||
raise web.HTTPError(400, "All %i users already exist" % len(usernames))
|
raise web.HTTPError(409, "All %i users already exist" % len(usernames))
|
||||||
|
|
||||||
created = []
|
created = []
|
||||||
for name in to_create:
|
for name in to_create:
|
||||||
@@ -78,10 +84,10 @@ class UserListAPIHandler(APIHandler):
|
|||||||
user.admin = True
|
user.admin = True
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
try:
|
try:
|
||||||
yield gen.maybe_future(self.authenticator.add_user(user))
|
await maybe_future(self.authenticator.add_user(user))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.log.error("Failed to create user: %s" % name, exc_info=True)
|
self.log.error("Failed to create user: %s" % name, exc_info=True)
|
||||||
del self.users[user]
|
self.users.delete(user)
|
||||||
raise web.HTTPError(400, "Failed to create user %s: %s" % (name, str(e)))
|
raise web.HTTPError(400, "Failed to create user %s: %s" % (name, str(e)))
|
||||||
else:
|
else:
|
||||||
created.append(user)
|
created.append(user)
|
||||||
@@ -105,20 +111,28 @@ def admin_or_self(method):
|
|||||||
return method(self, name, *args, **kwargs)
|
return method(self, name, *args, **kwargs)
|
||||||
return m
|
return m
|
||||||
|
|
||||||
|
|
||||||
class UserAPIHandler(APIHandler):
|
class UserAPIHandler(APIHandler):
|
||||||
|
|
||||||
@admin_or_self
|
@admin_or_self
|
||||||
def get(self, name):
|
async def get(self, name):
|
||||||
user = self.find_user(name)
|
user = self.find_user(name)
|
||||||
self.write(json.dumps(self.user_model(user)))
|
model = self.user_model(user, include_servers=True, include_state=self.get_current_user().admin)
|
||||||
|
# auth state will only be shown if the requestor is an admin
|
||||||
|
# this means users can't see their own auth state unless they
|
||||||
|
# are admins, Hub admins often are also marked as admins so they
|
||||||
|
# will see their auth state but normal users won't
|
||||||
|
requestor = self.get_current_user()
|
||||||
|
if requestor.admin:
|
||||||
|
model['auth_state'] = await user.get_auth_state()
|
||||||
|
self.write(json.dumps(model))
|
||||||
|
|
||||||
@admin_only
|
@admin_only
|
||||||
@gen.coroutine
|
async def post(self, name):
|
||||||
def post(self, name):
|
|
||||||
data = self.get_json_body()
|
data = self.get_json_body()
|
||||||
user = self.find_user(name)
|
user = self.find_user(name)
|
||||||
if user is not None:
|
if user is not None:
|
||||||
raise web.HTTPError(400, "User %s already exists" % name)
|
raise web.HTTPError(409, "User %s already exists" % name)
|
||||||
|
|
||||||
user = self.user_from_username(name)
|
user = self.user_from_username(name)
|
||||||
if data:
|
if data:
|
||||||
@@ -128,19 +142,18 @@ class UserAPIHandler(APIHandler):
|
|||||||
self.db.commit()
|
self.db.commit()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield gen.maybe_future(self.authenticator.add_user(user))
|
await maybe_future(self.authenticator.add_user(user))
|
||||||
except Exception:
|
except Exception:
|
||||||
self.log.error("Failed to create user: %s" % name, exc_info=True)
|
self.log.error("Failed to create user: %s" % name, exc_info=True)
|
||||||
# remove from registry
|
# remove from registry
|
||||||
del self.users[user]
|
self.users.delete(user)
|
||||||
raise web.HTTPError(400, "Failed to create user: %s" % name)
|
raise web.HTTPError(400, "Failed to create user: %s" % name)
|
||||||
|
|
||||||
self.write(json.dumps(self.user_model(user)))
|
self.write(json.dumps(self.user_model(user)))
|
||||||
self.set_status(201)
|
self.set_status(201)
|
||||||
|
|
||||||
@admin_only
|
@admin_only
|
||||||
@gen.coroutine
|
async def delete(self, name):
|
||||||
def delete(self, name):
|
|
||||||
user = self.find_user(name)
|
user = self.find_user(name)
|
||||||
if user is None:
|
if user is None:
|
||||||
raise web.HTTPError(404)
|
raise web.HTTPError(404)
|
||||||
@@ -149,18 +162,18 @@ class UserAPIHandler(APIHandler):
|
|||||||
if user.spawner._stop_pending:
|
if user.spawner._stop_pending:
|
||||||
raise web.HTTPError(400, "%s's server is in the process of stopping, please wait." % name)
|
raise web.HTTPError(400, "%s's server is in the process of stopping, please wait." % name)
|
||||||
if user.running:
|
if user.running:
|
||||||
yield self.stop_single_user(user)
|
await self.stop_single_user(user)
|
||||||
if user.spawner._stop_pending:
|
if user.spawner._stop_pending:
|
||||||
raise web.HTTPError(400, "%s's server is in the process of stopping, please wait." % name)
|
raise web.HTTPError(400, "%s's server is in the process of stopping, please wait." % name)
|
||||||
|
|
||||||
yield gen.maybe_future(self.authenticator.delete_user(user))
|
await maybe_future(self.authenticator.delete_user(user))
|
||||||
# remove from registry
|
# remove from registry
|
||||||
del self.users[user]
|
self.users.delete(user)
|
||||||
|
|
||||||
self.set_status(204)
|
self.set_status(204)
|
||||||
|
|
||||||
@admin_only
|
@admin_only
|
||||||
def patch(self, name):
|
async def patch(self, name):
|
||||||
user = self.find_user(name)
|
user = self.find_user(name)
|
||||||
if user is None:
|
if user is None:
|
||||||
raise web.HTTPError(404)
|
raise web.HTTPError(404)
|
||||||
@@ -171,17 +184,168 @@ class UserAPIHandler(APIHandler):
|
|||||||
if self.find_user(data['name']):
|
if self.find_user(data['name']):
|
||||||
raise web.HTTPError(400, "User %s already exists, username must be unique" % data['name'])
|
raise web.HTTPError(400, "User %s already exists, username must be unique" % data['name'])
|
||||||
for key, value in data.items():
|
for key, value in data.items():
|
||||||
setattr(user, key, value)
|
if key == 'auth_state':
|
||||||
|
await user.save_auth_state(value)
|
||||||
|
else:
|
||||||
|
setattr(user, key, value)
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
self.write(json.dumps(self.user_model(user)))
|
user_ = self.user_model(user)
|
||||||
|
user_['auth_state'] = await user.get_auth_state()
|
||||||
|
self.write(json.dumps(user_))
|
||||||
|
|
||||||
|
|
||||||
|
class UserTokenListAPIHandler(APIHandler):
|
||||||
|
"""API endpoint for listing/creating tokens"""
|
||||||
|
@admin_or_self
|
||||||
|
def get(self, name):
|
||||||
|
"""Get tokens for a given user"""
|
||||||
|
user = self.find_user(name)
|
||||||
|
if not user:
|
||||||
|
raise web.HTTPError(404, "No such user: %s" % name)
|
||||||
|
|
||||||
|
now = datetime.utcnow()
|
||||||
|
|
||||||
|
api_tokens = []
|
||||||
|
def sort_key(token):
|
||||||
|
return token.last_activity or token.created
|
||||||
|
|
||||||
|
for token in sorted(user.api_tokens, key=sort_key):
|
||||||
|
if token.expires_at and token.expires_at < now:
|
||||||
|
# exclude expired tokens
|
||||||
|
self.db.delete(token)
|
||||||
|
self.db.commit()
|
||||||
|
continue
|
||||||
|
api_tokens.append(self.token_model(token))
|
||||||
|
|
||||||
|
oauth_tokens = []
|
||||||
|
# OAuth tokens use integer timestamps
|
||||||
|
now_timestamp = now.timestamp()
|
||||||
|
for token in sorted(user.oauth_tokens, key=sort_key):
|
||||||
|
if token.expires_at and token.expires_at < now_timestamp:
|
||||||
|
# exclude expired tokens
|
||||||
|
self.db.delete(token)
|
||||||
|
self.db.commit()
|
||||||
|
continue
|
||||||
|
oauth_tokens.append(self.token_model(token))
|
||||||
|
self.write(json.dumps({
|
||||||
|
'api_tokens': api_tokens,
|
||||||
|
'oauth_tokens': oauth_tokens,
|
||||||
|
}))
|
||||||
|
|
||||||
|
async def post(self, name):
|
||||||
|
body = self.get_json_body() or {}
|
||||||
|
if not isinstance(body, dict):
|
||||||
|
raise web.HTTPError(400, "Body must be a JSON dict or empty")
|
||||||
|
|
||||||
|
requester = self.get_current_user()
|
||||||
|
if requester is None:
|
||||||
|
# defer to Authenticator for identifying the user
|
||||||
|
# can be username+password or an upstream auth token
|
||||||
|
try:
|
||||||
|
name = await self.authenticator.authenticate(self, body.get('auth'))
|
||||||
|
except web.HTTPError as e:
|
||||||
|
# turn any authentication error into 403
|
||||||
|
raise web.HTTPError(403)
|
||||||
|
except Exception as e:
|
||||||
|
# suppress and log error here in case Authenticator
|
||||||
|
# isn't prepared to handle auth via this data
|
||||||
|
self.log.error("Error authenticating request for %s: %s",
|
||||||
|
self.request.uri, e)
|
||||||
|
raise web.HTTPError(403)
|
||||||
|
requester = self.find_user(name)
|
||||||
|
if requester is None:
|
||||||
|
# couldn't identify requester
|
||||||
|
raise web.HTTPError(403)
|
||||||
|
user = self.find_user(name)
|
||||||
|
if requester is not user and not requester.admin:
|
||||||
|
raise web.HTTPError(403, "Only admins can request tokens for other users")
|
||||||
|
if not user:
|
||||||
|
raise web.HTTPError(404, "No such user: %s" % name)
|
||||||
|
if requester is not user:
|
||||||
|
kind = 'user' if isinstance(requester, User) else 'service'
|
||||||
|
|
||||||
|
note = body.get('note')
|
||||||
|
if not note:
|
||||||
|
note = "Requested via api"
|
||||||
|
if requester is not user:
|
||||||
|
note += " by %s %s" % (kind, requester.name)
|
||||||
|
|
||||||
|
api_token = user.new_api_token(note=note, expires_in=body.get('expires_in', None))
|
||||||
|
if requester is not user:
|
||||||
|
self.log.info("%s %s requested API token for %s", kind.title(), requester.name, user.name)
|
||||||
|
else:
|
||||||
|
user_kind = 'user' if isinstance(user, User) else 'service'
|
||||||
|
self.log.info("%s %s requested new API token", user_kind.title(), user.name)
|
||||||
|
# retrieve the model
|
||||||
|
token_model = self.token_model(orm.APIToken.find(self.db, api_token))
|
||||||
|
token_model['token'] = api_token
|
||||||
|
self.write(json.dumps(token_model))
|
||||||
|
|
||||||
|
|
||||||
|
class UserTokenAPIHandler(APIHandler):
|
||||||
|
"""API endpoint for retrieving/deleting individual tokens"""
|
||||||
|
|
||||||
|
def find_token_by_id(self, user, token_id):
|
||||||
|
"""Find a token object by token-id key
|
||||||
|
|
||||||
|
Raises 404 if not found for any reason
|
||||||
|
(e.g. wrong owner, invalid key format, etc.)
|
||||||
|
"""
|
||||||
|
not_found = "No such token %s for user %s" % (token_id, user.name)
|
||||||
|
prefix, id = token_id[0], token_id[1:]
|
||||||
|
if prefix == 'a':
|
||||||
|
Token = orm.APIToken
|
||||||
|
elif prefix == 'o':
|
||||||
|
Token = orm.OAuthAccessToken
|
||||||
|
else:
|
||||||
|
raise web.HTTPError(404, not_found)
|
||||||
|
try:
|
||||||
|
id = int(id)
|
||||||
|
except ValueError:
|
||||||
|
raise web.HTTPError(404, not_found)
|
||||||
|
|
||||||
|
orm_token = self.db.query(Token).filter(Token.id==id).first()
|
||||||
|
if orm_token is None or orm_token.user is not user.orm_user:
|
||||||
|
raise web.HTTPError(404, "Token not found %s", orm_token)
|
||||||
|
return orm_token
|
||||||
|
|
||||||
|
@admin_or_self
|
||||||
|
def get(self, name, token_id):
|
||||||
|
""""""
|
||||||
|
user = self.find_user(name)
|
||||||
|
if not user:
|
||||||
|
raise web.HTTPError(404, "No such user: %s" % name)
|
||||||
|
token = self.find_token_by_id(user, token_id)
|
||||||
|
self.write(json.dumps(self.token_model(token)))
|
||||||
|
|
||||||
|
@admin_or_self
|
||||||
|
def delete(self, name, token_id):
|
||||||
|
"""Delete a token"""
|
||||||
|
user = self.find_user(name)
|
||||||
|
if not user:
|
||||||
|
raise web.HTTPError(404, "No such user: %s" % name)
|
||||||
|
token = self.find_token_by_id(user, token_id)
|
||||||
|
# deleting an oauth token deletes *all* oauth tokens for that client
|
||||||
|
if isinstance(token, orm.OAuthAccessToken):
|
||||||
|
client_id = token.client_id
|
||||||
|
tokens = [
|
||||||
|
token for token in user.oauth_tokens
|
||||||
|
if token.client_id == client_id
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
tokens = [token]
|
||||||
|
for token in tokens:
|
||||||
|
self.db.delete(token)
|
||||||
|
self.db.commit()
|
||||||
|
self.set_header('Content-Type', 'text/plain')
|
||||||
|
self.set_status(204)
|
||||||
|
|
||||||
|
|
||||||
class UserServerAPIHandler(APIHandler):
|
class UserServerAPIHandler(APIHandler):
|
||||||
"""Start and stop single-user servers"""
|
"""Start and stop single-user servers"""
|
||||||
|
|
||||||
@gen.coroutine
|
|
||||||
@admin_or_self
|
@admin_or_self
|
||||||
def post(self, name, server_name=''):
|
async def post(self, name, server_name=''):
|
||||||
user = self.find_user(name)
|
user = self.find_user(name)
|
||||||
if server_name and not self.allow_named_servers:
|
if server_name and not self.allow_named_servers:
|
||||||
raise web.HTTPError(400, "Named servers are not enabled.")
|
raise web.HTTPError(400, "Named servers are not enabled.")
|
||||||
@@ -199,21 +363,20 @@ class UserServerAPIHandler(APIHandler):
|
|||||||
# set _spawn_pending flag to prevent races while we wait
|
# set _spawn_pending flag to prevent races while we wait
|
||||||
spawner._spawn_pending = True
|
spawner._spawn_pending = True
|
||||||
try:
|
try:
|
||||||
state = yield spawner.poll_and_notify()
|
state = await spawner.poll_and_notify()
|
||||||
finally:
|
finally:
|
||||||
spawner._spawn_pending = False
|
spawner._spawn_pending = False
|
||||||
if state is None:
|
if state is None:
|
||||||
raise web.HTTPError(400, "%s is already running" % spawner._log_name)
|
raise web.HTTPError(400, "%s is already running" % spawner._log_name)
|
||||||
|
|
||||||
options = self.get_json_body()
|
options = self.get_json_body()
|
||||||
yield self.spawn_single_user(user, server_name, options=options)
|
await self.spawn_single_user(user, server_name, options=options)
|
||||||
status = 202 if spawner.pending == 'spawn' else 201
|
status = 202 if spawner.pending == 'spawn' else 201
|
||||||
self.set_header('Content-Type', 'text/plain')
|
self.set_header('Content-Type', 'text/plain')
|
||||||
self.set_status(status)
|
self.set_status(status)
|
||||||
|
|
||||||
@gen.coroutine
|
|
||||||
@admin_or_self
|
@admin_or_self
|
||||||
def delete(self, name, server_name=''):
|
async def delete(self, name, server_name=''):
|
||||||
user = self.find_user(name)
|
user = self.find_user(name)
|
||||||
if server_name:
|
if server_name:
|
||||||
if not self.allow_named_servers:
|
if not self.allow_named_servers:
|
||||||
@@ -234,10 +397,10 @@ class UserServerAPIHandler(APIHandler):
|
|||||||
(spawner._log_name, '(pending: %s)' % spawner.pending if spawner.pending else '')
|
(spawner._log_name, '(pending: %s)' % spawner.pending if spawner.pending else '')
|
||||||
)
|
)
|
||||||
# include notify, so that a server that died is noticed immediately
|
# include notify, so that a server that died is noticed immediately
|
||||||
status = yield spawner.poll_and_notify()
|
status = await spawner.poll_and_notify()
|
||||||
if status is not None:
|
if status is not None:
|
||||||
raise web.HTTPError(400, "%s is not running" % spawner._log_name)
|
raise web.HTTPError(400, "%s is not running" % spawner._log_name)
|
||||||
yield self.stop_single_user(user, server_name)
|
await self.stop_single_user(user, server_name)
|
||||||
status = 202 if spawner._stop_pending else 204
|
status = 202 if spawner._stop_pending else 204
|
||||||
self.set_header('Content-Type', 'text/plain')
|
self.set_header('Content-Type', 'text/plain')
|
||||||
self.set_status(status)
|
self.set_status(status)
|
||||||
@@ -263,11 +426,131 @@ class UserAdminAccessAPIHandler(APIHandler):
|
|||||||
raise web.HTTPError(404)
|
raise web.HTTPError(404)
|
||||||
|
|
||||||
|
|
||||||
|
class SpawnProgressAPIHandler(APIHandler):
|
||||||
|
"""EventStream handler for pending spawns"""
|
||||||
|
|
||||||
|
keepalive_interval = 8
|
||||||
|
|
||||||
|
def get_content_type(self):
|
||||||
|
return 'text/event-stream'
|
||||||
|
|
||||||
|
async def send_event(self, event):
|
||||||
|
try:
|
||||||
|
self.write('data: {}\n\n'.format(json.dumps(event)))
|
||||||
|
await self.flush()
|
||||||
|
except StreamClosedError:
|
||||||
|
self.log.warning("Stream closed while handling %s", self.request.uri)
|
||||||
|
# raise Finish to halt the handler
|
||||||
|
raise web.Finish()
|
||||||
|
|
||||||
|
_finished = False
|
||||||
|
def on_finish(self):
|
||||||
|
self._finished = True
|
||||||
|
|
||||||
|
async def keepalive(self):
|
||||||
|
"""Write empty lines periodically
|
||||||
|
|
||||||
|
to avoid being closed by intermediate proxies
|
||||||
|
when there's a large gap between events.
|
||||||
|
"""
|
||||||
|
while not self._finished:
|
||||||
|
try:
|
||||||
|
self.write("\n\n")
|
||||||
|
except (StreamClosedError, RuntimeError):
|
||||||
|
return
|
||||||
|
await asyncio.sleep(self.keepalive_interval)
|
||||||
|
|
||||||
|
@admin_or_self
|
||||||
|
async def get(self, username, server_name=''):
|
||||||
|
self.set_header('Cache-Control', 'no-cache')
|
||||||
|
if server_name is None:
|
||||||
|
server_name = ''
|
||||||
|
user = self.find_user(username)
|
||||||
|
if user is None:
|
||||||
|
# no such user
|
||||||
|
raise web.HTTPError(404)
|
||||||
|
if server_name not in user.spawners:
|
||||||
|
# user has no such server
|
||||||
|
raise web.HTTPError(404)
|
||||||
|
spawner = user.spawners[server_name]
|
||||||
|
|
||||||
|
# start sending keepalive to avoid proxies closing the connection
|
||||||
|
asyncio.ensure_future(self.keepalive())
|
||||||
|
# cases:
|
||||||
|
# - spawner already started and ready
|
||||||
|
# - spawner not running at all
|
||||||
|
# - spawner failed
|
||||||
|
# - spawner pending start (what we expect)
|
||||||
|
url = url_path_join(user.url, server_name, '/')
|
||||||
|
ready_event = {
|
||||||
|
'progress': 100,
|
||||||
|
'ready': True,
|
||||||
|
'message': "Server ready at {}".format(url),
|
||||||
|
'html_message': 'Server ready at <a href="{0}">{0}</a>'.format(url),
|
||||||
|
'url': url,
|
||||||
|
}
|
||||||
|
failed_event = {
|
||||||
|
'progress': 100,
|
||||||
|
'failed': True,
|
||||||
|
'message': "Spawn failed",
|
||||||
|
}
|
||||||
|
|
||||||
|
if spawner.ready:
|
||||||
|
# spawner already ready. Trigger progress-completion immediately
|
||||||
|
self.log.info("Server %s is already started", spawner._log_name)
|
||||||
|
await self.send_event(ready_event)
|
||||||
|
return
|
||||||
|
|
||||||
|
spawn_future = spawner._spawn_future
|
||||||
|
|
||||||
|
if not spawner._spawn_pending:
|
||||||
|
# not pending, no progress to fetch
|
||||||
|
# check if spawner has just failed
|
||||||
|
f = spawn_future
|
||||||
|
if f and f.done() and f.exception():
|
||||||
|
failed_event['message'] = "Spawn failed: %s" % f.exception()
|
||||||
|
await self.send_event(failed_event)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
raise web.HTTPError(400, "%s is not starting...", spawner._log_name)
|
||||||
|
|
||||||
|
# retrieve progress events from the Spawner
|
||||||
|
async with aclosing(iterate_until(spawn_future, spawner._generate_progress())) as events:
|
||||||
|
async for event in events:
|
||||||
|
# don't allow events to sneakily set the 'ready' flag
|
||||||
|
if 'ready' in event:
|
||||||
|
event.pop('ready', None)
|
||||||
|
await self.send_event(event)
|
||||||
|
|
||||||
|
# progress finished, wait for spawn to actually resolve,
|
||||||
|
# in case progress finished early
|
||||||
|
# (ignore errors, which will be logged elsewhere)
|
||||||
|
await asyncio.wait([spawn_future])
|
||||||
|
|
||||||
|
# progress and spawn finished, check if spawn succeeded
|
||||||
|
if spawner.ready:
|
||||||
|
# spawner is ready, signal completion and redirect
|
||||||
|
self.log.info("Server %s is ready", spawner._log_name)
|
||||||
|
await self.send_event(ready_event)
|
||||||
|
else:
|
||||||
|
# what happened? Maybe spawn failed?
|
||||||
|
f = spawn_future
|
||||||
|
if f and f.done() and f.exception():
|
||||||
|
failed_event['message'] = "Spawn failed: %s" % f.exception()
|
||||||
|
else:
|
||||||
|
self.log.warning("Server %s didn't start for unknown reason", spawner._log_name)
|
||||||
|
await self.send_event(failed_event)
|
||||||
|
|
||||||
|
|
||||||
default_handlers = [
|
default_handlers = [
|
||||||
(r"/api/user", SelfAPIHandler),
|
(r"/api/user", SelfAPIHandler),
|
||||||
(r"/api/users", UserListAPIHandler),
|
(r"/api/users", UserListAPIHandler),
|
||||||
(r"/api/users/([^/]+)", UserAPIHandler),
|
(r"/api/users/([^/]+)", UserAPIHandler),
|
||||||
(r"/api/users/([^/]+)/server", UserServerAPIHandler),
|
(r"/api/users/([^/]+)/server", UserServerAPIHandler),
|
||||||
|
(r"/api/users/([^/]+)/server/progress", SpawnProgressAPIHandler),
|
||||||
|
(r"/api/users/([^/]+)/tokens", UserTokenListAPIHandler),
|
||||||
|
(r"/api/users/([^/]+)/tokens/([^/]*)", UserTokenAPIHandler),
|
||||||
(r"/api/users/([^/]+)/servers/([^/]*)", UserServerAPIHandler),
|
(r"/api/users/([^/]+)/servers/([^/]*)", UserServerAPIHandler),
|
||||||
|
(r"/api/users/([^/]+)/servers/([^/]*)/progress", SpawnProgressAPIHandler),
|
||||||
(r"/api/users/([^/]+)/admin-access", UserAdminAccessAPIHandler),
|
(r"/api/users/([^/]+)/admin-access", UserAdminAccessAPIHandler),
|
||||||
]
|
]
|
||||||
|
File diff suppressed because it is too large
Load Diff
@@ -3,28 +3,30 @@
|
|||||||
# Copyright (c) IPython Development Team.
|
# Copyright (c) IPython Development Team.
|
||||||
# Distributed under the terms of the Modified BSD License.
|
# Distributed under the terms of the Modified BSD License.
|
||||||
|
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
import pipes
|
import pipes
|
||||||
import re
|
import re
|
||||||
from shutil import which
|
from shutil import which
|
||||||
import sys
|
import sys
|
||||||
from subprocess import Popen, PIPE, STDOUT
|
from subprocess import Popen, PIPE, STDOUT
|
||||||
|
|
||||||
from tornado import gen
|
|
||||||
try:
|
try:
|
||||||
import pamela
|
import pamela
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
pamela = None
|
pamela = None
|
||||||
_pamela_error = e
|
_pamela_error = e
|
||||||
|
|
||||||
|
from tornado.concurrent import run_on_executor
|
||||||
|
from tornado import gen
|
||||||
|
|
||||||
from traitlets.config import LoggingConfigurable
|
from traitlets.config import LoggingConfigurable
|
||||||
from traitlets import Bool, Set, Unicode, Dict, Any, default, observe
|
from traitlets import Bool, Set, Unicode, Dict, Any, default, observe
|
||||||
|
|
||||||
from .handlers.login import LoginHandler
|
from .handlers.login import LoginHandler
|
||||||
from .utils import url_path_join
|
from .utils import maybe_future, url_path_join
|
||||||
from .traitlets import Command
|
from .traitlets import Command
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def getgrnam(name):
|
def getgrnam(name):
|
||||||
"""Wrapper function to protect against `grp` not being available
|
"""Wrapper function to protect against `grp` not being available
|
||||||
on Windows
|
on Windows
|
||||||
@@ -47,7 +49,7 @@ class Authenticator(LoggingConfigurable):
|
|||||||
|
|
||||||
Encrypting auth_state requires the cryptography package.
|
Encrypting auth_state requires the cryptography package.
|
||||||
|
|
||||||
Additionally, the JUPYTERHUB_CRYPTO_KEY envirionment variable must
|
Additionally, the JUPYTERHUB_CRYPT_KEY environment variable must
|
||||||
contain one (or more, separated by ;) 32B encryption keys.
|
contain one (or more, separated by ;) 32B encryption keys.
|
||||||
These can be either base64 or hex-encoded.
|
These can be either base64 or hex-encoded.
|
||||||
|
|
||||||
@@ -86,6 +88,20 @@ class Authenticator(LoggingConfigurable):
|
|||||||
"""
|
"""
|
||||||
).tag(config=True)
|
).tag(config=True)
|
||||||
|
|
||||||
|
blacklist = Set(
|
||||||
|
help="""
|
||||||
|
Blacklist of usernames that are not allowed to log in.
|
||||||
|
|
||||||
|
Use this with supported authenticators to restrict which users can not log in. This is an
|
||||||
|
additional blacklist that further restricts users, beyond whatever restrictions the
|
||||||
|
authenticator has in place.
|
||||||
|
|
||||||
|
If empty, does not perform any additional restriction.
|
||||||
|
|
||||||
|
.. versionadded: 0.9
|
||||||
|
"""
|
||||||
|
).tag(config=True)
|
||||||
|
|
||||||
@observe('whitelist')
|
@observe('whitelist')
|
||||||
def _check_whitelist(self, change):
|
def _check_whitelist(self, change):
|
||||||
short_names = [name for name in change['new'] if len(name) <= 1]
|
short_names = [name for name in change['new'] if len(name) <= 1]
|
||||||
@@ -203,8 +219,22 @@ class Authenticator(LoggingConfigurable):
|
|||||||
return True
|
return True
|
||||||
return username in self.whitelist
|
return username in self.whitelist
|
||||||
|
|
||||||
@gen.coroutine
|
def check_blacklist(self, username):
|
||||||
def get_authenticated_user(self, handler, data):
|
"""Check if a username is blocked to authenticate based on blacklist configuration
|
||||||
|
|
||||||
|
Return True if username is allowed, False otherwise.
|
||||||
|
No blacklist means any username is allowed.
|
||||||
|
|
||||||
|
Names are normalized *before* being checked against the blacklist.
|
||||||
|
|
||||||
|
.. versionadded: 0.9
|
||||||
|
"""
|
||||||
|
if not self.blacklist:
|
||||||
|
# No blacklist means any name is allowed
|
||||||
|
return True
|
||||||
|
return username not in self.blacklist
|
||||||
|
|
||||||
|
async def get_authenticated_user(self, handler, data):
|
||||||
"""Authenticate the user who is attempting to log in
|
"""Authenticate the user who is attempting to log in
|
||||||
|
|
||||||
Returns user dict if successful, None otherwise.
|
Returns user dict if successful, None otherwise.
|
||||||
@@ -224,7 +254,7 @@ class Authenticator(LoggingConfigurable):
|
|||||||
.. versionchanged:: 0.8
|
.. versionchanged:: 0.8
|
||||||
return dict instead of username
|
return dict instead of username
|
||||||
"""
|
"""
|
||||||
authenticated = yield self.authenticate(handler, data)
|
authenticated = await maybe_future(self.authenticate(handler, data))
|
||||||
if authenticated is None:
|
if authenticated is None:
|
||||||
return
|
return
|
||||||
if isinstance(authenticated, dict):
|
if isinstance(authenticated, dict):
|
||||||
@@ -235,6 +265,7 @@ class Authenticator(LoggingConfigurable):
|
|||||||
'name': authenticated,
|
'name': authenticated,
|
||||||
}
|
}
|
||||||
authenticated.setdefault('auth_state', None)
|
authenticated.setdefault('auth_state', None)
|
||||||
|
authenticated.setdefault('admin', None)
|
||||||
|
|
||||||
# normalize the username
|
# normalize the username
|
||||||
authenticated['name'] = username = self.normalize_username(authenticated['name'])
|
authenticated['name'] = username = self.normalize_username(authenticated['name'])
|
||||||
@@ -242,15 +273,21 @@ class Authenticator(LoggingConfigurable):
|
|||||||
self.log.warning("Disallowing invalid username %r.", username)
|
self.log.warning("Disallowing invalid username %r.", username)
|
||||||
return
|
return
|
||||||
|
|
||||||
whitelist_pass = yield gen.maybe_future(self.check_whitelist(username))
|
blacklist_pass = await maybe_future(self.check_blacklist(username))
|
||||||
|
whitelist_pass = await maybe_future(self.check_whitelist(username))
|
||||||
|
if blacklist_pass:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
self.log.warning("User %r in blacklist. Stop authentication", username)
|
||||||
|
return
|
||||||
|
|
||||||
if whitelist_pass:
|
if whitelist_pass:
|
||||||
return authenticated
|
return authenticated
|
||||||
else:
|
else:
|
||||||
self.log.warning("User %r not in whitelist.", username)
|
self.log.warning("User %r not in whitelist.", username)
|
||||||
return
|
return
|
||||||
|
|
||||||
@gen.coroutine
|
async def authenticate(self, handler, data):
|
||||||
def authenticate(self, handler, data):
|
|
||||||
"""Authenticate a user with login form data
|
"""Authenticate a user with login form data
|
||||||
|
|
||||||
This must be a tornado gen.coroutine.
|
This must be a tornado gen.coroutine.
|
||||||
@@ -269,10 +306,10 @@ class Authenticator(LoggingConfigurable):
|
|||||||
Returns:
|
Returns:
|
||||||
user (str or dict or None): The username of the authenticated user,
|
user (str or dict or None): The username of the authenticated user,
|
||||||
or None if Authentication failed.
|
or None if Authentication failed.
|
||||||
If the Authenticator has state associated with the user,
|
The Authenticator may return a dict instead, which MUST have a
|
||||||
it can return a dict with the keys 'name' and 'auth_state',
|
key 'name' holding the username, and may have two optional keys
|
||||||
where 'name' is the username and 'auth_state' is a dictionary
|
set - 'auth_state', a dictionary of of auth state that will be
|
||||||
of auth state that will be persisted.
|
persisted; and 'admin', the admin setting value for the user.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def pre_spawn_start(self, user, spawner):
|
def pre_spawn_start(self, user, spawner):
|
||||||
@@ -475,20 +512,19 @@ class LocalAuthenticator(Authenticator):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@gen.coroutine
|
async def add_user(self, user):
|
||||||
def add_user(self, user):
|
|
||||||
"""Hook called whenever a new user is added
|
"""Hook called whenever a new user is added
|
||||||
|
|
||||||
If self.create_system_users, the user will attempt to be created if it doesn't exist.
|
If self.create_system_users, the user will attempt to be created if it doesn't exist.
|
||||||
"""
|
"""
|
||||||
user_exists = yield gen.maybe_future(self.system_user_exists(user))
|
user_exists = await maybe_future(self.system_user_exists(user))
|
||||||
if not user_exists:
|
if not user_exists:
|
||||||
if self.create_system_users:
|
if self.create_system_users:
|
||||||
yield gen.maybe_future(self.add_system_user(user))
|
await maybe_future(self.add_system_user(user))
|
||||||
else:
|
else:
|
||||||
raise KeyError("User %s does not exist." % user.name)
|
raise KeyError("User %s does not exist." % user.name)
|
||||||
|
|
||||||
yield gen.maybe_future(super().add_user(user))
|
await maybe_future(super().add_user(user))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def system_user_exists(user):
|
def system_user_exists(user):
|
||||||
@@ -519,6 +555,12 @@ class LocalAuthenticator(Authenticator):
|
|||||||
class PAMAuthenticator(LocalAuthenticator):
|
class PAMAuthenticator(LocalAuthenticator):
|
||||||
"""Authenticate local UNIX users with PAM"""
|
"""Authenticate local UNIX users with PAM"""
|
||||||
|
|
||||||
|
# run PAM in a thread, since it can be slow
|
||||||
|
executor = Any()
|
||||||
|
@default('executor')
|
||||||
|
def _default_executor(self):
|
||||||
|
return ThreadPoolExecutor(1)
|
||||||
|
|
||||||
encoding = Unicode('utf8',
|
encoding = Unicode('utf8',
|
||||||
help="""
|
help="""
|
||||||
The text encoding to use when communicating with PAM
|
The text encoding to use when communicating with PAM
|
||||||
@@ -544,12 +586,25 @@ class PAMAuthenticator(LocalAuthenticator):
|
|||||||
"""
|
"""
|
||||||
).tag(config=True)
|
).tag(config=True)
|
||||||
|
|
||||||
|
check_account = Bool(True,
|
||||||
|
help="""
|
||||||
|
Whether to check the user's account status via PAM during authentication.
|
||||||
|
|
||||||
|
The PAM account stack performs non-authentication based account
|
||||||
|
management. It is typically used to restrict/permit access to a
|
||||||
|
service and this step is needed to access the host's user access control.
|
||||||
|
|
||||||
|
Disabling this can be dangerous as authenticated but unauthorized users may
|
||||||
|
be granted access and, therefore, arbitrary execution on the system.
|
||||||
|
"""
|
||||||
|
).tag(config=True)
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
if pamela is None:
|
if pamela is None:
|
||||||
raise _pamela_error from None
|
raise _pamela_error from None
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
@gen.coroutine
|
@run_on_executor
|
||||||
def authenticate(self, handler, data):
|
def authenticate(self, handler, data):
|
||||||
"""Authenticate with PAM, and return the username if login is successful.
|
"""Authenticate with PAM, and return the username if login is successful.
|
||||||
|
|
||||||
@@ -557,32 +612,44 @@ class PAMAuthenticator(LocalAuthenticator):
|
|||||||
"""
|
"""
|
||||||
username = data['username']
|
username = data['username']
|
||||||
try:
|
try:
|
||||||
pamela.authenticate(username, data['password'], service=self.service)
|
pamela.authenticate(username, data['password'], service=self.service, encoding=self.encoding)
|
||||||
except pamela.PAMError as e:
|
except pamela.PAMError as e:
|
||||||
if handler is not None:
|
if handler is not None:
|
||||||
self.log.warning("PAM Authentication failed (%s@%s): %s", username, handler.request.remote_ip, e)
|
self.log.warning("PAM Authentication failed (%s@%s): %s", username, handler.request.remote_ip, e)
|
||||||
else:
|
else:
|
||||||
self.log.warning("PAM Authentication failed: %s", e)
|
self.log.warning("PAM Authentication failed: %s", e)
|
||||||
else:
|
else:
|
||||||
return username
|
if not self.check_account:
|
||||||
|
return username
|
||||||
|
try:
|
||||||
|
pamela.check_account(username, service=self.service, encoding=self.encoding)
|
||||||
|
except pamela.PAMError as e:
|
||||||
|
if handler is not None:
|
||||||
|
self.log.warning("PAM Account Check failed (%s@%s): %s", username, handler.request.remote_ip, e)
|
||||||
|
else:
|
||||||
|
self.log.warning("PAM Account Check failed: %s", e)
|
||||||
|
else:
|
||||||
|
return username
|
||||||
|
|
||||||
|
@run_on_executor
|
||||||
def pre_spawn_start(self, user, spawner):
|
def pre_spawn_start(self, user, spawner):
|
||||||
"""Open PAM session for user if so configured"""
|
"""Open PAM session for user if so configured"""
|
||||||
if not self.open_sessions:
|
if not self.open_sessions:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
pamela.open_session(user.name, service=self.service)
|
pamela.open_session(user.name, service=self.service, encoding=self.encoding)
|
||||||
except pamela.PAMError as e:
|
except pamela.PAMError as e:
|
||||||
self.log.warning("Failed to open PAM session for %s: %s", user.name, e)
|
self.log.warning("Failed to open PAM session for %s: %s", user.name, e)
|
||||||
self.log.warning("Disabling PAM sessions from now on.")
|
self.log.warning("Disabling PAM sessions from now on.")
|
||||||
self.open_sessions = False
|
self.open_sessions = False
|
||||||
|
|
||||||
|
@run_on_executor
|
||||||
def post_spawn_stop(self, user, spawner):
|
def post_spawn_stop(self, user, spawner):
|
||||||
"""Close PAM session for user if we were configured to opened one"""
|
"""Close PAM session for user if we were configured to opened one"""
|
||||||
if not self.open_sessions:
|
if not self.open_sessions:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
pamela.close_session(user.name, service=self.service)
|
pamela.close_session(user.name, service=self.service, encoding=self.encoding)
|
||||||
except pamela.PAMError as e:
|
except pamela.PAMError as e:
|
||||||
self.log.warning("Failed to close PAM session for %s: %s", user.name, e)
|
self.log.warning("Failed to close PAM session for %s: %s", user.name, e)
|
||||||
self.log.warning("Disabling PAM sessions from now on.")
|
self.log.warning("Disabling PAM sessions from now on.")
|
||||||
|
@@ -19,6 +19,7 @@ except ImportError:
|
|||||||
class InvalidToken(Exception):
|
class InvalidToken(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
from .utils import maybe_future
|
||||||
|
|
||||||
KEY_ENV = 'JUPYTERHUB_CRYPT_KEY'
|
KEY_ENV = 'JUPYTERHUB_CRYPT_KEY'
|
||||||
|
|
||||||
@@ -132,7 +133,7 @@ class CryptKeeper(SingletonConfigurable):
|
|||||||
def encrypt(self, data):
|
def encrypt(self, data):
|
||||||
"""Encrypt an object with cryptography"""
|
"""Encrypt an object with cryptography"""
|
||||||
self.check_available()
|
self.check_available()
|
||||||
return self.executor.submit(self._encrypt, data)
|
return maybe_future(self.executor.submit(self._encrypt, data))
|
||||||
|
|
||||||
def _decrypt(self, encrypted):
|
def _decrypt(self, encrypted):
|
||||||
decrypted = self.fernet.decrypt(encrypted)
|
decrypted = self.fernet.decrypt(encrypted)
|
||||||
@@ -141,7 +142,7 @@ class CryptKeeper(SingletonConfigurable):
|
|||||||
def decrypt(self, encrypted):
|
def decrypt(self, encrypted):
|
||||||
"""Decrypt an object with cryptography"""
|
"""Decrypt an object with cryptography"""
|
||||||
self.check_available()
|
self.check_available()
|
||||||
return self.executor.submit(self._decrypt, encrypted)
|
return maybe_future(self.executor.submit(self._decrypt, encrypted))
|
||||||
|
|
||||||
|
|
||||||
def encrypt(data):
|
def encrypt(data):
|
||||||
@@ -158,4 +159,3 @@ def decrypt(data):
|
|||||||
Returns a Future whose result will be the decrypted, deserialized data.
|
Returns a Future whose result will be the decrypted, deserialized data.
|
||||||
"""
|
"""
|
||||||
return CryptKeeper.instance().decrypt(data)
|
return CryptKeeper.instance().decrypt(data)
|
||||||
|
|
@@ -130,13 +130,51 @@ def upgrade_if_needed(db_url, backup=True, log=None):
|
|||||||
upgrade(db_url)
|
upgrade(db_url)
|
||||||
|
|
||||||
|
|
||||||
def _alembic(*args):
|
def shell(args=None):
|
||||||
|
"""Start an IPython shell hooked up to the jupyerhub database"""
|
||||||
|
from .app import JupyterHub
|
||||||
|
hub = JupyterHub()
|
||||||
|
hub.load_config_file(hub.config_file)
|
||||||
|
db_url = hub.db_url
|
||||||
|
db = orm.new_session_factory(db_url, **hub.db_kwargs)()
|
||||||
|
ns = {
|
||||||
|
'db': db,
|
||||||
|
'db_url': db_url,
|
||||||
|
'orm': orm,
|
||||||
|
}
|
||||||
|
|
||||||
|
import IPython
|
||||||
|
IPython.start_ipython(args, user_ns=ns)
|
||||||
|
|
||||||
|
|
||||||
|
def _alembic(args):
|
||||||
"""Run an alembic command with a temporary alembic.ini"""
|
"""Run an alembic command with a temporary alembic.ini"""
|
||||||
with _temp_alembic_ini('sqlite:///jupyterhub.sqlite') as alembic_ini:
|
from .app import JupyterHub
|
||||||
|
hub = JupyterHub()
|
||||||
|
hub.load_config_file(hub.config_file)
|
||||||
|
db_url = hub.db_url
|
||||||
|
with _temp_alembic_ini(db_url) as alembic_ini:
|
||||||
check_call(
|
check_call(
|
||||||
['alembic', '-c', alembic_ini] + list(args)
|
['alembic', '-c', alembic_ini] + args
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def main(args=None):
|
||||||
|
if args is None:
|
||||||
|
args = sys.argv[1:]
|
||||||
|
# dumb option parsing, since we want to pass things through
|
||||||
|
# to subcommands
|
||||||
|
choices = ['shell', 'alembic']
|
||||||
|
if not args or args[0] not in choices:
|
||||||
|
print("Select a command from: %s" % ', '.join(choices))
|
||||||
|
return 1
|
||||||
|
cmd, args = args[0], args[1:]
|
||||||
|
|
||||||
|
if cmd == 'shell':
|
||||||
|
shell(args)
|
||||||
|
elif cmd == 'alembic':
|
||||||
|
_alembic(args)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
_alembic(*sys.argv[1:])
|
sys.exit(main())
|
||||||
|
@@ -1,8 +1,8 @@
|
|||||||
from .base import *
|
from .base import *
|
||||||
from .login import *
|
from .login import *
|
||||||
|
|
||||||
from . import base, pages, login
|
from . import base, pages, login, metrics
|
||||||
|
|
||||||
default_handlers = []
|
default_handlers = []
|
||||||
for mod in (base, pages, login):
|
for mod in (base, pages, login, metrics):
|
||||||
default_handlers.extend(mod.default_handlers)
|
default_handlers.extend(mod.default_handlers)
|
||||||
|
@@ -4,15 +4,21 @@
|
|||||||
# Distributed under the terms of the Modified BSD License.
|
# Distributed under the terms of the Modified BSD License.
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import re
|
from datetime import datetime, timedelta
|
||||||
from datetime import timedelta
|
|
||||||
from http.client import responses
|
from http.client import responses
|
||||||
|
import json
|
||||||
|
import math
|
||||||
|
import random
|
||||||
|
import re
|
||||||
|
import time
|
||||||
from urllib.parse import urlparse, urlunparse, parse_qs, urlencode
|
from urllib.parse import urlparse, urlunparse, parse_qs, urlencode
|
||||||
|
import uuid
|
||||||
|
|
||||||
from jinja2 import TemplateNotFound
|
from jinja2 import TemplateNotFound
|
||||||
|
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
from tornado.log import app_log
|
from tornado.log import app_log
|
||||||
from tornado.httputil import url_concat
|
from tornado.httputil import url_concat, HTTPHeaders
|
||||||
from tornado.ioloop import IOLoop
|
from tornado.ioloop import IOLoop
|
||||||
from tornado.web import RequestHandler
|
from tornado.web import RequestHandler
|
||||||
from tornado import gen, web
|
from tornado import gen, web
|
||||||
@@ -21,7 +27,11 @@ from .. import __version__
|
|||||||
from .. import orm
|
from .. import orm
|
||||||
from ..objects import Server
|
from ..objects import Server
|
||||||
from ..spawner import LocalProcessSpawner
|
from ..spawner import LocalProcessSpawner
|
||||||
from ..utils import url_path_join
|
from ..utils import maybe_future, url_path_join
|
||||||
|
from ..metrics import (
|
||||||
|
SERVER_SPAWN_DURATION_SECONDS, ServerSpawnStatus,
|
||||||
|
PROXY_ADD_DURATION_SECONDS, ProxyAddStatus,
|
||||||
|
)
|
||||||
|
|
||||||
# pattern for the authentication token header
|
# pattern for the authentication token header
|
||||||
auth_header_pat = re.compile(r'^(?:token|bearer)\s+([^\s]+)$', flags=re.IGNORECASE)
|
auth_header_pat = re.compile(r'^(?:token|bearer)\s+([^\s]+)$', flags=re.IGNORECASE)
|
||||||
@@ -31,9 +41,13 @@ reasons = {
|
|||||||
'timeout': "Failed to reach your server."
|
'timeout': "Failed to reach your server."
|
||||||
" Please try again later."
|
" Please try again later."
|
||||||
" Contact admin if the issue persists.",
|
" Contact admin if the issue persists.",
|
||||||
'error': "Failed to start your server. Please contact admin.",
|
'error': "Failed to start your server on the last attempt. "
|
||||||
|
" Please contact admin if the issue persists.",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# constant, not configurable
|
||||||
|
SESSION_COOKIE_NAME = 'jupyterhub-session-id'
|
||||||
|
|
||||||
class BaseHandler(RequestHandler):
|
class BaseHandler(RequestHandler):
|
||||||
"""Base Handler class with access to common methods and properties."""
|
"""Base Handler class with access to common methods and properties."""
|
||||||
|
|
||||||
@@ -50,6 +64,10 @@ class BaseHandler(RequestHandler):
|
|||||||
def base_url(self):
|
def base_url(self):
|
||||||
return self.settings.get('base_url', '/')
|
return self.settings.get('base_url', '/')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def default_url(self):
|
||||||
|
return self.settings.get('default_url', '')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version_hash(self):
|
def version_hash(self):
|
||||||
return self.settings.get('version_hash', '')
|
return self.settings.get('version_hash', '')
|
||||||
@@ -77,6 +95,7 @@ class BaseHandler(RequestHandler):
|
|||||||
@property
|
@property
|
||||||
def services(self):
|
def services(self):
|
||||||
return self.settings.setdefault('services', {})
|
return self.settings.setdefault('services', {})
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hub(self):
|
def hub(self):
|
||||||
return self.settings['hub']
|
return self.settings['hub']
|
||||||
@@ -125,20 +144,28 @@ class BaseHandler(RequestHandler):
|
|||||||
"report-uri " + self.csp_report_uri,
|
"report-uri " + self.csp_report_uri,
|
||||||
])
|
])
|
||||||
|
|
||||||
|
def get_content_type(self):
|
||||||
|
return 'text/html'
|
||||||
|
|
||||||
def set_default_headers(self):
|
def set_default_headers(self):
|
||||||
"""
|
"""
|
||||||
Set any headers passed as tornado_settings['headers'].
|
Set any headers passed as tornado_settings['headers'].
|
||||||
|
|
||||||
By default sets Content-Security-Policy of frame-ancestors 'self'.
|
By default sets Content-Security-Policy of frame-ancestors 'self'.
|
||||||
|
Also responsible for setting content-type header
|
||||||
"""
|
"""
|
||||||
headers = self.settings.get('headers', {})
|
# wrap in HTTPHeaders for case-insensitivity
|
||||||
|
headers = HTTPHeaders(self.settings.get('headers', {}))
|
||||||
headers.setdefault("X-JupyterHub-Version", __version__)
|
headers.setdefault("X-JupyterHub-Version", __version__)
|
||||||
|
|
||||||
for header_name, header_content in headers.items():
|
for header_name, header_content in headers.items():
|
||||||
self.set_header(header_name, header_content)
|
self.set_header(header_name, header_content)
|
||||||
|
|
||||||
|
if 'Access-Control-Allow-Headers' not in headers:
|
||||||
|
self.set_header('Access-Control-Allow-Headers', 'accept, content-type, authorization')
|
||||||
if 'Content-Security-Policy' not in headers:
|
if 'Content-Security-Policy' not in headers:
|
||||||
self.set_header('Content-Security-Policy', self.content_security_policy)
|
self.set_header('Content-Security-Policy', self.content_security_policy)
|
||||||
|
self.set_header('Content-Type', self.get_content_type())
|
||||||
|
|
||||||
#---------------------------------------------------------------
|
#---------------------------------------------------------------
|
||||||
# Login and cookie-related
|
# Login and cookie-related
|
||||||
@@ -152,6 +179,10 @@ class BaseHandler(RequestHandler):
|
|||||||
def cookie_max_age_days(self):
|
def cookie_max_age_days(self):
|
||||||
return self.settings.get('cookie_max_age_days', None)
|
return self.settings.get('cookie_max_age_days', None)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def redirect_to_server(self):
|
||||||
|
return self.settings.get('redirect_to_server', True)
|
||||||
|
|
||||||
def get_auth_token(self):
|
def get_auth_token(self):
|
||||||
"""Get the authorization token from Authorization header"""
|
"""Get the authorization token from Authorization header"""
|
||||||
auth_header = self.request.headers.get('Authorization', '')
|
auth_header = self.request.headers.get('Authorization', '')
|
||||||
@@ -173,8 +204,10 @@ class BaseHandler(RequestHandler):
|
|||||||
orm_token = orm.OAuthAccessToken.find(self.db, token)
|
orm_token = orm.OAuthAccessToken.find(self.db, token)
|
||||||
if orm_token is None:
|
if orm_token is None:
|
||||||
return None
|
return None
|
||||||
else:
|
orm_token.last_activity = \
|
||||||
return self._user_from_orm(orm_token.user)
|
orm_token.user.last_activity = datetime.utcnow()
|
||||||
|
self.db.commit()
|
||||||
|
return self._user_from_orm(orm_token.user)
|
||||||
|
|
||||||
def get_current_user_token(self):
|
def get_current_user_token(self):
|
||||||
"""get_current_user from Authorization header token"""
|
"""get_current_user from Authorization header token"""
|
||||||
@@ -185,6 +218,13 @@ class BaseHandler(RequestHandler):
|
|||||||
if orm_token is None:
|
if orm_token is None:
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
|
# record token activity
|
||||||
|
now = datetime.utcnow()
|
||||||
|
orm_token.last_activity = now
|
||||||
|
if orm_token.user:
|
||||||
|
orm_token.user.last_activity = now
|
||||||
|
|
||||||
|
self.db.commit()
|
||||||
return orm_token.service or self._user_from_orm(orm_token.user)
|
return orm_token.service or self._user_from_orm(orm_token.user)
|
||||||
|
|
||||||
def _user_for_cookie(self, cookie_name, cookie_value=None):
|
def _user_for_cookie(self, cookie_name, cookie_value=None):
|
||||||
@@ -209,6 +249,10 @@ class BaseHandler(RequestHandler):
|
|||||||
self.log.warning("Invalid cookie token")
|
self.log.warning("Invalid cookie token")
|
||||||
# have cookie, but it's not valid. Clear it and start over.
|
# have cookie, but it's not valid. Clear it and start over.
|
||||||
clear()
|
clear()
|
||||||
|
return
|
||||||
|
# update user activity
|
||||||
|
user.last_activity = datetime.utcnow()
|
||||||
|
self.db.commit()
|
||||||
return user
|
return user
|
||||||
|
|
||||||
def _user_from_orm(self, orm_user):
|
def _user_from_orm(self, orm_user):
|
||||||
@@ -223,10 +267,17 @@ class BaseHandler(RequestHandler):
|
|||||||
|
|
||||||
def get_current_user(self):
|
def get_current_user(self):
|
||||||
"""get current username"""
|
"""get current username"""
|
||||||
user = self.get_current_user_token()
|
if not hasattr(self, '_jupyterhub_user'):
|
||||||
if user is not None:
|
try:
|
||||||
return user
|
user = self.get_current_user_token()
|
||||||
return self.get_current_user_cookie()
|
if user is None:
|
||||||
|
user = self.get_current_user_cookie()
|
||||||
|
self._jupyterhub_user = user
|
||||||
|
except Exception:
|
||||||
|
# don't let errors here raise more than once
|
||||||
|
self._jupyterhub_user = None
|
||||||
|
raise
|
||||||
|
return self._jupyterhub_user
|
||||||
|
|
||||||
def find_user(self, name):
|
def find_user(self, name):
|
||||||
"""Get a user by name
|
"""Get a user by name
|
||||||
@@ -245,17 +296,46 @@ class BaseHandler(RequestHandler):
|
|||||||
self.db.add(u)
|
self.db.add(u)
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
user = self._user_from_orm(u)
|
user = self._user_from_orm(u)
|
||||||
self.authenticator.add_user(user)
|
|
||||||
return user
|
return user
|
||||||
|
|
||||||
def clear_login_cookie(self, name=None):
|
def clear_login_cookie(self, name=None):
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
if self.subdomain_host:
|
if self.subdomain_host:
|
||||||
kwargs['domain'] = self.domain
|
kwargs['domain'] = self.domain
|
||||||
self.clear_cookie(self.hub.cookie_name, path=self.hub.base_url, **kwargs)
|
user = self.get_current_user_cookie()
|
||||||
self.clear_cookie('jupyterhub-services', path=url_path_join(self.base_url, 'services'))
|
session_id = self.get_session_cookie()
|
||||||
|
if session_id:
|
||||||
|
# clear session id
|
||||||
|
self.clear_cookie(SESSION_COOKIE_NAME, **kwargs)
|
||||||
|
|
||||||
def _set_user_cookie(self, user, server):
|
if user:
|
||||||
|
# user is logged in, clear any tokens associated with the current session
|
||||||
|
# don't clear session tokens if not logged in,
|
||||||
|
# because that could be a malicious logout request!
|
||||||
|
count = 0
|
||||||
|
for access_token in (
|
||||||
|
self.db.query(orm.OAuthAccessToken)
|
||||||
|
.filter(orm.OAuthAccessToken.user_id==user.id)
|
||||||
|
.filter(orm.OAuthAccessToken.session_id==session_id)
|
||||||
|
):
|
||||||
|
self.db.delete(access_token)
|
||||||
|
count += 1
|
||||||
|
if count:
|
||||||
|
self.log.debug("Deleted %s access tokens for %s", count, user.name)
|
||||||
|
self.db.commit()
|
||||||
|
|
||||||
|
|
||||||
|
# clear hub cookie
|
||||||
|
self.clear_cookie(self.hub.cookie_name, path=self.hub.base_url, **kwargs)
|
||||||
|
# clear services cookie
|
||||||
|
self.clear_cookie('jupyterhub-services', path=url_path_join(self.base_url, 'services'), **kwargs)
|
||||||
|
|
||||||
|
def _set_cookie(self, key, value, encrypted=True, **overrides):
|
||||||
|
"""Setting any cookie should go through here
|
||||||
|
|
||||||
|
if encrypted use tornado's set_secure_cookie,
|
||||||
|
otherwise set plaintext cookies.
|
||||||
|
"""
|
||||||
# tornado <4.2 have a bug that consider secure==True as soon as
|
# tornado <4.2 have a bug that consider secure==True as soon as
|
||||||
# 'secure' kwarg is passed to set_secure_cookie
|
# 'secure' kwarg is passed to set_secure_cookie
|
||||||
kwargs = {
|
kwargs = {
|
||||||
@@ -265,14 +345,47 @@ class BaseHandler(RequestHandler):
|
|||||||
kwargs['secure'] = True
|
kwargs['secure'] = True
|
||||||
if self.subdomain_host:
|
if self.subdomain_host:
|
||||||
kwargs['domain'] = self.domain
|
kwargs['domain'] = self.domain
|
||||||
self.log.debug("Setting cookie for %s: %s, %s", user.name, server.cookie_name, kwargs)
|
|
||||||
self.set_secure_cookie(
|
kwargs.update(self.settings.get('cookie_options', {}))
|
||||||
|
kwargs.update(overrides)
|
||||||
|
|
||||||
|
if encrypted:
|
||||||
|
set_cookie = self.set_secure_cookie
|
||||||
|
else:
|
||||||
|
set_cookie = self.set_cookie
|
||||||
|
|
||||||
|
self.log.debug("Setting cookie %s: %s", key, kwargs)
|
||||||
|
set_cookie(key, value, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def _set_user_cookie(self, user, server):
|
||||||
|
self.log.debug("Setting cookie for %s: %s", user.name, server.cookie_name)
|
||||||
|
self._set_cookie(
|
||||||
server.cookie_name,
|
server.cookie_name,
|
||||||
user.cookie_id,
|
user.cookie_id,
|
||||||
|
encrypted=True,
|
||||||
path=server.base_url,
|
path=server.base_url,
|
||||||
**kwargs
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def get_session_cookie(self):
|
||||||
|
"""Get the session id from a cookie
|
||||||
|
|
||||||
|
Returns None if no session id is stored
|
||||||
|
"""
|
||||||
|
return self.get_cookie(SESSION_COOKIE_NAME, None)
|
||||||
|
|
||||||
|
def set_session_cookie(self):
|
||||||
|
"""Set a new session id cookie
|
||||||
|
|
||||||
|
new session id is returned
|
||||||
|
|
||||||
|
Session id cookie is *not* encrypted,
|
||||||
|
so other services on this domain can read it.
|
||||||
|
"""
|
||||||
|
session_id = uuid.uuid4().hex
|
||||||
|
self._set_cookie(SESSION_COOKIE_NAME, session_id, encrypted=False)
|
||||||
|
return session_id
|
||||||
|
|
||||||
def set_service_cookie(self, user):
|
def set_service_cookie(self, user):
|
||||||
"""set the login cookie for services"""
|
"""set the login cookie for services"""
|
||||||
self._set_user_cookie(user, orm.Server(
|
self._set_user_cookie(user, orm.Server(
|
||||||
@@ -295,50 +408,90 @@ class BaseHandler(RequestHandler):
|
|||||||
if self.db.query(orm.Service).filter(orm.Service.server != None).first():
|
if self.db.query(orm.Service).filter(orm.Service.server != None).first():
|
||||||
self.set_service_cookie(user)
|
self.set_service_cookie(user)
|
||||||
|
|
||||||
|
if not self.get_session_cookie():
|
||||||
|
self.set_session_cookie()
|
||||||
|
|
||||||
# create and set a new cookie token for the hub
|
# create and set a new cookie token for the hub
|
||||||
if not self.get_current_user_cookie():
|
if not self.get_current_user_cookie():
|
||||||
self.set_hub_cookie(user)
|
self.set_hub_cookie(user)
|
||||||
|
|
||||||
def authenticate(self, data):
|
def authenticate(self, data):
|
||||||
return gen.maybe_future(self.authenticator.get_authenticated_user(self, data))
|
return maybe_future(self.authenticator.get_authenticated_user(self, data))
|
||||||
|
|
||||||
def get_next_url(self, user=None):
|
def get_next_url(self, user=None):
|
||||||
"""Get the next_url for login redirect
|
"""Get the next_url for login redirect
|
||||||
|
|
||||||
Defaults to hub base_url /hub/ if user is not running,
|
Default URL after login:
|
||||||
otherwise user.url.
|
|
||||||
|
- if redirect_to_server (default): send to user's own server
|
||||||
|
- else: /hub/home
|
||||||
"""
|
"""
|
||||||
next_url = self.get_argument('next', default='')
|
next_url = self.get_argument('next', default='')
|
||||||
if (next_url + '/').startswith('%s://%s/' % (self.request.protocol, self.request.host)):
|
if (next_url + '/').startswith(
|
||||||
|
(
|
||||||
|
'%s://%s/' % (self.request.protocol, self.request.host),
|
||||||
|
'//%s/' % self.request.host,
|
||||||
|
)
|
||||||
|
):
|
||||||
# treat absolute URLs for our host as absolute paths:
|
# treat absolute URLs for our host as absolute paths:
|
||||||
next_url = urlparse(next_url).path
|
parsed = urlparse(next_url)
|
||||||
if not next_url.startswith('/'):
|
next_url = parsed.path
|
||||||
|
if parsed.query:
|
||||||
|
next_url = next_url + '?' + parsed.query
|
||||||
|
if parsed.hash:
|
||||||
|
next_url = next_url + '#' + parsed.hash
|
||||||
|
if next_url and (urlparse(next_url).netloc or not next_url.startswith('/')):
|
||||||
|
self.log.warning("Disallowing redirect outside JupyterHub: %r", next_url)
|
||||||
next_url = ''
|
next_url = ''
|
||||||
|
if next_url and next_url.startswith(url_path_join(self.base_url, 'user/')):
|
||||||
|
# add /hub/ prefix, to ensure we redirect to the right user's server.
|
||||||
|
# The next request will be handled by SpawnHandler,
|
||||||
|
# ultimately redirecting to the logged-in user's server.
|
||||||
|
without_prefix = next_url[len(self.base_url):]
|
||||||
|
next_url = url_path_join(self.hub.base_url, without_prefix)
|
||||||
|
self.log.warning("Redirecting %s to %s. For sharing public links, use /user-redirect/",
|
||||||
|
self.request.uri, next_url,
|
||||||
|
)
|
||||||
|
|
||||||
if not next_url:
|
if not next_url:
|
||||||
if user and user.running:
|
# custom default URL
|
||||||
|
next_url = self.default_url
|
||||||
|
|
||||||
|
if not next_url:
|
||||||
|
# default URL after login
|
||||||
|
# if self.redirect_to_server, default login URL initiates spawn,
|
||||||
|
# otherwise send to Hub home page (control panel)
|
||||||
|
if user and self.redirect_to_server:
|
||||||
next_url = user.url
|
next_url = user.url
|
||||||
else:
|
else:
|
||||||
next_url = self.hub.base_url
|
next_url = url_path_join(self.hub.base_url, 'home')
|
||||||
return next_url
|
return next_url
|
||||||
|
|
||||||
@gen.coroutine
|
async def login_user(self, data=None):
|
||||||
def login_user(self, data=None):
|
|
||||||
"""Login a user"""
|
"""Login a user"""
|
||||||
auth_timer = self.statsd.timer('login.authenticate').start()
|
auth_timer = self.statsd.timer('login.authenticate').start()
|
||||||
authenticated = yield self.authenticate(data)
|
authenticated = await self.authenticate(data)
|
||||||
auth_timer.stop(send=False)
|
auth_timer.stop(send=False)
|
||||||
|
|
||||||
if authenticated:
|
if authenticated:
|
||||||
username = authenticated['name']
|
username = authenticated['name']
|
||||||
auth_state = authenticated.get('auth_state')
|
auth_state = authenticated.get('auth_state')
|
||||||
|
admin = authenticated.get('admin')
|
||||||
|
new_user = username not in self.users
|
||||||
user = self.user_from_username(username)
|
user = self.user_from_username(username)
|
||||||
|
if new_user:
|
||||||
|
await maybe_future(self.authenticator.add_user(user))
|
||||||
|
# Only set `admin` if the authenticator returned an explicit value.
|
||||||
|
if admin is not None and admin != user.admin:
|
||||||
|
user.admin = admin
|
||||||
|
self.db.commit()
|
||||||
# always set auth_state and commit,
|
# always set auth_state and commit,
|
||||||
# because there could be key-rotation or clearing of previous values
|
# because there could be key-rotation or clearing of previous values
|
||||||
# going on.
|
# going on.
|
||||||
if not self.authenticator.enable_auth_state:
|
if not self.authenticator.enable_auth_state:
|
||||||
# auth_state is not enabled. Force None.
|
# auth_state is not enabled. Force None.
|
||||||
auth_state = None
|
auth_state = None
|
||||||
yield user.save_auth_state(auth_state)
|
await user.save_auth_state(auth_state)
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
self.set_login_cookie(user)
|
self.set_login_cookie(user)
|
||||||
self.statsd.incr('login.success')
|
self.statsd.incr('login.success')
|
||||||
@@ -375,9 +528,9 @@ class BaseHandler(RequestHandler):
|
|||||||
def active_server_limit(self):
|
def active_server_limit(self):
|
||||||
return self.settings.get('active_server_limit', 0)
|
return self.settings.get('active_server_limit', 0)
|
||||||
|
|
||||||
@gen.coroutine
|
async def spawn_single_user(self, user, server_name='', options=None):
|
||||||
def spawn_single_user(self, user, server_name='', options=None):
|
|
||||||
# in case of error, include 'try again from /hub/home' message
|
# in case of error, include 'try again from /hub/home' message
|
||||||
|
spawn_start_time = time.perf_counter()
|
||||||
self.extra_error_html = self.spawn_home_error
|
self.extra_error_html = self.spawn_home_error
|
||||||
|
|
||||||
user_server_name = user.name
|
user_server_name = user.name
|
||||||
@@ -387,6 +540,9 @@ class BaseHandler(RequestHandler):
|
|||||||
|
|
||||||
if server_name in user.spawners and user.spawners[server_name].pending:
|
if server_name in user.spawners and user.spawners[server_name].pending:
|
||||||
pending = user.spawners[server_name].pending
|
pending = user.spawners[server_name].pending
|
||||||
|
SERVER_SPAWN_DURATION_SECONDS.labels(
|
||||||
|
status=ServerSpawnStatus.already_pending
|
||||||
|
).observe(time.perf_counter() - spawn_start_time)
|
||||||
raise RuntimeError("%s pending %s" % (user_server_name, pending))
|
raise RuntimeError("%s pending %s" % (user_server_name, pending))
|
||||||
|
|
||||||
# count active servers and pending spawns
|
# count active servers and pending spawns
|
||||||
@@ -401,26 +557,52 @@ class BaseHandler(RequestHandler):
|
|||||||
active_server_limit = self.active_server_limit
|
active_server_limit = self.active_server_limit
|
||||||
|
|
||||||
if concurrent_spawn_limit and spawn_pending_count >= concurrent_spawn_limit:
|
if concurrent_spawn_limit and spawn_pending_count >= concurrent_spawn_limit:
|
||||||
self.log.info(
|
SERVER_SPAWN_DURATION_SECONDS.labels(
|
||||||
'%s pending spawns, throttling',
|
status=ServerSpawnStatus.throttled
|
||||||
spawn_pending_count,
|
).observe(time.perf_counter() - spawn_start_time)
|
||||||
|
# Suggest number of seconds client should wait before retrying
|
||||||
|
# This helps prevent thundering herd problems, where users simply
|
||||||
|
# immediately retry when we are overloaded.
|
||||||
|
retry_range = self.settings['spawn_throttle_retry_range']
|
||||||
|
retry_time = int(random.uniform(*retry_range))
|
||||||
|
|
||||||
|
# round suggestion to nicer human value (nearest 10 seconds or minute)
|
||||||
|
if retry_time <= 90:
|
||||||
|
# round human seconds up to nearest 10
|
||||||
|
human_retry_time = "%i0 seconds" % math.ceil(retry_time / 10.)
|
||||||
|
else:
|
||||||
|
# round number of minutes
|
||||||
|
human_retry_time = "%i minutes" % math.round(retry_time / 60.)
|
||||||
|
|
||||||
|
self.log.warning(
|
||||||
|
'%s pending spawns, throttling. Suggested retry in %s seconds.',
|
||||||
|
spawn_pending_count, retry_time,
|
||||||
)
|
)
|
||||||
raise web.HTTPError(
|
err = web.HTTPError(
|
||||||
429,
|
429,
|
||||||
"User startup rate limit exceeded. Try again in a few minutes.",
|
"Too many users trying to log in right now. Try again in {}.".format(human_retry_time)
|
||||||
)
|
)
|
||||||
|
# can't call set_header directly here because it gets ignored
|
||||||
|
# when errors are raised
|
||||||
|
# we handle err.headers ourselves in Handler.write_error
|
||||||
|
err.headers = {'Retry-After': retry_time}
|
||||||
|
raise err
|
||||||
|
|
||||||
if active_server_limit and active_count >= active_server_limit:
|
if active_server_limit and active_count >= active_server_limit:
|
||||||
self.log.info(
|
self.log.info(
|
||||||
'%s servers active, no space available',
|
'%s servers active, no space available',
|
||||||
active_count,
|
active_count,
|
||||||
)
|
)
|
||||||
|
SERVER_SPAWN_DURATION_SECONDS.labels(
|
||||||
|
status=ServerSpawnStatus.too_many_users
|
||||||
|
).observe(time.perf_counter() - spawn_start_time)
|
||||||
raise web.HTTPError(429, "Active user limit exceeded. Try again in a few minutes.")
|
raise web.HTTPError(429, "Active user limit exceeded. Try again in a few minutes.")
|
||||||
|
|
||||||
tic = IOLoop.current().time()
|
tic = IOLoop.current().time()
|
||||||
|
|
||||||
self.log.debug("Initiating spawn for %s", user_server_name)
|
self.log.debug("Initiating spawn for %s", user_server_name)
|
||||||
|
|
||||||
spawn_future = user.spawn(server_name, options)
|
spawn_future = user.spawn(server_name, options, handler=self)
|
||||||
|
|
||||||
self.log.debug("%i%s concurrent spawns",
|
self.log.debug("%i%s concurrent spawns",
|
||||||
spawn_pending_count,
|
spawn_pending_count,
|
||||||
@@ -434,25 +616,39 @@ class BaseHandler(RequestHandler):
|
|||||||
# while we are waiting for _proxy_pending to be set
|
# while we are waiting for _proxy_pending to be set
|
||||||
spawner._spawn_pending = True
|
spawner._spawn_pending = True
|
||||||
|
|
||||||
@gen.coroutine
|
async def finish_user_spawn():
|
||||||
def finish_user_spawn():
|
|
||||||
"""Finish the user spawn by registering listeners and notifying the proxy.
|
"""Finish the user spawn by registering listeners and notifying the proxy.
|
||||||
|
|
||||||
If the spawner is slow to start, this is passed as an async callback,
|
If the spawner is slow to start, this is passed as an async callback,
|
||||||
otherwise it is called immediately.
|
otherwise it is called immediately.
|
||||||
"""
|
"""
|
||||||
# wait for spawn Future
|
# wait for spawn Future
|
||||||
yield spawn_future
|
await spawn_future
|
||||||
toc = IOLoop.current().time()
|
toc = IOLoop.current().time()
|
||||||
self.log.info("User %s took %.3f seconds to start", user_server_name, toc-tic)
|
self.log.info("User %s took %.3f seconds to start", user_server_name, toc-tic)
|
||||||
self.statsd.timing('spawner.success', (toc - tic) * 1000)
|
self.statsd.timing('spawner.success', (toc - tic) * 1000)
|
||||||
|
SERVER_SPAWN_DURATION_SECONDS.labels(
|
||||||
|
status=ServerSpawnStatus.success
|
||||||
|
).observe(time.perf_counter() - spawn_start_time)
|
||||||
|
proxy_add_start_time = time.perf_counter()
|
||||||
spawner._proxy_pending = True
|
spawner._proxy_pending = True
|
||||||
try:
|
try:
|
||||||
yield self.proxy.add_user(user, server_name)
|
await self.proxy.add_user(user, server_name)
|
||||||
|
|
||||||
|
PROXY_ADD_DURATION_SECONDS.labels(
|
||||||
|
status='success'
|
||||||
|
).observe(
|
||||||
|
time.perf_counter() - proxy_add_start_time
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.log.exception("Failed to add %s to proxy!", user_server_name)
|
self.log.exception("Failed to add %s to proxy!", user_server_name)
|
||||||
self.log.error("Stopping %s to avoid inconsistent state", user_server_name)
|
self.log.error("Stopping %s to avoid inconsistent state", user_server_name)
|
||||||
yield user.stop()
|
await user.stop()
|
||||||
|
PROXY_ADD_DURATION_SECONDS.labels(
|
||||||
|
status='failure'
|
||||||
|
).observe(
|
||||||
|
time.perf_counter() - proxy_add_start_time
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
spawner.add_poll_callback(self.user_stopped, user, server_name)
|
spawner.add_poll_callback(self.user_stopped, user, server_name)
|
||||||
finally:
|
finally:
|
||||||
@@ -460,7 +656,8 @@ class BaseHandler(RequestHandler):
|
|||||||
|
|
||||||
# hook up spawner._spawn_future so that other requests can await
|
# hook up spawner._spawn_future so that other requests can await
|
||||||
# this result
|
# this result
|
||||||
finish_spawn_future = spawner._spawn_future = finish_user_spawn()
|
finish_spawn_future = spawner._spawn_future = maybe_future(finish_user_spawn())
|
||||||
|
|
||||||
def _clear_spawn_future(f):
|
def _clear_spawn_future(f):
|
||||||
# clear spawner._spawn_future when it's done
|
# clear spawner._spawn_future when it's done
|
||||||
# keep an exception around, though, to prevent repeated implicit spawns
|
# keep an exception around, though, to prevent repeated implicit spawns
|
||||||
@@ -469,10 +666,44 @@ class BaseHandler(RequestHandler):
|
|||||||
spawner._spawn_future = None
|
spawner._spawn_future = None
|
||||||
# Now we're all done. clear _spawn_pending flag
|
# Now we're all done. clear _spawn_pending flag
|
||||||
spawner._spawn_pending = False
|
spawner._spawn_pending = False
|
||||||
|
|
||||||
finish_spawn_future.add_done_callback(_clear_spawn_future)
|
finish_spawn_future.add_done_callback(_clear_spawn_future)
|
||||||
|
|
||||||
|
# when spawn finishes (success or failure)
|
||||||
|
# update failure count and abort if consecutive failure limit
|
||||||
|
# is reached
|
||||||
|
def _track_failure_count(f):
|
||||||
|
if f.exception() is None:
|
||||||
|
# spawn succeeded, reset failure count
|
||||||
|
self.settings['failure_count'] = 0
|
||||||
|
return
|
||||||
|
# spawn failed, increment count and abort if limit reached
|
||||||
|
self.settings.setdefault('failure_count', 0)
|
||||||
|
self.settings['failure_count'] += 1
|
||||||
|
failure_count = self.settings['failure_count']
|
||||||
|
failure_limit = spawner.consecutive_failure_limit
|
||||||
|
if failure_limit and 1 < failure_count < failure_limit:
|
||||||
|
self.log.warning(
|
||||||
|
"%i consecutive spawns failed. "
|
||||||
|
"Hub will exit if failure count reaches %i before succeeding",
|
||||||
|
failure_count, failure_limit,
|
||||||
|
)
|
||||||
|
if failure_limit and failure_count >= failure_limit:
|
||||||
|
self.log.critical(
|
||||||
|
"Aborting due to %i consecutive spawn failures", failure_count
|
||||||
|
)
|
||||||
|
# abort in 2 seconds to allow pending handlers to resolve
|
||||||
|
# mostly propagating errors for the current failures
|
||||||
|
def abort():
|
||||||
|
raise SystemExit(1)
|
||||||
|
IOLoop.current().call_later(2, abort)
|
||||||
|
|
||||||
|
finish_spawn_future.add_done_callback(_track_failure_count)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield gen.with_timeout(timedelta(seconds=self.slow_spawn_timeout), finish_spawn_future)
|
await gen.with_timeout(
|
||||||
|
timedelta(seconds=self.slow_spawn_timeout), finish_spawn_future
|
||||||
|
)
|
||||||
except gen.TimeoutError:
|
except gen.TimeoutError:
|
||||||
# waiting_for_response indicates server process has started,
|
# waiting_for_response indicates server process has started,
|
||||||
# but is yet to become responsive.
|
# but is yet to become responsive.
|
||||||
@@ -485,10 +716,13 @@ class BaseHandler(RequestHandler):
|
|||||||
|
|
||||||
# start has finished, but the server hasn't come up
|
# start has finished, but the server hasn't come up
|
||||||
# check if the server died while we were waiting
|
# check if the server died while we were waiting
|
||||||
status = yield spawner.poll()
|
status = await spawner.poll()
|
||||||
if status is not None:
|
if status is not None:
|
||||||
toc = IOLoop.current().time()
|
toc = IOLoop.current().time()
|
||||||
self.statsd.timing('spawner.failure', (toc - tic) * 1000)
|
self.statsd.timing('spawner.failure', (toc - tic) * 1000)
|
||||||
|
SERVER_SPAWN_DURATION_SECONDS.labels(
|
||||||
|
status=ServerSpawnStatus.failure
|
||||||
|
).observe(time.perf_counter() - spawn_start_time)
|
||||||
raise web.HTTPError(500, "Spawner failed to start [status=%s]. The logs for %s may contain details." % (
|
raise web.HTTPError(500, "Spawner failed to start [status=%s]. The logs for %s may contain details." % (
|
||||||
status, spawner._log_name))
|
status, spawner._log_name))
|
||||||
|
|
||||||
@@ -506,21 +740,19 @@ class BaseHandler(RequestHandler):
|
|||||||
self.log.warning("User %s is slow to be added to the proxy (timeout=%s)",
|
self.log.warning("User %s is slow to be added to the proxy (timeout=%s)",
|
||||||
user_server_name, self.slow_spawn_timeout)
|
user_server_name, self.slow_spawn_timeout)
|
||||||
|
|
||||||
@gen.coroutine
|
async def user_stopped(self, user, server_name):
|
||||||
def user_stopped(self, user, server_name):
|
|
||||||
"""Callback that fires when the spawner has stopped"""
|
"""Callback that fires when the spawner has stopped"""
|
||||||
spawner = user.spawners[server_name]
|
spawner = user.spawners[server_name]
|
||||||
status = yield spawner.poll()
|
status = await spawner.poll()
|
||||||
if status is None:
|
if status is None:
|
||||||
status = 'unknown'
|
status = 'unknown'
|
||||||
self.log.warning("User %s server stopped, with exit code: %s",
|
self.log.warning("User %s server stopped, with exit code: %s",
|
||||||
user.name, status,
|
user.name, status,
|
||||||
)
|
)
|
||||||
yield self.proxy.delete_user(user, server_name)
|
await self.proxy.delete_user(user, server_name)
|
||||||
yield user.stop(server_name)
|
await user.stop(server_name)
|
||||||
|
|
||||||
@gen.coroutine
|
async def stop_single_user(self, user, name=''):
|
||||||
def stop_single_user(self, user, name=''):
|
|
||||||
if name not in user.spawners:
|
if name not in user.spawners:
|
||||||
raise KeyError("User %s has no such spawner %r", user.name, name)
|
raise KeyError("User %s has no such spawner %r", user.name, name)
|
||||||
spawner = user.spawners[name]
|
spawner = user.spawners[name]
|
||||||
@@ -530,8 +762,7 @@ class BaseHandler(RequestHandler):
|
|||||||
# to avoid races
|
# to avoid races
|
||||||
spawner._stop_pending = True
|
spawner._stop_pending = True
|
||||||
|
|
||||||
@gen.coroutine
|
async def stop():
|
||||||
def stop():
|
|
||||||
"""Stop the server
|
"""Stop the server
|
||||||
|
|
||||||
1. remove it from the proxy
|
1. remove it from the proxy
|
||||||
@@ -540,8 +771,8 @@ class BaseHandler(RequestHandler):
|
|||||||
"""
|
"""
|
||||||
tic = IOLoop.current().time()
|
tic = IOLoop.current().time()
|
||||||
try:
|
try:
|
||||||
yield self.proxy.delete_user(user, name)
|
await self.proxy.delete_user(user, name)
|
||||||
yield user.stop(name)
|
await user.stop(name)
|
||||||
finally:
|
finally:
|
||||||
spawner._stop_pending = False
|
spawner._stop_pending = False
|
||||||
toc = IOLoop.current().time()
|
toc = IOLoop.current().time()
|
||||||
@@ -549,13 +780,10 @@ class BaseHandler(RequestHandler):
|
|||||||
self.statsd.timing('spawner.stop', (toc - tic) * 1000)
|
self.statsd.timing('spawner.stop', (toc - tic) * 1000)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield gen.with_timeout(timedelta(seconds=self.slow_stop_timeout), stop())
|
await gen.with_timeout(timedelta(seconds=self.slow_stop_timeout), stop())
|
||||||
except gen.TimeoutError:
|
except gen.TimeoutError:
|
||||||
if spawner._stop_pending:
|
# hit timeout, but stop is still pending
|
||||||
# hit timeout, but stop is still pending
|
self.log.warning("User %s:%s server is slow to stop", user.name, name)
|
||||||
self.log.warning("User %s:%s server is slow to stop", user.name, name)
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
#---------------------------------------------------------------
|
#---------------------------------------------------------------
|
||||||
# template rendering
|
# template rendering
|
||||||
@@ -579,14 +807,16 @@ class BaseHandler(RequestHandler):
|
|||||||
return self.settings['jinja2_env'].get_template(name)
|
return self.settings['jinja2_env'].get_template(name)
|
||||||
|
|
||||||
def render_template(self, name, **ns):
|
def render_template(self, name, **ns):
|
||||||
ns.update(self.template_namespace)
|
template_ns = {}
|
||||||
|
template_ns.update(self.template_namespace)
|
||||||
|
template_ns.update(ns)
|
||||||
template = self.get_template(name)
|
template = self.get_template(name)
|
||||||
return template.render(**ns)
|
return template.render(**template_ns)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def template_namespace(self):
|
def template_namespace(self):
|
||||||
user = self.get_current_user()
|
user = self.get_current_user()
|
||||||
return dict(
|
ns = dict(
|
||||||
base_url=self.hub.base_url,
|
base_url=self.hub.base_url,
|
||||||
prefix=self.base_url,
|
prefix=self.base_url,
|
||||||
user=user,
|
user=user,
|
||||||
@@ -596,6 +826,9 @@ class BaseHandler(RequestHandler):
|
|||||||
static_url=self.static_url,
|
static_url=self.static_url,
|
||||||
version_hash=self.version_hash,
|
version_hash=self.version_hash,
|
||||||
)
|
)
|
||||||
|
if self.settings['template_vars']:
|
||||||
|
ns.update(self.settings['template_vars'])
|
||||||
|
return ns
|
||||||
|
|
||||||
def write_error(self, status_code, **kwargs):
|
def write_error(self, status_code, **kwargs):
|
||||||
"""render custom error pages"""
|
"""render custom error pages"""
|
||||||
@@ -616,6 +849,10 @@ class BaseHandler(RequestHandler):
|
|||||||
if reason:
|
if reason:
|
||||||
message = reasons.get(reason, reason)
|
message = reasons.get(reason, reason)
|
||||||
|
|
||||||
|
if exception and isinstance(exception, SQLAlchemyError):
|
||||||
|
self.log.warning("Rolling back session due to database error %s", exception)
|
||||||
|
self.db.rollback()
|
||||||
|
|
||||||
# build template namespace
|
# build template namespace
|
||||||
ns = dict(
|
ns = dict(
|
||||||
status_code=status_code,
|
status_code=status_code,
|
||||||
@@ -626,12 +863,27 @@ class BaseHandler(RequestHandler):
|
|||||||
)
|
)
|
||||||
|
|
||||||
self.set_header('Content-Type', 'text/html')
|
self.set_header('Content-Type', 'text/html')
|
||||||
|
if isinstance(exception, web.HTTPError):
|
||||||
|
# allow setting headers from exceptions
|
||||||
|
# since exception handler clears headers
|
||||||
|
headers = getattr(exception, 'headers', None)
|
||||||
|
if headers:
|
||||||
|
for key, value in headers.items():
|
||||||
|
self.set_header(key, value)
|
||||||
|
# Content-Length must be recalculated.
|
||||||
|
self.clear_header('Content-Length')
|
||||||
|
|
||||||
# render the template
|
# render the template
|
||||||
try:
|
try:
|
||||||
html = self.render_template('%s.html' % status_code, **ns)
|
html = self.render_template('%s.html' % status_code, **ns)
|
||||||
except TemplateNotFound:
|
except TemplateNotFound:
|
||||||
self.log.debug("No template for %d", status_code)
|
self.log.debug("No template for %d", status_code)
|
||||||
html = self.render_template('error.html', **ns)
|
try:
|
||||||
|
html = self.render_template('error.html', **ns)
|
||||||
|
except:
|
||||||
|
# In this case, any side effect must be avoided.
|
||||||
|
ns['no_spawner_check'] = True
|
||||||
|
html = self.render_template('error.html', **ns)
|
||||||
|
|
||||||
self.write(html)
|
self.write(html)
|
||||||
|
|
||||||
@@ -649,6 +901,11 @@ class PrefixRedirectHandler(BaseHandler):
|
|||||||
"""
|
"""
|
||||||
def get(self):
|
def get(self):
|
||||||
uri = self.request.uri
|
uri = self.request.uri
|
||||||
|
# Since self.base_url will end with trailing slash.
|
||||||
|
# Ensure uri will end with trailing slash when matching
|
||||||
|
# with self.base_url.
|
||||||
|
if not uri.endswith('/'):
|
||||||
|
uri += '/'
|
||||||
if uri.startswith(self.base_url):
|
if uri.startswith(self.base_url):
|
||||||
path = self.request.uri[len(self.base_url):]
|
path = self.request.uri[len(self.base_url):]
|
||||||
else:
|
else:
|
||||||
@@ -668,13 +925,46 @@ class UserSpawnHandler(BaseHandler):
|
|||||||
which will in turn send her to /user/alice/notebooks/mynotebook.ipynb.
|
which will in turn send her to /user/alice/notebooks/mynotebook.ipynb.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@gen.coroutine
|
def _fail_api_request(self, user):
|
||||||
def get(self, name, user_path):
|
"""Fail an API request to a not-running server"""
|
||||||
|
self.set_status(404)
|
||||||
|
self.set_header("Content-Type", "application/json")
|
||||||
|
self.write(json.dumps({"message": "%s is not running" % user.name}))
|
||||||
|
self.finish()
|
||||||
|
|
||||||
|
async def get(self, name, user_path):
|
||||||
if not user_path:
|
if not user_path:
|
||||||
user_path = '/'
|
user_path = '/'
|
||||||
current_user = self.get_current_user()
|
current_user = self.get_current_user()
|
||||||
|
if (
|
||||||
|
current_user
|
||||||
|
and current_user.name != name
|
||||||
|
and current_user.admin
|
||||||
|
and self.settings.get('admin_access', False)
|
||||||
|
):
|
||||||
|
# allow admins to spawn on behalf of users
|
||||||
|
user = self.find_user(name)
|
||||||
|
if user is None:
|
||||||
|
# no such user
|
||||||
|
raise web.HTTPError(404, "No such user %s" % name)
|
||||||
|
self.log.info("Admin %s requesting spawn on behalf of %s",
|
||||||
|
current_user.name, user.name)
|
||||||
|
admin_spawn = True
|
||||||
|
should_spawn = True
|
||||||
|
else:
|
||||||
|
user = current_user
|
||||||
|
admin_spawn = False
|
||||||
|
# For non-admins, we should spawn if the user matches
|
||||||
|
# otherwise redirect users to their own server
|
||||||
|
should_spawn = (current_user and current_user.name == name)
|
||||||
|
|
||||||
if current_user and current_user.name == name:
|
if "api" in user_path.split("/") and not user.active:
|
||||||
|
# API request for not-running server (e.g. notebook UI left open)
|
||||||
|
# Avoid triggering a spawn.
|
||||||
|
self._fail_api_request(user)
|
||||||
|
return
|
||||||
|
|
||||||
|
if should_spawn:
|
||||||
# if spawning fails for any reason, point users to /hub/home to retry
|
# if spawning fails for any reason, point users to /hub/home to retry
|
||||||
self.extra_error_html = self.spawn_home_error
|
self.extra_error_html = self.spawn_home_error
|
||||||
|
|
||||||
@@ -693,8 +983,8 @@ class UserSpawnHandler(BaseHandler):
|
|||||||
Make sure to connect to the proxied public URL %s
|
Make sure to connect to the proxied public URL %s
|
||||||
""", self.request.full_url(), self.proxy.public_url)
|
""", self.request.full_url(), self.proxy.public_url)
|
||||||
|
|
||||||
# logged in as correct user, check for pending spawn
|
# logged in as valid user, check for pending spawn
|
||||||
spawner = current_user.spawner
|
spawner = user.spawner
|
||||||
|
|
||||||
# First, check for previous failure.
|
# First, check for previous failure.
|
||||||
if (
|
if (
|
||||||
@@ -713,51 +1003,70 @@ class UserSpawnHandler(BaseHandler):
|
|||||||
raise copy.copy(exc).with_traceback(exc.__traceback__)
|
raise copy.copy(exc).with_traceback(exc.__traceback__)
|
||||||
|
|
||||||
# check for pending spawn
|
# check for pending spawn
|
||||||
if spawner.pending and spawner._spawn_future:
|
if spawner.pending == 'spawn' and spawner._spawn_future:
|
||||||
# wait on the pending spawn
|
# wait on the pending spawn
|
||||||
self.log.debug("Waiting for %s pending %s", spawner._log_name, spawner.pending)
|
self.log.debug("Waiting for %s pending %s", spawner._log_name, spawner.pending)
|
||||||
try:
|
try:
|
||||||
yield gen.with_timeout(timedelta(seconds=self.slow_spawn_timeout), spawner._spawn_future)
|
await gen.with_timeout(timedelta(seconds=self.slow_spawn_timeout), spawner._spawn_future)
|
||||||
except gen.TimeoutError:
|
except gen.TimeoutError:
|
||||||
self.log.info("Pending spawn for %s didn't finish in %.1f seconds", spawner._log_name, self.slow_spawn_timeout)
|
self.log.info("Pending spawn for %s didn't finish in %.1f seconds", spawner._log_name, self.slow_spawn_timeout)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# we may have waited above, check pending again:
|
# we may have waited above, check pending again:
|
||||||
|
# page could be pending spawn *or* stop
|
||||||
if spawner.pending:
|
if spawner.pending:
|
||||||
self.log.info("%s is pending %s", spawner._log_name, spawner.pending)
|
self.log.info("%s is pending %s", spawner._log_name, spawner.pending)
|
||||||
# spawn has started, but not finished
|
# spawn has started, but not finished
|
||||||
self.statsd.incr('redirects.user_spawn_pending', 1)
|
self.statsd.incr('redirects.user_spawn_pending', 1)
|
||||||
html = self.render_template("spawn_pending.html", user=current_user)
|
url_parts = []
|
||||||
|
if spawner.pending == "stop":
|
||||||
|
page = "stop_pending.html"
|
||||||
|
else:
|
||||||
|
page = "spawn_pending.html"
|
||||||
|
html = self.render_template(
|
||||||
|
page,
|
||||||
|
user=user,
|
||||||
|
spawner=spawner,
|
||||||
|
progress_url=spawner._progress_url,
|
||||||
|
)
|
||||||
self.finish(html)
|
self.finish(html)
|
||||||
return
|
return
|
||||||
|
|
||||||
# spawn has supposedly finished, check on the status
|
# spawn has supposedly finished, check on the status
|
||||||
if spawner.ready:
|
if spawner.ready:
|
||||||
status = yield spawner.poll()
|
status = await spawner.poll()
|
||||||
else:
|
else:
|
||||||
status = 0
|
status = 0
|
||||||
|
|
||||||
# server is not running, trigger spawn
|
# server is not running, trigger spawn
|
||||||
if status is not None:
|
if status is not None:
|
||||||
if spawner.options_form:
|
if spawner.options_form:
|
||||||
self.redirect(url_concat(url_path_join(self.hub.base_url, 'spawn'),
|
url_parts = [self.hub.base_url, 'spawn']
|
||||||
|
if current_user.name != user.name:
|
||||||
|
# spawning on behalf of another user
|
||||||
|
url_parts.append(user.name)
|
||||||
|
self.redirect(url_concat(url_path_join(*url_parts),
|
||||||
{'next': self.request.uri}))
|
{'next': self.request.uri}))
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
yield self.spawn_single_user(current_user)
|
await self.spawn_single_user(user)
|
||||||
|
|
||||||
# spawn didn't finish, show pending page
|
# spawn didn't finish, show pending page
|
||||||
if spawner.pending:
|
if spawner.pending:
|
||||||
self.log.info("%s is pending %s", spawner._log_name, spawner.pending)
|
self.log.info("%s is pending %s", spawner._log_name, spawner.pending)
|
||||||
# spawn has started, but not finished
|
# spawn has started, but not finished
|
||||||
self.statsd.incr('redirects.user_spawn_pending', 1)
|
self.statsd.incr('redirects.user_spawn_pending', 1)
|
||||||
html = self.render_template("spawn_pending.html", user=current_user)
|
html = self.render_template(
|
||||||
|
"spawn_pending.html",
|
||||||
|
user=user,
|
||||||
|
progress_url=spawner._progress_url,
|
||||||
|
)
|
||||||
self.finish(html)
|
self.finish(html)
|
||||||
return
|
return
|
||||||
|
|
||||||
# We do exponential backoff here - since otherwise we can get stuck in a redirect loop!
|
# We do exponential backoff here - since otherwise we can get stuck in a redirect loop!
|
||||||
# This is important in many distributed proxy implementations - those are often eventually
|
# This is important in many distributed proxy implementations - those are often eventually
|
||||||
# consistent and can take upto a couple of seconds to actually apply throughout the cluster.
|
# consistent and can take up to a couple of seconds to actually apply throughout the cluster.
|
||||||
try:
|
try:
|
||||||
redirects = int(self.get_argument('redirects', 0))
|
redirects = int(self.get_argument('redirects', 0))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
@@ -782,18 +1091,16 @@ class UserSpawnHandler(BaseHandler):
|
|||||||
)
|
)
|
||||||
raise web.HTTPError(500, msg)
|
raise web.HTTPError(500, msg)
|
||||||
|
|
||||||
# set login cookie anew
|
|
||||||
self.set_login_cookie(current_user)
|
|
||||||
without_prefix = self.request.uri[len(self.hub.base_url):]
|
without_prefix = self.request.uri[len(self.hub.base_url):]
|
||||||
target = url_path_join(self.base_url, without_prefix)
|
target = url_path_join(self.base_url, without_prefix)
|
||||||
if self.subdomain_host:
|
if self.subdomain_host:
|
||||||
target = current_user.host + target
|
target = user.host + target
|
||||||
|
|
||||||
# record redirect count in query parameter
|
# record redirect count in query parameter
|
||||||
if redirects:
|
if redirects:
|
||||||
self.log.warning("Redirect loop detected on %s", self.request.uri)
|
self.log.warning("Redirect loop detected on %s", self.request.uri)
|
||||||
# add capped exponential backoff where cap is 10s
|
# add capped exponential backoff where cap is 10s
|
||||||
yield gen.sleep(min(1 * (2 ** redirects), 10))
|
await gen.sleep(min(1 * (2 ** redirects), 10))
|
||||||
# rewrite target url with new `redirects` query value
|
# rewrite target url with new `redirects` query value
|
||||||
url_parts = urlparse(target)
|
url_parts = urlparse(target)
|
||||||
query_parts = parse_qs(url_parts.query)
|
query_parts = parse_qs(url_parts.query)
|
||||||
@@ -809,6 +1116,9 @@ class UserSpawnHandler(BaseHandler):
|
|||||||
# logged in as a different user, redirect
|
# logged in as a different user, redirect
|
||||||
self.statsd.incr('redirects.user_to_user', 1)
|
self.statsd.incr('redirects.user_to_user', 1)
|
||||||
target = url_path_join(current_user.url, user_path or '')
|
target = url_path_join(current_user.url, user_path or '')
|
||||||
|
if self.request.query:
|
||||||
|
# FIXME: use urlunparse instead?
|
||||||
|
target += '?' + self.request.query
|
||||||
self.redirect(target)
|
self.redirect(target)
|
||||||
else:
|
else:
|
||||||
# not logged in, clear any cookies and reload
|
# not logged in, clear any cookies and reload
|
||||||
@@ -862,6 +1172,7 @@ class AddSlashHandler(BaseHandler):
|
|||||||
self.redirect(urlunparse(dest))
|
self.redirect(urlunparse(dest))
|
||||||
|
|
||||||
default_handlers = [
|
default_handlers = [
|
||||||
|
(r'', AddSlashHandler), # add trailing / to `/hub`
|
||||||
(r'/user/([^/]+)(/.*)?', UserSpawnHandler),
|
(r'/user/([^/]+)(/.*)?', UserSpawnHandler),
|
||||||
(r'/user-redirect/(.*)?', UserRedirectHandler),
|
(r'/user-redirect/(.*)?', UserRedirectHandler),
|
||||||
(r'/security/csp-report', CSPReportHandler),
|
(r'/security/csp-report', CSPReportHandler),
|
||||||
|
@@ -42,8 +42,7 @@ class LoginHandler(BaseHandler):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
@gen.coroutine
|
async def get(self):
|
||||||
def get(self):
|
|
||||||
self.statsd.incr('login.request')
|
self.statsd.incr('login.request')
|
||||||
user = self.get_current_user()
|
user = self.get_current_user()
|
||||||
if user:
|
if user:
|
||||||
@@ -58,7 +57,7 @@ class LoginHandler(BaseHandler):
|
|||||||
# auto_login without a custom login handler
|
# auto_login without a custom login handler
|
||||||
# means that auth info is already in the request
|
# means that auth info is already in the request
|
||||||
# (e.g. REMOTE_USER header)
|
# (e.g. REMOTE_USER header)
|
||||||
user = yield self.login_user()
|
user = await self.login_user()
|
||||||
if user is None:
|
if user is None:
|
||||||
# auto_login failed, just 403
|
# auto_login failed, just 403
|
||||||
raise web.HTTPError(403)
|
raise web.HTTPError(403)
|
||||||
@@ -72,27 +71,20 @@ class LoginHandler(BaseHandler):
|
|||||||
username = self.get_argument('username', default='')
|
username = self.get_argument('username', default='')
|
||||||
self.finish(self._render(username=username))
|
self.finish(self._render(username=username))
|
||||||
|
|
||||||
@gen.coroutine
|
async def post(self):
|
||||||
def post(self):
|
|
||||||
# parse the arguments dict
|
# parse the arguments dict
|
||||||
data = {}
|
data = {}
|
||||||
for arg in self.request.arguments:
|
for arg in self.request.arguments:
|
||||||
data[arg] = self.get_argument(arg, strip=False)
|
data[arg] = self.get_argument(arg, strip=False)
|
||||||
|
|
||||||
auth_timer = self.statsd.timer('login.authenticate').start()
|
auth_timer = self.statsd.timer('login.authenticate').start()
|
||||||
user = yield self.login_user(data)
|
user = await self.login_user(data)
|
||||||
auth_timer.stop(send=False)
|
auth_timer.stop(send=False)
|
||||||
|
|
||||||
if user:
|
if user:
|
||||||
already_running = False
|
# register current user for subsequent requests to user (e.g. logging the request)
|
||||||
if user.spawner.ready:
|
self.get_current_user = lambda: user
|
||||||
status = yield user.spawner.poll()
|
self.redirect(self.get_next_url(user))
|
||||||
already_running = (status is None)
|
|
||||||
if not already_running and not user.spawner.options_form \
|
|
||||||
and not user.spawner.pending:
|
|
||||||
# logging in triggers spawn
|
|
||||||
yield self.spawn_single_user(user)
|
|
||||||
self.redirect(self.get_next_url())
|
|
||||||
else:
|
else:
|
||||||
html = self._render(
|
html = self._render(
|
||||||
login_error='Invalid username or password',
|
login_error='Invalid username or password',
|
||||||
|
16
jupyterhub/handlers/metrics.py
Normal file
16
jupyterhub/handlers/metrics.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
from prometheus_client import REGISTRY, CONTENT_TYPE_LATEST, generate_latest
|
||||||
|
from tornado import gen
|
||||||
|
|
||||||
|
from .base import BaseHandler
|
||||||
|
|
||||||
|
class MetricsHandler(BaseHandler):
|
||||||
|
"""
|
||||||
|
Handler to serve Prometheus metrics
|
||||||
|
"""
|
||||||
|
async def get(self):
|
||||||
|
self.set_header('Content-Type', CONTENT_TYPE_LATEST)
|
||||||
|
self.write(generate_latest(REGISTRY))
|
||||||
|
|
||||||
|
default_handlers = [
|
||||||
|
(r'/metrics$', MetricsHandler)
|
||||||
|
]
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user