mirror of
https://github.com/jupyterhub/jupyterhub.git
synced 2025-10-07 18:14:10 +00:00
Compare commits
581 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
907bbb8e9d | ||
![]() |
d4e0c01189 | ||
![]() |
50370d42b0 | ||
![]() |
aa190a80b7 | ||
![]() |
e48bae77aa | ||
![]() |
96cf0f99ed | ||
![]() |
f380968049 | ||
![]() |
02468f4625 | ||
![]() |
24611f94cf | ||
![]() |
dc75a9a4b7 | ||
![]() |
33f459a23a | ||
![]() |
bdcc251002 | ||
![]() |
86052ba7b4 | ||
![]() |
62ebcf55c9 | ||
![]() |
80ac2475a0 | ||
![]() |
5179d922f5 | ||
![]() |
26f085a8ed | ||
![]() |
b7d302cc72 | ||
![]() |
f2941e3631 | ||
![]() |
26a6401af4 | ||
![]() |
5c8ce338a1 | ||
![]() |
5addc7bbaf | ||
![]() |
da095170bf | ||
![]() |
1aab0a69bd | ||
![]() |
fc8e04b62f | ||
![]() |
c6c53b4e10 | ||
![]() |
9b0219a2d8 | ||
![]() |
6e212fa476 | ||
![]() |
58f9237b12 | ||
![]() |
74fd925219 | ||
![]() |
2696bb97d2 | ||
![]() |
9cefb27704 | ||
![]() |
5e75357b06 | ||
![]() |
79bebb4bc9 | ||
![]() |
0ed88f212b | ||
![]() |
a8c1cab5fe | ||
![]() |
e1a6b1a70f | ||
![]() |
c95ed16786 | ||
![]() |
ec784803b4 | ||
![]() |
302d7a22d3 | ||
![]() |
eccd5a460b | ||
![]() |
80437229a1 | ||
![]() |
237ffba641 | ||
![]() |
2695c5e49f | ||
![]() |
b7a608fdfd | ||
![]() |
c3413bad78 | ||
![]() |
dceb244e5b | ||
![]() |
cb31a0b162 | ||
![]() |
7ced657d79 | ||
![]() |
8dd9168077 | ||
![]() |
7c6591aefe | ||
![]() |
58c91e3fd4 | ||
![]() |
db4cf7ae62 | ||
![]() |
a17f5e4f1b | ||
![]() |
6cf7f2b0a7 | ||
![]() |
7e21ea9a48 | ||
![]() |
3f29198bae | ||
![]() |
d4293650ff | ||
![]() |
d65dd16881 | ||
![]() |
f36e163581 | ||
![]() |
f215adcfa2 | ||
![]() |
1549af6f56 | ||
![]() |
c553f82580 | ||
![]() |
196b4ebc9f | ||
![]() |
8710ce1687 | ||
![]() |
f65e8d7369 | ||
![]() |
dc5d9f02c7 | ||
![]() |
2f3f8d7826 | ||
![]() |
297da070fc | ||
![]() |
10ea92dcea | ||
![]() |
2e5f01f232 | ||
![]() |
1a080c4261 | ||
![]() |
0e08963355 | ||
![]() |
cd9e39bf54 | ||
![]() |
580e840165 | ||
![]() |
09a8fd5254 | ||
![]() |
8898faa141 | ||
![]() |
fdbb1dad79 | ||
![]() |
c39244168b | ||
![]() |
9591fd88c5 | ||
![]() |
3558ce958e | ||
![]() |
804a9b7be8 | ||
![]() |
3cae550b13 | ||
![]() |
138bad5913 | ||
![]() |
09011815af | ||
![]() |
7b0c845c3a | ||
![]() |
6a47123ec9 | ||
![]() |
19fab6bbf8 | ||
![]() |
90e6b63e59 | ||
![]() |
bd78217cf3 | ||
![]() |
b0833985e6 | ||
![]() |
a6f73b035f | ||
![]() |
251440ec64 | ||
![]() |
22a1df6fa0 | ||
![]() |
6389751c22 | ||
![]() |
8498691763 | ||
![]() |
1750ff0324 | ||
![]() |
2ce4c46afd | ||
![]() |
a20f5e44d1 | ||
![]() |
cd746d72d4 | ||
![]() |
f7eaff0828 | ||
![]() |
849f119a47 | ||
![]() |
52b68381f6 | ||
![]() |
46d495e1e2 | ||
![]() |
acc6c22355 | ||
![]() |
8143182971 | ||
![]() |
04a22cd482 | ||
![]() |
4376224084 | ||
![]() |
a9fe88c343 | ||
![]() |
6eb95e1c66 | ||
![]() |
a46287c4a6 | ||
![]() |
bc86ee1c31 | ||
![]() |
10a6c5144d | ||
![]() |
4e5f43aeae | ||
![]() |
ff56db0c8b | ||
![]() |
95a9b97649 | ||
![]() |
a5b5208823 | ||
![]() |
783295fabd | ||
![]() |
1c942ec97c | ||
![]() |
3b6d2655ab | ||
![]() |
8a18d0daab | ||
![]() |
e9f7ccbd25 | ||
![]() |
68d9f35c0b | ||
![]() |
28d78134c1 | ||
![]() |
fd92ac852d | ||
![]() |
8399f5288e | ||
![]() |
f99b7cb7eb | ||
![]() |
bb5166077f | ||
![]() |
b72e4b66ca | ||
![]() |
ed85cd25d6 | ||
![]() |
3f90697e18 | ||
![]() |
73271a3e55 | ||
![]() |
6f9ea712de | ||
![]() |
6ee244e7cb | ||
![]() |
d66a4af79b | ||
![]() |
ea7b1caa4e | ||
![]() |
9cd880fb35 | ||
![]() |
658c152707 | ||
![]() |
6f1ba77608 | ||
![]() |
2344d696ca | ||
![]() |
bd816310cb | ||
![]() |
2bcf759a9f | ||
![]() |
82a04f7032 | ||
![]() |
4281babee4 | ||
![]() |
d89f2965cf | ||
![]() |
e2a2a9903a | ||
![]() |
4401cdc16a | ||
![]() |
e8d3fb2920 | ||
![]() |
f7ccc137ea | ||
![]() |
07bbb4ea02 | ||
![]() |
b189e70c9b | ||
![]() |
de4c9c1463 | ||
![]() |
8bdb73ced4 | ||
![]() |
dee9050939 | ||
![]() |
ae3c214708 | ||
![]() |
d6e81867bf | ||
![]() |
d30a5ee0a5 | ||
![]() |
88bb80be0f | ||
![]() |
bba1ba1678 | ||
![]() |
b50daf20d0 | ||
![]() |
5c6c7cdff5 | ||
![]() |
3f9b2a0c28 | ||
![]() |
453e119808 | ||
![]() |
a021f910c8 | ||
![]() |
e6c2afc4db | ||
![]() |
e6c7b28057 | ||
![]() |
b1840e8be7 | ||
![]() |
15e4b1ad8b | ||
![]() |
2517afcee0 | ||
![]() |
15c7ba3078 | ||
![]() |
f2cb24781a | ||
![]() |
e1d346b8c3 | ||
![]() |
97bdf4811c | ||
![]() |
45c871d779 | ||
![]() |
976fa9c907 | ||
![]() |
771c60ca37 | ||
![]() |
e15eeccd35 | ||
![]() |
ce535b55bc | ||
![]() |
33cb62c2ee | ||
![]() |
32fe3cf61d | ||
![]() |
73a05498ce | ||
![]() |
034147f604 | ||
![]() |
b629e520a9 | ||
![]() |
30280cc6a4 | ||
![]() |
f7f0b72776 | ||
![]() |
251289fc05 | ||
![]() |
6437093a67 | ||
![]() |
be5a878da5 | ||
![]() |
8dc73a852d | ||
![]() |
e37d82951e | ||
![]() |
acc311830e | ||
![]() |
6b1046697a | ||
![]() |
c5befc5b2a | ||
![]() |
e743a5733b | ||
![]() |
5f98801c99 | ||
![]() |
9858a3db9d | ||
![]() |
65c1a525b9 | ||
![]() |
8bd055d4bd | ||
![]() |
5ee14db1f9 | ||
![]() |
58069d015b | ||
![]() |
f2684b59ec | ||
![]() |
e0c0d03c5f | ||
![]() |
1ac47d2bb0 | ||
![]() |
bc75c71ca3 | ||
![]() |
c49fc14528 | ||
![]() |
078bd8c627 | ||
![]() |
33ba9fb5cf | ||
![]() |
4e7e586cb9 | ||
![]() |
62fa795052 | ||
![]() |
b6d9f89518 | ||
![]() |
afbf867169 | ||
![]() |
dace6ac156 | ||
![]() |
cbf2b8cb78 | ||
![]() |
96c5de63d8 | ||
![]() |
b8b57843a6 | ||
![]() |
e3fd4ad77d | ||
![]() |
c08148266a | ||
![]() |
a6a2d04c46 | ||
![]() |
8f7061fb9b | ||
![]() |
7b5235138f | ||
![]() |
7e3fa8c38d | ||
![]() |
151acd5bec | ||
![]() |
23ca2039f6 | ||
![]() |
b291103592 | ||
![]() |
e962c9993b | ||
![]() |
955b769d3f | ||
![]() |
9b914e8f01 | ||
![]() |
307ad636dc | ||
![]() |
2952f62726 | ||
![]() |
6d6e48f434 | ||
![]() |
a189196855 | ||
![]() |
d30e62a205 | ||
![]() |
e56d416210 | ||
![]() |
c0f37c48a1 | ||
![]() |
a3ed387455 | ||
![]() |
beedc94179 | ||
![]() |
5229604782 | ||
![]() |
cf665517dd | ||
![]() |
4663edd8a7 | ||
![]() |
312e7974d9 | ||
![]() |
ca8aa53b32 | ||
![]() |
7122ca1c24 | ||
![]() |
97cdb1a5d8 | ||
![]() |
31d3f7a20b | ||
![]() |
6f8a34127b | ||
![]() |
ee1a86d192 | ||
![]() |
707b300bd6 | ||
![]() |
c9e12182a2 | ||
![]() |
9b7186e9b8 | ||
![]() |
4eb07f9d48 | ||
![]() |
4f78cbbd1b | ||
![]() |
d962e8bcbc | ||
![]() |
ba695a0230 | ||
![]() |
dfed2437a8 | ||
![]() |
ecfcb4ec64 | ||
![]() |
b9335311de | ||
![]() |
354468db0a | ||
![]() |
340a736722 | ||
![]() |
7bf93cb7e6 | ||
![]() |
4fa9535fd4 | ||
![]() |
1abd3217aa | ||
![]() |
d0360d5c98 | ||
![]() |
74365ad05e | ||
![]() |
9dc24c0995 | ||
![]() |
fd40e27be4 | ||
![]() |
05b2bf4c96 | ||
![]() |
a0fcbcbc7d | ||
![]() |
3117ea9d34 | ||
![]() |
8973dea33e | ||
![]() |
3e7d0dbd23 | ||
![]() |
b26b1bc038 | ||
![]() |
74b1102dea | ||
![]() |
a89226279f | ||
![]() |
8b490c8ef0 | ||
![]() |
77a98e7875 | ||
![]() |
c02592d5ba | ||
![]() |
52d7dacbaa | ||
![]() |
9a8457deff | ||
![]() |
5039b3ac6f | ||
![]() |
00705223b6 | ||
![]() |
9f6ab4c419 | ||
![]() |
9012c7310d | ||
![]() |
a3edebcad9 | ||
![]() |
f2abb6a73f | ||
![]() |
e96e5b740a | ||
![]() |
ee067ad97a | ||
![]() |
d01b3a88b6 | ||
![]() |
5a22c978cf | ||
![]() |
f8a0e7d1be | ||
![]() |
25a65564b1 | ||
![]() |
c858023c88 | ||
![]() |
c3e470db26 | ||
![]() |
5908c4da7a | ||
![]() |
b08dbbd106 | ||
![]() |
3b320c75e9 | ||
![]() |
1aa6dc6686 | ||
![]() |
fdc4385e62 | ||
![]() |
5094448762 | ||
![]() |
98c7fa919f | ||
![]() |
5b9f51417f | ||
![]() |
7a91f89474 | ||
![]() |
bf7afa16e5 | ||
![]() |
0d57baae82 | ||
![]() |
446d197cf7 | ||
![]() |
2582f0bbe6 | ||
![]() |
1ee993c664 | ||
![]() |
542c20065f | ||
![]() |
39f663d03c | ||
![]() |
6474a55302 | ||
![]() |
8566d4c5ab | ||
![]() |
e374e93cfb | ||
![]() |
7bd4f6490c | ||
![]() |
25373f510d | ||
![]() |
82cab39e1c | ||
![]() |
22507cc1cd | ||
![]() |
2bded65c7e | ||
![]() |
a3a0c60804 | ||
![]() |
704b172887 | ||
![]() |
135717f8cb | ||
![]() |
1d87ba8534 | ||
![]() |
97cd27775b | ||
![]() |
fe2e9c282e | ||
![]() |
fab125975b | ||
![]() |
cefd7e3b1b | ||
![]() |
344a3e7b24 | ||
![]() |
a0ee237ada | ||
![]() |
e81eb9a5f8 | ||
![]() |
98d3b538af | ||
![]() |
3614a0e368 | ||
![]() |
0421497b1e | ||
![]() |
8b3c2fa12f | ||
![]() |
a58bea6d93 | ||
![]() |
c7c41cd761 | ||
![]() |
b282ec73c7 | ||
![]() |
dad26be2c6 | ||
![]() |
58d602e549 | ||
![]() |
5e14904205 | ||
![]() |
97293ab7ce | ||
![]() |
b6f634368c | ||
![]() |
7b4de150cc | ||
![]() |
7a268c94b0 | ||
![]() |
7a1fa78632 | ||
![]() |
19f02da64d | ||
![]() |
5bf1aac9cb | ||
![]() |
0ae034083c | ||
![]() |
5010af941b | ||
![]() |
015df7e060 | ||
![]() |
e025d58f6e | ||
![]() |
b151d333d3 | ||
![]() |
304c005a85 | ||
![]() |
e2591e8e36 | ||
![]() |
f3c22cb6d0 | ||
![]() |
b2527984bc | ||
![]() |
b8d2271191 | ||
![]() |
b8978b0235 | ||
![]() |
63ef6419cd | ||
![]() |
25dc429455 | ||
![]() |
7550e63fd0 | ||
![]() |
0561968fac | ||
![]() |
7811bf518b | ||
![]() |
bc7116ad94 | ||
![]() |
70eec33d06 | ||
![]() |
773973825f | ||
![]() |
a184d372f4 | ||
![]() |
ca1606a021 | ||
![]() |
5c6d7eb309 | ||
![]() |
4de6b39788 | ||
![]() |
f0494cc7d6 | ||
![]() |
9d98d1ee63 | ||
![]() |
f1238e17b1 | ||
![]() |
4201c8a6f3 | ||
![]() |
53396ed454 | ||
![]() |
8695823165 | ||
![]() |
ec8d008678 | ||
![]() |
a949ad14f8 | ||
![]() |
48e7bd4f10 | ||
![]() |
4b11f8f26b | ||
![]() |
b056444863 | ||
![]() |
872f021ddc | ||
![]() |
079b0c1b91 | ||
![]() |
2664b50a18 | ||
![]() |
6970df4dda | ||
![]() |
22c3064ec4 | ||
![]() |
d6ab65a2e7 | ||
![]() |
aa23b01a57 | ||
![]() |
d82de98001 | ||
![]() |
7df8597484 | ||
![]() |
1b99b1275c | ||
![]() |
d16461052b | ||
![]() |
9640364713 | ||
![]() |
18e0600727 | ||
![]() |
17fffda74e | ||
![]() |
3ac4f48f82 | ||
![]() |
6f8ae98ed0 | ||
![]() |
47b2ce6180 | ||
![]() |
d18d84e187 | ||
![]() |
c1dcdf49e5 | ||
![]() |
079005eab1 | ||
![]() |
dc8cea3a3e | ||
![]() |
efca88cf8b | ||
![]() |
c05a6b96b7 | ||
![]() |
a831ff3b61 | ||
![]() |
b814a09fe6 | ||
![]() |
fb48c8626a | ||
![]() |
fbdeb4c386 | ||
![]() |
4cf9ecc819 | ||
![]() |
e9573b6e24 | ||
![]() |
d5f0137052 | ||
![]() |
d9f5adb1fb | ||
![]() |
0c6aa064ac | ||
![]() |
646c853cf4 | ||
![]() |
fb3bc95623 | ||
![]() |
c8b4cab022 | ||
![]() |
06fb94b4ea | ||
![]() |
9f6cef4fb4 | ||
![]() |
0315dd5612 | ||
![]() |
e4e5bebc1a | ||
![]() |
c688e9ebad | ||
![]() |
6d6041a3c1 | ||
![]() |
dde7b5ea68 | ||
![]() |
9bf533b340 | ||
![]() |
f1a105abec | ||
![]() |
e6587b5dc8 | ||
![]() |
b2ad045a2d | ||
![]() |
89734d8c5f | ||
![]() |
53736099ba | ||
![]() |
2fcfa136c1 | ||
![]() |
9f85209a1b | ||
![]() |
cea1b2fd4d | ||
![]() |
312252b670 | ||
![]() |
4d6b30c17b | ||
![]() |
0beb9c2670 | ||
![]() |
a0289af59f | ||
![]() |
40363834c8 | ||
![]() |
0c9e5fd10b | ||
![]() |
3d90e5cdf6 | ||
![]() |
8e3f1f0955 | ||
![]() |
7c64415096 | ||
![]() |
e3fd1dba0e | ||
![]() |
9866a0fadc | ||
![]() |
f87f24d9e5 | ||
![]() |
4729ae4769 | ||
![]() |
691c4c158f | ||
![]() |
3c597339ba | ||
![]() |
e5fe174e03 | ||
![]() |
1c25a9d026 | ||
![]() |
2db378e9c1 | ||
![]() |
a4067ee681 | ||
![]() |
edb0831028 | ||
![]() |
dac3b0a6f5 | ||
![]() |
9a180cc8ad | ||
![]() |
e81764610e | ||
![]() |
e4e2b627fe | ||
![]() |
ec55f56725 | ||
![]() |
1e4f871bcc | ||
![]() |
69f72919bd | ||
![]() |
dc0336fa45 | ||
![]() |
8c341d262e | ||
![]() |
2b15464e12 | ||
![]() |
a686235ffb | ||
![]() |
29171a4d05 | ||
![]() |
e9123f55e0 | ||
![]() |
ee004486bd | ||
![]() |
498e234c37 | ||
![]() |
b29f19e206 | ||
![]() |
1e00343262 | ||
![]() |
3cd526c019 | ||
![]() |
ea99c58da5 | ||
![]() |
c64f23a64a | ||
![]() |
2099cd37fa | ||
![]() |
2559632079 | ||
![]() |
352df39454 | ||
![]() |
ce3a940b11 | ||
![]() |
6594e88390 | ||
![]() |
339758ec42 | ||
![]() |
0b4c7defd4 | ||
![]() |
6d71e9065b | ||
![]() |
631ab4d4eb | ||
![]() |
589ff47ae6 | ||
![]() |
877034d012 | ||
![]() |
3d440bf8f5 | ||
![]() |
138b2be010 | ||
![]() |
b729944480 | ||
![]() |
870afd9fac | ||
![]() |
e808814725 | ||
![]() |
122cf2250d | ||
![]() |
fa1d962507 | ||
![]() |
6504692c5c | ||
![]() |
bd36962643 | ||
![]() |
f5ccfc3f8a | ||
![]() |
c1a7e0513b | ||
![]() |
af71e79371 | ||
![]() |
bf911cf3a5 | ||
![]() |
6059a1c444 | ||
![]() |
c4966a4bf2 | ||
![]() |
cb9f356a69 | ||
![]() |
9d02f6a408 | ||
![]() |
ee76772e1b | ||
![]() |
f0a030a86d | ||
![]() |
1a31e56f33 | ||
![]() |
04e9e0e687 | ||
![]() |
cec917c2a2 | ||
![]() |
08989a8797 | ||
![]() |
b734c331e4 | ||
![]() |
fe477a6809 | ||
![]() |
6391a4a7f7 | ||
![]() |
e68220d4b3 | ||
![]() |
b873149f9b | ||
![]() |
86aebbcaea | ||
![]() |
fd260cf32f | ||
![]() |
69101a5b14 | ||
![]() |
151d6cbc48 | ||
![]() |
04675e5fcb | ||
![]() |
b38c6fe06a | ||
![]() |
089a12bdc9 | ||
![]() |
d9a0a2003f | ||
![]() |
ad704d9925 | ||
![]() |
0cca79eeee | ||
![]() |
457bea7c34 | ||
![]() |
2479679eeb | ||
![]() |
937405d2d8 | ||
![]() |
d1bed1b9cc | ||
![]() |
acc60bce57 | ||
![]() |
43807ff06b | ||
![]() |
b8a63bcc0c | ||
![]() |
66c1815a78 | ||
![]() |
4e5cfa2077 | ||
![]() |
ebaf5d31b7 | ||
![]() |
760a640c6a | ||
![]() |
4fc06e9504 | ||
![]() |
c283ccb122 | ||
![]() |
80df842b2b | ||
![]() |
f1a8a72a9f | ||
![]() |
0296e16232 | ||
![]() |
f6f7081483 | ||
![]() |
7f7cd0a314 | ||
![]() |
5ffb5763a5 | ||
![]() |
4382037110 | ||
![]() |
963cd88440 | ||
![]() |
885f99ac08 | ||
![]() |
7c3919980a | ||
![]() |
d8860d6f24 | ||
![]() |
6b992e37e3 | ||
![]() |
a3424355fa | ||
![]() |
569a91296d | ||
![]() |
8b583cb445 | ||
![]() |
038a85af43 | ||
![]() |
9165beb41c | ||
![]() |
b285de4412 | ||
![]() |
5826035fe9 | ||
![]() |
b953ac295b | ||
![]() |
8a95066b2e | ||
![]() |
00a4aef607 | ||
![]() |
9e2663491e | ||
![]() |
e01ce7b665 | ||
![]() |
a57df48f28 | ||
![]() |
5d7e008055 | ||
![]() |
ba31b3ecb7 | ||
![]() |
3c5eb934bf | ||
![]() |
82e15df6e9 | ||
![]() |
e3c83c0c29 | ||
![]() |
94542334c4 | ||
![]() |
95494b3ace | ||
![]() |
a131cfb79e | ||
![]() |
f002c67343 | ||
![]() |
b9caf95c72 | ||
![]() |
5356954240 | ||
![]() |
126c73002e | ||
![]() |
65b4502a78 | ||
![]() |
3406161d75 | ||
![]() |
e45f00f0f7 | ||
![]() |
71f4a30562 | ||
![]() |
20ba414b41 | ||
![]() |
f5250f04c5 | ||
![]() |
c2ea20a87a | ||
![]() |
b14989d4a5 | ||
![]() |
04578e329c | ||
![]() |
be05e438ca | ||
![]() |
24d9215029 | ||
![]() |
54dcca7ba9 | ||
![]() |
056a7351a3 |
21
.circleci/config.yml
Normal file
21
.circleci/config.yml
Normal file
@@ -0,0 +1,21 @@
|
||||
# Python CircleCI 2.0 configuration file
|
||||
# Updating CircleCI configuration from v1 to v2
|
||||
# Check https://circleci.com/docs/2.0/language-python/ for more details
|
||||
#
|
||||
version: 2
|
||||
jobs:
|
||||
build:
|
||||
machine: true
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: build images
|
||||
command: |
|
||||
docker build -t jupyterhub/jupyterhub .
|
||||
docker build -t jupyterhub/jupyterhub-onbuild onbuild
|
||||
docker build -t jupyterhub/jupyterhub:alpine -f dockerfiles/Dockerfile.alpine .
|
||||
docker build -t jupyterhub/singleuser singleuser
|
||||
- run:
|
||||
name: smoke test jupyterhub
|
||||
command: |
|
||||
docker run --rm -it jupyterhub/jupyterhub jupyterhub --help
|
10
.gitignore
vendored
10
.gitignore
vendored
@@ -14,11 +14,13 @@ docs/source/_static/rest-api
|
||||
/jupyterhub_config.py
|
||||
jupyterhub_cookie_secret
|
||||
jupyterhub.sqlite
|
||||
share/jupyter/hub/static/components
|
||||
share/jupyter/hub/static/css/style.min.css
|
||||
share/jupyter/hub/static/css/style.min.css.map
|
||||
package-lock.json
|
||||
share/jupyterhub/static/components
|
||||
share/jupyterhub/static/css/style.min.css
|
||||
share/jupyterhub/static/css/style.min.css.map
|
||||
*.egg-info
|
||||
MANIFEST
|
||||
.coverage
|
||||
htmlcov
|
||||
|
||||
.idea/
|
||||
.pytest_cache
|
||||
|
33
.travis.yml
33
.travis.yml
@@ -3,13 +3,14 @@ sudo: false
|
||||
cache:
|
||||
- pip
|
||||
python:
|
||||
- nightly
|
||||
- 3.6
|
||||
- 3.5
|
||||
- 3.4
|
||||
- nightly
|
||||
env:
|
||||
global:
|
||||
- ASYNC_TEST_TIMEOUT=15
|
||||
- MYSQL_HOST=127.0.0.1
|
||||
- MYSQL_TCP_PORT=13306
|
||||
services:
|
||||
- postgres
|
||||
- docker
|
||||
@@ -20,6 +21,7 @@ before_install:
|
||||
- npm install
|
||||
- npm install -g configurable-http-proxy
|
||||
- |
|
||||
# setup database
|
||||
if [[ $JUPYTERHUB_TEST_DB_URL == mysql* ]]; then
|
||||
unset MYSQL_UNIX_PORT
|
||||
DB=mysql bash ci/docker-db.sh
|
||||
@@ -30,27 +32,22 @@ before_install:
|
||||
pip install psycopg2
|
||||
fi
|
||||
install:
|
||||
- pip install -U pip
|
||||
- pip install --upgrade pip
|
||||
- pip install --pre -r dev-requirements.txt .
|
||||
- pip freeze
|
||||
|
||||
# running tests
|
||||
script:
|
||||
- |
|
||||
if [[ ! -z "$JUPYTERHUB_TEST_DB_URL" ]]; then
|
||||
# if testing upgrade-db, run `jupyterhub token` with 0.7
|
||||
# to initialize an old db. Used in upgrade-tests
|
||||
export JUPYTERHUB_TEST_UPGRADE_DB_URL=${JUPYTERHUB_TEST_DB_URL}_upgrade
|
||||
# use virtualenv instead of venv because venv doesn't work here
|
||||
python -m pip install virtualenv
|
||||
python -m virtualenv old-hub-env
|
||||
./old-hub-env/bin/python -m pip install jupyterhub==0.7.2 psycopg2 'mysql-connector<2.2'
|
||||
./old-hub-env/bin/jupyterhub token kaylee \
|
||||
--JupyterHub.db_url=$JUPYTERHUB_TEST_UPGRADE_DB_URL \
|
||||
--Authenticator.whitelist="{'kaylee'}" \
|
||||
--JupyterHub.authenticator_class=jupyterhub.auth.Authenticator
|
||||
fi
|
||||
- pytest -v --maxfail=2 --cov=jupyterhub jupyterhub/tests
|
||||
# run tests
|
||||
set -e
|
||||
pytest -v --maxfail=2 --cov=jupyterhub jupyterhub/tests
|
||||
- |
|
||||
# build docs
|
||||
pushd docs
|
||||
pip install -r requirements.txt
|
||||
make html
|
||||
popd
|
||||
after_success:
|
||||
- codecov
|
||||
|
||||
@@ -61,8 +58,6 @@ matrix:
|
||||
env: JUPYTERHUB_TEST_SUBDOMAIN_HOST=http://localhost.jovyan.org:8000
|
||||
- python: 3.6
|
||||
env:
|
||||
- MYSQL_HOST=127.0.0.1
|
||||
- MYSQL_TCP_PORT=13306
|
||||
- JUPYTERHUB_TEST_DB_URL=mysql+mysqlconnector://root@127.0.0.1:$MYSQL_TCP_PORT/jupyterhub
|
||||
- python: 3.6
|
||||
env:
|
||||
|
@@ -1,3 +1,98 @@
|
||||
# Contributing
|
||||
|
||||
Welcome! As a [Jupyter](https://jupyter.org) project, we follow the [Jupyter contributor guide](https://jupyter.readthedocs.io/en/latest/contributor/content-contributor.html).
|
||||
|
||||
|
||||
## Set up your development system
|
||||
|
||||
For a development install, clone the [repository](https://github.com/jupyterhub/jupyterhub)
|
||||
and then install from source:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/jupyterhub/jupyterhub
|
||||
cd jupyterhub
|
||||
npm install -g configurable-http-proxy
|
||||
pip3 install -r dev-requirements.txt -e .
|
||||
```
|
||||
|
||||
### Troubleshooting a development install
|
||||
|
||||
If the `pip3 install` command fails and complains about `lessc` being
|
||||
unavailable, you may need to explicitly install some additional JavaScript
|
||||
dependencies:
|
||||
|
||||
npm install
|
||||
|
||||
This will fetch client-side JavaScript dependencies necessary to compile CSS.
|
||||
|
||||
You may also need to manually update JavaScript and CSS after some development
|
||||
updates, with:
|
||||
|
||||
```bash
|
||||
python3 setup.py js # fetch updated client-side js
|
||||
python3 setup.py css # recompile CSS from LESS sources
|
||||
```
|
||||
|
||||
## Running the test suite
|
||||
|
||||
We use [pytest](http://doc.pytest.org/en/latest/) for running tests.
|
||||
|
||||
1. Set up a development install as described above.
|
||||
|
||||
2. Set environment variable for `ASYNC_TEST_TIMEOUT` to 15 seconds:
|
||||
|
||||
```bash
|
||||
export ASYNC_TEST_TIMEOUT=15
|
||||
```
|
||||
|
||||
3. Run tests.
|
||||
|
||||
To run all the tests:
|
||||
|
||||
```bash
|
||||
pytest -v jupyterhub/tests
|
||||
```
|
||||
|
||||
To run an individual test file (i.e. `test_api.py`):
|
||||
|
||||
```bash
|
||||
pytest -v jupyterhub/tests/test_api.py
|
||||
```
|
||||
|
||||
### Troubleshooting tests
|
||||
|
||||
If you see test failures because of timeouts, you may wish to increase the
|
||||
`ASYNC_TEST_TIMEOUT` used by the
|
||||
[pytest-tornado-plugin](https://github.com/eugeniy/pytest-tornado/blob/c79f68de2222eb7cf84edcfe28650ebf309a4d0c/README.rst#markers)
|
||||
from the default of 5 seconds:
|
||||
|
||||
```bash
|
||||
export ASYNC_TEST_TIMEOUT=15
|
||||
```
|
||||
|
||||
If you see many test errors and failures, double check that you have installed
|
||||
`configurable-http-proxy`.
|
||||
|
||||
## Building the Docs locally
|
||||
|
||||
1. Install the development system as described above.
|
||||
|
||||
2. Install the dependencies for documentation:
|
||||
|
||||
```bash
|
||||
python3 -m pip install -r docs/requirements.txt
|
||||
```
|
||||
|
||||
3. Build the docs:
|
||||
|
||||
```bash
|
||||
cd docs
|
||||
make clean
|
||||
make html
|
||||
```
|
||||
|
||||
4. View the docs:
|
||||
|
||||
```bash
|
||||
open build/html/index.html
|
||||
```
|
22
Dockerfile
22
Dockerfile
@@ -21,29 +21,25 @@
|
||||
# your jupyterhub_config.py will be added automatically
|
||||
# from your docker directory.
|
||||
|
||||
FROM debian:jessie
|
||||
MAINTAINER Jupyter Project <jupyter@googlegroups.com>
|
||||
FROM ubuntu:18.04
|
||||
LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
|
||||
|
||||
# install nodejs, utf8 locale, set CDN because default httpredir is unreliable
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
RUN REPO=http://cdn-fastly.deb.debian.org && \
|
||||
echo "deb $REPO/debian jessie main\ndeb $REPO/debian-security jessie/updates main" > /etc/apt/sources.list && \
|
||||
apt-get -y update && \
|
||||
RUN apt-get -y update && \
|
||||
apt-get -y upgrade && \
|
||||
apt-get -y install wget locales git bzip2 &&\
|
||||
/usr/sbin/update-locale LANG=C.UTF-8 && \
|
||||
locale-gen C.UTF-8 && \
|
||||
apt-get remove -y locales && \
|
||||
apt-get -y install wget git bzip2 && \
|
||||
apt-get purge && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
# install Python + NodeJS with conda
|
||||
RUN wget -q https://repo.continuum.io/miniconda/Miniconda3-4.2.12-Linux-x86_64.sh -O /tmp/miniconda.sh && \
|
||||
echo 'd0c7c71cc5659e54ab51f2005a8d96f3 */tmp/miniconda.sh' | md5sum -c - && \
|
||||
RUN wget -q https://repo.continuum.io/miniconda/Miniconda3-4.4.10-Linux-x86_64.sh -O /tmp/miniconda.sh && \
|
||||
echo 'bec6203dbb2f53011e974e9bf4d46e93 */tmp/miniconda.sh' | md5sum -c - && \
|
||||
bash /tmp/miniconda.sh -f -b -p /opt/conda && \
|
||||
/opt/conda/bin/conda install --yes -c conda-forge \
|
||||
python=3.5 sqlalchemy tornado jinja2 traitlets requests pip pycurl \
|
||||
python=3.6 sqlalchemy tornado jinja2 traitlets requests pip pycurl \
|
||||
nodejs configurable-http-proxy && \
|
||||
/opt/conda/bin/pip install --upgrade pip && \
|
||||
rm /tmp/miniconda.sh
|
||||
@@ -52,7 +48,7 @@ ENV PATH=/opt/conda/bin:$PATH
|
||||
ADD . /src/jupyterhub
|
||||
WORKDIR /src/jupyterhub
|
||||
|
||||
RUN python setup.py js && pip install . && \
|
||||
RUN pip install . && \
|
||||
rm -rf $PWD ~/.cache ~/.npm
|
||||
|
||||
RUN mkdir -p /srv/jupyterhub/
|
||||
|
20
MANIFEST.in
20
MANIFEST.in
@@ -1,8 +1,9 @@
|
||||
include README.md
|
||||
include COPYING.md
|
||||
include setupegg.py
|
||||
include bower.json
|
||||
include bower-lite
|
||||
include package.json
|
||||
include package-lock.json
|
||||
include *requirements.txt
|
||||
include Dockerfile
|
||||
|
||||
@@ -18,14 +19,15 @@ graft docs
|
||||
prune docs/node_modules
|
||||
|
||||
# prune some large unused files from components
|
||||
prune share/jupyter/hub/static/components/bootstrap/css
|
||||
exclude share/jupyter/hub/static/components/components/fonts/*.svg
|
||||
exclude share/jupyter/hub/static/components/bootstrap/less/*.js
|
||||
exclude share/jupyter/hub/static/components/font-awesome/css
|
||||
exclude share/jupyter/hub/static/components/font-awesome/fonts/*.svg
|
||||
exclude share/jupyter/hub/static/components/jquery/*migrate*.js
|
||||
prune share/jupyter/hub/static/components/moment/lang
|
||||
prune share/jupyter/hub/static/components/moment/min
|
||||
prune share/jupyterhub/static/components/bootstrap/dist/css
|
||||
exclude share/jupyterhub/static/components/bootstrap/dist/fonts/*.svg
|
||||
prune share/jupyterhub/static/components/font-awesome/css
|
||||
prune share/jupyterhub/static/components/font-awesome/scss
|
||||
exclude share/jupyterhub/static/components/font-awesome/fonts/*.svg
|
||||
prune share/jupyterhub/static/components/jquery/external
|
||||
prune share/jupyterhub/static/components/jquery/src
|
||||
prune share/jupyterhub/static/components/moment/lang
|
||||
prune share/jupyterhub/static/components/moment/min
|
||||
|
||||
# Patterns to exclude from any directory
|
||||
global-exclude *~
|
||||
|
45
README.md
45
README.md
@@ -20,7 +20,7 @@
|
||||
|
||||
With [JupyterHub](https://jupyterhub.readthedocs.io) you can create a
|
||||
**multi-user Hub** which spawns, manages, and proxies multiple instances of the
|
||||
single-user [Jupyter notebook (IPython notebook)](https://jupyter-notebook.readthedocs.io)
|
||||
single-user [Jupyter notebook](https://jupyter-notebook.readthedocs.io)
|
||||
server.
|
||||
|
||||
[Project Jupyter](https://jupyter.org) created JupyterHub to support many
|
||||
@@ -34,11 +34,11 @@ Three main actors make up JupyterHub:
|
||||
|
||||
- multi-user **Hub** (tornado process)
|
||||
- configurable http **proxy** (node-http-proxy)
|
||||
- multiple **single-user Jupyter notebook servers** (Python/IPython/tornado)
|
||||
- multiple **single-user Jupyter notebook servers** (Python/Jupyter/tornado)
|
||||
|
||||
Basic principles for operation are:
|
||||
|
||||
- Hub spawns a proxy.
|
||||
- Hub launches a proxy.
|
||||
- Proxy forwards all requests to Hub by default.
|
||||
- Hub handles login, and spawns single-user servers on demand.
|
||||
- Hub configures proxy to forward url prefixes to the single-user notebook
|
||||
@@ -151,7 +151,7 @@ not, Jupyter Notebook version 4 or greater must be installed.
|
||||
|
||||
The JupyterHub docker image can be started with the following command:
|
||||
|
||||
docker run -d --name jupyterhub jupyterhub/jupyterhub jupyterhub
|
||||
docker run -p 8000:8000 -d --name jupyterhub jupyterhub/jupyterhub jupyterhub
|
||||
|
||||
This command will create a container named `jupyterhub` that you can
|
||||
**stop and resume** with `docker stop/start`.
|
||||
@@ -163,7 +163,7 @@ If you want to run docker on a computer that has a public IP then you should
|
||||
(as in MUST) **secure it with ssl** by adding ssl options to your docker
|
||||
configuration or by using a ssl enabled proxy.
|
||||
|
||||
[Mounting volumes](https://docs.docker.com/engine/userguide/containers/dockervolumes/) will
|
||||
[Mounting volumes](https://docs.docker.com/engine/admin/volumes/volumes/) will
|
||||
allow you to **store data outside the docker image (host system) so it will be persistent**, even when you start
|
||||
a new image.
|
||||
|
||||
@@ -175,38 +175,9 @@ These accounts will be used for authentication in JupyterHub's default configura
|
||||
|
||||
If you would like to contribute to the project, please read our
|
||||
[contributor documentation](http://jupyter.readthedocs.io/en/latest/contributor/content-contributor.html)
|
||||
and the [`CONTRIBUTING.md`](CONTRIBUTING.md).
|
||||
|
||||
For a **development install**, clone the [repository](https://github.com/jupyterhub/jupyterhub)
|
||||
and then install from source:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/jupyterhub/jupyterhub
|
||||
cd jupyterhub
|
||||
pip3 install -r dev-requirements.txt -e .
|
||||
```
|
||||
|
||||
If the `pip3 install` command fails and complains about `lessc` being
|
||||
unavailable, you may need to explicitly install some additional JavaScript
|
||||
dependencies:
|
||||
|
||||
npm install
|
||||
|
||||
This will fetch client-side JavaScript dependencies necessary to compile CSS.
|
||||
|
||||
You may also need to manually update JavaScript and CSS after some development
|
||||
updates, with:
|
||||
|
||||
```bash
|
||||
python3 setup.py js # fetch updated client-side js
|
||||
python3 setup.py css # recompile CSS from LESS sources
|
||||
```
|
||||
|
||||
We use [pytest](http://doc.pytest.org/en/latest/) for **running tests**:
|
||||
|
||||
```bash
|
||||
pytest jupyterhub/tests
|
||||
```
|
||||
and the [`CONTRIBUTING.md`](CONTRIBUTING.md). The `CONTRIBUTING.md` file
|
||||
explains how to set up a development installation, how to run the test suite,
|
||||
and how to contribute to documentation.
|
||||
|
||||
### A note about platform support
|
||||
|
||||
|
36
bower-lite
Executable file
36
bower-lite
Executable file
@@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
"""
|
||||
bower-lite
|
||||
|
||||
Since Bower's on its way out,
|
||||
stage frontend dependencies from node_modules into components
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
from os.path import join
|
||||
import shutil
|
||||
|
||||
HERE = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
components = join(HERE, "share", "jupyterhub", "static", "components")
|
||||
node_modules = join(HERE, "node_modules")
|
||||
|
||||
if os.path.exists(components):
|
||||
shutil.rmtree(components)
|
||||
os.mkdir(components)
|
||||
|
||||
with open(join(HERE, 'package.json')) as f:
|
||||
package_json = json.load(f)
|
||||
|
||||
dependencies = package_json['dependencies']
|
||||
for dep in dependencies:
|
||||
src = join(node_modules, dep)
|
||||
dest = join(components, dep)
|
||||
print("%s -> %s" % (src, dest))
|
||||
shutil.copytree(src, dest)
|
11
bower.json
11
bower.json
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"name": "jupyterhub-deps",
|
||||
"version": "0.0.0",
|
||||
"dependencies": {
|
||||
"bootstrap": "components/bootstrap#~3.3",
|
||||
"font-awesome": "components/font-awesome#~4.7",
|
||||
"jquery": "components/jquery#~3.2",
|
||||
"moment": "~2.18",
|
||||
"requirejs": "~2.3"
|
||||
}
|
||||
}
|
0
ci/docker-db.sh
Normal file → Executable file
0
ci/docker-db.sh
Normal file → Executable file
8
ci/init-db.sh
Normal file → Executable file
8
ci/init-db.sh
Normal file → Executable file
@@ -21,7 +21,7 @@ esac
|
||||
|
||||
set -x
|
||||
|
||||
$SQL 'DROP DATABASE jupyterhub;' 2>/dev/null || true
|
||||
$SQL "CREATE DATABASE jupyterhub ${EXTRA_CREATE};"
|
||||
$SQL 'DROP DATABASE jupyterhub_upgrade;' 2>/dev/null || true
|
||||
$SQL "CREATE DATABASE jupyterhub_upgrade ${EXTRA_CREATE};"
|
||||
for SUFFIX in '' _upgrade_072 _upgrade_081; do
|
||||
$SQL "DROP DATABASE jupyterhub${SUFFIX};" 2>/dev/null || true
|
||||
$SQL "CREATE DATABASE jupyterhub${SUFFIX} ${EXTRA_CREATE};"
|
||||
done
|
||||
|
24
circle.yml
24
circle.yml
@@ -1,24 +0,0 @@
|
||||
machine:
|
||||
services:
|
||||
- docker
|
||||
|
||||
dependencies:
|
||||
override:
|
||||
- ls
|
||||
|
||||
test:
|
||||
override:
|
||||
- docker build -t jupyterhub/jupyterhub .
|
||||
- docker build -t jupyterhub/jupyterhub-onbuild:${CIRCLE_TAG:-latest} onbuild
|
||||
|
||||
deployment:
|
||||
hub:
|
||||
branch: master
|
||||
commands:
|
||||
- docker login -u $DOCKER_USER -p $DOCKER_PASS -e unused@example.com
|
||||
- docker push jupyterhub/jupyterhub-onbuild
|
||||
release:
|
||||
tag: /.*/
|
||||
commands:
|
||||
- docker login -u $DOCKER_USER -p $DOCKER_PASS -e unused@example.com
|
||||
- docker push jupyterhub/jupyterhub-onbuild:$CIRCLE_TAG
|
@@ -4,6 +4,7 @@ codecov
|
||||
cryptography
|
||||
pytest-cov
|
||||
pytest-tornado
|
||||
pytest>=2.8
|
||||
pytest>=3.3
|
||||
notebook
|
||||
requests-mock
|
||||
virtualenv
|
||||
|
11
dockerfiles/Dockerfile.alpine
Normal file
11
dockerfiles/Dockerfile.alpine
Normal file
@@ -0,0 +1,11 @@
|
||||
FROM python:3.6.3-alpine3.6
|
||||
|
||||
ARG JUPYTERHUB_VERSION=0.8.1
|
||||
|
||||
RUN pip3 install --no-cache jupyterhub==${JUPYTERHUB_VERSION}
|
||||
ENV LANG=en_US.UTF-8
|
||||
|
||||
USER nobody
|
||||
CMD ["jupyterhub"]
|
||||
|
||||
|
21
dockerfiles/README.md
Normal file
21
dockerfiles/README.md
Normal file
@@ -0,0 +1,21 @@
|
||||
## What is Dockerfile.alpine
|
||||
Dockerfile.alpine contains base image for jupyterhub. It does not work independently, but only as part of a full jupyterhub cluster
|
||||
|
||||
## How to use it?
|
||||
|
||||
1. A running configurable-http-proxy, whose API is accessible.
|
||||
2. A jupyterhub_config file.
|
||||
3. Authentication and other libraries required by the specific jupyterhub_config file.
|
||||
|
||||
|
||||
## Steps to test it outside a cluster
|
||||
|
||||
* start configurable-http-proxy in another container
|
||||
* specify CONFIGPROXY_AUTH_TOKEN env in both containers
|
||||
* put both containers on the same network (e.g. docker create network jupyterhub; docker run ... --net jupyterhub)
|
||||
* tell jupyterhub where CHP is (e.g. c.ConfigurableHTTPProxy.api_url = 'http://chp:8001')
|
||||
* tell jupyterhub not to start the proxy itself (c.ConfigurableHTTPProxy.should_start = False)
|
||||
* Use dummy authenticator for ease of testing. Update following in jupyterhub_config file
|
||||
- c.JupyterHub.authenticator_class = 'dummyauthenticator.DummyAuthenticator'
|
||||
- c.DummyAuthenticator.password = "your strong password"
|
||||
|
@@ -2,7 +2,7 @@
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXOPTS = "-W"
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = build
|
||||
|
@@ -3,17 +3,17 @@ channels:
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- nodejs
|
||||
- python=3.5
|
||||
- python=3.6
|
||||
- alembic
|
||||
- jinja2
|
||||
- pamela
|
||||
- requests
|
||||
- sqlalchemy>=1
|
||||
- tornado>=4.1
|
||||
- tornado>=5.0
|
||||
- traitlets>=4.1
|
||||
- sphinx>=1.4, !=1.5.4
|
||||
- sphinx_rtd_theme
|
||||
- sphinx>=1.7
|
||||
- pip:
|
||||
- jupyter_alabaster_theme
|
||||
- python-oauth2
|
||||
- recommonmark==0.4.0
|
||||
- async_generator
|
||||
- prometheus_client
|
||||
|
@@ -1,3 +1,3 @@
|
||||
-r ../requirements.txt
|
||||
sphinx>=1.4
|
||||
sphinx>=1.7
|
||||
recommonmark==0.4.0
|
||||
|
@@ -3,7 +3,7 @@ swagger: '2.0'
|
||||
info:
|
||||
title: JupyterHub
|
||||
description: The REST API for JupyterHub
|
||||
version: 0.8.0dev
|
||||
version: 0.9.0dev
|
||||
license:
|
||||
name: BSD-3-Clause
|
||||
schemes:
|
||||
@@ -240,18 +240,36 @@ paths:
|
||||
description: The user's notebook named-server has stopped
|
||||
'202':
|
||||
description: The user's notebook named-server has not yet stopped as it is taking a while to stop
|
||||
/users/{name}/admin-access:
|
||||
post:
|
||||
summary: Grant admin access to this user's notebook server
|
||||
parameters:
|
||||
- name: name
|
||||
description: username
|
||||
in: path
|
||||
required: true
|
||||
type: string
|
||||
/users/{name}/tokens:
|
||||
get:
|
||||
summary: List tokens for the user
|
||||
responses:
|
||||
'200':
|
||||
description: Sets a cookie granting the requesting administrator access to the user's notebook server
|
||||
description: The list of tokens
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/definitions/Token'
|
||||
post:
|
||||
summary: Create a new token for the user
|
||||
responses:
|
||||
'201':
|
||||
description: The newly created token
|
||||
schema:
|
||||
$ref: '#/definitions/Token'
|
||||
/users/{name}/tokens/{token_id}:
|
||||
get:
|
||||
summary: Get the model for a token by id
|
||||
responses:
|
||||
'200':
|
||||
description: The info for the new token
|
||||
schema:
|
||||
$ref: '#/definitions/Token'
|
||||
delete:
|
||||
summary: Delete (revoke) a token by id
|
||||
responses:
|
||||
'204':
|
||||
description: The token has been deleted
|
||||
/user:
|
||||
summary: Return authenticated user's model
|
||||
description:
|
||||
@@ -588,12 +606,55 @@ definitions:
|
||||
description: The user's notebook server's base URL, if running; null if not.
|
||||
pending:
|
||||
type: string
|
||||
enum: ["spawn", "stop"]
|
||||
enum: ["spawn", "stop", null]
|
||||
description: The currently pending action, if any
|
||||
last_activity:
|
||||
type: string
|
||||
format: date-time
|
||||
description: Timestamp of last-seen activity from the user
|
||||
servers:
|
||||
type: object
|
||||
description: The active servers for this user.
|
||||
items:
|
||||
schema:
|
||||
$ref: '#/definitions/Server'
|
||||
Server:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
description: The server's name. The user's default server has an empty name ('')
|
||||
ready:
|
||||
type: boolean
|
||||
description: |
|
||||
Whether the server is ready for traffic.
|
||||
Will always be false when any transition is pending.
|
||||
pending:
|
||||
type: string
|
||||
enum: ["spawn", "stop", null]
|
||||
description: |
|
||||
The currently pending action, if any.
|
||||
A server is not ready if an action is pending.
|
||||
url:
|
||||
type: string
|
||||
description: |
|
||||
The URL where the server can be accessed
|
||||
(typically /user/:name/:server.name/).
|
||||
progress_url:
|
||||
type: string
|
||||
description: |
|
||||
The URL for an event-stream to retrieve events during a spawn.
|
||||
started:
|
||||
type: string
|
||||
format: date-time
|
||||
description: UTC timestamp when the server was last started.
|
||||
last_activity:
|
||||
type: string
|
||||
format: date-time
|
||||
description: UTC timestamp last-seen activity on this server.
|
||||
state:
|
||||
type: object
|
||||
description: Arbitrary internal state from this server's spawner. Only available on the hub's users list or get-user-by-name method, and only if a hub admin. None otherwise.
|
||||
Group:
|
||||
type: object
|
||||
properties:
|
||||
@@ -628,3 +689,31 @@ definitions:
|
||||
description: The command used to start the service (if managed)
|
||||
items:
|
||||
type: string
|
||||
Token:
|
||||
type: object
|
||||
properties:
|
||||
token:
|
||||
type: string
|
||||
description: The token itself. Only present in responses to requests for a new token.
|
||||
id:
|
||||
type: string
|
||||
description: The id of the API token. Used for modifying or deleting the token.
|
||||
user:
|
||||
type: string
|
||||
description: The user that owns a token (undefined if owned by a service)
|
||||
service:
|
||||
type: string
|
||||
description: The service that owns the token (undefined of owned by a user)
|
||||
note:
|
||||
type: string
|
||||
description: A note about the token, typically describing what it was created for.
|
||||
created:
|
||||
type: string
|
||||
format: date-time
|
||||
description: Timestamp when this token was created
|
||||
last_activity:
|
||||
type: string
|
||||
format: date-time
|
||||
description: |
|
||||
Timestamp of last-seen activity using this token.
|
||||
Can be null if token has never been used.
|
||||
|
106
docs/source/_static/custom.css
Normal file
106
docs/source/_static/custom.css
Normal file
@@ -0,0 +1,106 @@
|
||||
div#helm-chart-schema h2,
|
||||
div#helm-chart-schema h3,
|
||||
div#helm-chart-schema h4,
|
||||
div#helm-chart-schema h5,
|
||||
div#helm-chart-schema h6 {
|
||||
font-family: courier new;
|
||||
}
|
||||
|
||||
h3, h3 ~ * {
|
||||
margin-left: 3% !important;
|
||||
}
|
||||
|
||||
h4, h4 ~ * {
|
||||
margin-left: 6% !important;
|
||||
}
|
||||
|
||||
h5, h5 ~ * {
|
||||
margin-left: 9% !important;
|
||||
}
|
||||
|
||||
h6, h6 ~ * {
|
||||
margin-left: 12% !important;
|
||||
}
|
||||
|
||||
h7, h7 ~ * {
|
||||
margin-left: 15% !important;
|
||||
}
|
||||
|
||||
img.logo {
|
||||
width:100%
|
||||
}
|
||||
|
||||
.right-next {
|
||||
float: right;
|
||||
max-width: 45%;
|
||||
overflow: auto;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.right-next::after{
|
||||
content: ' »';
|
||||
}
|
||||
|
||||
.left-prev {
|
||||
float: left;
|
||||
max-width: 45%;
|
||||
overflow: auto;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.left-prev::before{
|
||||
content: '« ';
|
||||
}
|
||||
|
||||
.prev-next-bottom {
|
||||
margin-top: 3em;
|
||||
}
|
||||
|
||||
.prev-next-top {
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
/* Sidebar TOC and headers */
|
||||
|
||||
div.sphinxsidebarwrapper div {
|
||||
margin-bottom: .8em;
|
||||
}
|
||||
div.sphinxsidebar h3 {
|
||||
font-size: 1.3em;
|
||||
padding-top: 0px;
|
||||
font-weight: 800;
|
||||
margin-left: 0px !important;
|
||||
}
|
||||
|
||||
div.sphinxsidebar p.caption {
|
||||
font-size: 1.2em;
|
||||
margin-bottom: 0px;
|
||||
margin-left: 0px !important;
|
||||
font-weight: 900;
|
||||
color: #767676;
|
||||
}
|
||||
|
||||
div.sphinxsidebar ul {
|
||||
font-size: .8em;
|
||||
margin-top: 0px;
|
||||
padding-left: 3%;
|
||||
margin-left: 0px !important;
|
||||
}
|
||||
|
||||
div.relations ul {
|
||||
font-size: 1em;
|
||||
margin-left: 0px !important;
|
||||
}
|
||||
|
||||
div#searchbox form {
|
||||
margin-left: 0px !important;
|
||||
}
|
||||
|
||||
/* body elements */
|
||||
.toctree-wrapper span.caption-text {
|
||||
color: #767676;
|
||||
font-style: italic;
|
||||
font-weight: 300;
|
||||
}
|
BIN
docs/source/_static/images/logo/favicon.ico
Normal file
BIN
docs/source/_static/images/logo/favicon.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.4 KiB |
BIN
docs/source/_static/images/logo/logo.png
Normal file
BIN
docs/source/_static/images/logo/logo.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 38 KiB |
16
docs/source/_templates/navigation.html
Normal file
16
docs/source/_templates/navigation.html
Normal file
@@ -0,0 +1,16 @@
|
||||
{# Custom template for navigation.html
|
||||
|
||||
alabaster theme does not provide blocks for titles to
|
||||
be overridden so this custom theme handles title and
|
||||
toctree for sidebar
|
||||
#}
|
||||
<h3>{{ _('Table of Contents') }}</h3>
|
||||
{{ toctree(includehidden=theme_sidebar_includehidden, collapse=theme_sidebar_collapse) }}
|
||||
{% if theme_extra_nav_links %}
|
||||
<hr />
|
||||
<ul>
|
||||
{% for text, uri in theme_extra_nav_links.items() %}
|
||||
<li class="toctree-l1"><a href="{{ uri }}">{{ text }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
30
docs/source/_templates/page.html
Normal file
30
docs/source/_templates/page.html
Normal file
@@ -0,0 +1,30 @@
|
||||
{% extends '!page.html' %}
|
||||
|
||||
{# Custom template for page.html
|
||||
|
||||
Alabaster theme does not provide blocks for prev/next at bottom of each page.
|
||||
This is _in addition_ to the prev/next in the sidebar. The "Prev/Next" text
|
||||
or symbols are handled by CSS classes in _static/custom.css
|
||||
#}
|
||||
|
||||
{% macro prev_next(prev, next, prev_title='', next_title='') %}
|
||||
{%- if prev %}
|
||||
<a class='left-prev' href="{{ prev.link|e }}" title="{{ _('previous chapter')}}">{{ prev_title or prev.title }}</a>
|
||||
{%- endif %}
|
||||
{%- if next %}
|
||||
<a class='right-next' href="{{ next.link|e }}" title="{{ _('next chapter')}}">{{ next_title or next.title }}</a>
|
||||
{%- endif %}
|
||||
<div style='clear:both;'></div>
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% block body %}
|
||||
<div class='prev-next-top'>
|
||||
{{ prev_next(prev, next, 'Previous', 'Next') }}
|
||||
</div>
|
||||
|
||||
{{super()}}
|
||||
<div class='prev-next-bottom'>
|
||||
{{ prev_next(prev, next) }}
|
||||
</div>
|
||||
{% endblock %}
|
17
docs/source/_templates/relations.html
Normal file
17
docs/source/_templates/relations.html
Normal file
@@ -0,0 +1,17 @@
|
||||
{# Custom template for relations.html
|
||||
|
||||
alabaster theme does not provide previous/next page by default
|
||||
#}
|
||||
<div class="relations">
|
||||
<h3>Navigation</h3>
|
||||
<ul>
|
||||
<li><a href="{{ pathto(master_doc) }}">Documentation Home</a><ul>
|
||||
{%- if prev %}
|
||||
<li><a href="{{ prev.link|e }}" title="Previous">Previous topic</a></li>
|
||||
{%- endif %}
|
||||
{%- if next %}
|
||||
<li><a href="{{ next.link|e }}" title="Next">Next topic</a></li>
|
||||
{%- endif %}
|
||||
</ul>
|
||||
</ul>
|
||||
</div>
|
@@ -7,7 +7,140 @@ command line for details.
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## [0.8.0] 2017-10-03
|
||||
## 0.9
|
||||
|
||||
### 0.9.0
|
||||
|
||||
JupyterHub 0.9 is a major upgrade of JupyterHub.
|
||||
There are several changes to the database schema,
|
||||
so make sure to backup your database and run:
|
||||
|
||||
jupyterhub upgrade-db
|
||||
|
||||
after upgrading jupyterhub.
|
||||
|
||||
The biggest change for 0.9 is the switch to asyncio coroutines everywhere
|
||||
instead of tornado coroutines. Custom Spawners and Authenticators are still
|
||||
free to use tornado coroutines for async methods, as they will continue to
|
||||
work. As part of this upgrade, JupyterHub 0.9 drops support for Python < 3.5
|
||||
and tornado < 5.0.
|
||||
|
||||
|
||||
#### Changed
|
||||
|
||||
- Require Python >= 3.5
|
||||
- Require tornado >= 5.0
|
||||
- Use asyncio coroutines throughout
|
||||
- Set status 409 for conflicting actions instead of 400,
|
||||
e.g. creating users or groups that already exist.
|
||||
- timestamps in REST API continue to be UTC, but now include 'Z' suffix
|
||||
to identify them as such.
|
||||
- REST API User model always includes `servers` dict,
|
||||
not just when named servers are enabled.
|
||||
- `server` info is no longer available to oauth identification endpoints,
|
||||
only user info and group membership.
|
||||
- `User.last_activity` may be None if a user has not been seen,
|
||||
rather than starting with the user creation time
|
||||
which is now separately stored as `User.created`.
|
||||
- static resources are now found in `$PREFIX/share/jupyterhub` instead of `share/jupyter/hub` for improved consistency.
|
||||
- Deprecate `.extra_log_file` config. Use pipe redirection instead:
|
||||
|
||||
jupyterhub &>> /var/log/jupyterhub.log
|
||||
|
||||
- Add `JupyterHub.bind_url` config for setting the full bind URL of the proxy.
|
||||
Sets ip, port, base_url all at once.
|
||||
- Add `JupyterHub.hub_bind_url` for setting the full host+port of the Hub.
|
||||
`hub_bind_url` supports unix domain sockets, e.g.
|
||||
`unix+http://%2Fsrv%2Fjupyterhub.sock`
|
||||
- Deprecate `JupyterHub.hub_connect_port` config in favor of `JupyterHub.hub_connect_url`. `hub_connect_ip` is not deprecated
|
||||
and can still be used in the common case where only the ip address of the hub differs from the bind ip.
|
||||
|
||||
#### Added
|
||||
|
||||
- Spawners can define a `.progress` method which should be an async generator.
|
||||
The generator should yield events of the form:
|
||||
```python
|
||||
{
|
||||
"message": "some-state-message",
|
||||
"progress": 50,
|
||||
}
|
||||
```
|
||||
These messages will be shown with a progress bar on the spawn-pending page.
|
||||
The `async_generator` package can be used to make async generators
|
||||
compatible with Python 3.5.
|
||||
- track activity of individual API tokens
|
||||
- new REST API for managing API tokens at `/hub/api/user/tokens[/token-id]`
|
||||
- allow viewing/revoking tokens via token page
|
||||
- User creation time is available in the REST API as `User.created`
|
||||
- Server start time is stored as `Server.started`
|
||||
- `Spawner.start` may return a URL for connecting to a notebook instead of `(ip, port)`. This enables Spawners to launch servers that setup their own HTTPS.
|
||||
- Optimize database performance by disabling sqlalchemy expire_on_commit by default.
|
||||
- Add `python -m jupyterhub.dbutil shell` entrypoint for quickly
|
||||
launching an IPython session connected to your JupyterHub database.
|
||||
- Include `User.auth_state` in user model on single-user REST endpoints for admins only.
|
||||
- Include `Server.state` in server model on REST endpoints for admins only.
|
||||
- Add `Authenticator.blacklist` for blacklisting users instead of whitelisting.
|
||||
- Pass `c.JupyterHub.tornado_settings['cookie_options']` down to Spawners
|
||||
so that cookie options (e.g. `expires_days`) can be set globally for the whole application.
|
||||
- SIGINFO (`ctrl-t`) handler showing the current status of all running threads,
|
||||
coroutines, and CPU/memory/FD consumption.
|
||||
- Add async `Spawner.get_options_form` alternative to `.options_form`, so it can be a coroutine.
|
||||
- Add `JupyterHub.redirect_to_server` config to govern whether
|
||||
users should be sent to their server on login or the JuptyerHub home page.
|
||||
- html page templates can be more easily customized and extended.
|
||||
- Allow registering external OAuth clients for using the Hub as an OAuth provider.
|
||||
- Add basic prometheus metrics at `/hub/metrics` endpoint.
|
||||
- Add session-id cookie, enabling immediate revocation of login tokens.
|
||||
- Authenticators may specify that users are admins by specifying the `admin` key when return the user model as a dict.
|
||||
- Added "Start All" button to admin page for launching all user servers at once.
|
||||
|
||||
|
||||
#### Fixed
|
||||
|
||||
- Remove green from theme to improve accessibility
|
||||
- Fix error when proxy deletion fails due to route already being deleted
|
||||
- clear `?redirects` from URL on successful launch
|
||||
- disable send2trash by default, which is rarely desirable for jupyterhub
|
||||
- Put PAM calls in a thread so they don't block the main application
|
||||
in cases where PAM is slow (e.g. LDAP).
|
||||
- Remove implicit spawn from login handler,
|
||||
instead relying on subsequent request for `/user/:name` to trigger spawn.
|
||||
- Fixed several inconsistencies for initial redirects,
|
||||
depending on whether server is running or not and whether the user is logged in or not.
|
||||
- Admin requests for `/user/:name` (when admin-access is enabled) launch the right server if it's not running instead of redirecting to their own.
|
||||
- Major performance improvement starting up JupyterHub with many users,
|
||||
especially when most are inactive.
|
||||
- Various fixes in race conditions and performance improvements with the default proxy.
|
||||
- Fixes for CORS headers
|
||||
- Stop setting `.form-control` on spawner form inputs unconditionally.
|
||||
|
||||
|
||||
## 0.8
|
||||
|
||||
### [0.8.1] 2017-11-07
|
||||
|
||||
JupyterHub 0.8.1 is a collection of bugfixes and small improvements on 0.8.
|
||||
|
||||
#### Added
|
||||
|
||||
- Run tornado with AsyncIO by default
|
||||
- Add `jupyterhub --upgrade-db` flag for automatically upgrading the database as part of startup.
|
||||
This is useful for cases where manually running `jupyterhub upgrade-db`
|
||||
as a separate step is unwieldy.
|
||||
- Avoid creating backups of the database when no changes are to be made by
|
||||
`jupyterhub upgrade-db`.
|
||||
|
||||
#### Fixed
|
||||
|
||||
- Add some further validation to usernames - `/` is not allowed in usernames.
|
||||
- Fix empty logout page when using auto_login
|
||||
- Fix autofill of username field in default login form.
|
||||
- Fix listing of users on the admin page who have not yet started their server.
|
||||
- Fix ever-growing traceback when re-raising Exceptions from spawn failures.
|
||||
- Remove use of deprecated `bower` for javascript client dependencies.
|
||||
|
||||
|
||||
### [0.8.0] 2017-10-03
|
||||
|
||||
JupyterHub 0.8 is a big release!
|
||||
|
||||
@@ -38,7 +171,7 @@ in your Dockerfile is sufficient.
|
||||
|
||||
```python
|
||||
{
|
||||
'username': 'name'
|
||||
'username': 'name',
|
||||
'state': {}
|
||||
}
|
||||
```
|
||||
@@ -235,7 +368,8 @@ Fix removal of `/login` page in 0.4.0, breaking some OAuth providers.
|
||||
First preview release
|
||||
|
||||
|
||||
[Unreleased]: https://github.com/jupyterhub/jupyterhub/compare/0.8.0...HEAD
|
||||
[Unreleased]: https://github.com/jupyterhub/jupyterhub/compare/0.8.1...HEAD
|
||||
[0.8.1]: https://github.com/jupyterhub/jupyterhub/compare/0.8.0...0.8.1
|
||||
[0.8.0]: https://github.com/jupyterhub/jupyterhub/compare/0.7.2...0.8.0
|
||||
[0.7.2]: https://github.com/jupyterhub/jupyterhub/compare/0.7.1...0.7.2
|
||||
[0.7.1]: https://github.com/jupyterhub/jupyterhub/compare/0.7.0...0.7.1
|
||||
|
@@ -21,7 +21,6 @@ extensions = [
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.napoleon',
|
||||
'autodoc_traits',
|
||||
'jupyter_alabaster_theme',
|
||||
]
|
||||
|
||||
templates_path = ['_templates']
|
||||
@@ -67,34 +66,39 @@ source_suffix = ['.rst', '.md']
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages.
|
||||
html_theme = 'jupyter_alabaster_theme'
|
||||
html_theme = 'alabaster'
|
||||
|
||||
#html_theme_options = {}
|
||||
#html_theme_path = []
|
||||
#html_title = None
|
||||
#html_short_title = None
|
||||
#html_logo = None
|
||||
#html_favicon = None
|
||||
html_logo = '_static/images/logo/logo.png'
|
||||
html_favicon = '_static/images/logo/favicon.ico'
|
||||
|
||||
# Paths that contain custom static files (such as style sheets)
|
||||
html_static_path = ['_static']
|
||||
|
||||
#html_extra_path = []
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
#html_use_smartypants = True
|
||||
#html_sidebars = {}
|
||||
#html_additional_pages = {}
|
||||
#html_domain_indices = True
|
||||
#html_use_index = True
|
||||
#html_split_index = False
|
||||
#html_show_sourcelink = True
|
||||
#html_show_sphinx = True
|
||||
#html_show_copyright = True
|
||||
#html_use_opensearch = ''
|
||||
#html_file_suffix = None
|
||||
#html_search_language = 'en'
|
||||
#html_search_options = {'type': 'default'}
|
||||
#html_search_scorer = 'scorer.js'
|
||||
html_theme_options = {
|
||||
'show_related': True,
|
||||
'description': 'Documentation for JupyterHub',
|
||||
'github_user': 'jupyterhub',
|
||||
'github_repo': 'jupyterhub',
|
||||
'github_banner': False,
|
||||
'github_button': True,
|
||||
'github_type': 'star',
|
||||
'show_powered_by': False,
|
||||
'extra_nav_links': {
|
||||
'GitHub Repo': 'http://github.com/jupyterhub/jupyterhub',
|
||||
'Issue Tracker': 'http://github.com/jupyterhub/jupyterhub/issues',
|
||||
},
|
||||
}
|
||||
|
||||
html_sidebars = {
|
||||
'**': [
|
||||
'about.html',
|
||||
'searchbox.html',
|
||||
'navigation.html',
|
||||
'relations.html',
|
||||
'sourcelink.html',
|
||||
],
|
||||
}
|
||||
|
||||
htmlhelp_basename = 'JupyterHubdoc'
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
@@ -170,9 +174,7 @@ intersphinx_mapping = {'https://docs.python.org/3/': None}
|
||||
|
||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||
if not on_rtd:
|
||||
import jupyter_alabaster_theme
|
||||
html_theme = 'jupyter_alabaster_theme'
|
||||
html_theme_path = [jupyter_alabaster_theme.get_path()]
|
||||
html_theme = 'alabaster'
|
||||
else:
|
||||
# readthedocs.org uses their theme by default, so no need to specify it
|
||||
# build rest-api, since RTD doesn't run make
|
||||
|
@@ -151,9 +151,9 @@ easy to do with RStudio too.
|
||||
- https://getcarina.com/blog/learning-how-to-whale/
|
||||
- http://carolynvanslyck.com/talk/carina/jupyterhub/#/
|
||||
|
||||
### Red Hat
|
||||
|
||||
|
||||
### jcloud.io
|
||||
- Open to public JupyterHub server
|
||||
- https://jcloud.io
|
||||
## Miscellaneous
|
||||
|
||||
- https://medium.com/@ybarraud/setting-up-jupyterhub-with-sudospawner-and-anaconda-844628c0dbee#.rm3yt87e1
|
||||
|
@@ -88,7 +88,7 @@ c.JupyterHub.services = [
|
||||
{
|
||||
'name': 'cull-idle',
|
||||
'admin': True,
|
||||
'command': 'python cull_idle_servers.py --timeout=3600'.split(),
|
||||
'command': 'python3 cull_idle_servers.py --timeout=3600'.split(),
|
||||
}
|
||||
]
|
||||
```
|
||||
@@ -115,7 +115,7 @@ variable. Run `cull_idle_servers.py` manually.
|
||||
|
||||
```bash
|
||||
export JUPYTERHUB_API_TOKEN='token'
|
||||
python cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
|
||||
python3 cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
|
||||
```
|
||||
|
||||
[cull_idle_servers]: https://github.com/jupyterhub/jupyterhub/blob/master/examples/cull-idle/cull_idle_servers.py
|
||||
|
@@ -19,7 +19,7 @@ Three subsystems make up JupyterHub:
|
||||
|
||||
JupyterHub performs the following functions:
|
||||
|
||||
- The Hub spawns a proxy
|
||||
- The Hub launches a proxy
|
||||
- The proxy forwards all requests to the Hub by default
|
||||
- The Hub handles user login and spawns single-user servers on demand
|
||||
- The Hub configures the proxy to forward URL prefixes to the single-user
|
||||
@@ -59,6 +59,9 @@ Contents
|
||||
* :doc:`reference/rest`
|
||||
* :doc:`reference/upgrading`
|
||||
* :doc:`reference/config-examples`
|
||||
* :doc:`reference/config-ghoauth`
|
||||
* :doc:`reference/config-proxy`
|
||||
* :doc:`reference/config-sudo`
|
||||
|
||||
**API Reference**
|
||||
|
||||
|
@@ -37,7 +37,7 @@ If you want to run docker on a computer that has a public IP then you should
|
||||
(as in MUST) **secure it with ssl** by adding ssl options to your docker
|
||||
configuration or using a ssl enabled proxy.
|
||||
|
||||
`Mounting volumes <https://docs.docker.com/engine/userguide/containers/dockervolumes/>`_
|
||||
`Mounting volumes <https://docs.docker.com/engine/admin/volumes/volumes/>`_
|
||||
will allow you to store data outside the docker image (host system) so it will
|
||||
be persistent, even when you start a new image.
|
||||
|
||||
|
@@ -145,7 +145,7 @@ If such state should be persisted, `.authenticate()` should return a dictionary
|
||||
|
||||
```python
|
||||
{
|
||||
'username': 'name',
|
||||
'name': username,
|
||||
'auth_state': {
|
||||
'key': 'value',
|
||||
}
|
||||
@@ -190,7 +190,7 @@ class MyAuthenticator(Authenticator):
|
||||
username = yield identify_user(handler, data)
|
||||
upstream_token = yield token_for_user(username)
|
||||
return {
|
||||
'username': username,
|
||||
'name': username,
|
||||
'auth_state': {
|
||||
'upstream_token': upstream_token,
|
||||
},
|
||||
|
@@ -1,272 +1,8 @@
|
||||
# Configuration examples
|
||||
|
||||
This section provides examples, including configuration files and tips, for the
|
||||
following configurations:
|
||||
The following sections provide examples, including configuration files and tips, for the
|
||||
following:
|
||||
|
||||
- Using GitHub OAuth
|
||||
- Using nginx reverse proxy
|
||||
|
||||
## Using GitHub OAuth
|
||||
|
||||
In this example, we show a configuration file for a fairly standard JupyterHub
|
||||
deployment with the following assumptions:
|
||||
|
||||
* Running JupyterHub on a single cloud server
|
||||
* Using SSL on the standard HTTPS port 443
|
||||
* Using GitHub OAuth (using oauthenticator) for login
|
||||
* Users exist locally on the server
|
||||
* Users' notebooks to be served from `~/assignments` to allow users to browse
|
||||
for notebooks within other users' home directories
|
||||
* You want the landing page for each user to be a `Welcome.ipynb` notebook in
|
||||
their assignments directory.
|
||||
* All runtime files are put into `/srv/jupyterhub` and log files in `/var/log`.
|
||||
|
||||
The `jupyterhub_config.py` file would have these settings:
|
||||
|
||||
```python
|
||||
# jupyterhub_config.py file
|
||||
c = get_config()
|
||||
|
||||
import os
|
||||
pjoin = os.path.join
|
||||
|
||||
runtime_dir = os.path.join('/srv/jupyterhub')
|
||||
ssl_dir = pjoin(runtime_dir, 'ssl')
|
||||
if not os.path.exists(ssl_dir):
|
||||
os.makedirs(ssl_dir)
|
||||
|
||||
# Allows multiple single-server per user
|
||||
c.JupyterHub.allow_named_servers = True
|
||||
|
||||
# https on :443
|
||||
c.JupyterHub.port = 443
|
||||
c.JupyterHub.ssl_key = pjoin(ssl_dir, 'ssl.key')
|
||||
c.JupyterHub.ssl_cert = pjoin(ssl_dir, 'ssl.cert')
|
||||
|
||||
# put the JupyterHub cookie secret and state db
|
||||
# in /var/run/jupyterhub
|
||||
c.JupyterHub.cookie_secret_file = pjoin(runtime_dir, 'cookie_secret')
|
||||
c.JupyterHub.db_url = pjoin(runtime_dir, 'jupyterhub.sqlite')
|
||||
# or `--db=/path/to/jupyterhub.sqlite` on the command-line
|
||||
|
||||
# use GitHub OAuthenticator for local users
|
||||
c.JupyterHub.authenticator_class = 'oauthenticator.LocalGitHubOAuthenticator'
|
||||
c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL']
|
||||
|
||||
# create system users that don't exist yet
|
||||
c.LocalAuthenticator.create_system_users = True
|
||||
|
||||
# specify users and admin
|
||||
c.Authenticator.whitelist = {'rgbkrk', 'minrk', 'jhamrick'}
|
||||
c.Authenticator.admin_users = {'jhamrick', 'rgbkrk'}
|
||||
|
||||
# start single-user notebook servers in ~/assignments,
|
||||
# with ~/assignments/Welcome.ipynb as the default landing page
|
||||
# this config could also be put in
|
||||
# /etc/jupyter/jupyter_notebook_config.py
|
||||
c.Spawner.notebook_dir = '~/assignments'
|
||||
c.Spawner.args = ['--NotebookApp.default_url=/notebooks/Welcome.ipynb']
|
||||
```
|
||||
|
||||
Using the GitHub Authenticator requires a few additional
|
||||
environment variable to be set prior to launching JupyterHub:
|
||||
|
||||
```bash
|
||||
export GITHUB_CLIENT_ID=github_id
|
||||
export GITHUB_CLIENT_SECRET=github_secret
|
||||
export OAUTH_CALLBACK_URL=https://example.com/hub/oauth_callback
|
||||
export CONFIGPROXY_AUTH_TOKEN=super-secret
|
||||
# append log output to log file /var/log/jupyterhub.log
|
||||
jupyterhub -f /etc/jupyterhub/jupyterhub_config.py &>> /var/log/jupyterhub.log
|
||||
```
|
||||
|
||||
## Using a reverse proxy
|
||||
|
||||
In the following example, we show configuration files for a JupyterHub server
|
||||
running locally on port `8000` but accessible from the outside on the standard
|
||||
SSL port `443`. This could be useful if the JupyterHub server machine is also
|
||||
hosting other domains or content on `443`. The goal in this example is to
|
||||
satisfy the following:
|
||||
|
||||
* JupyterHub is running on a server, accessed *only* via `HUB.DOMAIN.TLD:443`
|
||||
* On the same machine, `NO_HUB.DOMAIN.TLD` strictly serves different content,
|
||||
also on port `443`
|
||||
* `nginx` or `apache` is used as the public access point (which means that
|
||||
only nginx/apache will bind to `443`)
|
||||
* After testing, the server in question should be able to score at least an A on the
|
||||
Qualys SSL Labs [SSL Server Test](https://www.ssllabs.com/ssltest/)
|
||||
|
||||
Let's start out with needed JupyterHub configuration in `jupyterhub_config.py`:
|
||||
|
||||
```python
|
||||
# Force the proxy to only listen to connections to 127.0.0.1
|
||||
c.JupyterHub.ip = '127.0.0.1'
|
||||
```
|
||||
|
||||
For high-quality SSL configuration, we also generate Diffie-Helman parameters.
|
||||
This can take a few minutes:
|
||||
|
||||
```bash
|
||||
openssl dhparam -out /etc/ssl/certs/dhparam.pem 4096
|
||||
```
|
||||
|
||||
### nginx
|
||||
|
||||
The **`nginx` server config file** is fairly standard fare except for the two
|
||||
`location` blocks within the `HUB.DOMAIN.TLD` config file:
|
||||
|
||||
```bash
|
||||
# top-level http config for websocket headers
|
||||
# If Upgrade is defined, Connection = upgrade
|
||||
# If Upgrade is empty, Connection = close
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
# HTTP server to redirect all 80 traffic to SSL/HTTPS
|
||||
server {
|
||||
listen 80;
|
||||
server_name HUB.DOMAIN.TLD;
|
||||
|
||||
# Tell all requests to port 80 to be 302 redirected to HTTPS
|
||||
return 302 https://$host$request_uri;
|
||||
}
|
||||
|
||||
# HTTPS server to handle JupyterHub
|
||||
server {
|
||||
listen 443;
|
||||
ssl on;
|
||||
|
||||
server_name HUB.DOMAIN.TLD;
|
||||
|
||||
ssl_certificate /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem;
|
||||
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
|
||||
ssl_prefer_server_ciphers on;
|
||||
ssl_dhparam /etc/ssl/certs/dhparam.pem;
|
||||
ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA';
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:SSL:50m;
|
||||
ssl_stapling on;
|
||||
ssl_stapling_verify on;
|
||||
add_header Strict-Transport-Security max-age=15768000;
|
||||
|
||||
# Managing literal requests to the JupyterHub front end
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
|
||||
# websocket headers
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
}
|
||||
|
||||
# Managing requests to verify letsencrypt host
|
||||
location ~ /.well-known {
|
||||
allow all;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
If `nginx` is not running on port 443, substitute `$http_host` for `$host` on
|
||||
the lines setting the `Host` header.
|
||||
|
||||
`nginx` will now be the front facing element of JupyterHub on `443` which means
|
||||
it is also free to bind other servers, like `NO_HUB.DOMAIN.TLD` to the same port
|
||||
on the same machine and network interface. In fact, one can simply use the same
|
||||
server blocks as above for `NO_HUB` and simply add line for the root directory
|
||||
of the site as well as the applicable location call:
|
||||
|
||||
```bash
|
||||
server {
|
||||
listen 80;
|
||||
server_name NO_HUB.DOMAIN.TLD;
|
||||
|
||||
# Tell all requests to port 80 to be 302 redirected to HTTPS
|
||||
return 302 https://$host$request_uri;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443;
|
||||
ssl on;
|
||||
|
||||
# INSERT OTHER SSL PARAMETERS HERE AS ABOVE
|
||||
# SSL cert may differ
|
||||
|
||||
# Set the appropriate root directory
|
||||
root /var/www/html
|
||||
|
||||
# Set URI handling
|
||||
location / {
|
||||
try_files $uri $uri/ =404;
|
||||
}
|
||||
|
||||
# Managing requests to verify letsencrypt host
|
||||
location ~ /.well-known {
|
||||
allow all;
|
||||
}
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
Now restart `nginx`, restart the JupyterHub, and enjoy accessing
|
||||
`https://HUB.DOMAIN.TLD` while serving other content securely on
|
||||
`https://NO_HUB.DOMAIN.TLD`.
|
||||
|
||||
|
||||
### Apache
|
||||
|
||||
As with nginx above, you can use [Apache](https://httpd.apache.org) as the reverse proxy.
|
||||
First, we will need to enable the apache modules that we are going to need:
|
||||
|
||||
```bash
|
||||
a2enmod ssl rewrite proxy proxy_http proxy_wstunnel
|
||||
```
|
||||
|
||||
Our Apache configuration is equivalent to the nginx configuration above:
|
||||
|
||||
- Redirect HTTP to HTTPS
|
||||
- Good SSL Configuration
|
||||
- Support for websockets on any proxied URL
|
||||
- JupyterHub is running locally at http://127.0.0.1:8000
|
||||
|
||||
```bash
|
||||
# redirect HTTP to HTTPS
|
||||
Listen 80
|
||||
<VirtualHost HUB.DOMAIN.TLD:80>
|
||||
ServerName HUB.DOMAIN.TLD
|
||||
Redirect / https://HUB.DOMAIN.TLD/
|
||||
</VirtualHost>
|
||||
|
||||
Listen 443
|
||||
<VirtualHost HUB.DOMAIN.TLD:443>
|
||||
|
||||
ServerName HUB.DOMAIN.TLD
|
||||
|
||||
# configure SSL
|
||||
SSLEngine on
|
||||
SSLCertificateFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem
|
||||
SSLCertificateKeyFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem
|
||||
SSLProtocol All -SSLv2 -SSLv3
|
||||
SSLOpenSSLConfCmd DHParameters /etc/ssl/certs/dhparam.pem
|
||||
SSLCipherSuite EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH
|
||||
|
||||
# Use RewriteEngine to handle websocket connection upgrades
|
||||
RewriteEngine On
|
||||
RewriteCond %{HTTP:Connection} Upgrade [NC]
|
||||
RewriteCond %{HTTP:Upgrade} websocket [NC]
|
||||
RewriteRule /(.*) ws://127.0.0.1:8000/$1 [P,L]
|
||||
|
||||
<Location "/">
|
||||
# preserve Host header to avoid cross-origin problems
|
||||
ProxyPreserveHost on
|
||||
# proxy to JupyterHub
|
||||
ProxyPass http://127.0.0.1:8000/
|
||||
ProxyPassReverse http://127.0.0.1:8000/
|
||||
</Location>
|
||||
</VirtualHost>
|
||||
```
|
||||
- Configuring GitHub OAuth
|
||||
- Using reverse proxy (nginx and Apache)
|
||||
- Run JupyterHub without root privileges using `sudo`
|
||||
|
82
docs/source/reference/config-ghoauth.md
Normal file
82
docs/source/reference/config-ghoauth.md
Normal file
@@ -0,0 +1,82 @@
|
||||
# Configure GitHub OAuth
|
||||
|
||||
In this example, we show a configuration file for a fairly standard JupyterHub
|
||||
deployment with the following assumptions:
|
||||
|
||||
* Running JupyterHub on a single cloud server
|
||||
* Using SSL on the standard HTTPS port 443
|
||||
* Using GitHub OAuth (using oauthenticator) for login
|
||||
* Using the default spawner (to configure other spawners, uncomment and edit
|
||||
`spawner_class` as well as follow the instructions for your desired spawner)
|
||||
* Users exist locally on the server
|
||||
* Users' notebooks to be served from `~/assignments` to allow users to browse
|
||||
for notebooks within other users' home directories
|
||||
* You want the landing page for each user to be a `Welcome.ipynb` notebook in
|
||||
their assignments directory.
|
||||
* All runtime files are put into `/srv/jupyterhub` and log files in `/var/log`.
|
||||
|
||||
|
||||
The `jupyterhub_config.py` file would have these settings:
|
||||
|
||||
```python
|
||||
# jupyterhub_config.py file
|
||||
c = get_config()
|
||||
|
||||
import os
|
||||
pjoin = os.path.join
|
||||
|
||||
runtime_dir = os.path.join('/srv/jupyterhub')
|
||||
ssl_dir = pjoin(runtime_dir, 'ssl')
|
||||
if not os.path.exists(ssl_dir):
|
||||
os.makedirs(ssl_dir)
|
||||
|
||||
# Allows multiple single-server per user
|
||||
c.JupyterHub.allow_named_servers = True
|
||||
|
||||
# https on :443
|
||||
c.JupyterHub.port = 443
|
||||
c.JupyterHub.ssl_key = pjoin(ssl_dir, 'ssl.key')
|
||||
c.JupyterHub.ssl_cert = pjoin(ssl_dir, 'ssl.cert')
|
||||
|
||||
# put the JupyterHub cookie secret and state db
|
||||
# in /var/run/jupyterhub
|
||||
c.JupyterHub.cookie_secret_file = pjoin(runtime_dir, 'cookie_secret')
|
||||
c.JupyterHub.db_url = pjoin(runtime_dir, 'jupyterhub.sqlite')
|
||||
# or `--db=/path/to/jupyterhub.sqlite` on the command-line
|
||||
|
||||
# use GitHub OAuthenticator for local users
|
||||
c.JupyterHub.authenticator_class = 'oauthenticator.LocalGitHubOAuthenticator'
|
||||
c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL']
|
||||
|
||||
# create system users that don't exist yet
|
||||
c.LocalAuthenticator.create_system_users = True
|
||||
|
||||
# specify users and admin
|
||||
c.Authenticator.whitelist = {'rgbkrk', 'minrk', 'jhamrick'}
|
||||
c.Authenticator.admin_users = {'jhamrick', 'rgbkrk'}
|
||||
|
||||
# uses the default spawner
|
||||
# To use a different spawner, uncomment `spawner_class` and set to desired
|
||||
# spawner (e.g. SudoSpawner). Follow instructions for desired spawner
|
||||
# configuration.
|
||||
# c.JupyterHub.spawner_class = 'sudospawner.SudoSpawner'
|
||||
|
||||
# start single-user notebook servers in ~/assignments,
|
||||
# with ~/assignments/Welcome.ipynb as the default landing page
|
||||
# this config could also be put in
|
||||
# /etc/jupyter/jupyter_notebook_config.py
|
||||
c.Spawner.notebook_dir = '~/assignments'
|
||||
c.Spawner.args = ['--NotebookApp.default_url=/notebooks/Welcome.ipynb']
|
||||
```
|
||||
|
||||
Using the GitHub Authenticator requires a few additional
|
||||
environment variable to be set prior to launching JupyterHub:
|
||||
|
||||
```bash
|
||||
export GITHUB_CLIENT_ID=github_id
|
||||
export GITHUB_CLIENT_SECRET=github_secret
|
||||
export OAUTH_CALLBACK_URL=https://example.com/hub/oauth_callback
|
||||
export CONFIGPROXY_AUTH_TOKEN=super-secret
|
||||
# append log output to log file /var/log/jupyterhub.log
|
||||
jupyterhub -f /etc/jupyterhub/jupyterhub_config.py &>> /var/log/jupyterhub.log
|
||||
```
|
190
docs/source/reference/config-proxy.md
Normal file
190
docs/source/reference/config-proxy.md
Normal file
@@ -0,0 +1,190 @@
|
||||
# Using a reverse proxy
|
||||
|
||||
In the following example, we show configuration files for a JupyterHub server
|
||||
running locally on port `8000` but accessible from the outside on the standard
|
||||
SSL port `443`. This could be useful if the JupyterHub server machine is also
|
||||
hosting other domains or content on `443`. The goal in this example is to
|
||||
satisfy the following:
|
||||
|
||||
* JupyterHub is running on a server, accessed *only* via `HUB.DOMAIN.TLD:443`
|
||||
* On the same machine, `NO_HUB.DOMAIN.TLD` strictly serves different content,
|
||||
also on port `443`
|
||||
* `nginx` or `apache` is used as the public access point (which means that
|
||||
only nginx/apache will bind to `443`)
|
||||
* After testing, the server in question should be able to score at least an A on the
|
||||
Qualys SSL Labs [SSL Server Test](https://www.ssllabs.com/ssltest/)
|
||||
|
||||
Let's start out with needed JupyterHub configuration in `jupyterhub_config.py`:
|
||||
|
||||
```python
|
||||
# Force the proxy to only listen to connections to 127.0.0.1
|
||||
c.JupyterHub.ip = '127.0.0.1'
|
||||
```
|
||||
|
||||
For high-quality SSL configuration, we also generate Diffie-Helman parameters.
|
||||
This can take a few minutes:
|
||||
|
||||
```bash
|
||||
openssl dhparam -out /etc/ssl/certs/dhparam.pem 4096
|
||||
```
|
||||
|
||||
## nginx
|
||||
|
||||
The **`nginx` server config file** is fairly standard fare except for the two
|
||||
`location` blocks within the `HUB.DOMAIN.TLD` config file:
|
||||
|
||||
```bash
|
||||
# top-level http config for websocket headers
|
||||
# If Upgrade is defined, Connection = upgrade
|
||||
# If Upgrade is empty, Connection = close
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
# HTTP server to redirect all 80 traffic to SSL/HTTPS
|
||||
server {
|
||||
listen 80;
|
||||
server_name HUB.DOMAIN.TLD;
|
||||
|
||||
# Tell all requests to port 80 to be 302 redirected to HTTPS
|
||||
return 302 https://$host$request_uri;
|
||||
}
|
||||
|
||||
# HTTPS server to handle JupyterHub
|
||||
server {
|
||||
listen 443;
|
||||
ssl on;
|
||||
|
||||
server_name HUB.DOMAIN.TLD;
|
||||
|
||||
ssl_certificate /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem;
|
||||
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
|
||||
ssl_prefer_server_ciphers on;
|
||||
ssl_dhparam /etc/ssl/certs/dhparam.pem;
|
||||
ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA';
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:SSL:50m;
|
||||
ssl_stapling on;
|
||||
ssl_stapling_verify on;
|
||||
add_header Strict-Transport-Security max-age=15768000;
|
||||
|
||||
# Managing literal requests to the JupyterHub front end
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
|
||||
# websocket headers
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
}
|
||||
|
||||
# Managing requests to verify letsencrypt host
|
||||
location ~ /.well-known {
|
||||
allow all;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
If `nginx` is not running on port 443, substitute `$http_host` for `$host` on
|
||||
the lines setting the `Host` header.
|
||||
|
||||
`nginx` will now be the front facing element of JupyterHub on `443` which means
|
||||
it is also free to bind other servers, like `NO_HUB.DOMAIN.TLD` to the same port
|
||||
on the same machine and network interface. In fact, one can simply use the same
|
||||
server blocks as above for `NO_HUB` and simply add line for the root directory
|
||||
of the site as well as the applicable location call:
|
||||
|
||||
```bash
|
||||
server {
|
||||
listen 80;
|
||||
server_name NO_HUB.DOMAIN.TLD;
|
||||
|
||||
# Tell all requests to port 80 to be 302 redirected to HTTPS
|
||||
return 302 https://$host$request_uri;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443;
|
||||
ssl on;
|
||||
|
||||
# INSERT OTHER SSL PARAMETERS HERE AS ABOVE
|
||||
# SSL cert may differ
|
||||
|
||||
# Set the appropriate root directory
|
||||
root /var/www/html
|
||||
|
||||
# Set URI handling
|
||||
location / {
|
||||
try_files $uri $uri/ =404;
|
||||
}
|
||||
|
||||
# Managing requests to verify letsencrypt host
|
||||
location ~ /.well-known {
|
||||
allow all;
|
||||
}
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
Now restart `nginx`, restart the JupyterHub, and enjoy accessing
|
||||
`https://HUB.DOMAIN.TLD` while serving other content securely on
|
||||
`https://NO_HUB.DOMAIN.TLD`.
|
||||
|
||||
|
||||
## Apache
|
||||
|
||||
As with nginx above, you can use [Apache](https://httpd.apache.org) as the reverse proxy.
|
||||
First, we will need to enable the apache modules that we are going to need:
|
||||
|
||||
```bash
|
||||
a2enmod ssl rewrite proxy proxy_http proxy_wstunnel
|
||||
```
|
||||
|
||||
Our Apache configuration is equivalent to the nginx configuration above:
|
||||
|
||||
- Redirect HTTP to HTTPS
|
||||
- Good SSL Configuration
|
||||
- Support for websockets on any proxied URL
|
||||
- JupyterHub is running locally at http://127.0.0.1:8000
|
||||
|
||||
```bash
|
||||
# redirect HTTP to HTTPS
|
||||
Listen 80
|
||||
<VirtualHost HUB.DOMAIN.TLD:80>
|
||||
ServerName HUB.DOMAIN.TLD
|
||||
Redirect / https://HUB.DOMAIN.TLD/
|
||||
</VirtualHost>
|
||||
|
||||
Listen 443
|
||||
<VirtualHost HUB.DOMAIN.TLD:443>
|
||||
|
||||
ServerName HUB.DOMAIN.TLD
|
||||
|
||||
# configure SSL
|
||||
SSLEngine on
|
||||
SSLCertificateFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem
|
||||
SSLCertificateKeyFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem
|
||||
SSLProtocol All -SSLv2 -SSLv3
|
||||
SSLOpenSSLConfCmd DHParameters /etc/ssl/certs/dhparam.pem
|
||||
SSLCipherSuite EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH
|
||||
|
||||
# Use RewriteEngine to handle websocket connection upgrades
|
||||
RewriteEngine On
|
||||
RewriteCond %{HTTP:Connection} Upgrade [NC]
|
||||
RewriteCond %{HTTP:Upgrade} websocket [NC]
|
||||
RewriteRule /(.*) ws://127.0.0.1:8000/$1 [P,L]
|
||||
|
||||
<Location "/">
|
||||
# preserve Host header to avoid cross-origin problems
|
||||
ProxyPreserveHost on
|
||||
# proxy to JupyterHub
|
||||
ProxyPass http://127.0.0.1:8000/
|
||||
ProxyPassReverse http://127.0.0.1:8000/
|
||||
</Location>
|
||||
</VirtualHost>
|
||||
```
|
254
docs/source/reference/config-sudo.md
Normal file
254
docs/source/reference/config-sudo.md
Normal file
@@ -0,0 +1,254 @@
|
||||
# Run JupyterHub without root privileges using `sudo`
|
||||
|
||||
**Note:** Setting up `sudo` permissions involves many pieces of system
|
||||
configuration. It is quite easy to get wrong and very difficult to debug.
|
||||
Only do this if you are very sure you must.
|
||||
|
||||
## Overview
|
||||
|
||||
There are many Authenticators and Spawners available for JupyterHub. Some, such
|
||||
as DockerSpawner or OAuthenticator, do not need any elevated permissions. This
|
||||
document describes how to get the full default behavior of JupyterHub while
|
||||
running notebook servers as real system users on a shared system without
|
||||
running the Hub itself as root.
|
||||
|
||||
Since JupyterHub needs to spawn processes as other users, the simplest way
|
||||
is to run it as root, spawning user servers with [setuid](http://linux.die.net/man/2/setuid).
|
||||
But this isn't especially safe, because you have a process running on the
|
||||
public web as root.
|
||||
|
||||
A **more prudent way** to run the server while preserving functionality is to
|
||||
create a dedicated user with `sudo` access restricted to launching and
|
||||
monitoring single-user servers.
|
||||
|
||||
## Create a user
|
||||
|
||||
To do this, first create a user that will run the Hub:
|
||||
|
||||
```bash
|
||||
sudo useradd rhea
|
||||
```
|
||||
|
||||
This user shouldn't have a login shell or password (possible with -r).
|
||||
|
||||
## Set up sudospawner
|
||||
|
||||
Next, you will need [sudospawner](https://github.com/jupyter/sudospawner)
|
||||
to enable monitoring the single-user servers with sudo:
|
||||
|
||||
```bash
|
||||
sudo pip install sudospawner
|
||||
```
|
||||
|
||||
Now we have to configure sudo to allow the Hub user (`rhea`) to launch
|
||||
the sudospawner script on behalf of our hub users (here `zoe` and `wash`).
|
||||
We want to confine these permissions to only what we really need.
|
||||
|
||||
## Edit `/etc/sudoers`
|
||||
|
||||
To do this we add to `/etc/sudoers` (use `visudo` for safe editing of sudoers):
|
||||
|
||||
- specify the list of users `JUPYTER_USERS` for whom `rhea` can spawn servers
|
||||
- set the command `JUPYTER_CMD` that `rhea` can execute on behalf of users
|
||||
- give `rhea` permission to run `JUPYTER_CMD` on behalf of `JUPYTER_USERS`
|
||||
without entering a password
|
||||
|
||||
|
||||
For example:
|
||||
|
||||
```bash
|
||||
# comma-separated whitelist of users that can spawn single-user servers
|
||||
# this should include all of your Hub users
|
||||
Runas_Alias JUPYTER_USERS = rhea, zoe, wash
|
||||
|
||||
# the command(s) the Hub can run on behalf of the above users without needing a password
|
||||
# the exact path may differ, depending on how sudospawner was installed
|
||||
Cmnd_Alias JUPYTER_CMD = /usr/local/bin/sudospawner
|
||||
|
||||
# actually give the Hub user permission to run the above command on behalf
|
||||
# of the above users without prompting for a password
|
||||
rhea ALL=(JUPYTER_USERS) NOPASSWD:JUPYTER_CMD
|
||||
```
|
||||
|
||||
It might be useful to modifiy `secure_path` to add commands in path.
|
||||
|
||||
As an alternative to adding every user to the `/etc/sudoers` file, you can
|
||||
use a group in the last line above, instead of `JUPYTER_USERS`:
|
||||
|
||||
```bash
|
||||
rhea ALL=(%jupyterhub) NOPASSWD:JUPYTER_CMD
|
||||
```
|
||||
|
||||
If the `jupyterhub` group exists, there will be no need to edit `/etc/sudoers`
|
||||
again. A new user will gain access to the application when added to the group:
|
||||
|
||||
```bash
|
||||
$ adduser -G jupyterhub newuser
|
||||
```
|
||||
|
||||
## Test `sudo` setup
|
||||
|
||||
Test that the new user doesn't need to enter a password to run the sudospawner
|
||||
command.
|
||||
|
||||
This should prompt for your password to switch to rhea, but *not* prompt for
|
||||
any password for the second switch. It should show some help output about
|
||||
logging options:
|
||||
|
||||
```bash
|
||||
$ sudo -u rhea sudo -n -u $USER /usr/local/bin/sudospawner --help
|
||||
Usage: /usr/local/bin/sudospawner [OPTIONS]
|
||||
|
||||
Options:
|
||||
|
||||
--help show this help information
|
||||
...
|
||||
```
|
||||
|
||||
And this should fail:
|
||||
|
||||
```bash
|
||||
$ sudo -u rhea sudo -n -u $USER echo 'fail'
|
||||
sudo: a password is required
|
||||
```
|
||||
|
||||
## Enable PAM for non-root
|
||||
|
||||
By default, [PAM authentication](http://en.wikipedia.org/wiki/Pluggable_authentication_module)
|
||||
is used by JupyterHub. To use PAM, the process may need to be able to read
|
||||
the shadow password database.
|
||||
|
||||
### Shadow group (Linux)
|
||||
|
||||
```bash
|
||||
$ ls -l /etc/shadow
|
||||
-rw-r----- 1 root shadow 2197 Jul 21 13:41 shadow
|
||||
```
|
||||
|
||||
If there's already a shadow group, you are set. If its permissions are more like:
|
||||
|
||||
```bash
|
||||
$ ls -l /etc/shadow
|
||||
-rw------- 1 root wheel 2197 Jul 21 13:41 shadow
|
||||
```
|
||||
|
||||
Then you may want to add a shadow group, and make the shadow file group-readable:
|
||||
|
||||
```bash
|
||||
$ sudo groupadd shadow
|
||||
$ sudo chgrp shadow /etc/shadow
|
||||
$ sudo chmod g+r /etc/shadow
|
||||
```
|
||||
|
||||
We want our new user to be able to read the shadow passwords, so add it to the shadow group:
|
||||
|
||||
```bash
|
||||
$ sudo usermod -a -G shadow rhea
|
||||
```
|
||||
|
||||
If you want jupyterhub to serve pages on a restricted port (such as port 80 for http),
|
||||
then you will need to give `node` permission to do so:
|
||||
|
||||
```bash
|
||||
sudo setcap 'cap_net_bind_service=+ep' /usr/bin/node
|
||||
```
|
||||
However, you may want to further understand the consequences of this.
|
||||
|
||||
You may also be interested in limiting the amount of CPU any process can use
|
||||
on your server. `cpulimit` is a useful tool that is available for many Linux
|
||||
distributions' packaging system. This can be used to keep any user's process
|
||||
from using too much CPU cycles. You can configure it accoring to [these
|
||||
instructions](http://ubuntuforums.org/showthread.php?t=992706).
|
||||
|
||||
|
||||
### Shadow group (FreeBSD)
|
||||
|
||||
**NOTE:** This has not been tested and may not work as expected.
|
||||
|
||||
```bash
|
||||
$ ls -l /etc/spwd.db /etc/master.passwd
|
||||
-rw------- 1 root wheel 2516 Aug 22 13:35 /etc/master.passwd
|
||||
-rw------- 1 root wheel 40960 Aug 22 13:35 /etc/spwd.db
|
||||
```
|
||||
|
||||
Add a shadow group if there isn't one, and make the shadow file group-readable:
|
||||
|
||||
```bash
|
||||
$ sudo pw group add shadow
|
||||
$ sudo chgrp shadow /etc/spwd.db
|
||||
$ sudo chmod g+r /etc/spwd.db
|
||||
$ sudo chgrp shadow /etc/master.passwd
|
||||
$ sudo chmod g+r /etc/master.passwd
|
||||
```
|
||||
|
||||
We want our new user to be able to read the shadow passwords, so add it to the
|
||||
shadow group:
|
||||
|
||||
```bash
|
||||
$ sudo pw user mod rhea -G shadow
|
||||
```
|
||||
|
||||
## Test that PAM works
|
||||
|
||||
We can verify that PAM is working, with:
|
||||
|
||||
```bash
|
||||
$ sudo -u rhea python3 -c "import pamela, getpass; print(pamela.authenticate('$USER', getpass.getpass()))"
|
||||
Password: [enter your unix password]
|
||||
```
|
||||
|
||||
## Make a directory for JupyterHub
|
||||
|
||||
JupyterHub stores its state in a database, so it needs write access to a directory.
|
||||
The simplest way to deal with this is to make a directory owned by your Hub user,
|
||||
and use that as the CWD when launching the server.
|
||||
|
||||
```bash
|
||||
$ sudo mkdir /etc/jupyterhub
|
||||
$ sudo chown rhea /etc/jupyterhub
|
||||
```
|
||||
|
||||
## Start jupyterhub
|
||||
|
||||
Finally, start the server as our newly configured user, `rhea`:
|
||||
|
||||
```bash
|
||||
$ cd /etc/jupyterhub
|
||||
$ sudo -u rhea jupyterhub --JupyterHub.spawner_class=sudospawner.SudoSpawner
|
||||
```
|
||||
|
||||
And try logging in.
|
||||
|
||||
### Troubleshooting: SELinux
|
||||
|
||||
If you still get a generic `Permission denied` `PermissionError`, it's possible SELinux is blocking you.
|
||||
Here's how you can make a module to allow this.
|
||||
First, put this in a file sudo_exec_selinux.te:
|
||||
|
||||
```bash
|
||||
module sudo_exec 1.1;
|
||||
|
||||
require {
|
||||
type unconfined_t;
|
||||
type sudo_exec_t;
|
||||
class file { read entrypoint };
|
||||
}
|
||||
|
||||
#============= unconfined_t ==============
|
||||
allow unconfined_t sudo_exec_t:file entrypoint;
|
||||
```
|
||||
|
||||
Then run all of these commands as root:
|
||||
|
||||
```bash
|
||||
$ checkmodule -M -m -o sudo_exec_selinux.mod sudo_exec_selinux.te
|
||||
$ semodule_package -o sudo_exec_selinux.pp -m sudo_exec_selinux.mod
|
||||
$ semodule -i sudo_exec_selinux.pp
|
||||
```
|
||||
|
||||
### Troubleshooting: PAM session errors
|
||||
|
||||
If the PAM authentication doesn't work and you see errors for
|
||||
`login:session-auth`, or similar, considering updating to `master`
|
||||
and/or incorporating this commit https://github.com/jupyter/jupyterhub/commit/40368b8f555f04ffdd662ffe99d32392a088b1d2
|
||||
and configuration option, `c.PAMAuthenticator.open_sessions = False`.
|
@@ -12,4 +12,8 @@ Technical Reference
|
||||
proxy
|
||||
rest
|
||||
upgrading
|
||||
templates
|
||||
config-examples
|
||||
config-ghoauth
|
||||
config-proxy
|
||||
config-sudo
|
||||
|
@@ -1,22 +1,26 @@
|
||||
# Writing a custom Proxy implementation
|
||||
|
||||
JupyterHub 0.8 introduced the ability to write a custom implementation of the proxy.
|
||||
This enables deployments with different needs than the default proxy,
|
||||
configurable-http-proxy (CHP).
|
||||
CHP is a single-process nodejs proxy that they Hub manages by default as a subprocess
|
||||
(it can be run externally, as well, and typically is in production deployments).
|
||||
JupyterHub 0.8 introduced the ability to write a custom implementation of the
|
||||
proxy. This enables deployments with different needs than the default proxy,
|
||||
configurable-http-proxy (CHP). CHP is a single-process nodejs proxy that they
|
||||
Hub manages by default as a subprocess (it can be run externally, as well, and
|
||||
typically is in production deployments).
|
||||
|
||||
The upside to CHP, and why we use it by default, is that it's easy to install and run (if you have nodejs, you are set!).
|
||||
The downsides are that it's a single process and does not support any persistence of the routing table.
|
||||
So if the proxy process dies, your whole JupyterHub instance is inaccessible until the Hub notices, restarts the proxy, and restores the routing table.
|
||||
For deployments that want to avoid such a single point of failure,
|
||||
or leverage existing proxy infrastructure in their chosen deployment (such as Kubernetes ingress objects),
|
||||
the Proxy API provides a way to do that.
|
||||
The upside to CHP, and why we use it by default, is that it's easy to install
|
||||
and run (if you have nodejs, you are set!). The downsides are that it's a
|
||||
single process and does not support any persistence of the routing table. So
|
||||
if the proxy process dies, your whole JupyterHub instance is inaccessible
|
||||
until the Hub notices, restarts the proxy, and restores the routing table. For
|
||||
deployments that want to avoid such a single point of failure, or leverage
|
||||
existing proxy infrastructure in their chosen deployment (such as Kubernetes
|
||||
ingress objects), the Proxy API provides a way to do that.
|
||||
|
||||
In general, for a proxy to be usable by JupyterHub, it must:
|
||||
|
||||
1. support websockets without prior knowledge of the URL where websockets may occur
|
||||
2. support trie-based routing (i.e. allow different routes on `/foo` and `/foo/bar` and route based on specificity)
|
||||
1. support websockets without prior knowledge of the URL where websockets may
|
||||
occur
|
||||
2. support trie-based routing (i.e. allow different routes on `/foo` and
|
||||
`/foo/bar` and route based on specificity)
|
||||
3. adding or removing a route should not cause existing connections to drop
|
||||
|
||||
Optionally, if the JupyterHub deployment is to use host-based routing,
|
||||
@@ -35,10 +39,10 @@ class MyProxy(Proxy):
|
||||
...
|
||||
```
|
||||
|
||||
|
||||
## Starting and stopping the proxy
|
||||
|
||||
If your proxy should be launched when the Hub starts, you must define how to start and stop your proxy:
|
||||
If your proxy should be launched when the Hub starts, you must define how
|
||||
to start and stop your proxy:
|
||||
|
||||
```python
|
||||
from tornado import gen
|
||||
@@ -55,8 +59,8 @@ class MyProxy(Proxy):
|
||||
|
||||
These methods **may** be coroutines.
|
||||
|
||||
`c.Proxy.should_start` is a configurable flag that determines whether the Hub should call these methods when the Hub itself starts and stops.
|
||||
|
||||
`c.Proxy.should_start` is a configurable flag that determines whether the
|
||||
Hub should call these methods when the Hub itself starts and stops.
|
||||
|
||||
### Purely external proxies
|
||||
|
||||
@@ -70,31 +74,30 @@ class MyProxy(Proxy):
|
||||
should_start = False
|
||||
```
|
||||
|
||||
## Routes
|
||||
|
||||
## Adding and removing routes
|
||||
|
||||
At its most basic, a Proxy implementation defines a mechanism to add, remove, and retrieve routes.
|
||||
A proxy that implements these three methods is complete.
|
||||
At its most basic, a Proxy implementation defines a mechanism to add, remove,
|
||||
and retrieve routes. A proxy that implements these three methods is complete.
|
||||
Each of these methods **may** be a coroutine.
|
||||
|
||||
**Definition:** routespec
|
||||
|
||||
A routespec, which will appear in these methods, is a string describing a route to be proxied,
|
||||
such as `/user/name/`. A routespec will:
|
||||
A routespec, which will appear in these methods, is a string describing a
|
||||
route to be proxied, such as `/user/name/`. A routespec will:
|
||||
|
||||
1. always end with `/`
|
||||
2. always start with `/` if it is a path-based route `/proxy/path/`
|
||||
3. precede the leading `/` with a host for host-based routing, e.g. `host.tld/proxy/path/`
|
||||
|
||||
3. precede the leading `/` with a host for host-based routing, e.g.
|
||||
`host.tld/proxy/path/`
|
||||
|
||||
### Adding a route
|
||||
|
||||
When adding a route, JupyterHub may pass a JSON-serializable dict as a `data` argument
|
||||
that should be attacked to the proxy route.
|
||||
When that route is retrieved, the `data` argument should be returned as well.
|
||||
If your proxy implementation doesn't support storing data attached to routes,
|
||||
then your Python wrapper may have to handle storing the `data` piece itself,
|
||||
e.g in a simple file or database.
|
||||
When adding a route, JupyterHub may pass a JSON-serializable dict as a `data`
|
||||
argument that should be attacked to the proxy route. When that route is
|
||||
retrieved, the `data` argument should be returned as well. If your proxy
|
||||
implementation doesn't support storing data attached to routes, then your
|
||||
Python wrapper may have to handle storing the `data` piece itself, e.g in a
|
||||
simple file or database.
|
||||
|
||||
```python
|
||||
@gen.coroutine
|
||||
@@ -113,12 +116,10 @@ proxy.add_route('/user/pgeorgiou/', 'http://127.0.0.1:1227',
|
||||
{'user': 'pgeorgiou'})
|
||||
```
|
||||
|
||||
|
||||
### Removing routes
|
||||
|
||||
`delete_route()` is given a routespec to delete.
|
||||
If there is no such route, `delete_route` should still succeed,
|
||||
but a warning may be issued.
|
||||
`delete_route()` is given a routespec to delete. If there is no such route,
|
||||
`delete_route` should still succeed, but a warning may be issued.
|
||||
|
||||
```python
|
||||
@gen.coroutine
|
||||
@@ -126,18 +127,17 @@ def delete_route(self, routespec):
|
||||
"""Delete the route"""
|
||||
```
|
||||
|
||||
|
||||
### Retrieving routes
|
||||
|
||||
For retrieval, you only *need* to implement a single method that retrieves all routes.
|
||||
The return value for this function should be a dictionary, keyed by `routespect`,
|
||||
of dicts whose keys are the same three arguments passed to `add_route`
|
||||
(`routespec`, `target`, `data`)
|
||||
For retrieval, you only *need* to implement a single method that retrieves all
|
||||
routes. The return value for this function should be a dictionary, keyed by
|
||||
`routespect`, of dicts whose keys are the same three arguments passed to
|
||||
`add_route` (`routespec`, `target`, `data`)
|
||||
|
||||
```python
|
||||
@gen.coroutine
|
||||
def get_all_routes(self):
|
||||
"""Return all routes, keyed by routespec""""
|
||||
"""Return all routes, keyed by routespec"""
|
||||
```
|
||||
|
||||
```python
|
||||
@@ -150,15 +150,15 @@ def get_all_routes(self):
|
||||
}
|
||||
```
|
||||
|
||||
## Note on activity tracking
|
||||
|
||||
|
||||
#### Note on activity tracking
|
||||
|
||||
JupyterHub can track activity of users, for use in services such as culling idle servers.
|
||||
As of JupyterHub 0.8, this activity tracking is the responsibility of the proxy.
|
||||
If your proxy implementation can track activity to endpoints,
|
||||
it may add a `last_activity` key to the `data` of routes retrieved in `.get_all_routes()`.
|
||||
If present, the value of `last_activity` should be an [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) UTC date string:
|
||||
JupyterHub can track activity of users, for use in services such as culling
|
||||
idle servers. As of JupyterHub 0.8, this activity tracking is the
|
||||
responsibility of the proxy. If your proxy implementation can track activity
|
||||
to endpoints, it may add a `last_activity` key to the `data` of routes
|
||||
retrieved in `.get_all_routes()`. If present, the value of `last_activity`
|
||||
should be an [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) UTC date
|
||||
string:
|
||||
|
||||
```python
|
||||
{
|
||||
@@ -173,11 +173,9 @@ If present, the value of `last_activity` should be an [ISO8601](https://en.wikip
|
||||
}
|
||||
```
|
||||
|
||||
If the proxy does not track activity, then only activity to the Hub itself is
|
||||
tracked, and services such as cull-idle will not work.
|
||||
|
||||
If the proxy does not track activity, then only activity to the Hub itself is tracked,
|
||||
and services such as cull-idle will not work.
|
||||
|
||||
Now that `notebook-5.0` tracks activity internally,
|
||||
we can retrieve activity information from the single-user servers instead,
|
||||
removing the need to track activity in the proxy.
|
||||
But this is not yet implemented in JupyterHub 0.8.0.
|
||||
Now that `notebook-5.0` tracks activity internally, we can retrieve activity
|
||||
information from the single-user servers instead, removing the need to track
|
||||
activity in the proxy. But this is not yet implemented in JupyterHub 0.8.0.
|
||||
|
@@ -114,10 +114,11 @@ r.raise_for_status()
|
||||
r.json()
|
||||
```
|
||||
|
||||
Note that the API token authorizes **JupyterHub** REST API requests. The same
|
||||
token does **not** authorize access to the [Jupyter Notebook REST API][]
|
||||
provided by notebook servers managed by JupyterHub. A different token is used
|
||||
to access the **Jupyter Notebook** API.
|
||||
The same API token can also authorize access to the [Jupyter Notebook REST API][]
|
||||
provided by notebook servers managed by JupyterHub if one of the following is true:
|
||||
|
||||
1. The token is for the same user as the owner of the notebook
|
||||
2. The token is tied to an admin user or service **and** `c.JupyterHub.admin_access` is set to `True`
|
||||
|
||||
## Enabling users to spawn multiple named-servers via the API
|
||||
|
||||
|
@@ -178,7 +178,13 @@ When you run a service that has a url, it will be accessible under a
|
||||
your service to route proxied requests properly, it must take
|
||||
`JUPYTERHUB_SERVICE_PREFIX` into account when routing requests. For example, a
|
||||
web service would normally service its root handler at `'/'`, but the proxied
|
||||
service would need to serve `JUPYTERHUB_SERVICE_PREFIX + '/'`.
|
||||
service would need to serve `JUPYTERHUB_SERVICE_PREFIX`.
|
||||
|
||||
Note that `JUPYTERHUB_SERVICE_PREFIX` will contain a trailing slash. This must
|
||||
be taken into consideration when creating the service routes. If you include an
|
||||
extra slash you might get unexpected behavior. For example if your service has a
|
||||
`/foo` endpoint, the route would be `JUPYTERHUB_SERVICE_PREFIX + foo`, and
|
||||
`/foo/bar` would be `JUPYTERHUB_SERVICE_PREFIX + foo/bar`.
|
||||
|
||||
## Hub Authentication and Services
|
||||
|
||||
@@ -269,7 +275,7 @@ def authenticated(f):
|
||||
return decorated
|
||||
|
||||
|
||||
@app.route(prefix + '/')
|
||||
@app.route(prefix)
|
||||
@authenticated
|
||||
def whoami(user):
|
||||
return Response(
|
||||
|
@@ -170,9 +170,12 @@ If you are interested in building a custom spawner, you can read [this tutorial]
|
||||
Some spawners of the single-user notebook servers allow setting limits or
|
||||
guarantees on resources, such as CPU and memory. To provide a consistent
|
||||
experience for sysadmins and users, we provide a standard way to set and
|
||||
discover these resource limits and guarantees, such as for memory and CPU. For
|
||||
the limits and guarantees to be useful, the spawner must implement support for
|
||||
them.
|
||||
discover these resource limits and guarantees, such as for memory and CPU.
|
||||
For the limits and guarantees to be useful, **the spawner must implement
|
||||
support for them**. For example, LocalProcessSpawner, the default
|
||||
spawner, does not support limits and guarantees. One of the spawners
|
||||
that supports limits and guarantees is the `systemdspawner`.
|
||||
|
||||
|
||||
### Memory Limits & Guarantees
|
||||
|
||||
@@ -190,8 +193,8 @@ to provide a guarantee that at minimum this much memory will always be
|
||||
available for the single-user notebook server to use. The environment variable
|
||||
`MEM_GUARANTEE` will also be set in the single-user notebook server.
|
||||
|
||||
The spawner's underlying system or cluster is responsible for enforcing these
|
||||
limits and providing these guarantees. If these values are set to `None`, no
|
||||
**The spawner's underlying system or cluster is responsible for enforcing these
|
||||
limits and providing these guarantees.** If these values are set to `None`, no
|
||||
limits or guarantees are provided, and no environment values are set.
|
||||
|
||||
### CPU Limits & Guarantees
|
||||
@@ -208,6 +211,6 @@ higher priority applications might be taking up CPU.
|
||||
guarantee for CPU usage. The environment variable `CPU_GUARANTEE` will be set
|
||||
in the single-user notebook server when a guarantee is being provided.
|
||||
|
||||
The spawner's underlying system or cluster is responsible for enforcing these
|
||||
limits and providing these guarantees. If these values are set to `None`, no
|
||||
**The spawner's underlying system or cluster is responsible for enforcing these
|
||||
limits and providing these guarantees.** If these values are set to `None`, no
|
||||
limits or guarantees are provided, and no environment values are set.
|
||||
|
61
docs/source/reference/templates.md
Normal file
61
docs/source/reference/templates.md
Normal file
@@ -0,0 +1,61 @@
|
||||
# Working with templates and UI
|
||||
|
||||
The pages of the JupyterHub application are generated from
|
||||
[Jinja](http://jinja.pocoo.org/) templates. These allow the header, for
|
||||
example, to be defined once and incorporated into all pages. By providing
|
||||
your own templates, you can have complete control over JupyterHub's
|
||||
appearance.
|
||||
|
||||
## Custom Templates
|
||||
|
||||
JupyterHub will look for custom templates in all of the paths in the
|
||||
`JupyterHub.template_paths` configuration option, falling back on the
|
||||
[default templates](https://github.com/jupyterhub/jupyterhub/tree/master/share/jupyterhub/templates)
|
||||
if no custom template with that name is found. This fallback
|
||||
behavior is new in version 0.9; previous versions searched only those paths
|
||||
explicitly included in `template_paths`. You may override as many
|
||||
or as few templates as you desire.
|
||||
|
||||
## Extending Templates
|
||||
|
||||
Jinja provides a mechanism to [extend templates](http://jinja.pocoo.org/docs/2.10/templates/#template-inheritance).
|
||||
A base template can define a `block`, and child templates can replace or
|
||||
supplement the material in the block. The
|
||||
[JupyterHub templates](https://github.com/jupyterhub/jupyterhub/tree/master/share/jupyterhub/templates)
|
||||
make extensive use of blocks, which allows you to customize parts of the
|
||||
interface easily.
|
||||
|
||||
In general, a child template can extend a base template, `base.html`, by beginning with:
|
||||
|
||||
```html
|
||||
{% extends "base.html" %}
|
||||
```
|
||||
|
||||
This works, unless you are trying to extend the default template for the same
|
||||
file name. Starting in version 0.9, you may refer to the base file with a
|
||||
`templates/` prefix. Thus, if you are writing a custom `base.html`, start the
|
||||
file with this block:
|
||||
|
||||
```html
|
||||
{% extends "templates/base.html" %}
|
||||
```
|
||||
|
||||
By defining `block`s with same name as in the base template, child templates
|
||||
can replace those sections with custom content. The content from the base
|
||||
template can be included with the `{{ super() }}` directive.
|
||||
|
||||
### Example
|
||||
|
||||
To add an additional message to the spawn-pending page, below the existing
|
||||
text about the server starting up, place this content in a file named
|
||||
`spawn_pending.html` in a directory included in the
|
||||
`JupyterHub.template_paths` configuration option.
|
||||
|
||||
```html
|
||||
{% extends "templates/spawn_pending.html" %}
|
||||
|
||||
{% block message %}
|
||||
{{ super() }}
|
||||
<p>Patience is a virtue.</p>
|
||||
{% endblock %}
|
||||
```
|
@@ -89,7 +89,7 @@ check if the cookie corresponds to the right user. This request is logged.
|
||||
If everything is working, the response logged will be similar to this:
|
||||
|
||||
```
|
||||
200 GET /hub/api/authorizations/cookie/jupyter-hub-token-name/[secret] (@10.0.1.4) 6.10ms
|
||||
200 GET /hub/api/authorizations/cookie/jupyterhub-token-name/[secret] (@10.0.1.4) 6.10ms
|
||||
```
|
||||
|
||||
You should see a similar 200 message, as above, in the Hub log when you first
|
||||
@@ -99,7 +99,7 @@ may mean that your single-user notebook server isn't connecting to your Hub.
|
||||
If you see 403 (forbidden) like this, it's a token problem:
|
||||
|
||||
```
|
||||
403 GET /hub/api/authorizations/cookie/jupyter-hub-token-name/[secret] (@10.0.1.4) 4.14ms
|
||||
403 GET /hub/api/authorizations/cookie/jupyterhub-token-name/[secret] (@10.0.1.4) 4.14ms
|
||||
```
|
||||
|
||||
Check the logs of the single-user notebook server, which may have more detailed
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. upgrade-dot-eight:
|
||||
.. _upgrade-dot-eight:
|
||||
|
||||
Upgrading to JupyterHub version 0.8
|
||||
===================================
|
||||
|
@@ -25,8 +25,11 @@ Another use would be to copy initial content, such as tutorial files or referenc
|
||||
You can define your own bootstrap process by implementing a `pre_spawn_hook` on any spawner.
|
||||
The Spawner itself is passed as parameter to your hook and you can easily get the contextual information out of the spawning process.
|
||||
|
||||
If you implement a hook, make sure that it is *idempotent*. It will be executed every time
|
||||
a notebook server is spawned to the user. That means you should somehow
|
||||
Similarly, there may be cases where you would like to clean up after a spawner stops.
|
||||
You may implement a `post_stop_hook` that is always executed after the spawner stops.
|
||||
|
||||
If you implement a hook, make sure that it is *idempotent*. It will be executed every time
|
||||
a notebook server is spawned to the user. That means you should somehow
|
||||
ensure that things which should run only once are not running again and again.
|
||||
For example, before you create a directory, check if it exists.
|
||||
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# create a directory for the user before the spawner starts
|
||||
|
||||
import os
|
||||
import shutil
|
||||
def create_dir_hook(spawner):
|
||||
username = spawner.user.name # get the username
|
||||
volume_path = os.path.join('/volumes/jupyterhub', username)
|
||||
@@ -10,8 +11,15 @@ def create_dir_hook(spawner):
|
||||
# now do whatever you think your user needs
|
||||
# ...
|
||||
|
||||
# attach the hook function to the spawner
|
||||
def clean_dir_hook(spawner):
|
||||
username = spawner.user.name # get the username
|
||||
temp_path = os.path.join('/volumes/jupyterhub', username, 'temp')
|
||||
if os.path.exists(temp_path) and os.path.isdir(temp_path):
|
||||
shutil.rmtree(temp_path)
|
||||
|
||||
# attach the hook functions to the spawner
|
||||
c.Spawner.pre_spawn_hook = create_dir_hook
|
||||
c.Spawner.post_stop_hook = clean_dir_hook
|
||||
|
||||
# Use the DockerSpawner to serve your users' notebooks
|
||||
c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner'
|
||||
|
@@ -15,7 +15,7 @@ c.JupyterHub.services = [
|
||||
{
|
||||
'name': 'cull-idle',
|
||||
'admin': True,
|
||||
'command': 'python cull_idle_servers.py --timeout=3600'.split(),
|
||||
'command': 'python3 cull_idle_servers.py --timeout=3600'.split(),
|
||||
}
|
||||
]
|
||||
```
|
||||
@@ -37,5 +37,5 @@ variable. Run `cull_idle_servers.py` manually.
|
||||
|
||||
```bash
|
||||
export JUPYTERHUB_API_TOKEN=`jupyterhub token`
|
||||
python cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
|
||||
python3 cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
|
||||
```
|
||||
|
345
examples/cull-idle/cull_idle_servers.py
Normal file → Executable file
345
examples/cull-idle/cull_idle_servers.py
Normal file → Executable file
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""script to monitor and cull idle single-user servers
|
||||
|
||||
Caveats:
|
||||
@@ -16,102 +16,342 @@ You can run this as a service managed by JupyterHub with this in your config::
|
||||
{
|
||||
'name': 'cull-idle',
|
||||
'admin': True,
|
||||
'command': 'python cull_idle_servers.py --timeout=3600'.split(),
|
||||
'command': 'python3 cull_idle_servers.py --timeout=3600'.split(),
|
||||
}
|
||||
]
|
||||
|
||||
Or run it manually by generating an API token and storing it in `JUPYTERHUB_API_TOKEN`:
|
||||
|
||||
export JUPYTERHUB_API_TOKEN=`jupyterhub token`
|
||||
python cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
|
||||
python3 cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
|
||||
|
||||
This script uses the same ``--timeout`` and ``--max-age`` values for
|
||||
culling users and users' servers. If you want a different value for
|
||||
users and servers, you should add this script to the services list
|
||||
twice, just with different ``name``s, different values, and one with
|
||||
the ``--cull-users`` option.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
from datetime import datetime, timezone
|
||||
from functools import partial
|
||||
import json
|
||||
import os
|
||||
|
||||
from dateutil.parser import parse as parse_date
|
||||
try:
|
||||
from urllib.parse import quote
|
||||
except ImportError:
|
||||
from urllib import quote
|
||||
|
||||
from tornado.gen import coroutine
|
||||
import dateutil.parser
|
||||
|
||||
from tornado.gen import coroutine, multi
|
||||
from tornado.locks import Semaphore
|
||||
from tornado.log import app_log
|
||||
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
|
||||
from tornado.ioloop import IOLoop, PeriodicCallback
|
||||
from tornado.options import define, options, parse_command_line
|
||||
|
||||
|
||||
def parse_date(date_string):
|
||||
"""Parse a timestamp
|
||||
|
||||
If it doesn't have a timezone, assume utc
|
||||
|
||||
Returned datetime object will always be timezone-aware
|
||||
"""
|
||||
dt = dateutil.parser.parse(date_string)
|
||||
if not dt.tzinfo:
|
||||
# assume naïve timestamps are UTC
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt
|
||||
|
||||
|
||||
def format_td(td):
|
||||
"""
|
||||
Nicely format a timedelta object
|
||||
|
||||
as HH:MM:SS
|
||||
"""
|
||||
if td is None:
|
||||
return "unknown"
|
||||
if isinstance(td, str):
|
||||
return td
|
||||
seconds = int(td.total_seconds())
|
||||
h = seconds // 3600
|
||||
seconds = seconds % 3600
|
||||
m = seconds // 60
|
||||
seconds = seconds % 60
|
||||
return "{h:02}:{m:02}:{seconds:02}".format(h=h, m=m, seconds=seconds)
|
||||
|
||||
|
||||
@coroutine
|
||||
def cull_idle(url, api_token, timeout, cull_users=False):
|
||||
def cull_idle(url, api_token, inactive_limit, cull_users=False, max_age=0, concurrency=10):
|
||||
"""Shutdown idle single-user servers
|
||||
|
||||
If cull_users, inactive *users* will be deleted as well.
|
||||
"""
|
||||
auth_header = {
|
||||
'Authorization': 'token %s' % api_token
|
||||
}
|
||||
req = HTTPRequest(url=url + '/users',
|
||||
'Authorization': 'token %s' % api_token,
|
||||
}
|
||||
req = HTTPRequest(
|
||||
url=url + '/users',
|
||||
headers=auth_header,
|
||||
)
|
||||
now = datetime.datetime.utcnow()
|
||||
cull_limit = now - datetime.timedelta(seconds=timeout)
|
||||
now = datetime.now(timezone.utc)
|
||||
client = AsyncHTTPClient()
|
||||
resp = yield client.fetch(req)
|
||||
|
||||
if concurrency:
|
||||
semaphore = Semaphore(concurrency)
|
||||
@coroutine
|
||||
def fetch(req):
|
||||
"""client.fetch wrapped in a semaphore to limit concurrency"""
|
||||
yield semaphore.acquire()
|
||||
try:
|
||||
return (yield client.fetch(req))
|
||||
finally:
|
||||
yield semaphore.release()
|
||||
else:
|
||||
fetch = client.fetch
|
||||
|
||||
resp = yield fetch(req)
|
||||
users = json.loads(resp.body.decode('utf8', 'replace'))
|
||||
futures = []
|
||||
|
||||
@coroutine
|
||||
def cull_one(user, last_activity):
|
||||
"""cull one user"""
|
||||
def handle_server(user, server_name, server):
|
||||
"""Handle (maybe) culling a single server
|
||||
|
||||
# shutdown server first. Hub doesn't allow deleting users with running servers.
|
||||
if user['server']:
|
||||
app_log.info("Culling server for %s (inactive since %s)", user['name'], last_activity)
|
||||
req = HTTPRequest(url=url + '/users/%s/server' % user['name'],
|
||||
method='DELETE',
|
||||
headers=auth_header,
|
||||
Returns True if server is now stopped (user removable),
|
||||
False otherwise.
|
||||
"""
|
||||
log_name = user['name']
|
||||
if server_name:
|
||||
log_name = '%s/%s' % (user['name'], server_name)
|
||||
if server.get('pending'):
|
||||
app_log.warning(
|
||||
"Not culling server %s with pending %s",
|
||||
log_name, server['pending'])
|
||||
return False
|
||||
|
||||
# jupyterhub < 0.9 defined 'server.url' once the server was ready
|
||||
# as an *implicit* signal that the server was ready.
|
||||
# 0.9 adds a dedicated, explicit 'ready' field.
|
||||
# By current (0.9) definitions, servers that have no pending
|
||||
# events and are not ready shouldn't be in the model,
|
||||
# but let's check just to be safe.
|
||||
|
||||
if not server.get('ready', bool(server['url'])):
|
||||
app_log.warning(
|
||||
"Not culling not-ready not-pending server %s: %s",
|
||||
log_name, server)
|
||||
return False
|
||||
|
||||
if server.get('started'):
|
||||
age = now - parse_date(server['started'])
|
||||
else:
|
||||
# started may be undefined on jupyterhub < 0.9
|
||||
age = None
|
||||
|
||||
# check last activity
|
||||
# last_activity can be None in 0.9
|
||||
if server['last_activity']:
|
||||
inactive = now - parse_date(server['last_activity'])
|
||||
else:
|
||||
# no activity yet, use start date
|
||||
# last_activity may be None with jupyterhub 0.9,
|
||||
# which introduces the 'started' field which is never None
|
||||
# for running servers
|
||||
inactive = age
|
||||
|
||||
should_cull = (inactive is not None and
|
||||
inactive.total_seconds() >= inactive_limit)
|
||||
if should_cull:
|
||||
app_log.info(
|
||||
"Culling server %s (inactive for %s)",
|
||||
log_name, format_td(inactive))
|
||||
|
||||
if max_age and not should_cull:
|
||||
# only check started if max_age is specified
|
||||
# so that we can still be compatible with jupyterhub 0.8
|
||||
# which doesn't define the 'started' field
|
||||
if age is not None and age.total_seconds() >= max_age:
|
||||
app_log.info(
|
||||
"Culling server %s (age: %s, inactive for %s)",
|
||||
log_name, format_td(age), format_td(inactive))
|
||||
should_cull = True
|
||||
|
||||
if not should_cull:
|
||||
app_log.debug(
|
||||
"Not culling server %s (age: %s, inactive for %s)",
|
||||
log_name, format_td(age), format_td(inactive))
|
||||
return False
|
||||
|
||||
req = HTTPRequest(
|
||||
url=url + '/users/%s/server' % quote(user['name']),
|
||||
method='DELETE',
|
||||
headers=auth_header,
|
||||
)
|
||||
resp = yield fetch(req)
|
||||
if resp.code == 202:
|
||||
app_log.warning(
|
||||
"Server %s is slow to stop",
|
||||
log_name,
|
||||
)
|
||||
yield client.fetch(req)
|
||||
if cull_users:
|
||||
app_log.info("Culling user %s (inactive since %s)", user['name'], last_activity)
|
||||
req = HTTPRequest(url=url + '/users/%s' % user['name'],
|
||||
method='DELETE',
|
||||
headers=auth_header,
|
||||
)
|
||||
yield client.fetch(req)
|
||||
# return False to prevent culling user with pending shutdowns
|
||||
return False
|
||||
return True
|
||||
|
||||
@coroutine
|
||||
def handle_user(user):
|
||||
"""Handle one user.
|
||||
|
||||
Create a list of their servers, and async exec them. Wait for
|
||||
that to be done, and if all servers are stopped, possibly cull
|
||||
the user.
|
||||
"""
|
||||
# shutdown servers first.
|
||||
# Hub doesn't allow deleting users with running servers.
|
||||
# jupyterhub 0.9 always provides a 'servers' model.
|
||||
# 0.8 only does this when named servers are enabled.
|
||||
if 'servers' in user:
|
||||
servers = user['servers']
|
||||
else:
|
||||
# jupyterhub < 0.9 without named servers enabled.
|
||||
# create servers dict with one entry for the default server
|
||||
# from the user model.
|
||||
# only if the server is running.
|
||||
servers = {}
|
||||
if user['server']:
|
||||
servers[''] = {
|
||||
'last_activity': user['last_activity'],
|
||||
'pending': user['pending'],
|
||||
'url': user['server'],
|
||||
}
|
||||
server_futures = [
|
||||
handle_server(user, server_name, server)
|
||||
for server_name, server in servers.items()
|
||||
]
|
||||
results = yield multi(server_futures)
|
||||
if not cull_users:
|
||||
return
|
||||
# some servers are still running, cannot cull users
|
||||
still_alive = len(results) - sum(results)
|
||||
if still_alive:
|
||||
app_log.debug(
|
||||
"Not culling user %s with %i servers still alive",
|
||||
user['name'], still_alive)
|
||||
return False
|
||||
|
||||
should_cull = False
|
||||
if user.get('created'):
|
||||
age = now - parse_date(user['created'])
|
||||
else:
|
||||
# created may be undefined on jupyterhub < 0.9
|
||||
age = None
|
||||
|
||||
# check last activity
|
||||
# last_activity can be None in 0.9
|
||||
if user['last_activity']:
|
||||
inactive = now - parse_date(user['last_activity'])
|
||||
else:
|
||||
# no activity yet, use start date
|
||||
# last_activity may be None with jupyterhub 0.9,
|
||||
# which introduces the 'created' field which is never None
|
||||
inactive = age
|
||||
|
||||
should_cull = (inactive is not None and
|
||||
inactive.total_seconds() >= inactive_limit)
|
||||
if should_cull:
|
||||
app_log.info(
|
||||
"Culling user %s (inactive for %s)",
|
||||
user['name'], inactive)
|
||||
|
||||
if max_age and not should_cull:
|
||||
# only check created if max_age is specified
|
||||
# so that we can still be compatible with jupyterhub 0.8
|
||||
# which doesn't define the 'started' field
|
||||
if age is not None and age.total_seconds() >= max_age:
|
||||
app_log.info(
|
||||
"Culling user %s (age: %s, inactive for %s)",
|
||||
user['name'], format_td(age), format_td(inactive))
|
||||
should_cull = True
|
||||
|
||||
if not should_cull:
|
||||
app_log.debug(
|
||||
"Not culling user %s (created: %s, last active: %s)",
|
||||
user['name'], format_td(age), format_td(inactive))
|
||||
return False
|
||||
|
||||
req = HTTPRequest(
|
||||
url=url + '/users/%s' % user['name'],
|
||||
method='DELETE',
|
||||
headers=auth_header,
|
||||
)
|
||||
yield fetch(req)
|
||||
return True
|
||||
|
||||
for user in users:
|
||||
if not user['server'] and not cull_users:
|
||||
# server not running and not culling users, nothing to do
|
||||
continue
|
||||
last_activity = parse_date(user['last_activity'])
|
||||
if last_activity < cull_limit:
|
||||
futures.append((user['name'], cull_one(user, last_activity)))
|
||||
else:
|
||||
app_log.debug("Not culling %s (active since %s)", user['name'], last_activity)
|
||||
|
||||
futures.append((user['name'], handle_user(user)))
|
||||
|
||||
for (name, f) in futures:
|
||||
yield f
|
||||
app_log.debug("Finished culling %s", name)
|
||||
try:
|
||||
result = yield f
|
||||
except Exception:
|
||||
app_log.exception("Error processing %s", name)
|
||||
else:
|
||||
if result:
|
||||
app_log.debug("Finished culling %s", name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
define('url', default=os.environ.get('JUPYTERHUB_API_URL'), help="The JupyterHub API URL")
|
||||
define('timeout', default=600, help="The idle timeout (in seconds)")
|
||||
define('cull_every', default=0, help="The interval (in seconds) for checking for idle servers to cull")
|
||||
define('cull_users', default=False,
|
||||
help="""Cull users in addition to servers.
|
||||
This is for use in temporary-user cases such as tmpnb.""",
|
||||
define(
|
||||
'url',
|
||||
default=os.environ.get('JUPYTERHUB_API_URL'),
|
||||
help="The JupyterHub API URL",
|
||||
)
|
||||
|
||||
define('timeout', default=600, help="The idle timeout (in seconds)")
|
||||
define('cull_every', default=0,
|
||||
help="The interval (in seconds) for checking for idle servers to cull")
|
||||
define('max_age', default=0,
|
||||
help="The maximum age (in seconds) of servers that should be culled even if they are active")
|
||||
define('cull_users', default=False,
|
||||
help="""Cull users in addition to servers.
|
||||
This is for use in temporary-user cases such as tmpnb.""",
|
||||
)
|
||||
define('concurrency', default=10,
|
||||
help="""Limit the number of concurrent requests made to the Hub.
|
||||
|
||||
Deleting a lot of users at the same time can slow down the Hub,
|
||||
so limit the number of API requests we have outstanding at any given time.
|
||||
"""
|
||||
)
|
||||
|
||||
parse_command_line()
|
||||
if not options.cull_every:
|
||||
options.cull_every = options.timeout // 2
|
||||
|
||||
api_token = os.environ['JUPYTERHUB_API_TOKEN']
|
||||
|
||||
|
||||
try:
|
||||
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
|
||||
except ImportError as e:
|
||||
app_log.warning(
|
||||
"Could not load pycurl: %s\n"
|
||||
"pycurl is recommended if you have a large number of users.",
|
||||
e)
|
||||
|
||||
loop = IOLoop.current()
|
||||
cull = lambda : cull_idle(options.url, api_token, options.timeout, options.cull_users)
|
||||
# run once before scheduling periodic call
|
||||
loop.run_sync(cull)
|
||||
cull = partial(
|
||||
cull_idle,
|
||||
url=options.url,
|
||||
api_token=api_token,
|
||||
inactive_limit=options.timeout,
|
||||
cull_users=options.cull_users,
|
||||
max_age=options.max_age,
|
||||
concurrency=options.concurrency,
|
||||
)
|
||||
# schedule first cull immediately
|
||||
# because PeriodicCallback doesn't start until the end of the first interval
|
||||
loop.add_callback(cull)
|
||||
# schedule periodic cull
|
||||
pc = PeriodicCallback(cull, 1e3 * options.cull_every)
|
||||
pc.start()
|
||||
@@ -119,4 +359,3 @@ if __name__ == '__main__':
|
||||
loop.start()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
@@ -3,6 +3,6 @@ c.JupyterHub.services = [
|
||||
{
|
||||
'name': 'cull-idle',
|
||||
'admin': True,
|
||||
'command': 'python cull_idle_servers.py --timeout=3600'.split(),
|
||||
'command': 'python3 cull_idle_servers.py --timeout=3600'.split(),
|
||||
}
|
||||
]
|
||||
|
90
examples/external-oauth/README.md
Normal file
90
examples/external-oauth/README.md
Normal file
@@ -0,0 +1,90 @@
|
||||
# Using JupyterHub as an OAuth provider
|
||||
|
||||
JupyterHub 0.9 introduces the ability to use JupyterHub as an OAuth provider
|
||||
for external services that may not be otherwise integrated with JupyterHub.
|
||||
The main feature this enables is using JupyterHub like a 'regular' OAuth 2
|
||||
provider for services running anywhere.
|
||||
|
||||
There are two examples here. `whoami-oauth` (in the service-whoami directory) uses `jupyterhub.services.HubOAuthenticated`
|
||||
to authenticate requests with the Hub for a service run on its own host.
|
||||
This is an implementation of OAuth 2.0 provided by the jupyterhub package,
|
||||
which configures all of the necessary URLs from environment variables.
|
||||
|
||||
The second is `whoami-oauth-basic`, which implements the full OAuth process
|
||||
without any inheritance, so it can be used as a reference for OAuth
|
||||
implementations in other web servers or languages.
|
||||
|
||||
## Run the example
|
||||
|
||||
1. generate an API token:
|
||||
|
||||
export JUPYTERHUB_API_TOKEN=`openssl rand -hex 32`
|
||||
|
||||
2. launch a version of the the whoami service.
|
||||
For `whoami-oauth`:
|
||||
|
||||
bash launch-service.sh &
|
||||
|
||||
or for `whoami-oauth-basic`:
|
||||
|
||||
bash launch-service-basic.sh &
|
||||
|
||||
3. Launch JupyterHub:
|
||||
|
||||
jupyterhub
|
||||
|
||||
4. Visit http://127.0.0.1:5555/
|
||||
|
||||
After logging in with your local-system credentials, you should see a JSON dump of your user info:
|
||||
|
||||
```json
|
||||
{
|
||||
"admin": false,
|
||||
"last_activity": "2016-05-27T14:05:18.016372",
|
||||
"name": "queequeg",
|
||||
"pending": null,
|
||||
"server": "/user/queequeg"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
The essential pieces for using JupyterHub as an OAuth provider are:
|
||||
|
||||
1. registering your service with jupyterhub:
|
||||
|
||||
```python
|
||||
c.JupyterHub.services = [
|
||||
{
|
||||
# the name of your service
|
||||
# should be simple and unique.
|
||||
# mostly used to identify your service in logging
|
||||
"name": "my-service",
|
||||
# the oauth client id of your service
|
||||
# must be unique but isn't private
|
||||
# can be randomly generated or hand-written
|
||||
"oauth_client_id": "abc123",
|
||||
# the API token and client secret of the service
|
||||
# should be generated securely,
|
||||
# e.g. via `openssl rand -hex 32`
|
||||
"api_token": "abc123...",
|
||||
# the redirect target for jupyterhub to send users
|
||||
# after successful authentication
|
||||
"oauth_redirect_uri": "https://service-host/oauth_callback"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
2. Telling your service how to authenticate with JupyterHub.
|
||||
|
||||
The relevant OAuth URLs and keys for using JupyterHub as an OAuth provider are:
|
||||
|
||||
1. the client_id, used in oauth requests
|
||||
2. the api token registered with jupyterhub is the client_secret for oauth requests
|
||||
3. oauth url of the Hub, which is "/hub/api/oauth2/authorize", e.g. `https://myhub.horse/hub/api/oauth2/authorize`
|
||||
4. a redirect handler to receive the authenticated response
|
||||
(at `oauth_redirect_uri` registered in jupyterhub config)
|
||||
5. the token URL for completing the oauth process is "/hub/api/oauth2/token",
|
||||
e.g. `https://myhub.horse/hub/api/oauth2/token`.
|
||||
The reply is JSON and the token is in the field `access_token`.
|
||||
6. Users can be identified by oauth token by making a request to `/hub/api/user`
|
||||
with the new token in the `Authorization` header.
|
18
examples/external-oauth/jupyterhub_config.py
Normal file
18
examples/external-oauth/jupyterhub_config.py
Normal file
@@ -0,0 +1,18 @@
|
||||
import os
|
||||
|
||||
# get the oauth client's API token.
|
||||
# this could come from anywhere
|
||||
api_token = os.getenv("JUPYTERHUB_API_TOKEN")
|
||||
if not api_token:
|
||||
raise ValueError("Make sure to `export JUPYTERHUB_API_TOKEN=$(openssl rand -hex 32)`")
|
||||
|
||||
# tell JupyterHub to register the service as an external oauth client
|
||||
|
||||
c.JupyterHub.services = [
|
||||
{
|
||||
'name': 'external-oauth',
|
||||
'oauth_client_id': "whoami-oauth-client-test",
|
||||
'api_token': api_token,
|
||||
'oauth_redirect_uri': 'http://127.0.0.1:5555/oauth_callback',
|
||||
},
|
||||
]
|
20
examples/external-oauth/launch-service-basic.sh
Normal file
20
examples/external-oauth/launch-service-basic.sh
Normal file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env bash
|
||||
# script to launch whoami-oauth-basic service
|
||||
set -euo pipefail
|
||||
|
||||
# the service needs to know:
|
||||
# 1. API token
|
||||
if [[ -z "${JUPYTERHUB_API_TOKEN}" ]]; then
|
||||
echo 'set API token with export JUPYTERHUB_API_TOKEN=$(openssl rand -hex 32)'
|
||||
fi
|
||||
|
||||
# 2. oauth client ID
|
||||
export JUPYTERHUB_CLIENT_ID='whoami-oauth-client-test'
|
||||
# 3. where the Hub is
|
||||
export JUPYTERHUB_URL='http://127.0.0.1:8000'
|
||||
|
||||
# 4. where to run
|
||||
export JUPYTERHUB_SERVICE_URL='http://127.0.0.1:5555'
|
||||
|
||||
# launch the service
|
||||
exec python3 whoami-oauth-basic.py
|
21
examples/external-oauth/launch-service.sh
Normal file
21
examples/external-oauth/launch-service.sh
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env bash
|
||||
# script to launch whoami-oauth service
|
||||
set -euo pipefail
|
||||
|
||||
# the service needs to know:
|
||||
# 1. API token
|
||||
if [[ -z "${JUPYTERHUB_API_TOKEN}" ]]; then
|
||||
echo 'set API token with export JUPYTERHUB_API_TOKEN=$(openssl rand -hex 32)'
|
||||
fi
|
||||
|
||||
# 2. oauth client ID
|
||||
export JUPYTERHUB_CLIENT_ID="whoami-oauth-client-test"
|
||||
# 3. what URL to run on
|
||||
export JUPYTERHUB_SERVICE_PREFIX='/'
|
||||
export JUPYTERHUB_SERVICE_URL='http://127.0.0.1:5555'
|
||||
export JUPYTERHUB_OAUTH_CALLBACK_URL="$JUPYTERHUB_SERVICE_URL/oauth_callback"
|
||||
# 4. where the Hub is
|
||||
export JUPYTERHUB_HOST='http://127.0.0.1:8000'
|
||||
|
||||
# launch the service
|
||||
exec python3 ../service-whoami/whoami-oauth.py
|
135
examples/external-oauth/whoami-oauth-basic.py
Normal file
135
examples/external-oauth/whoami-oauth-basic.py
Normal file
@@ -0,0 +1,135 @@
|
||||
"""Basic implementation of OAuth without any inheritance
|
||||
|
||||
Implements OAuth handshake manually
|
||||
so all URLs and requests necessary for OAuth with JupyterHub should be in one place
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from urllib.parse import urlencode, urlparse
|
||||
|
||||
from tornado.auth import OAuth2Mixin
|
||||
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
|
||||
from tornado.httputil import url_concat
|
||||
from tornado.ioloop import IOLoop
|
||||
from tornado import log
|
||||
from tornado import web
|
||||
|
||||
|
||||
class JupyterHubLoginHandler(web.RequestHandler):
|
||||
"""Login Handler
|
||||
|
||||
this handler both begins and ends the OAuth process
|
||||
"""
|
||||
|
||||
async def token_for_code(self, code):
|
||||
"""Complete OAuth by requesting an access token for an oauth code"""
|
||||
params = dict(
|
||||
client_id=self.settings['client_id'],
|
||||
client_secret=self.settings['api_token'],
|
||||
grant_type='authorization_code',
|
||||
code=code,
|
||||
redirect_uri=self.settings['redirect_uri'],
|
||||
)
|
||||
req = HTTPRequest(self.settings['token_url'], method='POST',
|
||||
body=urlencode(params).encode('utf8'),
|
||||
headers={
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
)
|
||||
response = await AsyncHTTPClient().fetch(req)
|
||||
data = json.loads(response.body.decode('utf8', 'replace'))
|
||||
return data['access_token']
|
||||
|
||||
async def get(self):
|
||||
code = self.get_argument('code', None)
|
||||
if code:
|
||||
# code is set, we are the oauth callback
|
||||
# complete oauth
|
||||
token = await self.token_for_code(code)
|
||||
# login successful, set cookie and redirect back to home
|
||||
self.set_secure_cookie('whoami-oauth-token', token)
|
||||
self.redirect('/')
|
||||
else:
|
||||
# we are the login handler,
|
||||
# begin oauth process which will come back later with an
|
||||
# authorization_code
|
||||
self.redirect(url_concat(
|
||||
self.settings['authorize_url'],
|
||||
dict(
|
||||
redirect_uri=self.settings['redirect_uri'],
|
||||
client_id=self.settings['client_id'],
|
||||
response_type='code',
|
||||
)
|
||||
))
|
||||
|
||||
|
||||
class WhoAmIHandler(web.RequestHandler):
|
||||
"""Serve the JSON model for the authenticated user"""
|
||||
|
||||
def get_current_user(self):
|
||||
"""The login handler stored a JupyterHub API token in a cookie
|
||||
|
||||
@web.authenticated calls this method.
|
||||
If a Falsy value is returned, the request is redirected to `login_url`.
|
||||
If a Truthy value is returned, the request is allowed to proceed.
|
||||
"""
|
||||
token = self.get_secure_cookie('whoami-oauth-token')
|
||||
|
||||
if token:
|
||||
# secure cookies are bytes, decode to str
|
||||
return token.decode('ascii', 'replace')
|
||||
|
||||
async def user_for_token(self, token):
|
||||
"""Retrieve the user for a given token, via /hub/api/user"""
|
||||
|
||||
req = HTTPRequest(
|
||||
self.settings['user_url'],
|
||||
headers={
|
||||
'Authorization': f'token {token}'
|
||||
},
|
||||
)
|
||||
response = await AsyncHTTPClient().fetch(req)
|
||||
return json.loads(response.body.decode('utf8', 'replace'))
|
||||
|
||||
@web.authenticated
|
||||
async def get(self):
|
||||
user_token = self.get_current_user()
|
||||
user_model = await self.user_for_token(user_token)
|
||||
self.set_header('content-type', 'application/json')
|
||||
self.write(json.dumps(user_model, indent=1, sort_keys=True))
|
||||
|
||||
|
||||
def main():
|
||||
log.enable_pretty_logging()
|
||||
|
||||
# construct OAuth URLs from jupyterhub base URL
|
||||
hub_api = os.environ['JUPYTERHUB_URL'].rstrip('/') + '/hub/api'
|
||||
authorize_url = hub_api + '/oauth2/authorize'
|
||||
token_url = hub_api + '/oauth2/token'
|
||||
user_url = hub_api + '/user'
|
||||
|
||||
app = web.Application([
|
||||
('/oauth_callback', JupyterHubLoginHandler),
|
||||
('/', WhoAmIHandler),
|
||||
],
|
||||
login_url='/oauth_callback',
|
||||
cookie_secret=os.urandom(32),
|
||||
api_token=os.environ['JUPYTERHUB_API_TOKEN'],
|
||||
client_id=os.environ['JUPYTERHUB_CLIENT_ID'],
|
||||
redirect_uri=os.environ['JUPYTERHUB_SERVICE_URL'].rstrip('/') + '/oauth_callback',
|
||||
authorize_url=authorize_url,
|
||||
token_url=token_url,
|
||||
user_url=user_url,
|
||||
)
|
||||
|
||||
url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
|
||||
log.app_log.info("Running basic whoami service on %s",
|
||||
os.environ['JUPYTERHUB_SERVICE_URL'])
|
||||
app.listen(url.port, url.hostname)
|
||||
IOLoop.current().start()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
BIN
examples/external-oauth/whoami.png
Normal file
BIN
examples/external-oauth/whoami.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 35 KiB |
@@ -8,7 +8,7 @@ Uses `jupyterhub.services.HubAuth` to authenticate requests with the Hub in a [f
|
||||
|
||||
jupyterhub --ip=127.0.0.1
|
||||
|
||||
2. Visit http://127.0.0.1:8000/services/whoami or http://127.0.0.1:8000/services/whoami-oauth
|
||||
2. Visit http://127.0.0.1:8000/services/whoami/ or http://127.0.0.1:8000/services/whoami-oauth/
|
||||
|
||||
After logging in with your local-system credentials, you should see a JSON dump of your user info:
|
||||
|
||||
|
@@ -43,7 +43,7 @@ def authenticated(f):
|
||||
return decorated
|
||||
|
||||
|
||||
@app.route(prefix + '/')
|
||||
@app.route(prefix)
|
||||
@authenticated
|
||||
def whoami(user):
|
||||
return Response(
|
||||
|
@@ -26,6 +26,10 @@ After logging in with your local-system credentials, you should see a JSON dump
|
||||
|
||||
This relies on the Hub starting the whoami services, via config (see [jupyterhub_config.py](./jupyterhub_config.py)).
|
||||
|
||||
You may set the `hub_users` configuration in the service script
|
||||
to restrict access to the service to a whitelist of allowed users.
|
||||
By default, any authenticated user is allowed.
|
||||
|
||||
A similar service could be run externally, by setting the JupyterHub service environment variables:
|
||||
|
||||
JUPYTERHUB_API_TOKEN
|
||||
|
@@ -17,7 +17,11 @@ from jupyterhub.services.auth import HubOAuthenticated, HubOAuthCallbackHandler
|
||||
from jupyterhub.utils import url_path_join
|
||||
|
||||
class WhoAmIHandler(HubOAuthenticated, RequestHandler):
|
||||
hub_users = {getuser()} # the users allowed to access this service
|
||||
# hub_users can be a set of users who are allowed to access the service
|
||||
# `getuser()` here would mean only the user who started the service
|
||||
# can access the service:
|
||||
|
||||
# hub_users = {getuser()}
|
||||
|
||||
@authenticated
|
||||
def get(self):
|
||||
|
@@ -15,7 +15,11 @@ from jupyterhub.services.auth import HubAuthenticated
|
||||
|
||||
|
||||
class WhoAmIHandler(HubAuthenticated, RequestHandler):
|
||||
hub_users = {getuser()} # the users allowed to access me
|
||||
# hub_users can be a set of users who are allowed to access the service
|
||||
# `getuser()` here would mean only the user who started the service
|
||||
# can access the service:
|
||||
|
||||
# hub_users = {getuser()}
|
||||
|
||||
@authenticated
|
||||
def get(self):
|
||||
@@ -37,4 +41,4 @@ def main():
|
||||
IOLoop.current().start()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
||||
|
3
hooks/README.md
Normal file
3
hooks/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Docker Cloud build hooks
|
||||
|
||||
These are the hooks
|
4
hooks/post_build
Executable file
4
hooks/post_build
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/bin/bash
|
||||
set -exuo pipefail
|
||||
|
||||
docker build --build-arg BASE_IMAGE=$DOCKER_REPO:$DOCKER_TAG -t ${DOCKER_REPO}-onbuild:$DOCKER_TAG onbuild
|
29
hooks/post_push
Executable file
29
hooks/post_push
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/bin/bash
|
||||
set -exuo pipefail
|
||||
|
||||
export ONBUILD=${DOCKER_REPO}-onbuild
|
||||
# push ONBUILD image
|
||||
docker push $ONBUILD:$DOCKER_TAG
|
||||
|
||||
function get_hub_version() {
|
||||
rm -f hub_version
|
||||
docker run --rm -v $PWD:/version -u $(id -u) -i $DOCKER_REPO:$DOCKER_TAG sh -c 'jupyterhub --version > /version/hub_version'
|
||||
hub_xyz=$(cat hub_version)
|
||||
split=( ${hub_xyz//./ } )
|
||||
hub_xy="${split[0]}.${split[1]}"
|
||||
}
|
||||
|
||||
|
||||
get_hub_version
|
||||
|
||||
# when building master, push 0.9.0 as well
|
||||
docker tag $DOCKER_REPO:$DOCKER_TAG $DOCKER_REPO:$hub_xyz
|
||||
docker push $DOCKER_REPO:$hub_xyz
|
||||
docker tag $ONBUILD:$DOCKER_TAG $ONBUILD:$hub_xyz
|
||||
docker push $ONBUILD:$hub_xyz
|
||||
|
||||
# when building 0.9.x, push 0.9 as well
|
||||
docker tag $DOCKER_REPO:$DOCKER_TAG $DOCKER_REPO:$hub_xy
|
||||
docker push $DOCKER_REPO:$hub_xy
|
||||
docker tag $ONBUILD:$DOCKER_TAG $ONBUILD:$hub_xy
|
||||
docker push $ONBUILD:$hub_xyz
|
@@ -2,7 +2,7 @@
|
||||
|
||||
|
||||
def get_data_files():
|
||||
"""Walk up until we find share/jupyter/hub"""
|
||||
"""Walk up until we find share/jupyterhub"""
|
||||
import sys
|
||||
from os.path import join, abspath, dirname, exists, split
|
||||
path = abspath(dirname(__file__))
|
||||
@@ -12,9 +12,10 @@ def get_data_files():
|
||||
for path in starting_points:
|
||||
# walk up, looking for prefix/share/jupyter
|
||||
while path != '/':
|
||||
share_jupyter = join(path, 'share', 'jupyter', 'hub')
|
||||
if exists(join(share_jupyter, 'static', 'components')):
|
||||
return share_jupyter
|
||||
share_jupyterhub = join(path, 'share', 'jupyterhub')
|
||||
static = join(share_jupyterhub, 'static')
|
||||
if all(exists(join(static, f)) for f in ['components', 'css']):
|
||||
return share_jupyterhub
|
||||
path, _ = split(path)
|
||||
# didn't find it, give up
|
||||
return ''
|
||||
|
@@ -5,11 +5,19 @@
|
||||
|
||||
version_info = (
|
||||
0,
|
||||
8,
|
||||
9,
|
||||
0,
|
||||
'b2', # release
|
||||
# 'dev', # dev
|
||||
)
|
||||
|
||||
__version__ = '.'.join(map(str, version_info))
|
||||
# pep 440 version: no dot before beta/rc, but before .dev
|
||||
# 0.1.0rc1
|
||||
# 0.1.0a1
|
||||
# 0.1.0b1.dev
|
||||
# 0.1.0.dev
|
||||
|
||||
__version__ = ".".join(map(str, version_info[:3])) + ".".join(version_info[3:])
|
||||
|
||||
|
||||
def _check_version(hub_version, singleuser_version, log):
|
||||
|
@@ -30,11 +30,9 @@ if 'jupyterhub' in sys.modules:
|
||||
else:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = None
|
||||
# add your model's MetaData object here for 'autogenerate' support
|
||||
from jupyterhub import orm
|
||||
target_metadata = orm.Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
|
42
jupyterhub/alembic/versions/1cebaf56856c_session_id.py
Normal file
42
jupyterhub/alembic/versions/1cebaf56856c_session_id.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""Add session_id to auth tokens
|
||||
|
||||
Revision ID: 1cebaf56856c
|
||||
Revises: 3ec6993fe20c
|
||||
Create Date: 2017-12-07 14:43:51.500740
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '1cebaf56856c'
|
||||
down_revision = '3ec6993fe20c'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger('alembic')
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
tables = ('oauth_access_tokens', 'oauth_codes')
|
||||
|
||||
|
||||
def add_column_if_table_exists(table, column):
|
||||
engine = op.get_bind().engine
|
||||
if table not in engine.table_names():
|
||||
# table doesn't exist, no need to upgrade
|
||||
# because jupyterhub will create it on launch
|
||||
logger.warning("Skipping upgrade of absent table: %s", table)
|
||||
return
|
||||
op.add_column(table, column)
|
||||
|
||||
|
||||
def upgrade():
|
||||
for table in tables:
|
||||
add_column_if_table_exists(table, sa.Column('session_id', sa.Unicode(255)))
|
||||
|
||||
|
||||
def downgrade():
|
||||
# sqlite cannot downgrade because of limited ALTER TABLE support (no DROP COLUMN)
|
||||
for table in tables:
|
||||
op.drop_column(table, 'session_id')
|
44
jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py
Normal file
44
jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""token tracking
|
||||
|
||||
Revision ID: 56cc5a70207e
|
||||
Revises: 1cebaf56856c
|
||||
Create Date: 2017-12-19 15:21:09.300513
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '56cc5a70207e'
|
||||
down_revision = '1cebaf56856c'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger('alembic')
|
||||
|
||||
|
||||
def upgrade():
|
||||
tables = op.get_bind().engine.table_names()
|
||||
op.add_column('api_tokens', sa.Column('created', sa.DateTime(), nullable=True))
|
||||
op.add_column('api_tokens', sa.Column('last_activity', sa.DateTime(), nullable=True))
|
||||
op.add_column('api_tokens', sa.Column('note', sa.Unicode(length=1023), nullable=True))
|
||||
if 'oauth_access_tokens' in tables:
|
||||
op.add_column('oauth_access_tokens', sa.Column('created', sa.DateTime(), nullable=True))
|
||||
op.add_column('oauth_access_tokens', sa.Column('last_activity', sa.DateTime(), nullable=True))
|
||||
if op.get_context().dialect.name == 'sqlite':
|
||||
logger.warning("sqlite cannot use ALTER TABLE to create foreign keys. Upgrade will be incomplete.")
|
||||
else:
|
||||
op.create_foreign_key(None, 'oauth_access_tokens', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')
|
||||
op.create_foreign_key(None, 'oauth_codes', 'oauth_clients', ['client_id'], ['identifier'], ondelete='CASCADE')
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_constraint(None, 'oauth_codes', type_='foreignkey')
|
||||
op.drop_constraint(None, 'oauth_access_tokens', type_='foreignkey')
|
||||
op.drop_column('oauth_access_tokens', 'last_activity')
|
||||
op.drop_column('oauth_access_tokens', 'created')
|
||||
op.drop_column('api_tokens', 'note')
|
||||
op.drop_column('api_tokens', 'last_activity')
|
||||
op.drop_column('api_tokens', 'created')
|
24
jupyterhub/alembic/versions/896818069c98_token_expires.py
Normal file
24
jupyterhub/alembic/versions/896818069c98_token_expires.py
Normal file
@@ -0,0 +1,24 @@
|
||||
"""Add APIToken.expires_at
|
||||
|
||||
Revision ID: 896818069c98
|
||||
Revises: d68c98b66cd4
|
||||
Create Date: 2018-05-07 11:35:58.050542
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '896818069c98'
|
||||
down_revision = 'd68c98b66cd4'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column('api_tokens', sa.Column('expires_at', sa.DateTime(), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('api_tokens', 'expires_at')
|
47
jupyterhub/alembic/versions/99a28a4418e1_user_created.py
Normal file
47
jupyterhub/alembic/versions/99a28a4418e1_user_created.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""user.created and spawner.started
|
||||
|
||||
Revision ID: 99a28a4418e1
|
||||
Revises: 56cc5a70207e
|
||||
Create Date: 2018-03-21 14:27:17.466841
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '99a28a4418e1'
|
||||
down_revision = '56cc5a70207e'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
def upgrade():
|
||||
op.add_column('users', sa.Column('created', sa.DateTime, nullable=True))
|
||||
c = op.get_bind()
|
||||
# fill created date with current time
|
||||
now = datetime.utcnow()
|
||||
c.execute("""
|
||||
UPDATE users
|
||||
SET created='%s'
|
||||
""" % (now,)
|
||||
)
|
||||
|
||||
tables = c.engine.table_names()
|
||||
|
||||
if 'spawners' in tables:
|
||||
op.add_column('spawners', sa.Column('started', sa.DateTime, nullable=True))
|
||||
# fill started value with now for running servers
|
||||
c.execute("""
|
||||
UPDATE spawners
|
||||
SET started='%s'
|
||||
WHERE server_id IS NOT NULL
|
||||
""" % (now,)
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('users', 'created')
|
||||
op.drop_column('spawners', 'started')
|
@@ -0,0 +1,29 @@
|
||||
"""client-description
|
||||
|
||||
Revision ID: d68c98b66cd4
|
||||
Revises: 99a28a4418e1
|
||||
Create Date: 2018-04-13 10:50:17.968636
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd68c98b66cd4'
|
||||
down_revision = '99a28a4418e1'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade():
|
||||
tables = op.get_bind().engine.table_names()
|
||||
if 'oauth_clients' in tables:
|
||||
op.add_column(
|
||||
'oauth_clients',
|
||||
sa.Column('description', sa.Unicode(length=1023))
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('oauth_clients', 'description')
|
@@ -3,13 +3,15 @@
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from datetime import datetime
|
||||
import json
|
||||
from urllib.parse import quote
|
||||
|
||||
from oauth2.web.tornado import OAuth2Handler
|
||||
from tornado import web, gen
|
||||
from tornado import web
|
||||
|
||||
from .. import orm
|
||||
from ..user import User
|
||||
from ..utils import token_authenticated
|
||||
from .base import BaseHandler, APIHandler
|
||||
|
||||
@@ -22,7 +24,11 @@ class TokenAPIHandler(APIHandler):
|
||||
orm_token = orm.OAuthAccessToken.find(self.db, token)
|
||||
if orm_token is None:
|
||||
raise web.HTTPError(404)
|
||||
|
||||
# record activity whenever we see a token
|
||||
now = orm_token.last_activity = datetime.utcnow()
|
||||
if orm_token.user:
|
||||
orm_token.user.last_activity = now
|
||||
model = self.user_model(self.users[orm_token.user])
|
||||
elif orm_token.service:
|
||||
model = self.service_model(orm_token.service)
|
||||
@@ -31,17 +37,22 @@ class TokenAPIHandler(APIHandler):
|
||||
self.db.delete(orm_token)
|
||||
self.db.commit()
|
||||
raise web.HTTPError(404)
|
||||
self.db.commit()
|
||||
self.write(json.dumps(model))
|
||||
|
||||
@gen.coroutine
|
||||
def post(self):
|
||||
user = self.get_current_user()
|
||||
async def post(self):
|
||||
warn_msg = (
|
||||
"Using deprecated token creation endpoint %s."
|
||||
" Use /hub/api/users/:user/tokens instead."
|
||||
) % self.request.uri
|
||||
self.log.warning(warn_msg)
|
||||
requester = user = self.get_current_user()
|
||||
if user is None:
|
||||
# allow requesting a token with username and password
|
||||
# for authenticators where that's possible
|
||||
data = self.get_json_body()
|
||||
try:
|
||||
user = yield self.login_user(data)
|
||||
requester = user = await self.login_user(data)
|
||||
except Exception as e:
|
||||
self.log.error("Failure trying to authenticate with form data: %s" % e)
|
||||
user = None
|
||||
@@ -49,17 +60,25 @@ class TokenAPIHandler(APIHandler):
|
||||
raise web.HTTPError(403)
|
||||
else:
|
||||
data = self.get_json_body()
|
||||
# admin users can request
|
||||
if data and data.get('username') != user.name:
|
||||
if user.admin:
|
||||
user = self.find_user(data['username'])
|
||||
if user is None:
|
||||
raise web.HTTPError(400, "No such user '%s'" % data['username'])
|
||||
else:
|
||||
# admin users can request tokens for other users
|
||||
if data and data.get('username'):
|
||||
user = self.find_user(data['username'])
|
||||
if user is not requester and not requester.admin:
|
||||
raise web.HTTPError(403, "Only admins can request tokens for other users.")
|
||||
api_token = user.new_api_token()
|
||||
if requester.admin and user is None:
|
||||
raise web.HTTPError(400, "No such user '%s'" % data['username'])
|
||||
|
||||
note = (data or {}).get('note')
|
||||
if not note:
|
||||
note = "Requested via deprecated api"
|
||||
if requester is not user:
|
||||
kind = 'user' if isinstance(user, User) else 'service'
|
||||
note += " by %s %s" % (kind, requester.name)
|
||||
|
||||
api_token = user.new_api_token(note=note)
|
||||
self.write(json.dumps({
|
||||
'token': api_token,
|
||||
'warning': warn_msg,
|
||||
'user': self.user_model(user),
|
||||
}))
|
||||
|
||||
@@ -81,7 +100,7 @@ class CookieAPIHandler(APIHandler):
|
||||
|
||||
class OAuthHandler(BaseHandler, OAuth2Handler):
|
||||
"""Implement OAuth provider handlers
|
||||
|
||||
|
||||
OAuth2Handler sets `self.provider` in initialize,
|
||||
but we are already passing the Provider object via settings.
|
||||
"""
|
||||
|
@@ -8,8 +8,9 @@ from http.client import responses
|
||||
|
||||
from tornado import web
|
||||
|
||||
from .. import orm
|
||||
from ..handlers import BaseHandler
|
||||
from ..utils import url_path_join
|
||||
from ..utils import isoformat, url_path_join
|
||||
|
||||
class APIHandler(BaseHandler):
|
||||
|
||||
@@ -17,15 +18,11 @@ class APIHandler(BaseHandler):
|
||||
def content_security_policy(self):
|
||||
return '; '.join([super().content_security_policy, "default-src 'none'"])
|
||||
|
||||
def set_default_headers(self):
|
||||
self.set_header('Content-Type', 'application/json')
|
||||
super().set_default_headers()
|
||||
|
||||
def check_referer(self):
|
||||
"""Check Origin for cross-site API requests.
|
||||
|
||||
|
||||
Copied from WebSocket with changes:
|
||||
|
||||
|
||||
- allow unspecified host/referer (e.g. scripts)
|
||||
"""
|
||||
host = self.request.headers.get("Host")
|
||||
@@ -39,7 +36,7 @@ class APIHandler(BaseHandler):
|
||||
if not referer:
|
||||
self.log.warning("Blocking API request with no referer")
|
||||
return False
|
||||
|
||||
|
||||
host_path = url_path_join(host, self.hub.base_url)
|
||||
referer_path = referer.split('://', 1)[-1]
|
||||
if not (referer_path + '/').startswith(host_path):
|
||||
@@ -47,7 +44,7 @@ class APIHandler(BaseHandler):
|
||||
referer, host_path)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def get_current_user_cookie(self):
|
||||
"""Override get_user_cookie to check Referer header"""
|
||||
cookie_user = super().get_current_user_cookie()
|
||||
@@ -70,11 +67,12 @@ class APIHandler(BaseHandler):
|
||||
self.log.error("Couldn't parse JSON", exc_info=True)
|
||||
raise web.HTTPError(400, 'Invalid JSON in body of request')
|
||||
return model
|
||||
|
||||
|
||||
def write_error(self, status_code, **kwargs):
|
||||
"""Write JSON errors instead of HTML"""
|
||||
exc_info = kwargs.get('exc_info')
|
||||
message = ''
|
||||
exception = None
|
||||
status_message = responses.get(status_code, 'Unknown Error')
|
||||
if exc_info:
|
||||
exception = exc_info[1]
|
||||
@@ -88,13 +86,77 @@ class APIHandler(BaseHandler):
|
||||
reason = getattr(exception, 'reason', '')
|
||||
if reason:
|
||||
status_message = reason
|
||||
|
||||
self.set_header('Content-Type', 'application/json')
|
||||
# allow setting headers from exceptions
|
||||
# since exception handler clears headers
|
||||
headers = getattr(exception, 'headers', None)
|
||||
if headers:
|
||||
for key, value in headers.items():
|
||||
self.set_header(key, value)
|
||||
|
||||
self.write(json.dumps({
|
||||
'status': status_code,
|
||||
'message': message or status_message,
|
||||
}))
|
||||
|
||||
def user_model(self, user):
|
||||
def server_model(self, spawner, include_state=False):
|
||||
"""Get the JSON model for a Spawner"""
|
||||
return {
|
||||
'name': spawner.name,
|
||||
'last_activity': isoformat(spawner.orm_spawner.last_activity),
|
||||
'started': isoformat(spawner.orm_spawner.started),
|
||||
'pending': spawner.pending,
|
||||
'ready': spawner.ready,
|
||||
'state': spawner.get_state() if include_state else None,
|
||||
'url': url_path_join(spawner.user.url, spawner.name, '/'),
|
||||
'progress_url': spawner._progress_url,
|
||||
}
|
||||
|
||||
def token_model(self, token):
|
||||
"""Get the JSON model for an APIToken"""
|
||||
expires_at = None
|
||||
if isinstance(token, orm.APIToken):
|
||||
kind = 'api_token'
|
||||
extra = {
|
||||
'note': token.note,
|
||||
}
|
||||
expires_at = token.expires_at
|
||||
elif isinstance(token, orm.OAuthAccessToken):
|
||||
kind = 'oauth'
|
||||
extra = {
|
||||
'oauth_client': token.client.description or token.client.client_id,
|
||||
}
|
||||
if token.expires_at:
|
||||
expires_at = datetime.fromtimestamp(token.expires_at)
|
||||
else:
|
||||
raise TypeError(
|
||||
"token must be an APIToken or OAuthAccessToken, not %s"
|
||||
% type(token))
|
||||
|
||||
if token.user:
|
||||
owner_key = 'user'
|
||||
owner = token.user.name
|
||||
|
||||
else:
|
||||
owner_key = 'service'
|
||||
owner = token.service.name
|
||||
|
||||
model = {
|
||||
owner_key: owner,
|
||||
'id': token.api_id,
|
||||
'kind': kind,
|
||||
'created': isoformat(token.created),
|
||||
'last_activity': isoformat(token.last_activity),
|
||||
}
|
||||
model.update(extra)
|
||||
return model
|
||||
|
||||
def user_model(self, user, include_servers=False, include_state=False):
|
||||
"""Get the JSON model for a User object"""
|
||||
if isinstance(user, orm.User):
|
||||
user = self.users[user.id]
|
||||
|
||||
model = {
|
||||
'kind': 'user',
|
||||
'name': user.name,
|
||||
@@ -102,19 +164,22 @@ class APIHandler(BaseHandler):
|
||||
'groups': [ g.name for g in user.groups ],
|
||||
'server': user.url if user.running else None,
|
||||
'pending': None,
|
||||
'last_activity': user.last_activity.isoformat(),
|
||||
'created': isoformat(user.created),
|
||||
'last_activity': isoformat(user.last_activity),
|
||||
}
|
||||
model['pending'] = user.spawners[''].pending or None
|
||||
if '' in user.spawners:
|
||||
model['pending'] = user.spawners[''].pending
|
||||
|
||||
if self.allow_named_servers:
|
||||
servers = model['servers'] = {}
|
||||
for name, spawner in user.spawners.items():
|
||||
if spawner.ready:
|
||||
servers[name] = s = {'name': name}
|
||||
if spawner.pending:
|
||||
s['pending'] = spawner.pending
|
||||
if spawner.server:
|
||||
s['url'] = url_path_join(user.url, name, '/')
|
||||
if not include_servers:
|
||||
model['servers'] = None
|
||||
return model
|
||||
|
||||
servers = model['servers'] = {}
|
||||
for name, spawner in user.spawners.items():
|
||||
# include 'active' servers, not just ready
|
||||
# (this includes pending events)
|
||||
if spawner.active:
|
||||
servers[name] = self.server_model(spawner, include_state=include_state)
|
||||
return model
|
||||
|
||||
def group_model(self, group):
|
||||
@@ -137,6 +202,7 @@ class APIHandler(BaseHandler):
|
||||
'name': str,
|
||||
'admin': bool,
|
||||
'groups': list,
|
||||
'auth_state': dict,
|
||||
}
|
||||
|
||||
_group_model_types = {
|
||||
@@ -146,7 +212,7 @@ class APIHandler(BaseHandler):
|
||||
|
||||
def _check_model(self, model, model_types, name):
|
||||
"""Check a model provided by a REST API request
|
||||
|
||||
|
||||
Args:
|
||||
model (dict): user-provided model
|
||||
model_types (dict): dict of key:type used to validate types and keys
|
||||
@@ -178,5 +244,4 @@ class APIHandler(BaseHandler):
|
||||
|
||||
|
||||
def options(self, *args, **kwargs):
|
||||
self.set_header('Access-Control-Allow-Headers', 'accept, content-type')
|
||||
self.finish()
|
||||
|
@@ -41,6 +41,37 @@ class GroupListAPIHandler(_GroupAPIHandler):
|
||||
data = [ self.group_model(g) for g in self.db.query(orm.Group) ]
|
||||
self.write(json.dumps(data))
|
||||
|
||||
@admin_only
|
||||
async def post(self):
|
||||
"""POST creates Multiple groups """
|
||||
model = self.get_json_body()
|
||||
if not model or not isinstance(model, dict) or not model.get('groups'):
|
||||
raise web.HTTPError(400, "Must specify at least one group to create")
|
||||
|
||||
groupnames = model.pop("groups",[])
|
||||
self._check_group_model(model)
|
||||
|
||||
created = []
|
||||
for name in groupnames:
|
||||
existing = orm.Group.find(self.db, name=name)
|
||||
if existing is not None:
|
||||
raise web.HTTPError(409, "Group %s already exists" % name)
|
||||
|
||||
usernames = model.get('users', [])
|
||||
# check that users exist
|
||||
users = self._usernames_to_users(usernames)
|
||||
# create the group
|
||||
self.log.info("Creating new group %s with %i users",
|
||||
name, len(users),
|
||||
)
|
||||
self.log.debug("Users: %s", usernames)
|
||||
group = orm.Group(name=name, users=users)
|
||||
self.db.add(group)
|
||||
self.db.commit()
|
||||
created.append(group)
|
||||
self.write(json.dumps([self.group_model(group) for group in created]))
|
||||
self.set_status(201)
|
||||
|
||||
|
||||
class GroupAPIHandler(_GroupAPIHandler):
|
||||
"""View and modify groups by name"""
|
||||
@@ -51,8 +82,7 @@ class GroupAPIHandler(_GroupAPIHandler):
|
||||
self.write(json.dumps(self.group_model(group)))
|
||||
|
||||
@admin_only
|
||||
@gen.coroutine
|
||||
def post(self, name):
|
||||
async def post(self, name):
|
||||
"""POST creates a group by name"""
|
||||
model = self.get_json_body()
|
||||
if model is None:
|
||||
@@ -62,7 +92,7 @@ class GroupAPIHandler(_GroupAPIHandler):
|
||||
|
||||
existing = orm.Group.find(self.db, name=name)
|
||||
if existing is not None:
|
||||
raise web.HTTPError(400, "Group %s already exists" % name)
|
||||
raise web.HTTPError(409, "Group %s already exists" % name)
|
||||
|
||||
usernames = model.get('users', [])
|
||||
# check that users exist
|
||||
@@ -109,9 +139,8 @@ class GroupUsersAPIHandler(_GroupAPIHandler):
|
||||
self.db.commit()
|
||||
self.write(json.dumps(self.group_model(group)))
|
||||
|
||||
@gen.coroutine
|
||||
@admin_only
|
||||
def delete(self, name):
|
||||
async def delete(self, name):
|
||||
"""DELETE removes users from a group"""
|
||||
group = self.find_group(name)
|
||||
data = self.get_json_body()
|
||||
|
@@ -14,52 +14,48 @@ from .base import APIHandler
|
||||
|
||||
|
||||
class ProxyAPIHandler(APIHandler):
|
||||
|
||||
|
||||
@admin_only
|
||||
@gen.coroutine
|
||||
def get(self):
|
||||
async def get(self):
|
||||
"""GET /api/proxy fetches the routing table
|
||||
|
||||
This is the same as fetching the routing table directly from the proxy,
|
||||
but without clients needing to maintain separate
|
||||
"""
|
||||
routes = yield self.proxy.get_all_routes()
|
||||
routes = await self.proxy.get_all_routes()
|
||||
self.write(json.dumps(routes))
|
||||
|
||||
@admin_only
|
||||
@gen.coroutine
|
||||
def post(self):
|
||||
async def post(self):
|
||||
"""POST checks the proxy to ensure that it's up to date.
|
||||
|
||||
Can be used to jumpstart a newly launched proxy
|
||||
without waiting for the check_routes interval.
|
||||
"""
|
||||
yield self.proxy.check_routes(self.users, self.services)
|
||||
|
||||
await self.proxy.check_routes(self.users, self.services)
|
||||
|
||||
@admin_only
|
||||
@gen.coroutine
|
||||
def patch(self):
|
||||
async def patch(self):
|
||||
"""PATCH updates the location of the proxy
|
||||
|
||||
|
||||
Can be used to notify the Hub that a new proxy is in charge
|
||||
"""
|
||||
if not self.request.body:
|
||||
raise web.HTTPError(400, "need JSON body")
|
||||
|
||||
|
||||
try:
|
||||
model = json.loads(self.request.body.decode('utf8', 'replace'))
|
||||
except ValueError:
|
||||
raise web.HTTPError(400, "Request body must be JSON dict")
|
||||
if not isinstance(model, dict):
|
||||
raise web.HTTPError(400, "Request body must be JSON dict")
|
||||
|
||||
|
||||
if 'api_url' in model:
|
||||
self.proxy.api_url = model['api_url']
|
||||
if 'auth_token' in model:
|
||||
self.proxy.auth_token = model['auth_token']
|
||||
self.log.info("Updated proxy at %s", self.proxy)
|
||||
yield self.proxy.check_routes(self.users, self.services)
|
||||
|
||||
await self.proxy.check_routes(self.users, self.services)
|
||||
|
||||
|
||||
default_handlers = [
|
||||
|
@@ -3,22 +3,27 @@
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
import json
|
||||
|
||||
from tornado import gen, web
|
||||
from async_generator import aclosing
|
||||
from tornado import web
|
||||
from tornado.iostream import StreamClosedError
|
||||
|
||||
from .. import orm
|
||||
from ..utils import admin_only
|
||||
from ..user import User
|
||||
from ..utils import admin_only, iterate_until, maybe_future, url_path_join
|
||||
from .base import APIHandler
|
||||
|
||||
|
||||
class SelfAPIHandler(APIHandler):
|
||||
"""Return the authenticated user's model
|
||||
|
||||
|
||||
Based on the authentication info. Acts as a 'whoami' for auth tokens.
|
||||
"""
|
||||
@web.authenticated
|
||||
def get(self):
|
||||
|
||||
async def get(self):
|
||||
user = self.get_current_user()
|
||||
if user is None:
|
||||
# whoami can be accessed via oauth token
|
||||
@@ -31,23 +36,24 @@ class SelfAPIHandler(APIHandler):
|
||||
class UserListAPIHandler(APIHandler):
|
||||
@admin_only
|
||||
def get(self):
|
||||
users = [ self._user_from_orm(u) for u in self.db.query(orm.User) ]
|
||||
data = [ self.user_model(u) for u in users ]
|
||||
data = [
|
||||
self.user_model(u, include_servers=True, include_state=True)
|
||||
for u in self.db.query(orm.User)
|
||||
]
|
||||
self.write(json.dumps(data))
|
||||
|
||||
|
||||
@admin_only
|
||||
@gen.coroutine
|
||||
def post(self):
|
||||
async def post(self):
|
||||
data = self.get_json_body()
|
||||
if not data or not isinstance(data, dict) or not data.get('usernames'):
|
||||
raise web.HTTPError(400, "Must specify at least one user to create")
|
||||
|
||||
|
||||
usernames = data.pop('usernames')
|
||||
self._check_user_model(data)
|
||||
# admin is set for all users
|
||||
# to create admin and non-admin users requires at least two API requests
|
||||
admin = data.get('admin', False)
|
||||
|
||||
|
||||
to_create = []
|
||||
invalid_names = []
|
||||
for name in usernames:
|
||||
@@ -60,17 +66,17 @@ class UserListAPIHandler(APIHandler):
|
||||
self.log.warning("User %s already exists" % name)
|
||||
else:
|
||||
to_create.append(name)
|
||||
|
||||
|
||||
if invalid_names:
|
||||
if len(invalid_names) == 1:
|
||||
msg = "Invalid username: %s" % invalid_names[0]
|
||||
else:
|
||||
msg = "Invalid usernames: %s" % ', '.join(invalid_names)
|
||||
raise web.HTTPError(400, msg)
|
||||
|
||||
|
||||
if not to_create:
|
||||
raise web.HTTPError(400, "All %i users already exist" % len(usernames))
|
||||
|
||||
raise web.HTTPError(409, "All %i users already exist" % len(usernames))
|
||||
|
||||
created = []
|
||||
for name in to_create:
|
||||
user = self.user_from_username(name)
|
||||
@@ -78,14 +84,14 @@ class UserListAPIHandler(APIHandler):
|
||||
user.admin = True
|
||||
self.db.commit()
|
||||
try:
|
||||
yield gen.maybe_future(self.authenticator.add_user(user))
|
||||
await maybe_future(self.authenticator.add_user(user))
|
||||
except Exception as e:
|
||||
self.log.error("Failed to create user: %s" % name, exc_info=True)
|
||||
del self.users[user]
|
||||
self.users.delete(user)
|
||||
raise web.HTTPError(400, "Failed to create user %s: %s" % (name, str(e)))
|
||||
else:
|
||||
created.append(user)
|
||||
|
||||
|
||||
self.write(json.dumps([ self.user_model(u) for u in created ]))
|
||||
self.set_status(201)
|
||||
|
||||
@@ -98,49 +104,56 @@ def admin_or_self(method):
|
||||
raise web.HTTPError(403)
|
||||
if not (current.name == name or current.admin):
|
||||
raise web.HTTPError(403)
|
||||
|
||||
|
||||
# raise 404 if not found
|
||||
if not self.find_user(name):
|
||||
raise web.HTTPError(404)
|
||||
return method(self, name, *args, **kwargs)
|
||||
return m
|
||||
|
||||
|
||||
class UserAPIHandler(APIHandler):
|
||||
|
||||
|
||||
@admin_or_self
|
||||
def get(self, name):
|
||||
async def get(self, name):
|
||||
user = self.find_user(name)
|
||||
self.write(json.dumps(self.user_model(user)))
|
||||
|
||||
model = self.user_model(user, include_servers=True, include_state=self.get_current_user().admin)
|
||||
# auth state will only be shown if the requestor is an admin
|
||||
# this means users can't see their own auth state unless they
|
||||
# are admins, Hub admins often are also marked as admins so they
|
||||
# will see their auth state but normal users won't
|
||||
requestor = self.get_current_user()
|
||||
if requestor.admin:
|
||||
model['auth_state'] = await user.get_auth_state()
|
||||
self.write(json.dumps(model))
|
||||
|
||||
@admin_only
|
||||
@gen.coroutine
|
||||
def post(self, name):
|
||||
async def post(self, name):
|
||||
data = self.get_json_body()
|
||||
user = self.find_user(name)
|
||||
if user is not None:
|
||||
raise web.HTTPError(400, "User %s already exists" % name)
|
||||
|
||||
raise web.HTTPError(409, "User %s already exists" % name)
|
||||
|
||||
user = self.user_from_username(name)
|
||||
if data:
|
||||
self._check_user_model(data)
|
||||
if 'admin' in data:
|
||||
user.admin = data['admin']
|
||||
self.db.commit()
|
||||
|
||||
|
||||
try:
|
||||
yield gen.maybe_future(self.authenticator.add_user(user))
|
||||
await maybe_future(self.authenticator.add_user(user))
|
||||
except Exception:
|
||||
self.log.error("Failed to create user: %s" % name, exc_info=True)
|
||||
# remove from registry
|
||||
del self.users[user]
|
||||
self.users.delete(user)
|
||||
raise web.HTTPError(400, "Failed to create user: %s" % name)
|
||||
|
||||
|
||||
self.write(json.dumps(self.user_model(user)))
|
||||
self.set_status(201)
|
||||
|
||||
|
||||
@admin_only
|
||||
@gen.coroutine
|
||||
def delete(self, name):
|
||||
async def delete(self, name):
|
||||
user = self.find_user(name)
|
||||
if user is None:
|
||||
raise web.HTTPError(404)
|
||||
@@ -149,18 +162,18 @@ class UserAPIHandler(APIHandler):
|
||||
if user.spawner._stop_pending:
|
||||
raise web.HTTPError(400, "%s's server is in the process of stopping, please wait." % name)
|
||||
if user.running:
|
||||
yield self.stop_single_user(user)
|
||||
await self.stop_single_user(user)
|
||||
if user.spawner._stop_pending:
|
||||
raise web.HTTPError(400, "%s's server is in the process of stopping, please wait." % name)
|
||||
|
||||
yield gen.maybe_future(self.authenticator.delete_user(user))
|
||||
|
||||
await maybe_future(self.authenticator.delete_user(user))
|
||||
# remove from registry
|
||||
del self.users[user]
|
||||
self.users.delete(user)
|
||||
|
||||
self.set_status(204)
|
||||
|
||||
@admin_only
|
||||
def patch(self, name):
|
||||
async def patch(self, name):
|
||||
user = self.find_user(name)
|
||||
if user is None:
|
||||
raise web.HTTPError(404)
|
||||
@@ -171,17 +184,168 @@ class UserAPIHandler(APIHandler):
|
||||
if self.find_user(data['name']):
|
||||
raise web.HTTPError(400, "User %s already exists, username must be unique" % data['name'])
|
||||
for key, value in data.items():
|
||||
setattr(user, key, value)
|
||||
if key == 'auth_state':
|
||||
await user.save_auth_state(value)
|
||||
else:
|
||||
setattr(user, key, value)
|
||||
self.db.commit()
|
||||
self.write(json.dumps(self.user_model(user)))
|
||||
user_ = self.user_model(user)
|
||||
user_['auth_state'] = await user.get_auth_state()
|
||||
self.write(json.dumps(user_))
|
||||
|
||||
|
||||
class UserTokenListAPIHandler(APIHandler):
|
||||
"""API endpoint for listing/creating tokens"""
|
||||
@admin_or_self
|
||||
def get(self, name):
|
||||
"""Get tokens for a given user"""
|
||||
user = self.find_user(name)
|
||||
if not user:
|
||||
raise web.HTTPError(404, "No such user: %s" % name)
|
||||
|
||||
now = datetime.utcnow()
|
||||
|
||||
api_tokens = []
|
||||
def sort_key(token):
|
||||
return token.last_activity or token.created
|
||||
|
||||
for token in sorted(user.api_tokens, key=sort_key):
|
||||
if token.expires_at and token.expires_at < now:
|
||||
# exclude expired tokens
|
||||
self.db.delete(token)
|
||||
self.db.commit()
|
||||
continue
|
||||
api_tokens.append(self.token_model(token))
|
||||
|
||||
oauth_tokens = []
|
||||
# OAuth tokens use integer timestamps
|
||||
now_timestamp = now.timestamp()
|
||||
for token in sorted(user.oauth_tokens, key=sort_key):
|
||||
if token.expires_at and token.expires_at < now_timestamp:
|
||||
# exclude expired tokens
|
||||
self.db.delete(token)
|
||||
self.db.commit()
|
||||
continue
|
||||
oauth_tokens.append(self.token_model(token))
|
||||
self.write(json.dumps({
|
||||
'api_tokens': api_tokens,
|
||||
'oauth_tokens': oauth_tokens,
|
||||
}))
|
||||
|
||||
async def post(self, name):
|
||||
body = self.get_json_body() or {}
|
||||
if not isinstance(body, dict):
|
||||
raise web.HTTPError(400, "Body must be a JSON dict or empty")
|
||||
|
||||
requester = self.get_current_user()
|
||||
if requester is None:
|
||||
# defer to Authenticator for identifying the user
|
||||
# can be username+password or an upstream auth token
|
||||
try:
|
||||
name = await self.authenticator.authenticate(self, body.get('auth'))
|
||||
except web.HTTPError as e:
|
||||
# turn any authentication error into 403
|
||||
raise web.HTTPError(403)
|
||||
except Exception as e:
|
||||
# suppress and log error here in case Authenticator
|
||||
# isn't prepared to handle auth via this data
|
||||
self.log.error("Error authenticating request for %s: %s",
|
||||
self.request.uri, e)
|
||||
raise web.HTTPError(403)
|
||||
requester = self.find_user(name)
|
||||
if requester is None:
|
||||
# couldn't identify requester
|
||||
raise web.HTTPError(403)
|
||||
user = self.find_user(name)
|
||||
if requester is not user and not requester.admin:
|
||||
raise web.HTTPError(403, "Only admins can request tokens for other users")
|
||||
if not user:
|
||||
raise web.HTTPError(404, "No such user: %s" % name)
|
||||
if requester is not user:
|
||||
kind = 'user' if isinstance(requester, User) else 'service'
|
||||
|
||||
note = body.get('note')
|
||||
if not note:
|
||||
note = "Requested via api"
|
||||
if requester is not user:
|
||||
note += " by %s %s" % (kind, requester.name)
|
||||
|
||||
api_token = user.new_api_token(note=note, expires_in=body.get('expires_in', None))
|
||||
if requester is not user:
|
||||
self.log.info("%s %s requested API token for %s", kind.title(), requester.name, user.name)
|
||||
else:
|
||||
user_kind = 'user' if isinstance(user, User) else 'service'
|
||||
self.log.info("%s %s requested new API token", user_kind.title(), user.name)
|
||||
# retrieve the model
|
||||
token_model = self.token_model(orm.APIToken.find(self.db, api_token))
|
||||
token_model['token'] = api_token
|
||||
self.write(json.dumps(token_model))
|
||||
|
||||
|
||||
class UserTokenAPIHandler(APIHandler):
|
||||
"""API endpoint for retrieving/deleting individual tokens"""
|
||||
|
||||
def find_token_by_id(self, user, token_id):
|
||||
"""Find a token object by token-id key
|
||||
|
||||
Raises 404 if not found for any reason
|
||||
(e.g. wrong owner, invalid key format, etc.)
|
||||
"""
|
||||
not_found = "No such token %s for user %s" % (token_id, user.name)
|
||||
prefix, id = token_id[0], token_id[1:]
|
||||
if prefix == 'a':
|
||||
Token = orm.APIToken
|
||||
elif prefix == 'o':
|
||||
Token = orm.OAuthAccessToken
|
||||
else:
|
||||
raise web.HTTPError(404, not_found)
|
||||
try:
|
||||
id = int(id)
|
||||
except ValueError:
|
||||
raise web.HTTPError(404, not_found)
|
||||
|
||||
orm_token = self.db.query(Token).filter(Token.id==id).first()
|
||||
if orm_token is None or orm_token.user is not user.orm_user:
|
||||
raise web.HTTPError(404, "Token not found %s", orm_token)
|
||||
return orm_token
|
||||
|
||||
@admin_or_self
|
||||
def get(self, name, token_id):
|
||||
""""""
|
||||
user = self.find_user(name)
|
||||
if not user:
|
||||
raise web.HTTPError(404, "No such user: %s" % name)
|
||||
token = self.find_token_by_id(user, token_id)
|
||||
self.write(json.dumps(self.token_model(token)))
|
||||
|
||||
@admin_or_self
|
||||
def delete(self, name, token_id):
|
||||
"""Delete a token"""
|
||||
user = self.find_user(name)
|
||||
if not user:
|
||||
raise web.HTTPError(404, "No such user: %s" % name)
|
||||
token = self.find_token_by_id(user, token_id)
|
||||
# deleting an oauth token deletes *all* oauth tokens for that client
|
||||
if isinstance(token, orm.OAuthAccessToken):
|
||||
client_id = token.client_id
|
||||
tokens = [
|
||||
token for token in user.oauth_tokens
|
||||
if token.client_id == client_id
|
||||
]
|
||||
else:
|
||||
tokens = [token]
|
||||
for token in tokens:
|
||||
self.db.delete(token)
|
||||
self.db.commit()
|
||||
self.set_header('Content-Type', 'text/plain')
|
||||
self.set_status(204)
|
||||
|
||||
|
||||
class UserServerAPIHandler(APIHandler):
|
||||
"""Start and stop single-user servers"""
|
||||
|
||||
@gen.coroutine
|
||||
@admin_or_self
|
||||
def post(self, name, server_name=''):
|
||||
async def post(self, name, server_name=''):
|
||||
user = self.find_user(name)
|
||||
if server_name and not self.allow_named_servers:
|
||||
raise web.HTTPError(400, "Named servers are not enabled.")
|
||||
@@ -199,21 +363,20 @@ class UserServerAPIHandler(APIHandler):
|
||||
# set _spawn_pending flag to prevent races while we wait
|
||||
spawner._spawn_pending = True
|
||||
try:
|
||||
state = yield spawner.poll_and_notify()
|
||||
state = await spawner.poll_and_notify()
|
||||
finally:
|
||||
spawner._spawn_pending = False
|
||||
if state is None:
|
||||
raise web.HTTPError(400, "%s is already running" % spawner._log_name)
|
||||
|
||||
options = self.get_json_body()
|
||||
yield self.spawn_single_user(user, server_name, options=options)
|
||||
await self.spawn_single_user(user, server_name, options=options)
|
||||
status = 202 if spawner.pending == 'spawn' else 201
|
||||
self.set_header('Content-Type', 'text/plain')
|
||||
self.set_status(status)
|
||||
|
||||
@gen.coroutine
|
||||
@admin_or_self
|
||||
def delete(self, name, server_name=''):
|
||||
async def delete(self, name, server_name=''):
|
||||
user = self.find_user(name)
|
||||
if server_name:
|
||||
if not self.allow_named_servers:
|
||||
@@ -234,10 +397,10 @@ class UserServerAPIHandler(APIHandler):
|
||||
(spawner._log_name, '(pending: %s)' % spawner.pending if spawner.pending else '')
|
||||
)
|
||||
# include notify, so that a server that died is noticed immediately
|
||||
status = yield spawner.poll_and_notify()
|
||||
status = await spawner.poll_and_notify()
|
||||
if status is not None:
|
||||
raise web.HTTPError(400, "%s is not running" % spawner._log_name)
|
||||
yield self.stop_single_user(user, server_name)
|
||||
await self.stop_single_user(user, server_name)
|
||||
status = 202 if spawner._stop_pending else 204
|
||||
self.set_header('Content-Type', 'text/plain')
|
||||
self.set_status(status)
|
||||
@@ -245,7 +408,7 @@ class UserServerAPIHandler(APIHandler):
|
||||
|
||||
class UserAdminAccessAPIHandler(APIHandler):
|
||||
"""Grant admins access to single-user servers
|
||||
|
||||
|
||||
This handler sets the necessary cookie for an admin to login to a single-user server.
|
||||
"""
|
||||
@admin_only
|
||||
@@ -263,11 +426,108 @@ class UserAdminAccessAPIHandler(APIHandler):
|
||||
raise web.HTTPError(404)
|
||||
|
||||
|
||||
class SpawnProgressAPIHandler(APIHandler):
|
||||
"""EventStream handler for pending spawns"""
|
||||
def get_content_type(self):
|
||||
return 'text/event-stream'
|
||||
|
||||
async def send_event(self, event):
|
||||
try:
|
||||
self.write('data: {}\n\n'.format(json.dumps(event)))
|
||||
await self.flush()
|
||||
except StreamClosedError:
|
||||
self.log.warning("Stream closed while handling %s", self.request.uri)
|
||||
# raise Finish to halt the handler
|
||||
raise web.Finish()
|
||||
|
||||
@admin_or_self
|
||||
async def get(self, username, server_name=''):
|
||||
self.set_header('Cache-Control', 'no-cache')
|
||||
if server_name is None:
|
||||
server_name = ''
|
||||
user = self.find_user(username)
|
||||
if user is None:
|
||||
# no such user
|
||||
raise web.HTTPError(404)
|
||||
if server_name not in user.spawners:
|
||||
# user has no such server
|
||||
raise web.HTTPError(404)
|
||||
spawner = user.spawners[server_name]
|
||||
# cases:
|
||||
# - spawner already started and ready
|
||||
# - spawner not running at all
|
||||
# - spawner failed
|
||||
# - spawner pending start (what we expect)
|
||||
url = url_path_join(user.url, server_name, '/')
|
||||
ready_event = {
|
||||
'progress': 100,
|
||||
'ready': True,
|
||||
'message': "Server ready at {}".format(url),
|
||||
'html_message': 'Server ready at <a href="{0}">{0}</a>'.format(url),
|
||||
'url': url,
|
||||
}
|
||||
failed_event = {
|
||||
'progress': 100,
|
||||
'failed': True,
|
||||
'message': "Spawn failed",
|
||||
}
|
||||
|
||||
if spawner.ready:
|
||||
# spawner already ready. Trigger progress-completion immediately
|
||||
self.log.info("Server %s is already started", spawner._log_name)
|
||||
await self.send_event(ready_event)
|
||||
return
|
||||
|
||||
spawn_future = spawner._spawn_future
|
||||
|
||||
if not spawner._spawn_pending:
|
||||
# not pending, no progress to fetch
|
||||
# check if spawner has just failed
|
||||
f = spawn_future
|
||||
if f and f.done() and f.exception():
|
||||
failed_event['message'] = "Spawn failed: %s" % f.exception()
|
||||
await self.send_event(failed_event)
|
||||
return
|
||||
else:
|
||||
raise web.HTTPError(400, "%s is not starting...", spawner._log_name)
|
||||
|
||||
# retrieve progress events from the Spawner
|
||||
async with aclosing(iterate_until(spawn_future, spawner._generate_progress())) as events:
|
||||
async for event in events:
|
||||
# don't allow events to sneakily set the 'ready' flag
|
||||
if 'ready' in event:
|
||||
event.pop('ready', None)
|
||||
await self.send_event(event)
|
||||
|
||||
# progress finished, wait for spawn to actually resolve,
|
||||
# in case progress finished early
|
||||
# (ignore errors, which will be logged elsewhere)
|
||||
await asyncio.wait([spawn_future])
|
||||
|
||||
# progress and spawn finished, check if spawn succeeded
|
||||
if spawner.ready:
|
||||
# spawner is ready, signal completion and redirect
|
||||
self.log.info("Server %s is ready", spawner._log_name)
|
||||
await self.send_event(ready_event)
|
||||
else:
|
||||
# what happened? Maybe spawn failed?
|
||||
f = spawn_future
|
||||
if f and f.done() and f.exception():
|
||||
failed_event['message'] = "Spawn failed: %s" % f.exception()
|
||||
else:
|
||||
self.log.warning("Server %s didn't start for unknown reason", spawner._log_name)
|
||||
await self.send_event(failed_event)
|
||||
|
||||
|
||||
default_handlers = [
|
||||
(r"/api/user", SelfAPIHandler),
|
||||
(r"/api/users", UserListAPIHandler),
|
||||
(r"/api/users/([^/]+)", UserAPIHandler),
|
||||
(r"/api/users/([^/]+)/server", UserServerAPIHandler),
|
||||
(r"/api/users/([^/]+)/server/progress", SpawnProgressAPIHandler),
|
||||
(r"/api/users/([^/]+)/tokens", UserTokenListAPIHandler),
|
||||
(r"/api/users/([^/]+)/tokens/([^/]*)", UserTokenAPIHandler),
|
||||
(r"/api/users/([^/]+)/servers/([^/]*)", UserServerAPIHandler),
|
||||
(r"/api/users/([^/]+)/servers/([^/]*)/progress", SpawnProgressAPIHandler),
|
||||
(r"/api/users/([^/]+)/admin-access", UserAdminAccessAPIHandler),
|
||||
]
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -3,30 +3,32 @@
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import pipes
|
||||
import re
|
||||
from shutil import which
|
||||
import sys
|
||||
from subprocess import Popen, PIPE, STDOUT
|
||||
|
||||
from tornado import gen
|
||||
try:
|
||||
import pamela
|
||||
except Exception as e:
|
||||
pamela = None
|
||||
_pamela_error = e
|
||||
|
||||
from tornado.concurrent import run_on_executor
|
||||
from tornado import gen
|
||||
|
||||
from traitlets.config import LoggingConfigurable
|
||||
from traitlets import Bool, Set, Unicode, Dict, Any, default, observe
|
||||
|
||||
from .handlers.login import LoginHandler
|
||||
from .utils import url_path_join
|
||||
from .utils import maybe_future, url_path_join
|
||||
from .traitlets import Command
|
||||
|
||||
|
||||
|
||||
def getgrnam(name):
|
||||
"""Wrapper function to protect against `grp` not being available
|
||||
"""Wrapper function to protect against `grp` not being available
|
||||
on Windows
|
||||
"""
|
||||
import grp
|
||||
@@ -37,7 +39,7 @@ class Authenticator(LoggingConfigurable):
|
||||
"""Base class for implementing an authentication provider for JupyterHub"""
|
||||
|
||||
db = Any()
|
||||
|
||||
|
||||
enable_auth_state = Bool(False, config=True,
|
||||
help="""Enable persisting auth_state (if available).
|
||||
|
||||
@@ -86,6 +88,20 @@ class Authenticator(LoggingConfigurable):
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
blacklist = Set(
|
||||
help="""
|
||||
Blacklist of usernames that are not allowed to log in.
|
||||
|
||||
Use this with supported authenticators to restrict which users can not log in. This is an
|
||||
additional blacklist that further restricts users, beyond whatever restrictions the
|
||||
authenticator has in place.
|
||||
|
||||
If empty, does not perform any additional restriction.
|
||||
|
||||
.. versionadded: 0.9
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
@observe('whitelist')
|
||||
def _check_whitelist(self, change):
|
||||
short_names = [name for name in change['new'] if len(name) <= 1]
|
||||
@@ -144,6 +160,12 @@ class Authenticator(LoggingConfigurable):
|
||||
|
||||
Return True if username is valid, False otherwise.
|
||||
"""
|
||||
if '/' in username:
|
||||
# / is not allowed in usernames
|
||||
return False
|
||||
if not username:
|
||||
# empty usernames are not allowed
|
||||
return False
|
||||
if not self.username_regex:
|
||||
return True
|
||||
return bool(self.username_regex.match(username))
|
||||
@@ -197,8 +219,22 @@ class Authenticator(LoggingConfigurable):
|
||||
return True
|
||||
return username in self.whitelist
|
||||
|
||||
@gen.coroutine
|
||||
def get_authenticated_user(self, handler, data):
|
||||
def check_blacklist(self, username):
|
||||
"""Check if a username is blocked to authenticate based on blacklist configuration
|
||||
|
||||
Return True if username is allowed, False otherwise.
|
||||
No blacklist means any username is allowed.
|
||||
|
||||
Names are normalized *before* being checked against the blacklist.
|
||||
|
||||
.. versionadded: 0.9
|
||||
"""
|
||||
if not self.blacklist:
|
||||
# No blacklist means any name is allowed
|
||||
return True
|
||||
return username not in self.blacklist
|
||||
|
||||
async def get_authenticated_user(self, handler, data):
|
||||
"""Authenticate the user who is attempting to log in
|
||||
|
||||
Returns user dict if successful, None otherwise.
|
||||
@@ -214,11 +250,11 @@ class Authenticator(LoggingConfigurable):
|
||||
- `authenticate` turns formdata into a username
|
||||
- `normalize_username` normalizes the username
|
||||
- `check_whitelist` checks against the user whitelist
|
||||
|
||||
|
||||
.. versionchanged:: 0.8
|
||||
return dict instead of username
|
||||
"""
|
||||
authenticated = yield self.authenticate(handler, data)
|
||||
authenticated = await maybe_future(self.authenticate(handler, data))
|
||||
if authenticated is None:
|
||||
return
|
||||
if isinstance(authenticated, dict):
|
||||
@@ -229,6 +265,7 @@ class Authenticator(LoggingConfigurable):
|
||||
'name': authenticated,
|
||||
}
|
||||
authenticated.setdefault('auth_state', None)
|
||||
authenticated.setdefault('admin', None)
|
||||
|
||||
# normalize the username
|
||||
authenticated['name'] = username = self.normalize_username(authenticated['name'])
|
||||
@@ -236,15 +273,21 @@ class Authenticator(LoggingConfigurable):
|
||||
self.log.warning("Disallowing invalid username %r.", username)
|
||||
return
|
||||
|
||||
whitelist_pass = yield gen.maybe_future(self.check_whitelist(username))
|
||||
blacklist_pass = await maybe_future(self.check_blacklist(username))
|
||||
whitelist_pass = await maybe_future(self.check_whitelist(username))
|
||||
if blacklist_pass:
|
||||
pass
|
||||
else:
|
||||
self.log.warning("User %r in blacklist. Stop authentication", username)
|
||||
return
|
||||
|
||||
if whitelist_pass:
|
||||
return authenticated
|
||||
else:
|
||||
self.log.warning("User %r not in whitelist.", username)
|
||||
return
|
||||
|
||||
@gen.coroutine
|
||||
def authenticate(self, handler, data):
|
||||
async def authenticate(self, handler, data):
|
||||
"""Authenticate a user with login form data
|
||||
|
||||
This must be a tornado gen.coroutine.
|
||||
@@ -263,10 +306,10 @@ class Authenticator(LoggingConfigurable):
|
||||
Returns:
|
||||
user (str or dict or None): The username of the authenticated user,
|
||||
or None if Authentication failed.
|
||||
If the Authenticator has state associated with the user,
|
||||
it can return a dict with the keys 'name' and 'auth_state',
|
||||
where 'name' is the username and 'auth_state' is a dictionary
|
||||
of auth state that will be persisted.
|
||||
The Authenticator may return a dict instead, which MUST have a
|
||||
key 'name' holding the username, and may have two optional keys
|
||||
set - 'auth_state', a dictionary of of auth state that will be
|
||||
persisted; and 'admin', the admin setting value for the user.
|
||||
"""
|
||||
|
||||
def pre_spawn_start(self, user, spawner):
|
||||
@@ -469,20 +512,19 @@ class LocalAuthenticator(Authenticator):
|
||||
return True
|
||||
return False
|
||||
|
||||
@gen.coroutine
|
||||
def add_user(self, user):
|
||||
async def add_user(self, user):
|
||||
"""Hook called whenever a new user is added
|
||||
|
||||
If self.create_system_users, the user will attempt to be created if it doesn't exist.
|
||||
"""
|
||||
user_exists = yield gen.maybe_future(self.system_user_exists(user))
|
||||
user_exists = await maybe_future(self.system_user_exists(user))
|
||||
if not user_exists:
|
||||
if self.create_system_users:
|
||||
yield gen.maybe_future(self.add_system_user(user))
|
||||
await maybe_future(self.add_system_user(user))
|
||||
else:
|
||||
raise KeyError("User %s does not exist." % user.name)
|
||||
|
||||
yield gen.maybe_future(super().add_user(user))
|
||||
await maybe_future(super().add_user(user))
|
||||
|
||||
@staticmethod
|
||||
def system_user_exists(user):
|
||||
@@ -513,6 +555,12 @@ class LocalAuthenticator(Authenticator):
|
||||
class PAMAuthenticator(LocalAuthenticator):
|
||||
"""Authenticate local UNIX users with PAM"""
|
||||
|
||||
# run PAM in a thread, since it can be slow
|
||||
executor = Any()
|
||||
@default('executor')
|
||||
def _default_executor(self):
|
||||
return ThreadPoolExecutor(1)
|
||||
|
||||
encoding = Unicode('utf8',
|
||||
help="""
|
||||
The text encoding to use when communicating with PAM
|
||||
@@ -537,13 +585,26 @@ class PAMAuthenticator(LocalAuthenticator):
|
||||
this is automatically set to False.
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
|
||||
check_account = Bool(True,
|
||||
help="""
|
||||
Whether to check the user's account status via PAM during authentication.
|
||||
|
||||
The PAM account stack performs non-authentication based account
|
||||
management. It is typically used to restrict/permit access to a
|
||||
service and this step is needed to access the host's user access control.
|
||||
|
||||
Disabling this can be dangerous as authenticated but unauthorized users may
|
||||
be granted access and, therefore, arbitrary execution on the system.
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
if pamela is None:
|
||||
raise _pamela_error from None
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@gen.coroutine
|
||||
@run_on_executor
|
||||
def authenticate(self, handler, data):
|
||||
"""Authenticate with PAM, and return the username if login is successful.
|
||||
|
||||
@@ -551,32 +612,44 @@ class PAMAuthenticator(LocalAuthenticator):
|
||||
"""
|
||||
username = data['username']
|
||||
try:
|
||||
pamela.authenticate(username, data['password'], service=self.service)
|
||||
pamela.authenticate(username, data['password'], service=self.service, encoding=self.encoding)
|
||||
except pamela.PAMError as e:
|
||||
if handler is not None:
|
||||
self.log.warning("PAM Authentication failed (%s@%s): %s", username, handler.request.remote_ip, e)
|
||||
else:
|
||||
self.log.warning("PAM Authentication failed: %s", e)
|
||||
else:
|
||||
return username
|
||||
if not self.check_account:
|
||||
return username
|
||||
try:
|
||||
pamela.check_account(username, service=self.service, encoding=self.encoding)
|
||||
except pamela.PAMError as e:
|
||||
if handler is not None:
|
||||
self.log.warning("PAM Account Check failed (%s@%s): %s", username, handler.request.remote_ip, e)
|
||||
else:
|
||||
self.log.warning("PAM Account Check failed: %s", e)
|
||||
else:
|
||||
return username
|
||||
|
||||
@run_on_executor
|
||||
def pre_spawn_start(self, user, spawner):
|
||||
"""Open PAM session for user if so configured"""
|
||||
if not self.open_sessions:
|
||||
return
|
||||
try:
|
||||
pamela.open_session(user.name, service=self.service)
|
||||
pamela.open_session(user.name, service=self.service, encoding=self.encoding)
|
||||
except pamela.PAMError as e:
|
||||
self.log.warning("Failed to open PAM session for %s: %s", user.name, e)
|
||||
self.log.warning("Disabling PAM sessions from now on.")
|
||||
self.open_sessions = False
|
||||
|
||||
@run_on_executor
|
||||
def post_spawn_stop(self, user, spawner):
|
||||
"""Close PAM session for user if we were configured to opened one"""
|
||||
if not self.open_sessions:
|
||||
return
|
||||
try:
|
||||
pamela.close_session(user.name, service=self.service)
|
||||
pamela.close_session(user.name, service=self.service, encoding=self.encoding)
|
||||
except pamela.PAMError as e:
|
||||
self.log.warning("Failed to close PAM session for %s: %s", user.name, e)
|
||||
self.log.warning("Disabling PAM sessions from now on.")
|
||||
|
@@ -19,6 +19,7 @@ except ImportError:
|
||||
class InvalidToken(Exception):
|
||||
pass
|
||||
|
||||
from .utils import maybe_future
|
||||
|
||||
KEY_ENV = 'JUPYTERHUB_CRYPT_KEY'
|
||||
|
||||
@@ -104,7 +105,7 @@ class CryptKeeper(SingletonConfigurable):
|
||||
def _ensure_bytes(self, proposal):
|
||||
# cast str to bytes
|
||||
return [ _validate_key(key) for key in proposal.value ]
|
||||
|
||||
|
||||
fernet = Any()
|
||||
def _fernet_default(self):
|
||||
if cryptography is None or not self.keys:
|
||||
@@ -123,7 +124,7 @@ class CryptKeeper(SingletonConfigurable):
|
||||
|
||||
def _encrypt(self, data):
|
||||
"""Actually do the encryption. Runs in a background thread.
|
||||
|
||||
|
||||
data is serialized to bytes with pickle.
|
||||
bytes are returned.
|
||||
"""
|
||||
@@ -132,7 +133,7 @@ class CryptKeeper(SingletonConfigurable):
|
||||
def encrypt(self, data):
|
||||
"""Encrypt an object with cryptography"""
|
||||
self.check_available()
|
||||
return self.executor.submit(self._encrypt, data)
|
||||
return maybe_future(self.executor.submit(self._encrypt, data))
|
||||
|
||||
def _decrypt(self, encrypted):
|
||||
decrypted = self.fernet.decrypt(encrypted)
|
||||
@@ -141,12 +142,12 @@ class CryptKeeper(SingletonConfigurable):
|
||||
def decrypt(self, encrypted):
|
||||
"""Decrypt an object with cryptography"""
|
||||
self.check_available()
|
||||
return self.executor.submit(self._decrypt, encrypted)
|
||||
return maybe_future(self.executor.submit(self._decrypt, encrypted))
|
||||
|
||||
|
||||
def encrypt(data):
|
||||
"""encrypt some data with the crypt keeper.
|
||||
|
||||
|
||||
data will be serialized with pickle.
|
||||
Returns a Future whose result will be bytes.
|
||||
"""
|
||||
@@ -158,4 +159,3 @@ def decrypt(data):
|
||||
Returns a Future whose result will be the decrypted, deserialized data.
|
||||
"""
|
||||
return CryptKeeper.instance().decrypt(data)
|
||||
|
@@ -5,11 +5,17 @@
|
||||
# Based on pgcontents.utils.migrate, used under the Apache license.
|
||||
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
import os
|
||||
import shutil
|
||||
from subprocess import check_call
|
||||
import sys
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
from . import orm
|
||||
|
||||
_here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
ALEMBIC_INI_TEMPLATE_PATH = os.path.join(_here, 'alembic.ini')
|
||||
@@ -84,13 +90,91 @@ def upgrade(db_url, revision='head'):
|
||||
)
|
||||
|
||||
|
||||
def _alembic(*args):
|
||||
def backup_db_file(db_file, log=None):
|
||||
"""Backup a database file if it exists"""
|
||||
timestamp = datetime.now().strftime('.%Y-%m-%d-%H%M%S')
|
||||
backup_db_file = db_file + timestamp
|
||||
for i in range(1, 10):
|
||||
if not os.path.exists(backup_db_file):
|
||||
break
|
||||
backup_db_file = '{}.{}.{}'.format(db_file, timestamp, i)
|
||||
#
|
||||
if os.path.exists(backup_db_file):
|
||||
raise OSError("backup db file already exists: %s" % backup_db_file)
|
||||
if log:
|
||||
log.info("Backing up %s => %s", db_file, backup_db_file)
|
||||
shutil.copy(db_file, backup_db_file)
|
||||
|
||||
|
||||
def upgrade_if_needed(db_url, backup=True, log=None):
|
||||
"""Upgrade a database if needed
|
||||
|
||||
If the database is sqlite, a backup file will be created with a timestamp.
|
||||
Other database systems should perform their own backups prior to calling this.
|
||||
"""
|
||||
# run check-db-revision first
|
||||
engine = create_engine(db_url)
|
||||
try:
|
||||
orm.check_db_revision(engine)
|
||||
except orm.DatabaseSchemaMismatch:
|
||||
# ignore mismatch error because that's what we are here for!
|
||||
pass
|
||||
else:
|
||||
# nothing to do
|
||||
return
|
||||
log.info("Upgrading %s", db_url)
|
||||
# we need to upgrade, backup the database
|
||||
if backup and db_url.startswith('sqlite:///'):
|
||||
db_file = db_url.split(':///', 1)[1]
|
||||
backup_db_file(db_file, log=log)
|
||||
upgrade(db_url)
|
||||
|
||||
|
||||
def shell(args=None):
|
||||
"""Start an IPython shell hooked up to the jupyerhub database"""
|
||||
from .app import JupyterHub
|
||||
hub = JupyterHub()
|
||||
hub.load_config_file(hub.config_file)
|
||||
db_url = hub.db_url
|
||||
db = orm.new_session_factory(db_url, **hub.db_kwargs)()
|
||||
ns = {
|
||||
'db': db,
|
||||
'db_url': db_url,
|
||||
'orm': orm,
|
||||
}
|
||||
|
||||
import IPython
|
||||
IPython.start_ipython(args, user_ns=ns)
|
||||
|
||||
|
||||
def _alembic(args):
|
||||
"""Run an alembic command with a temporary alembic.ini"""
|
||||
with _temp_alembic_ini('sqlite:///jupyterhub.sqlite') as alembic_ini:
|
||||
from .app import JupyterHub
|
||||
hub = JupyterHub()
|
||||
hub.load_config_file(hub.config_file)
|
||||
db_url = hub.db_url
|
||||
with _temp_alembic_ini(db_url) as alembic_ini:
|
||||
check_call(
|
||||
['alembic', '-c', alembic_ini] + list(args)
|
||||
['alembic', '-c', alembic_ini] + args
|
||||
)
|
||||
|
||||
|
||||
def main(args=None):
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
# dumb option parsing, since we want to pass things through
|
||||
# to subcommands
|
||||
choices = ['shell', 'alembic']
|
||||
if not args or args[0] not in choices:
|
||||
print("Select a command from: %s" % ', '.join(choices))
|
||||
return 1
|
||||
cmd, args = args[0], args[1:]
|
||||
|
||||
if cmd == 'shell':
|
||||
shell(args)
|
||||
elif cmd == 'alembic':
|
||||
_alembic(args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
_alembic(*sys.argv[1:])
|
||||
sys.exit(main())
|
||||
|
@@ -1,8 +1,8 @@
|
||||
from .base import *
|
||||
from .login import *
|
||||
|
||||
from . import base, pages, login
|
||||
from . import base, pages, login, metrics
|
||||
|
||||
default_handlers = []
|
||||
for mod in (base, pages, login):
|
||||
for mod in (base, pages, login, metrics):
|
||||
default_handlers.extend(mod.default_handlers)
|
||||
|
@@ -3,15 +3,20 @@
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import re
|
||||
from datetime import timedelta
|
||||
import copy
|
||||
from datetime import datetime, timedelta
|
||||
from http.client import responses
|
||||
import math
|
||||
import random
|
||||
import re
|
||||
import time
|
||||
from urllib.parse import urlparse, urlunparse, parse_qs, urlencode
|
||||
import uuid
|
||||
|
||||
from jinja2 import TemplateNotFound
|
||||
|
||||
from tornado.log import app_log
|
||||
from tornado.httputil import url_concat
|
||||
from tornado.httputil import url_concat, HTTPHeaders
|
||||
from tornado.ioloop import IOLoop
|
||||
from tornado.web import RequestHandler
|
||||
from tornado import gen, web
|
||||
@@ -20,7 +25,11 @@ from .. import __version__
|
||||
from .. import orm
|
||||
from ..objects import Server
|
||||
from ..spawner import LocalProcessSpawner
|
||||
from ..utils import url_path_join
|
||||
from ..utils import maybe_future, url_path_join
|
||||
from ..metrics import (
|
||||
SERVER_SPAWN_DURATION_SECONDS, ServerSpawnStatus,
|
||||
PROXY_ADD_DURATION_SECONDS, ProxyAddStatus,
|
||||
)
|
||||
|
||||
# pattern for the authentication token header
|
||||
auth_header_pat = re.compile(r'^(?:token|bearer)\s+([^\s]+)$', flags=re.IGNORECASE)
|
||||
@@ -33,6 +42,9 @@ reasons = {
|
||||
'error': "Failed to start your server. Please contact admin.",
|
||||
}
|
||||
|
||||
# constant, not configurable
|
||||
SESSION_COOKIE_NAME = 'jupyterhub-session-id'
|
||||
|
||||
class BaseHandler(RequestHandler):
|
||||
"""Base Handler class with access to common methods and properties."""
|
||||
|
||||
@@ -49,6 +61,10 @@ class BaseHandler(RequestHandler):
|
||||
def base_url(self):
|
||||
return self.settings.get('base_url', '/')
|
||||
|
||||
@property
|
||||
def default_url(self):
|
||||
return self.settings.get('default_url', '')
|
||||
|
||||
@property
|
||||
def version_hash(self):
|
||||
return self.settings.get('version_hash', '')
|
||||
@@ -76,6 +92,7 @@ class BaseHandler(RequestHandler):
|
||||
@property
|
||||
def services(self):
|
||||
return self.settings.setdefault('services', {})
|
||||
|
||||
@property
|
||||
def hub(self):
|
||||
return self.settings['hub']
|
||||
@@ -124,20 +141,28 @@ class BaseHandler(RequestHandler):
|
||||
"report-uri " + self.csp_report_uri,
|
||||
])
|
||||
|
||||
def get_content_type(self):
|
||||
return 'text/html'
|
||||
|
||||
def set_default_headers(self):
|
||||
"""
|
||||
Set any headers passed as tornado_settings['headers'].
|
||||
|
||||
By default sets Content-Security-Policy of frame-ancestors 'self'.
|
||||
Also responsible for setting content-type header
|
||||
"""
|
||||
headers = self.settings.get('headers', {})
|
||||
# wrap in HTTPHeaders for case-insensitivity
|
||||
headers = HTTPHeaders(self.settings.get('headers', {}))
|
||||
headers.setdefault("X-JupyterHub-Version", __version__)
|
||||
|
||||
for header_name, header_content in headers.items():
|
||||
self.set_header(header_name, header_content)
|
||||
|
||||
if 'Access-Control-Allow-Headers' not in headers:
|
||||
self.set_header('Access-Control-Allow-Headers', 'accept, content-type, authorization')
|
||||
if 'Content-Security-Policy' not in headers:
|
||||
self.set_header('Content-Security-Policy', self.content_security_policy)
|
||||
self.set_header('Content-Type', self.get_content_type())
|
||||
|
||||
#---------------------------------------------------------------
|
||||
# Login and cookie-related
|
||||
@@ -151,6 +176,10 @@ class BaseHandler(RequestHandler):
|
||||
def cookie_max_age_days(self):
|
||||
return self.settings.get('cookie_max_age_days', None)
|
||||
|
||||
@property
|
||||
def redirect_to_server(self):
|
||||
return self.settings.get('redirect_to_server', True)
|
||||
|
||||
def get_auth_token(self):
|
||||
"""Get the authorization token from Authorization header"""
|
||||
auth_header = self.request.headers.get('Authorization', '')
|
||||
@@ -161,7 +190,7 @@ class BaseHandler(RequestHandler):
|
||||
|
||||
def get_current_user_oauth_token(self):
|
||||
"""Get the current user identified by OAuth access token
|
||||
|
||||
|
||||
Separate from API token because OAuth access tokens
|
||||
can only be used for identifying users,
|
||||
not using the API.
|
||||
@@ -172,9 +201,11 @@ class BaseHandler(RequestHandler):
|
||||
orm_token = orm.OAuthAccessToken.find(self.db, token)
|
||||
if orm_token is None:
|
||||
return None
|
||||
else:
|
||||
return self._user_from_orm(orm_token.user)
|
||||
|
||||
orm_token.last_activity = \
|
||||
orm_token.user.last_activity = datetime.utcnow()
|
||||
self.db.commit()
|
||||
return self._user_from_orm(orm_token.user)
|
||||
|
||||
def get_current_user_token(self):
|
||||
"""get_current_user from Authorization header token"""
|
||||
token = self.get_auth_token()
|
||||
@@ -184,6 +215,13 @@ class BaseHandler(RequestHandler):
|
||||
if orm_token is None:
|
||||
return None
|
||||
else:
|
||||
# record token activity
|
||||
now = datetime.utcnow()
|
||||
orm_token.last_activity = now
|
||||
if orm_token.user:
|
||||
orm_token.user.last_activity = now
|
||||
|
||||
self.db.commit()
|
||||
return orm_token.service or self._user_from_orm(orm_token.user)
|
||||
|
||||
def _user_for_cookie(self, cookie_name, cookie_value=None):
|
||||
@@ -208,6 +246,10 @@ class BaseHandler(RequestHandler):
|
||||
self.log.warning("Invalid cookie token")
|
||||
# have cookie, but it's not valid. Clear it and start over.
|
||||
clear()
|
||||
return
|
||||
# update user activity
|
||||
user.last_activity = datetime.utcnow()
|
||||
self.db.commit()
|
||||
return user
|
||||
|
||||
def _user_from_orm(self, orm_user):
|
||||
@@ -244,17 +286,46 @@ class BaseHandler(RequestHandler):
|
||||
self.db.add(u)
|
||||
self.db.commit()
|
||||
user = self._user_from_orm(u)
|
||||
self.authenticator.add_user(user)
|
||||
return user
|
||||
|
||||
def clear_login_cookie(self, name=None):
|
||||
kwargs = {}
|
||||
if self.subdomain_host:
|
||||
kwargs['domain'] = self.domain
|
||||
self.clear_cookie(self.hub.cookie_name, path=self.hub.base_url, **kwargs)
|
||||
self.clear_cookie('jupyterhub-services', path=url_path_join(self.base_url, 'services'))
|
||||
user = self.get_current_user_cookie()
|
||||
session_id = self.get_session_cookie()
|
||||
if session_id:
|
||||
# clear session id
|
||||
self.clear_cookie(SESSION_COOKIE_NAME, **kwargs)
|
||||
|
||||
def _set_user_cookie(self, user, server):
|
||||
if user:
|
||||
# user is logged in, clear any tokens associated with the current session
|
||||
# don't clear session tokens if not logged in,
|
||||
# because that could be a malicious logout request!
|
||||
count = 0
|
||||
for access_token in (
|
||||
self.db.query(orm.OAuthAccessToken)
|
||||
.filter(orm.OAuthAccessToken.user_id==user.id)
|
||||
.filter(orm.OAuthAccessToken.session_id==session_id)
|
||||
):
|
||||
self.db.delete(access_token)
|
||||
count += 1
|
||||
if count:
|
||||
self.log.debug("Deleted %s access tokens for %s", count, user.name)
|
||||
self.db.commit()
|
||||
|
||||
|
||||
# clear hub cookie
|
||||
self.clear_cookie(self.hub.cookie_name, path=self.hub.base_url, **kwargs)
|
||||
# clear services cookie
|
||||
self.clear_cookie('jupyterhub-services', path=url_path_join(self.base_url, 'services'), **kwargs)
|
||||
|
||||
def _set_cookie(self, key, value, encrypted=True, **overrides):
|
||||
"""Setting any cookie should go through here
|
||||
|
||||
if encrypted use tornado's set_secure_cookie,
|
||||
otherwise set plaintext cookies.
|
||||
"""
|
||||
# tornado <4.2 have a bug that consider secure==True as soon as
|
||||
# 'secure' kwarg is passed to set_secure_cookie
|
||||
kwargs = {
|
||||
@@ -264,14 +335,47 @@ class BaseHandler(RequestHandler):
|
||||
kwargs['secure'] = True
|
||||
if self.subdomain_host:
|
||||
kwargs['domain'] = self.domain
|
||||
self.log.debug("Setting cookie for %s: %s, %s", user.name, server.cookie_name, kwargs)
|
||||
self.set_secure_cookie(
|
||||
|
||||
kwargs.update(self.settings.get('cookie_options', {}))
|
||||
kwargs.update(overrides)
|
||||
|
||||
if encrypted:
|
||||
set_cookie = self.set_secure_cookie
|
||||
else:
|
||||
set_cookie = self.set_cookie
|
||||
|
||||
self.log.debug("Setting cookie %s: %s", key, kwargs)
|
||||
set_cookie(key, value, **kwargs)
|
||||
|
||||
|
||||
def _set_user_cookie(self, user, server):
|
||||
self.log.debug("Setting cookie for %s: %s", user.name, server.cookie_name)
|
||||
self._set_cookie(
|
||||
server.cookie_name,
|
||||
user.cookie_id,
|
||||
encrypted=True,
|
||||
path=server.base_url,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def get_session_cookie(self):
|
||||
"""Get the session id from a cookie
|
||||
|
||||
Returns None if no session id is stored
|
||||
"""
|
||||
return self.get_cookie(SESSION_COOKIE_NAME, None)
|
||||
|
||||
def set_session_cookie(self):
|
||||
"""Set a new session id cookie
|
||||
|
||||
new session id is returned
|
||||
|
||||
Session id cookie is *not* encrypted,
|
||||
so other services on this domain can read it.
|
||||
"""
|
||||
session_id = uuid.uuid4().hex
|
||||
self._set_cookie(SESSION_COOKIE_NAME, session_id, encrypted=False)
|
||||
return session_id
|
||||
|
||||
def set_service_cookie(self, user):
|
||||
"""set the login cookie for services"""
|
||||
self._set_user_cookie(user, orm.Server(
|
||||
@@ -294,50 +398,80 @@ class BaseHandler(RequestHandler):
|
||||
if self.db.query(orm.Service).filter(orm.Service.server != None).first():
|
||||
self.set_service_cookie(user)
|
||||
|
||||
if not self.get_session_cookie():
|
||||
self.set_session_cookie()
|
||||
|
||||
# create and set a new cookie token for the hub
|
||||
if not self.get_current_user_cookie():
|
||||
self.set_hub_cookie(user)
|
||||
|
||||
def authenticate(self, data):
|
||||
return gen.maybe_future(self.authenticator.get_authenticated_user(self, data))
|
||||
return maybe_future(self.authenticator.get_authenticated_user(self, data))
|
||||
|
||||
def get_next_url(self, user=None):
|
||||
"""Get the next_url for login redirect
|
||||
|
||||
Defaults to hub base_url /hub/ if user is not running,
|
||||
otherwise user.url.
|
||||
|
||||
Default URL after login:
|
||||
|
||||
- if redirect_to_server (default): send to user's own server
|
||||
- else: /hub/home
|
||||
"""
|
||||
next_url = self.get_argument('next', default='')
|
||||
if (next_url + '/').startswith('%s://%s/' % (self.request.protocol, self.request.host)):
|
||||
# treat absolute URLs for our host as absolute paths:
|
||||
next_url = urlparse(next_url).path
|
||||
if not next_url.startswith('/'):
|
||||
if next_url and not next_url.startswith('/'):
|
||||
self.log.warning("Disallowing redirect outside JupyterHub: %r", next_url)
|
||||
next_url = ''
|
||||
if next_url and next_url.startswith(url_path_join(self.base_url, 'user/')):
|
||||
# add /hub/ prefix, to ensure we redirect to the right user's server.
|
||||
# The next request will be handled by SpawnHandler,
|
||||
# ultimately redirecting to the logged-in user's server.
|
||||
without_prefix = next_url[len(self.base_url):]
|
||||
next_url = url_path_join(self.hub.base_url, without_prefix)
|
||||
self.log.warning("Redirecting %s to %s. For sharing public links, use /user-redirect/",
|
||||
self.request.uri, next_url,
|
||||
)
|
||||
|
||||
if not next_url:
|
||||
if user and user.running:
|
||||
# custom default URL
|
||||
next_url = self.default_url
|
||||
|
||||
if not next_url:
|
||||
# default URL after login
|
||||
# if self.redirect_to_server, default login URL initiates spawn,
|
||||
# otherwise send to Hub home page (control panel)
|
||||
if user and self.redirect_to_server:
|
||||
next_url = user.url
|
||||
else:
|
||||
next_url = self.hub.base_url
|
||||
next_url = url_path_join(self.hub.base_url, 'home')
|
||||
return next_url
|
||||
|
||||
@gen.coroutine
|
||||
def login_user(self, data=None):
|
||||
async def login_user(self, data=None):
|
||||
"""Login a user"""
|
||||
auth_timer = self.statsd.timer('login.authenticate').start()
|
||||
authenticated = yield self.authenticate(data)
|
||||
authenticated = await self.authenticate(data)
|
||||
auth_timer.stop(send=False)
|
||||
|
||||
if authenticated:
|
||||
username = authenticated['name']
|
||||
auth_state = authenticated.get('auth_state')
|
||||
admin = authenticated.get('admin')
|
||||
new_user = username not in self.users
|
||||
user = self.user_from_username(username)
|
||||
if new_user:
|
||||
await maybe_future(self.authenticator.add_user(user))
|
||||
# Only set `admin` if the authenticator returned an explicit value.
|
||||
if admin is not None and admin != user.admin:
|
||||
user.admin = admin
|
||||
self.db.commit()
|
||||
# always set auth_state and commit,
|
||||
# because there could be key-rotation or clearing of previous values
|
||||
# going on.
|
||||
if not self.authenticator.enable_auth_state:
|
||||
# auth_state is not enabled. Force None.
|
||||
auth_state = None
|
||||
yield user.save_auth_state(auth_state)
|
||||
await user.save_auth_state(auth_state)
|
||||
self.db.commit()
|
||||
self.set_login_cookie(user)
|
||||
self.statsd.incr('login.success')
|
||||
@@ -374,9 +508,9 @@ class BaseHandler(RequestHandler):
|
||||
def active_server_limit(self):
|
||||
return self.settings.get('active_server_limit', 0)
|
||||
|
||||
@gen.coroutine
|
||||
def spawn_single_user(self, user, server_name='', options=None):
|
||||
async def spawn_single_user(self, user, server_name='', options=None):
|
||||
# in case of error, include 'try again from /hub/home' message
|
||||
spawn_start_time = time.perf_counter()
|
||||
self.extra_error_html = self.spawn_home_error
|
||||
|
||||
user_server_name = user.name
|
||||
@@ -386,6 +520,9 @@ class BaseHandler(RequestHandler):
|
||||
|
||||
if server_name in user.spawners and user.spawners[server_name].pending:
|
||||
pending = user.spawners[server_name].pending
|
||||
SERVER_SPAWN_DURATION_SECONDS.labels(
|
||||
status=ServerSpawnStatus.already_pending
|
||||
).observe(time.perf_counter() - spawn_start_time)
|
||||
raise RuntimeError("%s pending %s" % (user_server_name, pending))
|
||||
|
||||
# count active servers and pending spawns
|
||||
@@ -400,19 +537,45 @@ class BaseHandler(RequestHandler):
|
||||
active_server_limit = self.active_server_limit
|
||||
|
||||
if concurrent_spawn_limit and spawn_pending_count >= concurrent_spawn_limit:
|
||||
self.log.info(
|
||||
'%s pending spawns, throttling',
|
||||
spawn_pending_count,
|
||||
SERVER_SPAWN_DURATION_SECONDS.labels(
|
||||
status=ServerSpawnStatus.throttled
|
||||
).observe(time.perf_counter() - spawn_start_time)
|
||||
# Suggest number of seconds client should wait before retrying
|
||||
# This helps prevent thundering herd problems, where users simply
|
||||
# immediately retry when we are overloaded.
|
||||
retry_range = self.settings['spawn_throttle_retry_range']
|
||||
retry_time = int(random.uniform(*retry_range))
|
||||
|
||||
# round suggestion to nicer human value (nearest 10 seconds or minute)
|
||||
if retry_time <= 90:
|
||||
# round human seconds up to nearest 10
|
||||
human_retry_time = "%i0 seconds" % math.ceil(retry_time / 10.)
|
||||
else:
|
||||
# round number of minutes
|
||||
human_retry_time = "%i minutes" % math.round(retry_time / 60.)
|
||||
|
||||
self.log.warning(
|
||||
'%s pending spawns, throttling. Suggested retry in %s seconds.',
|
||||
spawn_pending_count, retry_time,
|
||||
)
|
||||
raise web.HTTPError(
|
||||
err = web.HTTPError(
|
||||
429,
|
||||
"User startup rate limit exceeded. Try again in a few minutes.",
|
||||
"Too many users trying to log in right now. Try again in {}.".format(human_retry_time)
|
||||
)
|
||||
# can't call set_header directly here because it gets ignored
|
||||
# when errors are raised
|
||||
# we handle err.headers ourselves in Handler.write_error
|
||||
err.headers = {'Retry-After': retry_time}
|
||||
raise err
|
||||
|
||||
if active_server_limit and active_count >= active_server_limit:
|
||||
self.log.info(
|
||||
'%s servers active, no space available',
|
||||
active_count,
|
||||
)
|
||||
SERVER_SPAWN_DURATION_SECONDS.labels(
|
||||
status=ServerSpawnStatus.too_many_users
|
||||
).observe(time.perf_counter() - spawn_start_time)
|
||||
raise web.HTTPError(429, "Active user limit exceeded. Try again in a few minutes.")
|
||||
|
||||
tic = IOLoop.current().time()
|
||||
@@ -433,25 +596,39 @@ class BaseHandler(RequestHandler):
|
||||
# while we are waiting for _proxy_pending to be set
|
||||
spawner._spawn_pending = True
|
||||
|
||||
@gen.coroutine
|
||||
def finish_user_spawn():
|
||||
async def finish_user_spawn():
|
||||
"""Finish the user spawn by registering listeners and notifying the proxy.
|
||||
|
||||
If the spawner is slow to start, this is passed as an async callback,
|
||||
otherwise it is called immediately.
|
||||
"""
|
||||
# wait for spawn Future
|
||||
yield spawn_future
|
||||
await spawn_future
|
||||
toc = IOLoop.current().time()
|
||||
self.log.info("User %s took %.3f seconds to start", user_server_name, toc-tic)
|
||||
self.statsd.timing('spawner.success', (toc - tic) * 1000)
|
||||
SERVER_SPAWN_DURATION_SECONDS.labels(
|
||||
status=ServerSpawnStatus.success
|
||||
).observe(time.perf_counter() - spawn_start_time)
|
||||
proxy_add_start_time = time.perf_counter()
|
||||
spawner._proxy_pending = True
|
||||
try:
|
||||
yield self.proxy.add_user(user, server_name)
|
||||
await self.proxy.add_user(user, server_name)
|
||||
|
||||
PROXY_ADD_DURATION_SECONDS.labels(
|
||||
status='success'
|
||||
).observe(
|
||||
time.perf_counter() - proxy_add_start_time
|
||||
)
|
||||
except Exception:
|
||||
self.log.exception("Failed to add %s to proxy!", user_server_name)
|
||||
self.log.error("Stopping %s to avoid inconsistent state", user_server_name)
|
||||
yield user.stop()
|
||||
await user.stop()
|
||||
PROXY_ADD_DURATION_SECONDS.labels(
|
||||
status='failure'
|
||||
).observe(
|
||||
time.perf_counter() - proxy_add_start_time
|
||||
)
|
||||
else:
|
||||
spawner.add_poll_callback(self.user_stopped, user, server_name)
|
||||
finally:
|
||||
@@ -459,7 +636,7 @@ class BaseHandler(RequestHandler):
|
||||
|
||||
# hook up spawner._spawn_future so that other requests can await
|
||||
# this result
|
||||
finish_spawn_future = spawner._spawn_future = finish_user_spawn()
|
||||
finish_spawn_future = spawner._spawn_future = maybe_future(finish_user_spawn())
|
||||
def _clear_spawn_future(f):
|
||||
# clear spawner._spawn_future when it's done
|
||||
# keep an exception around, though, to prevent repeated implicit spawns
|
||||
@@ -471,7 +648,7 @@ class BaseHandler(RequestHandler):
|
||||
finish_spawn_future.add_done_callback(_clear_spawn_future)
|
||||
|
||||
try:
|
||||
yield gen.with_timeout(timedelta(seconds=self.slow_spawn_timeout), finish_spawn_future)
|
||||
await gen.with_timeout(timedelta(seconds=self.slow_spawn_timeout), finish_spawn_future)
|
||||
except gen.TimeoutError:
|
||||
# waiting_for_response indicates server process has started,
|
||||
# but is yet to become responsive.
|
||||
@@ -484,10 +661,13 @@ class BaseHandler(RequestHandler):
|
||||
|
||||
# start has finished, but the server hasn't come up
|
||||
# check if the server died while we were waiting
|
||||
status = yield spawner.poll()
|
||||
status = await spawner.poll()
|
||||
if status is not None:
|
||||
toc = IOLoop.current().time()
|
||||
self.statsd.timing('spawner.failure', (toc - tic) * 1000)
|
||||
SERVER_SPAWN_DURATION_SECONDS.labels(
|
||||
status=ServerSpawnStatus.failure
|
||||
).observe(time.perf_counter() - spawn_start_time)
|
||||
raise web.HTTPError(500, "Spawner failed to start [status=%s]. The logs for %s may contain details." % (
|
||||
status, spawner._log_name))
|
||||
|
||||
@@ -505,21 +685,19 @@ class BaseHandler(RequestHandler):
|
||||
self.log.warning("User %s is slow to be added to the proxy (timeout=%s)",
|
||||
user_server_name, self.slow_spawn_timeout)
|
||||
|
||||
@gen.coroutine
|
||||
def user_stopped(self, user, server_name):
|
||||
async def user_stopped(self, user, server_name):
|
||||
"""Callback that fires when the spawner has stopped"""
|
||||
spawner = user.spawners[server_name]
|
||||
status = yield spawner.poll()
|
||||
status = await spawner.poll()
|
||||
if status is None:
|
||||
status = 'unknown'
|
||||
self.log.warning("User %s server stopped, with exit code: %s",
|
||||
user.name, status,
|
||||
)
|
||||
yield self.proxy.delete_user(user, server_name)
|
||||
yield user.stop(server_name)
|
||||
await self.proxy.delete_user(user, server_name)
|
||||
await user.stop(server_name)
|
||||
|
||||
@gen.coroutine
|
||||
def stop_single_user(self, user, name=''):
|
||||
async def stop_single_user(self, user, name=''):
|
||||
if name not in user.spawners:
|
||||
raise KeyError("User %s has no such spawner %r", user.name, name)
|
||||
spawner = user.spawners[name]
|
||||
@@ -529,8 +707,7 @@ class BaseHandler(RequestHandler):
|
||||
# to avoid races
|
||||
spawner._stop_pending = True
|
||||
|
||||
@gen.coroutine
|
||||
def stop():
|
||||
async def stop():
|
||||
"""Stop the server
|
||||
|
||||
1. remove it from the proxy
|
||||
@@ -539,8 +716,8 @@ class BaseHandler(RequestHandler):
|
||||
"""
|
||||
tic = IOLoop.current().time()
|
||||
try:
|
||||
yield self.proxy.delete_user(user, name)
|
||||
yield user.stop(name)
|
||||
await self.proxy.delete_user(user, name)
|
||||
await user.stop(name)
|
||||
finally:
|
||||
spawner._stop_pending = False
|
||||
toc = IOLoop.current().time()
|
||||
@@ -548,13 +725,10 @@ class BaseHandler(RequestHandler):
|
||||
self.statsd.timing('spawner.stop', (toc - tic) * 1000)
|
||||
|
||||
try:
|
||||
yield gen.with_timeout(timedelta(seconds=self.slow_stop_timeout), stop())
|
||||
await gen.with_timeout(timedelta(seconds=self.slow_stop_timeout), stop())
|
||||
except gen.TimeoutError:
|
||||
if spawner._stop_pending:
|
||||
# hit timeout, but stop is still pending
|
||||
self.log.warning("User %s:%s server is slow to stop", user.name, name)
|
||||
else:
|
||||
raise
|
||||
# hit timeout, but stop is still pending
|
||||
self.log.warning("User %s:%s server is slow to stop", user.name, name)
|
||||
|
||||
#---------------------------------------------------------------
|
||||
# template rendering
|
||||
@@ -578,14 +752,16 @@ class BaseHandler(RequestHandler):
|
||||
return self.settings['jinja2_env'].get_template(name)
|
||||
|
||||
def render_template(self, name, **ns):
|
||||
ns.update(self.template_namespace)
|
||||
template_ns = {}
|
||||
template_ns.update(self.template_namespace)
|
||||
template_ns.update(ns)
|
||||
template = self.get_template(name)
|
||||
return template.render(**ns)
|
||||
return template.render(**template_ns)
|
||||
|
||||
@property
|
||||
def template_namespace(self):
|
||||
user = self.get_current_user()
|
||||
return dict(
|
||||
ns = dict(
|
||||
base_url=self.hub.base_url,
|
||||
prefix=self.base_url,
|
||||
user=user,
|
||||
@@ -595,6 +771,9 @@ class BaseHandler(RequestHandler):
|
||||
static_url=self.static_url,
|
||||
version_hash=self.version_hash,
|
||||
)
|
||||
if self.settings['template_vars']:
|
||||
ns.update(self.settings['template_vars'])
|
||||
return ns
|
||||
|
||||
def write_error(self, status_code, **kwargs):
|
||||
"""render custom error pages"""
|
||||
@@ -625,6 +804,13 @@ class BaseHandler(RequestHandler):
|
||||
)
|
||||
|
||||
self.set_header('Content-Type', 'text/html')
|
||||
# allow setting headers from exceptions
|
||||
# since exception handler clears headers
|
||||
headers = getattr(exception, 'headers', None)
|
||||
if headers:
|
||||
for key, value in headers.items():
|
||||
self.set_header(key, value)
|
||||
|
||||
# render the template
|
||||
try:
|
||||
html = self.render_template('%s.html' % status_code, **ns)
|
||||
@@ -667,13 +853,34 @@ class UserSpawnHandler(BaseHandler):
|
||||
which will in turn send her to /user/alice/notebooks/mynotebook.ipynb.
|
||||
"""
|
||||
|
||||
@gen.coroutine
|
||||
def get(self, name, user_path):
|
||||
async def get(self, name, user_path):
|
||||
if not user_path:
|
||||
user_path = '/'
|
||||
current_user = self.get_current_user()
|
||||
if (
|
||||
current_user
|
||||
and current_user.name != name
|
||||
and current_user.admin
|
||||
and self.settings.get('admin_access', False)
|
||||
):
|
||||
# allow admins to spawn on behalf of users
|
||||
user = self.find_user(name)
|
||||
if user is None:
|
||||
# no such user
|
||||
raise web.HTTPError(404, "No such user %s" % name)
|
||||
self.log.info("Admin %s requesting spawn on behalf of %s",
|
||||
current_user.name, user.name)
|
||||
admin_spawn = True
|
||||
should_spawn = True
|
||||
else:
|
||||
user = current_user
|
||||
admin_spawn = False
|
||||
# For non-admins, we should spawn if the user matches
|
||||
# otherwise redirect users to their own server
|
||||
should_spawn = (current_user and current_user.name == name)
|
||||
|
||||
if current_user and current_user.name == name:
|
||||
|
||||
if should_spawn:
|
||||
# if spawning fails for any reason, point users to /hub/home to retry
|
||||
self.extra_error_html = self.spawn_home_error
|
||||
|
||||
@@ -692,8 +899,8 @@ class UserSpawnHandler(BaseHandler):
|
||||
Make sure to connect to the proxied public URL %s
|
||||
""", self.request.full_url(), self.proxy.public_url)
|
||||
|
||||
# logged in as correct user, check for pending spawn
|
||||
spawner = current_user.spawner
|
||||
# logged in as valid user, check for pending spawn
|
||||
spawner = user.spawner
|
||||
|
||||
# First, check for previous failure.
|
||||
if (
|
||||
@@ -705,16 +912,18 @@ class UserSpawnHandler(BaseHandler):
|
||||
# Condition: spawner not active and _spawn_future exists and contains an Exception
|
||||
# Implicit spawn on /user/:name is not allowed if the user's last spawn failed.
|
||||
# We should point the user to Home if the most recent spawn failed.
|
||||
exc = spawner._spawn_future.exception()
|
||||
self.log.error("Preventing implicit spawn for %s because last spawn failed: %s",
|
||||
spawner._log_name, spawner._spawn_future.exception())
|
||||
raise spawner._spawn_future.exception()
|
||||
spawner._log_name, exc)
|
||||
# raise a copy because each time an Exception object is re-raised, its traceback grows
|
||||
raise copy.copy(exc).with_traceback(exc.__traceback__)
|
||||
|
||||
# check for pending spawn
|
||||
if spawner.pending and spawner._spawn_future:
|
||||
# wait on the pending spawn
|
||||
self.log.debug("Waiting for %s pending %s", spawner._log_name, spawner.pending)
|
||||
try:
|
||||
yield gen.with_timeout(timedelta(seconds=self.slow_spawn_timeout), spawner._spawn_future)
|
||||
await gen.with_timeout(timedelta(seconds=self.slow_spawn_timeout), spawner._spawn_future)
|
||||
except gen.TimeoutError:
|
||||
self.log.info("Pending spawn for %s didn't finish in %.1f seconds", spawner._log_name, self.slow_spawn_timeout)
|
||||
pass
|
||||
@@ -724,37 +933,50 @@ class UserSpawnHandler(BaseHandler):
|
||||
self.log.info("%s is pending %s", spawner._log_name, spawner.pending)
|
||||
# spawn has started, but not finished
|
||||
self.statsd.incr('redirects.user_spawn_pending', 1)
|
||||
html = self.render_template("spawn_pending.html", user=current_user)
|
||||
url_parts = []
|
||||
html = self.render_template(
|
||||
"spawn_pending.html",
|
||||
user=user,
|
||||
progress_url=spawner._progress_url,
|
||||
)
|
||||
self.finish(html)
|
||||
return
|
||||
|
||||
# spawn has supposedly finished, check on the status
|
||||
if spawner.ready:
|
||||
status = yield spawner.poll()
|
||||
status = await spawner.poll()
|
||||
else:
|
||||
status = 0
|
||||
|
||||
# server is not running, trigger spawn
|
||||
if status is not None:
|
||||
if spawner.options_form:
|
||||
self.redirect(url_concat(url_path_join(self.hub.base_url, 'spawn'),
|
||||
url_parts = [self.hub.base_url, 'spawn']
|
||||
if current_user.name != user.name:
|
||||
# spawning on behalf of another user
|
||||
url_parts.append(user.name)
|
||||
self.redirect(url_concat(url_path_join(*url_parts),
|
||||
{'next': self.request.uri}))
|
||||
return
|
||||
else:
|
||||
yield self.spawn_single_user(current_user)
|
||||
await self.spawn_single_user(user)
|
||||
|
||||
# spawn didn't finish, show pending page
|
||||
if spawner.pending:
|
||||
self.log.info("%s is pending %s", spawner._log_name, spawner.pending)
|
||||
# spawn has started, but not finished
|
||||
self.statsd.incr('redirects.user_spawn_pending', 1)
|
||||
html = self.render_template("spawn_pending.html", user=current_user)
|
||||
html = self.render_template(
|
||||
"spawn_pending.html",
|
||||
user=user,
|
||||
progress_url=spawner._progress_url,
|
||||
)
|
||||
self.finish(html)
|
||||
return
|
||||
|
||||
# We do exponential backoff here - since otherwise we can get stuck in a redirect loop!
|
||||
# This is important in many distributed proxy implementations - those are often eventually
|
||||
# consistent and can take upto a couple of seconds to actually apply throughout the cluster.
|
||||
# consistent and can take up to a couple of seconds to actually apply throughout the cluster.
|
||||
try:
|
||||
redirects = int(self.get_argument('redirects', 0))
|
||||
except ValueError:
|
||||
@@ -779,18 +1001,16 @@ class UserSpawnHandler(BaseHandler):
|
||||
)
|
||||
raise web.HTTPError(500, msg)
|
||||
|
||||
# set login cookie anew
|
||||
self.set_login_cookie(current_user)
|
||||
without_prefix = self.request.uri[len(self.hub.base_url):]
|
||||
target = url_path_join(self.base_url, without_prefix)
|
||||
if self.subdomain_host:
|
||||
target = current_user.host + target
|
||||
target = user.host + target
|
||||
|
||||
# record redirect count in query parameter
|
||||
if redirects:
|
||||
self.log.warning("Redirect loop detected on %s", self.request.uri)
|
||||
# add capped exponential backoff where cap is 10s
|
||||
yield gen.sleep(min(1 * (2 ** redirects), 10))
|
||||
await gen.sleep(min(1 * (2 ** redirects), 10))
|
||||
# rewrite target url with new `redirects` query value
|
||||
url_parts = urlparse(target)
|
||||
query_parts = parse_qs(url_parts.query)
|
||||
@@ -806,6 +1026,9 @@ class UserSpawnHandler(BaseHandler):
|
||||
# logged in as a different user, redirect
|
||||
self.statsd.incr('redirects.user_to_user', 1)
|
||||
target = url_path_join(current_user.url, user_path or '')
|
||||
if self.request.query:
|
||||
# FIXME: use urlunparse instead?
|
||||
target += '?' + self.request.query
|
||||
self.redirect(target)
|
||||
else:
|
||||
# not logged in, clear any cookies and reload
|
||||
@@ -819,13 +1042,13 @@ class UserSpawnHandler(BaseHandler):
|
||||
|
||||
class UserRedirectHandler(BaseHandler):
|
||||
"""Redirect requests to user servers.
|
||||
|
||||
|
||||
Allows public linking to "this file on your server".
|
||||
|
||||
|
||||
/user-redirect/path/to/foo will redirect to /user/:name/path/to/foo
|
||||
|
||||
|
||||
If the user is not logged in, send to login URL, redirecting back here.
|
||||
|
||||
|
||||
.. versionadded:: 0.7
|
||||
"""
|
||||
@web.authenticated
|
||||
|
@@ -20,7 +20,8 @@ class LogoutHandler(BaseHandler):
|
||||
self.clear_login_cookie()
|
||||
self.statsd.incr('logout')
|
||||
if self.authenticator.auto_login:
|
||||
self.render_template('logout.html')
|
||||
html = self.render_template('logout.html')
|
||||
self.finish(html)
|
||||
else:
|
||||
self.redirect(self.settings['login_url'], permanent=False)
|
||||
|
||||
@@ -41,8 +42,7 @@ class LoginHandler(BaseHandler):
|
||||
),
|
||||
)
|
||||
|
||||
@gen.coroutine
|
||||
def get(self):
|
||||
async def get(self):
|
||||
self.statsd.incr('login.request')
|
||||
user = self.get_current_user()
|
||||
if user:
|
||||
@@ -57,7 +57,7 @@ class LoginHandler(BaseHandler):
|
||||
# auto_login without a custom login handler
|
||||
# means that auth info is already in the request
|
||||
# (e.g. REMOTE_USER header)
|
||||
user = yield self.login_user()
|
||||
user = await self.login_user()
|
||||
if user is None:
|
||||
# auto_login failed, just 403
|
||||
raise web.HTTPError(403)
|
||||
@@ -71,27 +71,20 @@ class LoginHandler(BaseHandler):
|
||||
username = self.get_argument('username', default='')
|
||||
self.finish(self._render(username=username))
|
||||
|
||||
@gen.coroutine
|
||||
def post(self):
|
||||
async def post(self):
|
||||
# parse the arguments dict
|
||||
data = {}
|
||||
for arg in self.request.arguments:
|
||||
data[arg] = self.get_argument(arg, strip=False)
|
||||
|
||||
auth_timer = self.statsd.timer('login.authenticate').start()
|
||||
user = yield self.login_user(data)
|
||||
user = await self.login_user(data)
|
||||
auth_timer.stop(send=False)
|
||||
|
||||
if user:
|
||||
already_running = False
|
||||
if user.spawner.ready:
|
||||
status = yield user.spawner.poll()
|
||||
already_running = (status is None)
|
||||
if not already_running and not user.spawner.options_form \
|
||||
and not user.spawner.pending:
|
||||
# logging in triggers spawn
|
||||
yield self.spawn_single_user(user)
|
||||
self.redirect(self.get_next_url())
|
||||
# register current user for subsequent requests to user (e.g. logging the request)
|
||||
self.get_current_user = lambda: user
|
||||
self.redirect(self.get_next_url(user))
|
||||
else:
|
||||
html = self._render(
|
||||
login_error='Invalid username or password',
|
||||
|
16
jupyterhub/handlers/metrics.py
Normal file
16
jupyterhub/handlers/metrics.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from prometheus_client import REGISTRY, CONTENT_TYPE_LATEST, generate_latest
|
||||
from tornado import gen
|
||||
|
||||
from .base import BaseHandler
|
||||
|
||||
class MetricsHandler(BaseHandler):
|
||||
"""
|
||||
Handler to serve Prometheus metrics
|
||||
"""
|
||||
async def get(self):
|
||||
self.set_header('Content-Type', CONTENT_TYPE_LATEST)
|
||||
self.write(generate_latest(REGISTRY))
|
||||
|
||||
default_handlers = [
|
||||
(r'/metrics$', MetricsHandler)
|
||||
]
|
@@ -3,6 +3,8 @@
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from http.client import responses
|
||||
|
||||
from jinja2 import TemplateNotFound
|
||||
@@ -22,36 +24,17 @@ class RootHandler(BaseHandler):
|
||||
|
||||
If logged in, redirects to:
|
||||
|
||||
- single-user server if running
|
||||
- single-user server if settings.redirect_to_server (default)
|
||||
- hub home, otherwise
|
||||
|
||||
Otherwise, renders login page.
|
||||
"""
|
||||
def get(self):
|
||||
next_url = self.get_argument('next', '')
|
||||
if next_url and not next_url.startswith('/'):
|
||||
self.log.warning("Disallowing redirect outside JupyterHub: %r", next_url)
|
||||
next_url = ''
|
||||
if next_url and next_url.startswith(url_path_join(self.base_url, 'user/')):
|
||||
# add /hub/ prefix, to ensure we redirect to the right user's server.
|
||||
# The next request will be handled by UserSpawnHandler,
|
||||
# ultimately redirecting to the logged-in user's server.
|
||||
without_prefix = next_url[len(self.base_url):]
|
||||
next_url = url_path_join(self.hub.base_url, without_prefix)
|
||||
self.log.warning("Redirecting %s to %s. For sharing public links, use /user-redirect/",
|
||||
self.request.uri, next_url,
|
||||
)
|
||||
self.redirect(next_url)
|
||||
return
|
||||
user = self.get_current_user()
|
||||
if user:
|
||||
if user.running:
|
||||
url = user.url
|
||||
self.log.debug("User is running: %s", url)
|
||||
self.set_login_cookie(user) # set cookie
|
||||
else:
|
||||
url = url_path_join(self.hub.base_url, 'home')
|
||||
self.log.debug("User is not running: %s", url)
|
||||
if self.default_url:
|
||||
url = self.default_url
|
||||
elif user:
|
||||
url = self.get_next_url(user)
|
||||
else:
|
||||
url = self.settings['login_url']
|
||||
self.redirect(url)
|
||||
@@ -61,16 +44,16 @@ class HomeHandler(BaseHandler):
|
||||
"""Render the user's home page."""
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def get(self):
|
||||
async def get(self):
|
||||
user = self.get_current_user()
|
||||
if user.running:
|
||||
# trigger poll_and_notify event in case of a server that died
|
||||
yield user.spawner.poll_and_notify()
|
||||
# send the user to /spawn if they aren't running,
|
||||
await user.spawner.poll_and_notify()
|
||||
|
||||
# send the user to /spawn if they aren't running or pending a spawn,
|
||||
# to establish that this is an explicit spawn request rather
|
||||
# than an implicit one, which can be caused by any link to `/user/:name`
|
||||
url = user.url if user.running else url_path_join(self.hub.base_url, 'spawn')
|
||||
url = user.url if user.spawner.active else url_path_join(self.hub.base_url, 'spawn')
|
||||
html = self.render_template('home.html',
|
||||
user=user,
|
||||
url=url,
|
||||
@@ -85,29 +68,42 @@ class SpawnHandler(BaseHandler):
|
||||
|
||||
Only enabled when Spawner.options_form is defined.
|
||||
"""
|
||||
def _render_form(self, message=''):
|
||||
user = self.get_current_user()
|
||||
async def _render_form(self, message='', for_user=None):
|
||||
# Note that 'user' is the authenticated user making the request and
|
||||
# 'for_user' is the user whose server is being spawned.
|
||||
user = for_user or self.get_current_user()
|
||||
spawner_options_form = await user.spawner.get_options_form()
|
||||
return self.render_template('spawn.html',
|
||||
user=user,
|
||||
spawner_options_form=user.spawner.options_form,
|
||||
for_user=for_user,
|
||||
spawner_options_form=spawner_options_form,
|
||||
error_message=message,
|
||||
url=self.request.uri,
|
||||
spawner=for_user.spawner
|
||||
)
|
||||
|
||||
@web.authenticated
|
||||
def get(self):
|
||||
async def get(self, for_user=None):
|
||||
"""GET renders form for spawning with user-specified options
|
||||
|
||||
or triggers spawn via redirect if there is no form.
|
||||
"""
|
||||
user = self.get_current_user()
|
||||
user = current_user = self.get_current_user()
|
||||
if for_user is not None and for_user != user.name:
|
||||
if not user.admin:
|
||||
raise web.HTTPError(403, "Only admins can spawn on behalf of other users")
|
||||
|
||||
user = self.find_user(for_user)
|
||||
if user is None:
|
||||
raise web.HTTPError(404, "No such user: %s" % for_user)
|
||||
|
||||
if not self.allow_named_servers and user.running:
|
||||
url = user.url
|
||||
self.log.debug("User is running: %s", url)
|
||||
self.redirect(url)
|
||||
return
|
||||
if user.spawner.options_form:
|
||||
self.finish(self._render_form())
|
||||
form = await self._render_form(for_user=user)
|
||||
self.finish(form)
|
||||
else:
|
||||
# Explicit spawn request: clear _spawn_future
|
||||
# which may have been saved to prevent implicit spawns
|
||||
@@ -118,10 +114,15 @@ class SpawnHandler(BaseHandler):
|
||||
self.redirect(user.url)
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def post(self):
|
||||
async def post(self, for_user=None):
|
||||
"""POST spawns with user-specified options"""
|
||||
user = self.get_current_user()
|
||||
user = current_user = self.get_current_user()
|
||||
if for_user is not None and for_user != user.name:
|
||||
if not user.admin:
|
||||
raise web.HTTPError(403, "Only admins can spawn on behalf of other users")
|
||||
user = self.find_user(for_user)
|
||||
if user is None:
|
||||
raise web.HTTPError(404, "No such user: %s" % for_user)
|
||||
if not self.allow_named_servers and user.running:
|
||||
url = user.url
|
||||
self.log.warning("User is already running: %s", url)
|
||||
@@ -138,12 +139,14 @@ class SpawnHandler(BaseHandler):
|
||||
form_options["%s_file"%key] = byte_list
|
||||
try:
|
||||
options = user.spawner.options_from_form(form_options)
|
||||
yield self.spawn_single_user(user, options=options)
|
||||
await self.spawn_single_user(user, options=options)
|
||||
except Exception as e:
|
||||
self.log.error("Failed to spawn single-user server with form", exc_info=True)
|
||||
self.finish(self._render_form(str(e)))
|
||||
form = await self._render_form(message=str(e), for_user=user)
|
||||
self.finish(form)
|
||||
return
|
||||
self.set_login_cookie(user)
|
||||
if current_user is user:
|
||||
self.set_login_cookie(user)
|
||||
url = user.url
|
||||
|
||||
next_url = self.get_argument('next', '')
|
||||
@@ -154,6 +157,7 @@ class SpawnHandler(BaseHandler):
|
||||
|
||||
self.redirect(url)
|
||||
|
||||
|
||||
class AdminHandler(BaseHandler):
|
||||
"""Render the admin page."""
|
||||
|
||||
@@ -201,7 +205,7 @@ class AdminHandler(BaseHandler):
|
||||
# get User.col.desc() order objects
|
||||
ordered = [ getattr(c, o)() for c, o in zip(cols, orders) ]
|
||||
|
||||
users = self.db.query(orm.User).join(orm.Spawner).order_by(*ordered)
|
||||
users = self.db.query(orm.User).outerjoin(orm.Spawner).order_by(*ordered)
|
||||
users = [ self._user_from_orm(u) for u in users ]
|
||||
running = [ u for u in users if u.running ]
|
||||
|
||||
@@ -220,18 +224,92 @@ class TokenPageHandler(BaseHandler):
|
||||
|
||||
@web.authenticated
|
||||
def get(self):
|
||||
html = self.render_template('token.html')
|
||||
never = datetime(1900, 1, 1)
|
||||
|
||||
user = self.get_current_user()
|
||||
def sort_key(token):
|
||||
return (
|
||||
token.last_activity or never,
|
||||
token.created or never,
|
||||
)
|
||||
|
||||
now = datetime.utcnow()
|
||||
api_tokens = []
|
||||
for token in sorted(user.api_tokens, key=sort_key, reverse=True):
|
||||
if token.expires_at and token.expires_at < now:
|
||||
self.db.delete(token)
|
||||
self.db.commit()
|
||||
continue
|
||||
api_tokens.append(token)
|
||||
|
||||
# group oauth client tokens by client id
|
||||
oauth_tokens = defaultdict(list)
|
||||
for token in user.oauth_tokens:
|
||||
if token.expires_at and token.expires_at < now:
|
||||
self.log.warning("Deleting expired token")
|
||||
self.db.delete(token)
|
||||
self.db.commit()
|
||||
continue
|
||||
if not token.client_id:
|
||||
# token should have been deleted when client was deleted
|
||||
self.log.warning("Deleting stale oauth token for %s", user.name)
|
||||
self.db.delete(token)
|
||||
self.db.commit()
|
||||
continue
|
||||
oauth_tokens[token.client_id].append(token)
|
||||
|
||||
# get the earliest created and latest last_activity
|
||||
# timestamp for a given oauth client
|
||||
oauth_clients = []
|
||||
for client_id, tokens in oauth_tokens.items():
|
||||
created = tokens[0].created
|
||||
last_activity = tokens[0].last_activity
|
||||
for token in tokens[1:]:
|
||||
if token.created < created:
|
||||
created = token.created
|
||||
if (
|
||||
last_activity is None or
|
||||
(token.last_activity and token.last_activity > last_activity)
|
||||
):
|
||||
last_activity = token.last_activity
|
||||
token = tokens[0]
|
||||
oauth_clients.append({
|
||||
'client': token.client,
|
||||
'description': token.client.description or token.client.identifier,
|
||||
'created': created,
|
||||
'last_activity': last_activity,
|
||||
'tokens': tokens,
|
||||
# only need one token id because
|
||||
# revoking one oauth token revokes all oauth tokens for that client
|
||||
'token_id': tokens[0].api_id,
|
||||
'token_count': len(tokens),
|
||||
})
|
||||
|
||||
# sort oauth clients by last activity, created
|
||||
def sort_key(client):
|
||||
return (
|
||||
client['last_activity'] or never,
|
||||
client['created'] or never,
|
||||
)
|
||||
|
||||
oauth_clients = sorted(oauth_clients, key=sort_key, reverse=True)
|
||||
|
||||
html = self.render_template(
|
||||
'token.html',
|
||||
api_tokens=api_tokens,
|
||||
oauth_clients=oauth_clients,
|
||||
)
|
||||
self.finish(html)
|
||||
|
||||
|
||||
class ProxyErrorHandler(BaseHandler):
|
||||
"""Handler for rendering proxy error pages"""
|
||||
|
||||
|
||||
def get(self, status_code_s):
|
||||
status_code = int(status_code_s)
|
||||
status_message = responses.get(status_code, 'Unknown HTTP Error')
|
||||
# build template namespace
|
||||
|
||||
|
||||
hub_home = url_path_join(self.hub.base_url, 'home')
|
||||
message_html = ''
|
||||
if status_code == 503:
|
||||
@@ -262,6 +340,7 @@ default_handlers = [
|
||||
(r'/home', HomeHandler),
|
||||
(r'/admin', AdminHandler),
|
||||
(r'/spawn', SpawnHandler),
|
||||
(r'/spawn/([^/]+)', SpawnHandler),
|
||||
(r'/token', TokenPageHandler),
|
||||
(r'/error/(\d+)', ProxyErrorHandler),
|
||||
]
|
||||
|
@@ -4,10 +4,32 @@
|
||||
|
||||
import json
|
||||
import traceback
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
from tornado.log import LogFormatter, access_log
|
||||
from tornado.web import StaticFileHandler, HTTPError
|
||||
|
||||
from .metrics import prometheus_log_method
|
||||
|
||||
|
||||
def coroutine_frames(all_frames):
|
||||
"""Extract coroutine boilerplate frames from a frame list
|
||||
|
||||
for better stack/traceback printing of coroutines
|
||||
"""
|
||||
useful_frames = []
|
||||
for frame in all_frames:
|
||||
if frame[0] == '<string>' and frame[2] == 'raise_exc_info':
|
||||
continue
|
||||
# start out conservative with filename + function matching
|
||||
# maybe just filename matching would be sufficient
|
||||
elif frame[0].endswith('tornado/gen.py') and frame[2] in {'run', 'wrapper', '__init__'}:
|
||||
continue
|
||||
elif frame[0].endswith('tornado/concurrent.py') and frame[2] == 'result':
|
||||
continue
|
||||
useful_frames.append(frame)
|
||||
return useful_frames
|
||||
|
||||
|
||||
def coroutine_traceback(typ, value, tb):
|
||||
"""Scrub coroutine frames from a traceback
|
||||
@@ -19,17 +41,8 @@ def coroutine_traceback(typ, value, tb):
|
||||
Returns a list of strings (like traceback.format_tb)
|
||||
"""
|
||||
all_frames = traceback.extract_tb(tb)
|
||||
useful_frames = []
|
||||
for frame in all_frames:
|
||||
if frame[0] == '<string>' and frame[2] == 'raise_exc_info':
|
||||
continue
|
||||
# start out conservative with filename + function matching
|
||||
# maybe just filename matching would be sufficient
|
||||
elif frame[0].endswith('tornado/gen.py') and frame[2] in {'run', 'wrapper'}:
|
||||
continue
|
||||
elif frame[0].endswith('tornado/concurrent.py') and frame[2] == 'result':
|
||||
continue
|
||||
useful_frames.append(frame)
|
||||
useful_frames = coroutine_frames(all_frames)
|
||||
|
||||
tb_list = ['Traceback (most recent call last):\n']
|
||||
tb_list.extend(traceback.format_list(useful_frames))
|
||||
tb_list.extend(traceback.format_exception_only(typ, value))
|
||||
@@ -41,11 +54,33 @@ class CoroutineLogFormatter(LogFormatter):
|
||||
def formatException(self, exc_info):
|
||||
return ''.join(coroutine_traceback(*exc_info))
|
||||
|
||||
# url params to be scrubbed if seen
|
||||
# any url param that *contains* one of these
|
||||
# will be scrubbed from logs
|
||||
SCRUB_PARAM_KEYS = ('token', 'auth', 'key', 'code', 'state')
|
||||
|
||||
|
||||
def _scrub_uri(uri):
|
||||
"""scrub auth info from uri"""
|
||||
if '/api/authorizations/cookie/' in uri or '/api/authorizations/token/' in uri:
|
||||
uri = uri.rsplit('/', 1)[0] + '/[secret]'
|
||||
parsed = urlparse(uri)
|
||||
if parsed.query:
|
||||
# check for potentially sensitive url params
|
||||
# use manual list + split rather than parsing
|
||||
# to minimally perturb original
|
||||
parts = parsed.query.split('&')
|
||||
changed = False
|
||||
for i, s in enumerate(parts):
|
||||
if '=' in s:
|
||||
key, value = s.split('=', 1)
|
||||
for substring in SCRUB_PARAM_KEYS:
|
||||
if substring in key:
|
||||
parts[i] = key + '=[secret]'
|
||||
changed = True
|
||||
if changed:
|
||||
parsed = parsed._replace(query='&'.join(parts))
|
||||
return urlunparse(parsed)
|
||||
return uri
|
||||
|
||||
|
||||
@@ -68,6 +103,7 @@ def log_request(handler):
|
||||
- get proxied IP instead of proxy IP
|
||||
- log referer for redirect and failed requests
|
||||
- log user-agent for failed requests
|
||||
- record per-request metrics in prometheus
|
||||
"""
|
||||
status = handler.get_status()
|
||||
request = handler.request
|
||||
@@ -118,5 +154,6 @@ def log_request(handler):
|
||||
# to get headers from tornado
|
||||
location = handler._headers.get('Location')
|
||||
if location:
|
||||
ns['location'] = ' → {}'.format(location)
|
||||
ns['location'] = ' -> {}'.format(_scrub_uri(location))
|
||||
log_method(msg.format(**ns))
|
||||
prometheus_log_method(handler)
|
||||
|
94
jupyterhub/metrics.py
Normal file
94
jupyterhub/metrics.py
Normal file
@@ -0,0 +1,94 @@
|
||||
"""
|
||||
Prometheus metrics exported by JupyterHub
|
||||
|
||||
Read https://prometheus.io/docs/practices/naming/ for naming
|
||||
conventions for metrics & labels. We generally prefer naming them
|
||||
`<noun>_<verb>_<type_suffix>`. So a histogram that's tracking
|
||||
the duration (in seconds) of servers spawning would be called
|
||||
SERVER_SPAWN_DURATION_SECONDS.
|
||||
|
||||
We also create an Enum for each 'status' type label in every metric
|
||||
we collect. This is to make sure that the metrics exist regardless
|
||||
of the condition happening or not. For example, if we don't explicitly
|
||||
create them, the metric spawn_duration_seconds{status="failure"}
|
||||
will not actually exist until the first failure. This makes dashboarding
|
||||
and alerting difficult, so we explicitly list statuses and create
|
||||
them manually here.
|
||||
"""
|
||||
from enum import Enum
|
||||
|
||||
from prometheus_client import Histogram
|
||||
|
||||
REQUEST_DURATION_SECONDS = Histogram(
|
||||
'request_duration_seconds',
|
||||
'request duration for all HTTP requests',
|
||||
['method', 'handler', 'code']
|
||||
)
|
||||
|
||||
SERVER_SPAWN_DURATION_SECONDS = Histogram(
|
||||
'server_spawn_duration_seconds',
|
||||
'time taken for server spawning operation',
|
||||
['status'],
|
||||
# Use custom bucket sizes, since the default bucket ranges
|
||||
# are meant for quick running processes. Spawns can take a while!
|
||||
buckets=[0.5, 1, 2.5, 5, 10, 15, 30, 60, 120, float("inf")]
|
||||
)
|
||||
|
||||
class ServerSpawnStatus(Enum):
|
||||
"""
|
||||
Possible values for 'status' label of SERVER_SPAWN_DURATION_SECONDS
|
||||
"""
|
||||
success = 'success'
|
||||
failure = 'failure'
|
||||
already_pending = 'already-pending'
|
||||
throttled = 'throttled'
|
||||
too_many_users = 'too-many-users'
|
||||
|
||||
def __str__(self):
|
||||
return self.value
|
||||
|
||||
for s in ServerSpawnStatus:
|
||||
# Create empty metrics with the given status
|
||||
SERVER_SPAWN_DURATION_SECONDS.labels(status=s)
|
||||
|
||||
|
||||
PROXY_ADD_DURATION_SECONDS = Histogram(
|
||||
'proxy_add_duration_seconds',
|
||||
'duration for adding user routes to proxy',
|
||||
['status']
|
||||
)
|
||||
|
||||
class ProxyAddStatus(Enum):
|
||||
"""
|
||||
Possible values for 'status' label of PROXY_ADD_DURATION_SECONDS
|
||||
"""
|
||||
success = 'success'
|
||||
failure = 'failure'
|
||||
|
||||
def __str__(self):
|
||||
return self.value
|
||||
|
||||
for s in ProxyAddStatus:
|
||||
PROXY_ADD_DURATION_SECONDS.labels(status=s)
|
||||
|
||||
def prometheus_log_method(handler):
|
||||
"""
|
||||
Tornado log handler for recording RED metrics.
|
||||
|
||||
We record the following metrics:
|
||||
Rate – the number of requests, per second, your services are serving.
|
||||
Errors – the number of failed requests per second.
|
||||
Duration – The amount of time each request takes expressed as a time interval.
|
||||
|
||||
We use a fully qualified name of the handler as a label,
|
||||
rather than every url path to reduce cardinality.
|
||||
|
||||
This function should be either the value of or called from a function
|
||||
that is the 'log_function' tornado setting. This makes it get called
|
||||
at the end of every request, allowing us to record the metrics we need.
|
||||
"""
|
||||
REQUEST_DURATION_SECONDS.labels(
|
||||
method=handler.request.method,
|
||||
handler='{}.{}'.format(handler.__class__.__module__, type(handler).__name__),
|
||||
code=handler.get_status()
|
||||
).observe(handler.request.request_time())
|
@@ -39,15 +39,19 @@ class JupyterHubSiteAdapter(AuthorizationCodeGrantSiteAdapter):
|
||||
def authenticate(self, request, environ, scopes, client):
|
||||
handler = request.handler
|
||||
user = handler.get_current_user()
|
||||
# ensure session_id is set
|
||||
session_id = handler.get_session_cookie()
|
||||
if session_id is None:
|
||||
session_id = handler.set_session_cookie()
|
||||
if user:
|
||||
return {}, user.id
|
||||
return {'session_id': session_id}, user.id
|
||||
else:
|
||||
raise UserNotAuthenticated()
|
||||
|
||||
def user_has_denied_access(self, request):
|
||||
# user can't deny access
|
||||
return False
|
||||
|
||||
|
||||
|
||||
class HubDBMixin(object):
|
||||
"""Mixin for connecting to the hub database"""
|
||||
@@ -65,17 +69,19 @@ class AccessTokenStore(HubDBMixin, oauth2.store.AccessTokenStore):
|
||||
:param access_token: An instance of :class:`oauth2.datatype.AccessToken`.
|
||||
|
||||
"""
|
||||
|
||||
user = self.db.query(orm.User).filter(orm.User.id == access_token.user_id).first()
|
||||
|
||||
user = self.db.query(orm.User).filter_by(id=access_token.user_id).first()
|
||||
if user is None:
|
||||
raise ValueError("No user for access token: %s" % access_token.user_id)
|
||||
client = self.db.query(orm.OAuthClient).filter_by(identifier=access_token.client_id).first()
|
||||
orm_access_token = orm.OAuthAccessToken(
|
||||
client_id=access_token.client_id,
|
||||
client=client,
|
||||
grant_type=access_token.grant_type,
|
||||
expires_at=access_token.expires_at,
|
||||
refresh_token=access_token.refresh_token,
|
||||
refresh_expires_at=access_token.refresh_expires_at,
|
||||
token=access_token.token,
|
||||
session_id=access_token.data['session_id'],
|
||||
user=user,
|
||||
)
|
||||
self.db.add(orm_access_token)
|
||||
@@ -96,10 +102,12 @@ class AuthCodeStore(HubDBMixin, oauth2.store.AuthCodeStore):
|
||||
given code.
|
||||
|
||||
"""
|
||||
orm_code = self.db\
|
||||
.query(orm.OAuthCode)\
|
||||
.filter(orm.OAuthCode.code == code)\
|
||||
orm_code = (
|
||||
self.db
|
||||
.query(orm.OAuthCode)
|
||||
.filter_by(code=code)
|
||||
.first()
|
||||
)
|
||||
if orm_code is None:
|
||||
raise AuthCodeNotFound()
|
||||
else:
|
||||
@@ -110,9 +118,9 @@ class AuthCodeStore(HubDBMixin, oauth2.store.AuthCodeStore):
|
||||
redirect_uri=orm_code.redirect_uri,
|
||||
scopes=[],
|
||||
user_id=orm_code.user_id,
|
||||
data={'session_id': orm_code.session_id},
|
||||
)
|
||||
|
||||
|
||||
def save_code(self, authorization_code):
|
||||
"""
|
||||
Stores the data belonging to an authorization code token.
|
||||
@@ -120,12 +128,31 @@ class AuthCodeStore(HubDBMixin, oauth2.store.AuthCodeStore):
|
||||
:param authorization_code: An instance of
|
||||
:class:`oauth2.datatype.AuthorizationCode`.
|
||||
"""
|
||||
orm_client = (
|
||||
self.db
|
||||
.query(orm.OAuthClient)
|
||||
.filter_by(identifier=authorization_code.client_id)
|
||||
.first()
|
||||
)
|
||||
if orm_client is None:
|
||||
raise ValueError("No such client: %s" % authorization_code.client_id)
|
||||
|
||||
orm_user = (
|
||||
self.db
|
||||
.query(orm.User)
|
||||
.filter_by(id=authorization_code.user_id)
|
||||
.first()
|
||||
)
|
||||
if orm_user is None:
|
||||
raise ValueError("No such user: %s" % authorization_code.user_id)
|
||||
|
||||
orm_code = orm.OAuthCode(
|
||||
client_id=authorization_code.client_id,
|
||||
client=orm_client,
|
||||
code=authorization_code.code,
|
||||
expires_at=authorization_code.expires_at,
|
||||
user_id=authorization_code.user_id,
|
||||
user=orm_user,
|
||||
redirect_uri=authorization_code.redirect_uri,
|
||||
session_id=authorization_code.data.get('session_id', ''),
|
||||
)
|
||||
self.db.add(orm_code)
|
||||
self.db.commit()
|
||||
@@ -139,7 +166,7 @@ class AuthCodeStore(HubDBMixin, oauth2.store.AuthCodeStore):
|
||||
|
||||
:param code: The authorization code.
|
||||
"""
|
||||
orm_code = self.db.query(orm.OAuthCode).filter(orm.OAuthCode.code == code).first()
|
||||
orm_code = self.db.query(orm.OAuthCode).filter_by(code=code).first()
|
||||
if orm_code is not None:
|
||||
self.db.delete(orm_code)
|
||||
self.db.commit()
|
||||
@@ -159,7 +186,7 @@ class HashComparable:
|
||||
"""
|
||||
def __init__(self, hashed_token):
|
||||
self.hashed_token = hashed_token
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
return "<{} '{}'>".format(self.__class__.__name__, self.hashed_token)
|
||||
|
||||
@@ -178,33 +205,38 @@ class ClientStore(HubDBMixin, oauth2.store.ClientStore):
|
||||
:raises: :class:`oauth2.error.ClientNotFoundError` if no data could be retrieved for
|
||||
given client_id.
|
||||
"""
|
||||
orm_client = self.db\
|
||||
.query(orm.OAuthClient)\
|
||||
.filter(orm.OAuthClient.identifier == client_id)\
|
||||
orm_client = (
|
||||
self.db
|
||||
.query(orm.OAuthClient)
|
||||
.filter_by(identifier=client_id)
|
||||
.first()
|
||||
)
|
||||
if orm_client is None:
|
||||
raise ClientNotFoundError()
|
||||
return Client(identifier=client_id,
|
||||
redirect_uris=[orm_client.redirect_uri],
|
||||
secret=HashComparable(orm_client.secret),
|
||||
)
|
||||
|
||||
def add_client(self, client_id, client_secret, redirect_uri):
|
||||
|
||||
def add_client(self, client_id, client_secret, redirect_uri, description=''):
|
||||
"""Add a client
|
||||
|
||||
|
||||
hash its client_secret before putting it in the database.
|
||||
"""
|
||||
# clear existing clients with same ID
|
||||
for client in self.db\
|
||||
.query(orm.OAuthClient)\
|
||||
.filter(orm.OAuthClient.identifier == client_id):
|
||||
self.db.delete(client)
|
||||
for orm_client in (
|
||||
self.db
|
||||
.query(orm.OAuthClient)\
|
||||
.filter_by(identifier=client_id)
|
||||
):
|
||||
self.db.delete(orm_client)
|
||||
self.db.commit()
|
||||
|
||||
orm_client = orm.OAuthClient(
|
||||
identifier=client_id,
|
||||
secret=hash_token(client_secret),
|
||||
redirect_uri=redirect_uri,
|
||||
description=description,
|
||||
)
|
||||
self.db.add(orm_client)
|
||||
self.db.commit()
|
||||
@@ -215,7 +247,7 @@ def make_provider(session_factory, url_prefix, login_url):
|
||||
token_store = AccessTokenStore(session_factory)
|
||||
code_store = AuthCodeStore(session_factory)
|
||||
client_store = ClientStore(session_factory)
|
||||
|
||||
|
||||
provider = Provider(
|
||||
access_token_store=token_store,
|
||||
auth_code_store=code_store,
|
||||
|
@@ -4,12 +4,12 @@
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import socket
|
||||
from urllib.parse import urlparse
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
import warnings
|
||||
|
||||
from traitlets import (
|
||||
HasTraits, Instance, Integer, Unicode,
|
||||
default, observe,
|
||||
default, observe, validate,
|
||||
)
|
||||
from .traitlets import URLPrefix
|
||||
from . import orm
|
||||
@@ -33,6 +33,41 @@ class Server(HasTraits):
|
||||
port = Integer()
|
||||
base_url = URLPrefix('/')
|
||||
cookie_name = Unicode('')
|
||||
connect_url = Unicode('')
|
||||
bind_url = Unicode('')
|
||||
|
||||
@default('bind_url')
|
||||
def bind_url_default(self):
|
||||
"""representation of URL used for binding
|
||||
|
||||
Never used in APIs, only logging,
|
||||
since it can be non-connectable value, such as '', meaning all interfaces.
|
||||
"""
|
||||
if self.ip in {'', '0.0.0.0'}:
|
||||
return self.url.replace(self._connect_ip, self.ip or '*', 1)
|
||||
return self.url
|
||||
|
||||
@observe('bind_url')
|
||||
def _bind_url_changed(self, change):
|
||||
urlinfo = urlparse(change.new)
|
||||
self.proto = urlinfo.scheme
|
||||
self.ip = urlinfo.hostname or ''
|
||||
port = urlinfo.port
|
||||
if port is None:
|
||||
if self.proto == 'https':
|
||||
port = 443
|
||||
else:
|
||||
port = 80
|
||||
self.port = port
|
||||
|
||||
@validate('connect_url')
|
||||
def _connect_url_add_prefix(self, proposal):
|
||||
"""Ensure connect_url includes base_url"""
|
||||
urlinfo = urlparse(proposal.value)
|
||||
if not urlinfo.path.startswith(self.base_url):
|
||||
urlinfo = urlinfo._replace(path=self.base_url)
|
||||
return urlunparse(urlinfo)
|
||||
return proposal.value
|
||||
|
||||
@property
|
||||
def _connect_ip(self):
|
||||
@@ -70,16 +105,7 @@ class Server(HasTraits):
|
||||
@classmethod
|
||||
def from_url(cls, url):
|
||||
"""Create a Server from a given URL"""
|
||||
urlinfo = urlparse(url)
|
||||
proto = urlinfo.scheme
|
||||
ip = urlinfo.hostname or ''
|
||||
port = urlinfo.port
|
||||
if not port:
|
||||
if proto == 'https':
|
||||
port = 443
|
||||
else:
|
||||
port = 80
|
||||
return cls(proto=proto, ip=ip, port=port, base_url=urlinfo.path)
|
||||
return cls(bind_url=url, base_url=urlparse(url).path)
|
||||
|
||||
@default('port')
|
||||
def _default_port(self):
|
||||
@@ -107,6 +133,12 @@ class Server(HasTraits):
|
||||
|
||||
@property
|
||||
def host(self):
|
||||
if self.connect_url:
|
||||
parsed = urlparse(self.connect_url)
|
||||
return "{proto}://{host}".format(
|
||||
proto=parsed.scheme,
|
||||
host=parsed.netloc,
|
||||
)
|
||||
return "{proto}://{ip}:{port}".format(
|
||||
proto=self.proto,
|
||||
ip=self._connect_ip,
|
||||
@@ -115,22 +147,13 @@ class Server(HasTraits):
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
if self.connect_url:
|
||||
return self.connect_url
|
||||
return "{host}{uri}".format(
|
||||
host=self.host,
|
||||
uri=self.base_url,
|
||||
)
|
||||
|
||||
@property
|
||||
def bind_url(self):
|
||||
"""representation of URL used for binding
|
||||
|
||||
Never used in APIs, only logging,
|
||||
since it can be non-connectable value, such as '', meaning all interfaces.
|
||||
"""
|
||||
if self.ip in {'', '0.0.0.0'}:
|
||||
return self.url.replace(self._connect_ip, self.ip or '*', 1)
|
||||
return self.url
|
||||
|
||||
def wait_up(self, timeout=10, http=False):
|
||||
"""Wait for this server to come up"""
|
||||
if http:
|
||||
@@ -152,7 +175,7 @@ class Hub(Server):
|
||||
of the server base_url.
|
||||
"""
|
||||
|
||||
cookie_name = 'jupyter-hub-token'
|
||||
cookie_name = 'jupyterhub-hub-login'
|
||||
|
||||
@property
|
||||
def server(self):
|
||||
|
@@ -3,7 +3,7 @@
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
import enum
|
||||
import json
|
||||
|
||||
@@ -14,22 +14,28 @@ from tornado.log import app_log
|
||||
|
||||
from sqlalchemy.types import TypeDecorator, TEXT, LargeBinary
|
||||
from sqlalchemy import (
|
||||
inspect,
|
||||
create_engine, event, inspect, or_,
|
||||
Column, Integer, ForeignKey, Unicode, Boolean,
|
||||
DateTime, Enum
|
||||
DateTime, Enum, Table,
|
||||
)
|
||||
from sqlalchemy.ext.declarative import declarative_base, declared_attr
|
||||
from sqlalchemy.orm import sessionmaker, relationship
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.interfaces import PoolListener
|
||||
from sqlalchemy.orm import (
|
||||
Session,
|
||||
interfaces, object_session, relationship, sessionmaker,
|
||||
)
|
||||
|
||||
from sqlalchemy.pool import StaticPool
|
||||
from sqlalchemy.sql.expression import bindparam
|
||||
from sqlalchemy import create_engine, Table
|
||||
|
||||
from .dbutil import _temp_alembic_ini
|
||||
from .utils import (
|
||||
random_port,
|
||||
new_token, hash_token, compare_token,
|
||||
)
|
||||
|
||||
# top-level variable for easier mocking in tests
|
||||
utcnow = datetime.utcnow
|
||||
|
||||
|
||||
class JSONDict(TypeDecorator):
|
||||
"""Represents an immutable structure as a json-encoded string.
|
||||
@@ -78,8 +84,8 @@ class Server(Base):
|
||||
|
||||
# user:group many:many mapping table
|
||||
user_group_map = Table('user_group_map', Base.metadata,
|
||||
Column('user_id', ForeignKey('users.id'), primary_key=True),
|
||||
Column('group_id', ForeignKey('groups.id'), primary_key=True),
|
||||
Column('user_id', ForeignKey('users.id', ondelete='CASCADE'), primary_key=True),
|
||||
Column('group_id', ForeignKey('groups.id', ondelete='CASCADE'), primary_key=True),
|
||||
)
|
||||
|
||||
|
||||
@@ -88,7 +94,7 @@ class Group(Base):
|
||||
__tablename__ = 'groups'
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
name = Column(Unicode(255), unique=True)
|
||||
users = relationship('User', secondary='user_group_map', back_populates='groups')
|
||||
users = relationship('User', secondary='user_group_map', backref='groups')
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s (%i users)>" % (
|
||||
@@ -129,15 +135,34 @@ class User(Base):
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
name = Column(Unicode(255), unique=True)
|
||||
|
||||
_orm_spawners = relationship("Spawner", backref="user")
|
||||
_orm_spawners = relationship(
|
||||
"Spawner",
|
||||
backref="user",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
@property
|
||||
def orm_spawners(self):
|
||||
return {s.name: s for s in self._orm_spawners}
|
||||
|
||||
admin = Column(Boolean, default=False)
|
||||
last_activity = Column(DateTime, default=datetime.utcnow)
|
||||
created = Column(DateTime, default=datetime.utcnow)
|
||||
last_activity = Column(DateTime, nullable=True)
|
||||
|
||||
api_tokens = relationship("APIToken", backref="user")
|
||||
api_tokens = relationship(
|
||||
"APIToken",
|
||||
backref="user",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
oauth_tokens = relationship(
|
||||
"OAuthAccessToken",
|
||||
backref="user",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
oauth_codes = relationship(
|
||||
"OAuthCode",
|
||||
backref="user",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
cookie_id = Column(Unicode(255), default=new_token, nullable=False, unique=True)
|
||||
# User.state is actually Spawner state
|
||||
# We will need to figure something else out if/when we have multiple spawners per user
|
||||
@@ -145,8 +170,6 @@ class User(Base):
|
||||
# Authenticators can store their state here:
|
||||
# Encryption is handled elsewhere
|
||||
encrypted_auth_state = Column(LargeBinary)
|
||||
# group mapping
|
||||
groups = relationship('Group', secondary='user_group_map', back_populates='users')
|
||||
|
||||
def __repr__(self):
|
||||
return "<{cls}({name} {running}/{total} running)>".format(
|
||||
@@ -156,12 +179,12 @@ class User(Base):
|
||||
running=sum(bool(s.server) for s in self._orm_spawners),
|
||||
)
|
||||
|
||||
def new_api_token(self, token=None, generated=True):
|
||||
def new_api_token(self, token=None, **kwargs):
|
||||
"""Create a new API token
|
||||
|
||||
If `token` is given, load that token.
|
||||
"""
|
||||
return APIToken.new(token=token, user=self, generated=generated)
|
||||
return APIToken.new(token=token, user=self, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def find(cls, db, name):
|
||||
@@ -173,17 +196,18 @@ class User(Base):
|
||||
class Spawner(Base):
|
||||
""""State about a Spawner"""
|
||||
__tablename__ = 'spawners'
|
||||
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
user_id = Column(Integer, ForeignKey('users.id', ondelete='CASCADE'))
|
||||
|
||||
server_id = Column(Integer, ForeignKey('servers.id', ondelete='SET NULL'))
|
||||
server = relationship(Server)
|
||||
server = relationship(Server, cascade="all")
|
||||
|
||||
state = Column(JSONDict)
|
||||
name = Column(Unicode(255))
|
||||
|
||||
last_activity = Column(DateTime, default=datetime.utcnow)
|
||||
started = Column(DateTime)
|
||||
last_activity = Column(DateTime, nullable=True)
|
||||
|
||||
|
||||
class Service(Base):
|
||||
@@ -210,18 +234,22 @@ class Service(Base):
|
||||
name = Column(Unicode(255), unique=True)
|
||||
admin = Column(Boolean, default=False)
|
||||
|
||||
api_tokens = relationship("APIToken", backref="service")
|
||||
api_tokens = relationship(
|
||||
"APIToken",
|
||||
backref="service",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
# service-specific interface
|
||||
_server_id = Column(Integer, ForeignKey('servers.id', ondelete='SET NULL'))
|
||||
server = relationship(Server, primaryjoin=_server_id == Server.id)
|
||||
server = relationship(Server, cascade='all')
|
||||
pid = Column(Integer)
|
||||
|
||||
def new_api_token(self, token=None, generated=True):
|
||||
def new_api_token(self, token=None, **kwargs):
|
||||
"""Create a new API token
|
||||
If `token` is given, load that token.
|
||||
"""
|
||||
return APIToken.new(token=token, service=self, generated=generated)
|
||||
return APIToken.new(token=token, service=self, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def find(cls, db, name):
|
||||
@@ -231,6 +259,7 @@ class Service(Base):
|
||||
"""
|
||||
return db.query(cls).filter(cls.name == name).first()
|
||||
|
||||
|
||||
class Hashed(object):
|
||||
"""Mixin for tables with hashed tokens"""
|
||||
prefix_length = 4
|
||||
@@ -267,7 +296,7 @@ class Hashed(object):
|
||||
def match(self, token):
|
||||
"""Is this my token?"""
|
||||
return compare_token(self.hashed, token)
|
||||
|
||||
|
||||
@classmethod
|
||||
def check_token(cls, db, token):
|
||||
"""Check if a token is acceptable"""
|
||||
@@ -282,7 +311,7 @@ class Hashed(object):
|
||||
@classmethod
|
||||
def find_prefix(cls, db, token):
|
||||
"""Start the query for matching token.
|
||||
|
||||
|
||||
Returns an SQLAlchemy query already filtered by prefix-matches.
|
||||
"""
|
||||
prefix = token[:cls.prefix_length]
|
||||
@@ -304,22 +333,28 @@ class Hashed(object):
|
||||
if orm_token.match(token):
|
||||
return orm_token
|
||||
|
||||
|
||||
class APIToken(Hashed, Base):
|
||||
"""An API token"""
|
||||
__tablename__ = 'api_tokens'
|
||||
|
||||
@declared_attr
|
||||
def user_id(cls):
|
||||
return Column(Integer, ForeignKey('users.id', ondelete="CASCADE"), nullable=True)
|
||||
|
||||
@declared_attr
|
||||
def service_id(cls):
|
||||
return Column(Integer, ForeignKey('services.id', ondelete="CASCADE"), nullable=True)
|
||||
user_id = Column(Integer, ForeignKey('users.id', ondelete="CASCADE"), nullable=True)
|
||||
service_id = Column(Integer, ForeignKey('services.id', ondelete="CASCADE"), nullable=True)
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
hashed = Column(Unicode(255), unique=True)
|
||||
prefix = Column(Unicode(16), index=True)
|
||||
|
||||
@property
|
||||
def api_id(self):
|
||||
return 'a%i' % self.id
|
||||
|
||||
# token metadata for bookkeeping
|
||||
created = Column(DateTime, default=datetime.utcnow)
|
||||
expires_at = Column(DateTime, default=None, nullable=True)
|
||||
last_activity = Column(DateTime)
|
||||
note = Column(Unicode(1023))
|
||||
|
||||
def __repr__(self):
|
||||
if self.user is not None:
|
||||
kind = 'user'
|
||||
@@ -338,6 +373,22 @@ class APIToken(Hashed, Base):
|
||||
name=name,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def purge_expired(cls, db):
|
||||
"""Purge expired API Tokens from the database"""
|
||||
now = utcnow()
|
||||
deleted = False
|
||||
for token in (
|
||||
db.query(cls)
|
||||
.filter(cls.expires_at != None)
|
||||
.filter(cls.expires_at < now)
|
||||
):
|
||||
app_log.debug("Purging expired %s", token)
|
||||
deleted = True
|
||||
db.delete(token)
|
||||
if deleted:
|
||||
db.commit()
|
||||
|
||||
@classmethod
|
||||
def find(cls, db, token, *, kind=None):
|
||||
"""Find a token object by value.
|
||||
@@ -348,6 +399,9 @@ class APIToken(Hashed, Base):
|
||||
`kind='service'` only returns API tokens for services
|
||||
"""
|
||||
prefix_match = cls.find_prefix(db, token)
|
||||
prefix_match = prefix_match.filter(
|
||||
or_(cls.expires_at == None, cls.expires_at >= utcnow())
|
||||
)
|
||||
if kind == 'user':
|
||||
prefix_match = prefix_match.filter(cls.user_id != None)
|
||||
elif kind == 'service':
|
||||
@@ -359,7 +413,8 @@ class APIToken(Hashed, Base):
|
||||
return orm_token
|
||||
|
||||
@classmethod
|
||||
def new(cls, token=None, user=None, service=None, generated=True):
|
||||
def new(cls, token=None, user=None, service=None, note='', generated=True,
|
||||
expires_in=None):
|
||||
"""Generate a new API token for a user or service"""
|
||||
assert user or service
|
||||
assert not (user and service)
|
||||
@@ -373,14 +428,16 @@ class APIToken(Hashed, Base):
|
||||
cls.check_token(db, token)
|
||||
# two stages to ensure orm_token.generated has been set
|
||||
# before token setter is called
|
||||
orm_token = cls(generated=generated)
|
||||
orm_token = cls(generated=generated, note=note or '')
|
||||
orm_token.token = token
|
||||
if user:
|
||||
assert user.id is not None
|
||||
orm_token.user_id = user.id
|
||||
orm_token.user = user
|
||||
else:
|
||||
assert service.id is not None
|
||||
orm_token.service_id = service.id
|
||||
orm_token.service = service
|
||||
if expires_in is not None:
|
||||
orm_token.expires_at = utcnow() + timedelta(seconds=expires_in)
|
||||
db.add(orm_token)
|
||||
db.commit()
|
||||
return token
|
||||
@@ -404,34 +461,58 @@ class OAuthAccessToken(Hashed, Base):
|
||||
__tablename__ = 'oauth_access_tokens'
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
client_id = Column(Unicode(255))
|
||||
@property
|
||||
def api_id(self):
|
||||
return 'o%i' % self.id
|
||||
|
||||
client_id = Column(Unicode(255), ForeignKey('oauth_clients.identifier', ondelete='CASCADE'))
|
||||
grant_type = Column(Enum(GrantType), nullable=False)
|
||||
expires_at = Column(Integer)
|
||||
refresh_token = Column(Unicode(255))
|
||||
refresh_expires_at = Column(Integer)
|
||||
user_id = Column(Integer, ForeignKey('users.id', ondelete='CASCADE'))
|
||||
user = relationship(User)
|
||||
service = None # for API-equivalence with APIToken
|
||||
|
||||
# the browser session id associated with a given token
|
||||
session_id = Column(Unicode(255))
|
||||
|
||||
# from Hashed
|
||||
hashed = Column(Unicode(255), unique=True)
|
||||
prefix = Column(Unicode(16), index=True)
|
||||
|
||||
|
||||
created = Column(DateTime, default=datetime.utcnow)
|
||||
last_activity = Column(DateTime, nullable=True)
|
||||
|
||||
def __repr__(self):
|
||||
return "<{cls}('{prefix}...', user='{user}'>".format(
|
||||
return "<{cls}('{prefix}...', client_id={client_id!r}, user={user!r}>".format(
|
||||
cls=self.__class__.__name__,
|
||||
client_id=self.client_id,
|
||||
user=self.user and self.user.name,
|
||||
prefix=self.prefix,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def find(cls, db, token):
|
||||
orm_token = super().find(db, token)
|
||||
if orm_token and not orm_token.client_id:
|
||||
app_log.warning(
|
||||
"Deleting stale oauth token for %s with no client",
|
||||
orm_token.user and orm_token.user.name,
|
||||
)
|
||||
db.delete(orm_token)
|
||||
db.commit()
|
||||
return
|
||||
return orm_token
|
||||
|
||||
|
||||
class OAuthCode(Base):
|
||||
__tablename__ = 'oauth_codes'
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
client_id = Column(Unicode(255))
|
||||
client_id = Column(Unicode(255), ForeignKey('oauth_clients.identifier', ondelete='CASCADE'))
|
||||
code = Column(Unicode(36))
|
||||
expires_at = Column(Integer)
|
||||
redirect_uri = Column(Unicode(1023))
|
||||
session_id = Column(Unicode(255))
|
||||
user_id = Column(Integer, ForeignKey('users.id', ondelete='CASCADE'))
|
||||
|
||||
|
||||
@@ -439,9 +520,22 @@ class OAuthClient(Base):
|
||||
__tablename__ = 'oauth_clients'
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
identifier = Column(Unicode(255), unique=True)
|
||||
description = Column(Unicode(1023))
|
||||
secret = Column(Unicode(255))
|
||||
redirect_uri = Column(Unicode(1023))
|
||||
|
||||
access_tokens = relationship(
|
||||
OAuthAccessToken,
|
||||
backref='client',
|
||||
cascade='all, delete-orphan',
|
||||
)
|
||||
codes = relationship(
|
||||
OAuthCode,
|
||||
backref='client',
|
||||
cascade='all, delete-orphan',
|
||||
)
|
||||
|
||||
# General database utilities
|
||||
|
||||
class DatabaseSchemaMismatch(Exception):
|
||||
"""Exception raised when the database schema version does not match
|
||||
@@ -449,6 +543,46 @@ class DatabaseSchemaMismatch(Exception):
|
||||
the current version of JupyterHub.
|
||||
"""
|
||||
|
||||
|
||||
class ForeignKeysListener(PoolListener):
|
||||
"""Enable foreign keys on sqlite"""
|
||||
def connect(self, dbapi_con, con_record):
|
||||
dbapi_con.execute('pragma foreign_keys=ON')
|
||||
|
||||
|
||||
def _expire_relationship(target, relationship_prop):
|
||||
"""Expire relationship backrefs
|
||||
|
||||
used when an object with relationships is deleted
|
||||
"""
|
||||
|
||||
session = object_session(target)
|
||||
# get peer objects to be expired
|
||||
peers = getattr(target, relationship_prop.key)
|
||||
if peers is None:
|
||||
# no peer to clear
|
||||
return
|
||||
# many-to-many and one-to-many have a list of peers
|
||||
# many-to-one has only one
|
||||
if relationship_prop.direction is interfaces.MANYTOONE:
|
||||
peers = [peers]
|
||||
for obj in peers:
|
||||
if inspect(obj).persistent:
|
||||
session.expire(obj, [relationship_prop.back_populates])
|
||||
|
||||
|
||||
@event.listens_for(Session, "persistent_to_deleted")
|
||||
def _notify_deleted_relationships(session, obj):
|
||||
"""Expire relationships when an object becomes deleted
|
||||
|
||||
Needed for
|
||||
"""
|
||||
mapper = inspect(obj).mapper
|
||||
for prop in mapper.relationships:
|
||||
if prop.back_populates:
|
||||
_expire_relationship(obj, prop)
|
||||
|
||||
|
||||
def check_db_revision(engine):
|
||||
"""Check the JupyterHub database revision
|
||||
|
||||
@@ -463,6 +597,8 @@ def check_db_revision(engine):
|
||||
current_table_names = set(engine.table_names())
|
||||
my_table_names = set(Base.metadata.tables.keys())
|
||||
|
||||
from .dbutil import _temp_alembic_ini
|
||||
|
||||
with _temp_alembic_ini(engine.url) as ini:
|
||||
cfg = alembic.config.Config(ini)
|
||||
scripts = ScriptDirectory.from_config(cfg)
|
||||
@@ -510,10 +646,35 @@ def check_db_revision(engine):
|
||||
head=head,
|
||||
))
|
||||
|
||||
def new_session_factory(url="sqlite:///:memory:", reset=False, **kwargs):
|
||||
|
||||
def mysql_large_prefix_check(engine):
|
||||
"""Check mysql has innodb_large_prefix set"""
|
||||
if not str(engine.url).startswith('mysql'):
|
||||
return False
|
||||
variables = dict(engine.execute(
|
||||
'show variables where variable_name like '
|
||||
'"innodb_large_prefix" or '
|
||||
'variable_name like "innodb_file_format";').fetchall())
|
||||
if (variables['innodb_file_format'] == 'Barracuda' and
|
||||
variables['innodb_large_prefix'] == 'ON'):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def add_row_format(base):
|
||||
for t in base.metadata.tables.values():
|
||||
t.dialect_kwargs['mysql_ROW_FORMAT'] = 'DYNAMIC'
|
||||
|
||||
def new_session_factory(url="sqlite:///:memory:",
|
||||
reset=False,
|
||||
expire_on_commit=False,
|
||||
**kwargs):
|
||||
"""Create a new session at url"""
|
||||
if url.startswith('sqlite'):
|
||||
kwargs.setdefault('connect_args', {'check_same_thread': False})
|
||||
listeners = kwargs.setdefault('listeners', [])
|
||||
listeners.append(ForeignKeysListener())
|
||||
|
||||
elif url.startswith('mysql'):
|
||||
kwargs.setdefault('pool_recycle', 60)
|
||||
|
||||
@@ -526,9 +687,18 @@ def new_session_factory(url="sqlite:///:memory:", reset=False, **kwargs):
|
||||
if reset:
|
||||
Base.metadata.drop_all(engine)
|
||||
|
||||
if mysql_large_prefix_check(engine): # if mysql is allows large indexes
|
||||
add_row_format(Base) # set format on the tables
|
||||
# check the db revision (will raise, pointing to `upgrade-db` if version doesn't match)
|
||||
check_db_revision(engine)
|
||||
|
||||
Base.metadata.create_all(engine)
|
||||
|
||||
session_factory = sessionmaker(bind=engine)
|
||||
# We set expire_on_commit=False, since we don't actually need
|
||||
# SQLAlchemy to expire objects after commiting - we don't expect
|
||||
# concurrent runs of the hub talking to the same db. Turning
|
||||
# this off gives us a major performance boost
|
||||
session_factory = sessionmaker(bind=engine,
|
||||
expire_on_commit=expire_on_commit,
|
||||
)
|
||||
return session_factory
|
||||
|
@@ -15,32 +15,48 @@ Route Specification:
|
||||
- Route paths should be normalized to always start and end with '/'
|
||||
"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import asyncio
|
||||
from functools import wraps
|
||||
import json
|
||||
import os
|
||||
from subprocess import Popen
|
||||
from urllib.parse import quote
|
||||
|
||||
from tornado import gen
|
||||
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
|
||||
from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPError
|
||||
from tornado.ioloop import PeriodicCallback
|
||||
|
||||
|
||||
from traitlets import (
|
||||
Any, Bool, Instance, Integer, Unicode,
|
||||
default,
|
||||
default, observe,
|
||||
)
|
||||
from jupyterhub.traitlets import Command
|
||||
|
||||
from traitlets.config import LoggingConfigurable
|
||||
from .objects import Server
|
||||
from .orm import Service, User
|
||||
from . import utils
|
||||
from .utils import url_path_join
|
||||
|
||||
|
||||
def _one_at_a_time(method):
|
||||
"""decorator to limit an async method to be called only once
|
||||
|
||||
If multiple concurrent calls to this method are made,
|
||||
queue them instead of allowing them to be concurrently outstanding.
|
||||
"""
|
||||
method._lock = asyncio.Lock()
|
||||
@wraps(method)
|
||||
async def locked_method(*args, **kwargs):
|
||||
async with method._lock:
|
||||
return await method(*args, **kwargs)
|
||||
|
||||
return locked_method
|
||||
|
||||
|
||||
class Proxy(LoggingConfigurable):
|
||||
"""Base class for configurable proxies that JupyterHub can use.
|
||||
|
||||
@@ -99,13 +115,13 @@ class Proxy(LoggingConfigurable):
|
||||
|
||||
Will be called during teardown if should_start is True.
|
||||
|
||||
**Subclasses must define this method**
|
||||
**Subclasses must define this method**
|
||||
if the proxy is to be started by the Hub
|
||||
"""
|
||||
|
||||
|
||||
def validate_routespec(self, routespec):
|
||||
"""Validate a routespec
|
||||
|
||||
|
||||
- Checks host value vs host-based routing.
|
||||
- Ensures trailing slash on path.
|
||||
"""
|
||||
@@ -125,8 +141,7 @@ class Proxy(LoggingConfigurable):
|
||||
else:
|
||||
return routespec
|
||||
|
||||
@gen.coroutine
|
||||
def add_route(self, routespec, target, data):
|
||||
async def add_route(self, routespec, target, data):
|
||||
"""Add a route to the proxy.
|
||||
|
||||
**Subclasses must define this method**
|
||||
@@ -146,16 +161,14 @@ class Proxy(LoggingConfigurable):
|
||||
"""
|
||||
pass
|
||||
|
||||
@gen.coroutine
|
||||
def delete_route(self, routespec):
|
||||
async def delete_route(self, routespec):
|
||||
"""Delete a route with a given routespec if it exists.
|
||||
|
||||
|
||||
**Subclasses must define this method**
|
||||
"""
|
||||
pass
|
||||
|
||||
@gen.coroutine
|
||||
def get_all_routes(self):
|
||||
async def get_all_routes(self):
|
||||
"""Fetch and return all the routes associated by JupyterHub from the
|
||||
proxy.
|
||||
|
||||
@@ -172,8 +185,7 @@ class Proxy(LoggingConfigurable):
|
||||
"""
|
||||
pass
|
||||
|
||||
@gen.coroutine
|
||||
def get_route(self, routespec):
|
||||
async def get_route(self, routespec):
|
||||
"""Return the route info for a given routespec.
|
||||
|
||||
Args:
|
||||
@@ -184,7 +196,7 @@ class Proxy(LoggingConfigurable):
|
||||
Returns:
|
||||
result (dict):
|
||||
dict with the following keys::
|
||||
|
||||
|
||||
'routespec': The normalized route specification passed in to add_route
|
||||
([host]/path/)
|
||||
'target': The target host for this route (proto://host)
|
||||
@@ -195,13 +207,12 @@ class Proxy(LoggingConfigurable):
|
||||
"""
|
||||
# default implementation relies on get_all_routes
|
||||
routespec = self.validate_routespec(routespec)
|
||||
routes = yield self.get_all_routes()
|
||||
routes = await self.get_all_routes()
|
||||
return routes.get(routespec)
|
||||
|
||||
# Most basic implementers must only implement above methods
|
||||
|
||||
@gen.coroutine
|
||||
def add_service(self, service, client=None):
|
||||
async def add_service(self, service, client=None):
|
||||
"""Add a service's server to the proxy table."""
|
||||
if not service.server:
|
||||
raise RuntimeError(
|
||||
@@ -211,20 +222,18 @@ class Proxy(LoggingConfigurable):
|
||||
service.name, service.proxy_spec, service.server.host,
|
||||
)
|
||||
|
||||
yield self.add_route(
|
||||
await self.add_route(
|
||||
service.proxy_spec,
|
||||
service.server.host,
|
||||
{'service': service.name}
|
||||
)
|
||||
|
||||
@gen.coroutine
|
||||
def delete_service(self, service, client=None):
|
||||
async def delete_service(self, service, client=None):
|
||||
"""Remove a service's server from the proxy table."""
|
||||
self.log.info("Removing service %s from proxy", service.name)
|
||||
yield self.delete_route(service.proxy_spec)
|
||||
await self.delete_route(service.proxy_spec)
|
||||
|
||||
@gen.coroutine
|
||||
def add_user(self, user, server_name='', client=None):
|
||||
async def add_user(self, user, server_name='', client=None):
|
||||
"""Add a user's server to the proxy table."""
|
||||
spawner = user.spawners[server_name]
|
||||
self.log.info("Adding user %s to proxy %s => %s",
|
||||
@@ -236,7 +245,7 @@ class Proxy(LoggingConfigurable):
|
||||
"%s is pending %s, shouldn't be added to the proxy yet!" % (spawner._log_name, spawner.pending)
|
||||
)
|
||||
|
||||
yield self.add_route(
|
||||
await self.add_route(
|
||||
spawner.proxy_spec,
|
||||
spawner.server.host,
|
||||
{
|
||||
@@ -245,63 +254,57 @@ class Proxy(LoggingConfigurable):
|
||||
}
|
||||
)
|
||||
|
||||
@gen.coroutine
|
||||
def delete_user(self, user, server_name=''):
|
||||
async def delete_user(self, user, server_name=''):
|
||||
"""Remove a user's server from the proxy table."""
|
||||
routespec = user.proxy_spec
|
||||
if server_name:
|
||||
routespec = url_path_join(user.proxy_spec, server_name, '/')
|
||||
self.log.info("Removing user %s from proxy (%s)", user.name, routespec)
|
||||
yield self.delete_route(routespec)
|
||||
await self.delete_route(routespec)
|
||||
|
||||
@gen.coroutine
|
||||
def add_all_services(self, service_dict):
|
||||
async def add_all_services(self, service_dict):
|
||||
"""Update the proxy table from the database.
|
||||
|
||||
Used when loading up a new proxy.
|
||||
"""
|
||||
db = self.db
|
||||
futures = []
|
||||
for orm_service in db.query(Service):
|
||||
service = service_dict[orm_service.name]
|
||||
for service in service_dict.values():
|
||||
if service.server:
|
||||
futures.append(self.add_service(service))
|
||||
# wait after submitting them all
|
||||
for f in futures:
|
||||
yield f
|
||||
await gen.multi(futures)
|
||||
|
||||
@gen.coroutine
|
||||
def add_all_users(self, user_dict):
|
||||
async def add_all_users(self, user_dict):
|
||||
"""Update the proxy table from the database.
|
||||
|
||||
Used when loading up a new proxy.
|
||||
"""
|
||||
db = self.db
|
||||
futures = []
|
||||
for orm_user in db.query(User):
|
||||
user = user_dict[orm_user]
|
||||
for user in user_dict.values():
|
||||
for name, spawner in user.spawners.items():
|
||||
if spawner.ready:
|
||||
futures.append(self.add_user(user, name))
|
||||
# wait after submitting them all
|
||||
for f in futures:
|
||||
yield f
|
||||
await gen.multi(futures)
|
||||
|
||||
@gen.coroutine
|
||||
def check_routes(self, user_dict, service_dict, routes=None):
|
||||
@_one_at_a_time
|
||||
async def check_routes(self, user_dict, service_dict, routes=None):
|
||||
"""Check that all users are properly routed on the proxy."""
|
||||
if not routes:
|
||||
routes = yield self.get_all_routes()
|
||||
self.log.debug("Fetching routes to check")
|
||||
routes = await self.get_all_routes()
|
||||
# log info-level that we are starting the route-checking
|
||||
# this may help diagnose performance issues,
|
||||
# as we are about
|
||||
self.log.info("Checking routes")
|
||||
|
||||
user_routes = {path for path, r in routes.items() if 'user' in r['data']}
|
||||
futures = []
|
||||
db = self.db
|
||||
|
||||
good_routes = {'/'}
|
||||
|
||||
hub = self.app.hub
|
||||
if '/' not in routes:
|
||||
self.log.warning("Adding missing default route")
|
||||
futures.append(self.add_hub_route(hub))
|
||||
else:
|
||||
route = routes['/']
|
||||
@@ -309,8 +312,7 @@ class Proxy(LoggingConfigurable):
|
||||
self.log.warning("Updating default route %s → %s", route['target'], hub.host)
|
||||
futures.append(self.add_hub_route(hub))
|
||||
|
||||
for orm_user in db.query(User):
|
||||
user = user_dict[orm_user]
|
||||
for user in user_dict.values():
|
||||
for name, spawner in user.spawners.items():
|
||||
if spawner.ready:
|
||||
spec = spawner.proxy_spec
|
||||
@@ -327,20 +329,17 @@ class Proxy(LoggingConfigurable):
|
||||
spec, route['target'], spawner.server,
|
||||
)
|
||||
futures.append(self.add_user(user, name))
|
||||
elif spawner._spawn_pending:
|
||||
elif spawner.pending:
|
||||
# don't consider routes stale if the spawner is in any pending event
|
||||
# wait until after the pending state clears before taking any actions
|
||||
# they could be pending deletion from the proxy!
|
||||
good_routes.add(spawner.proxy_spec)
|
||||
|
||||
# check service routes
|
||||
service_routes = {r['data']['service']: r
|
||||
for r in routes.values() if 'service' in r['data']}
|
||||
for orm_service in db.query(Service).filter(Service.server != None):
|
||||
service = service_dict[orm_service.name]
|
||||
for service in service_dict.values():
|
||||
if service.server is None:
|
||||
# This should never be True, but seems to be on rare occasion.
|
||||
# catch filter bug, either in sqlalchemy or my understanding of
|
||||
# its behavior
|
||||
self.log.error(
|
||||
"Service %s has no server, but wasn't filtered out.", service)
|
||||
continue
|
||||
good_routes.add(service.proxy_spec)
|
||||
if service.name not in service_routes:
|
||||
@@ -352,7 +351,7 @@ class Proxy(LoggingConfigurable):
|
||||
if route['target'] != service.server.host:
|
||||
self.log.warning(
|
||||
"Updating route for %s (%s → %s)",
|
||||
route['routespec'], route['target'], spawner.server.host,
|
||||
route['routespec'], route['target'], service.server.host,
|
||||
)
|
||||
futures.append(self.add_service(service))
|
||||
|
||||
@@ -362,20 +361,18 @@ class Proxy(LoggingConfigurable):
|
||||
self.log.warning("Deleting stale route %s", routespec)
|
||||
futures.append(self.delete_route(routespec))
|
||||
|
||||
for f in futures:
|
||||
yield f
|
||||
await gen.multi(futures)
|
||||
|
||||
def add_hub_route(self, hub):
|
||||
"""Add the default route for the Hub"""
|
||||
self.log.info("Adding default route for Hub: / => %s", hub.host)
|
||||
return self.add_route('/', self.hub.host, {'hub': True})
|
||||
|
||||
@gen.coroutine
|
||||
def restore_routes(self):
|
||||
async def restore_routes(self):
|
||||
self.log.info("Setting up routes on new proxy")
|
||||
yield self.add_hub_route(self.app.hub)
|
||||
yield self.add_all_users(self.app.users)
|
||||
yield self.add_all_services(self.app._service_map)
|
||||
await self.add_hub_route(self.app.hub)
|
||||
await self.add_all_users(self.app.users)
|
||||
await self.add_all_services(self.app._service_map)
|
||||
self.log.info("New proxy back up and good to go")
|
||||
|
||||
|
||||
@@ -388,13 +385,33 @@ class ConfigurableHTTPProxy(Proxy):
|
||||
If the proxy should not be run as a subprocess of the Hub,
|
||||
(e.g. in a separate container),
|
||||
set::
|
||||
|
||||
|
||||
c.ConfigurableHTTPProxy.should_start = False
|
||||
"""
|
||||
|
||||
proxy_process = Any()
|
||||
client = Instance(AsyncHTTPClient, ())
|
||||
|
||||
concurrency = Integer(
|
||||
10,
|
||||
config=True,
|
||||
help="""
|
||||
The number of requests allowed to be concurrently outstanding to the proxy
|
||||
|
||||
Limiting this number avoids potential timeout errors
|
||||
by sending too many requests to update the proxy at once
|
||||
""",
|
||||
)
|
||||
semaphore = Any()
|
||||
|
||||
@default('semaphore')
|
||||
def _default_semaphore(self):
|
||||
return asyncio.BoundedSemaphore(self.concurrency)
|
||||
|
||||
@observe('concurrency')
|
||||
def _concurrency_changed(self, change):
|
||||
self.semaphore = asyncio.BoundedSemaphore(change.new)
|
||||
|
||||
debug = Bool(False, help="Add debug-level logging to the Proxy.", config=True)
|
||||
auth_token = Unicode(
|
||||
help="""The Proxy auth token
|
||||
@@ -406,14 +423,10 @@ class ConfigurableHTTPProxy(Proxy):
|
||||
|
||||
@default('auth_token')
|
||||
def _auth_token_default(self):
|
||||
token = os.environ.get('CONFIGPROXY_AUTH_TOKEN', None)
|
||||
if not token:
|
||||
self.log.warning('\n'.join([
|
||||
"",
|
||||
"Generating CONFIGPROXY_AUTH_TOKEN. Restarting the Hub will require restarting the proxy.",
|
||||
"Set CONFIGPROXY_AUTH_TOKEN env or JupyterHub.proxy_auth_token config to avoid this message.",
|
||||
"",
|
||||
]))
|
||||
token = os.environ.get('CONFIGPROXY_AUTH_TOKEN', '')
|
||||
if self.should_start and not token:
|
||||
# generating tokens is fine if the Hub is starting the proxy
|
||||
self.log.info("Generating new CONFIGPROXY_AUTH_TOKEN")
|
||||
token = utils.new_token()
|
||||
return token
|
||||
|
||||
@@ -424,8 +437,20 @@ class ConfigurableHTTPProxy(Proxy):
|
||||
help="""The command to start the proxy"""
|
||||
)
|
||||
|
||||
@gen.coroutine
|
||||
def start(self):
|
||||
_check_running_callback = Any(help="PeriodicCallback to check if the proxy is running")
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
# check for required token if proxy is external
|
||||
if not self.auth_token and not self.should_start:
|
||||
raise ValueError(
|
||||
"%s.auth_token or CONFIGPROXY_AUTH_TOKEN env is required"
|
||||
" if Proxy.should_start is False" % self.__class__.__name__
|
||||
)
|
||||
|
||||
|
||||
|
||||
async def start(self):
|
||||
public_server = Server.from_url(self.public_url)
|
||||
api_server = Server.from_url(self.api_url)
|
||||
env = os.environ.copy()
|
||||
@@ -457,7 +482,7 @@ class ConfigurableHTTPProxy(Proxy):
|
||||
" I hope there is SSL termination happening somewhere else...")
|
||||
self.log.info("Starting proxy @ %s", public_server.bind_url)
|
||||
self.log.debug("Proxy cmd: %s", cmd)
|
||||
shell = os.name == 'nt'
|
||||
shell = os.name == 'nt'
|
||||
try:
|
||||
self.proxy_process = Popen(cmd, env=env, start_new_session=True, shell=shell)
|
||||
except FileNotFoundError as e:
|
||||
@@ -473,43 +498,43 @@ class ConfigurableHTTPProxy(Proxy):
|
||||
if status is not None:
|
||||
e = RuntimeError(
|
||||
"Proxy failed to start with exit code %i" % status)
|
||||
# py2-compatible `raise e from None`
|
||||
e.__cause__ = None
|
||||
raise e
|
||||
raise e from None
|
||||
|
||||
for server in (public_server, api_server):
|
||||
for i in range(10):
|
||||
_check_process()
|
||||
try:
|
||||
yield server.wait_up(1)
|
||||
await server.wait_up(1)
|
||||
except TimeoutError:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
yield server.wait_up(1)
|
||||
await server.wait_up(1)
|
||||
_check_process()
|
||||
self.log.debug("Proxy started and appears to be up")
|
||||
pc = PeriodicCallback(self.check_running, 1e3 * self.check_running_interval)
|
||||
self._check_running_callback = pc
|
||||
pc.start()
|
||||
|
||||
def stop(self):
|
||||
self.log.info("Cleaning up proxy[%i]...", self.proxy_process.pid)
|
||||
if self._check_running_callback is not None:
|
||||
self._check_running_callback.stop()
|
||||
if self.proxy_process.poll() is None:
|
||||
try:
|
||||
self.proxy_process.terminate()
|
||||
except Exception as e:
|
||||
self.log.error("Failed to terminate proxy process: %s", e)
|
||||
|
||||
@gen.coroutine
|
||||
def check_running(self):
|
||||
async def check_running(self):
|
||||
"""Check if the proxy is still running"""
|
||||
if self.proxy_process.poll() is None:
|
||||
return
|
||||
self.log.error("Proxy stopped with exit code %r",
|
||||
'unknown' if self.proxy_process is None else self.proxy_process.poll()
|
||||
)
|
||||
yield self.start()
|
||||
yield self.restore_routes()
|
||||
await self.start()
|
||||
await self.restore_routes()
|
||||
|
||||
def _routespec_to_chp_path(self, routespec):
|
||||
"""Turn a routespec into a CHP API path
|
||||
@@ -527,7 +552,7 @@ class ConfigurableHTTPProxy(Proxy):
|
||||
|
||||
def _routespec_from_chp_path(self, chp_path):
|
||||
"""Turn a CHP route into a route spec
|
||||
|
||||
|
||||
In the JSON API, CHP route keys are unescaped,
|
||||
so re-escape them to raw URLs and ensure slashes are in the right places.
|
||||
"""
|
||||
@@ -542,7 +567,7 @@ class ConfigurableHTTPProxy(Proxy):
|
||||
routespec = routespec + '/'
|
||||
return routespec
|
||||
|
||||
def api_request(self, path, method='GET', body=None, client=None):
|
||||
async def api_request(self, path, method='GET', body=None, client=None):
|
||||
"""Make an authenticated API request of the proxy."""
|
||||
client = client or AsyncHTTPClient()
|
||||
url = url_path_join(self.api_url, 'api/routes', path)
|
||||
@@ -556,22 +581,33 @@ class ConfigurableHTTPProxy(Proxy):
|
||||
self.auth_token)},
|
||||
body=body,
|
||||
)
|
||||
async with self.semaphore:
|
||||
result = await client.fetch(req)
|
||||
return result
|
||||
|
||||
return client.fetch(req)
|
||||
|
||||
def add_route(self, routespec, target, data):
|
||||
async def add_route(self, routespec, target, data):
|
||||
body = data or {}
|
||||
body['target'] = target
|
||||
body['jupyterhub'] = True
|
||||
path = self._routespec_to_chp_path(routespec)
|
||||
return self.api_request(path,
|
||||
method='POST',
|
||||
body=body,
|
||||
)
|
||||
await self.api_request(
|
||||
path,
|
||||
method='POST',
|
||||
body=body,
|
||||
)
|
||||
|
||||
def delete_route(self, routespec):
|
||||
async def delete_route(self, routespec):
|
||||
path = self._routespec_to_chp_path(routespec)
|
||||
return self.api_request(path, method='DELETE')
|
||||
try:
|
||||
await self.api_request(path, method='DELETE')
|
||||
except HTTPError as e:
|
||||
if e.code == 404:
|
||||
# Warn about 404s because something might be wrong
|
||||
# but don't raise because the route is gone,
|
||||
# which is the goal.
|
||||
self.log.warning("Route %s already deleted", routespec)
|
||||
else:
|
||||
raise
|
||||
|
||||
def _reformat_routespec(self, routespec, chp_data):
|
||||
"""Reformat CHP data format to JupyterHub's proxy API."""
|
||||
@@ -582,11 +618,10 @@ class ConfigurableHTTPProxy(Proxy):
|
||||
'target': target,
|
||||
'data': chp_data,
|
||||
}
|
||||
|
||||
@gen.coroutine
|
||||
def get_all_routes(self, client=None):
|
||||
|
||||
async def get_all_routes(self, client=None):
|
||||
"""Fetch the proxy's routes."""
|
||||
resp = yield self.api_request('', client=client)
|
||||
resp = await self.api_request('', client=client)
|
||||
chp_routes = json.loads(resp.body.decode('utf8', 'replace'))
|
||||
all_routes = {}
|
||||
for chp_path, chp_data in chp_routes.items():
|
||||
|
@@ -29,8 +29,11 @@ from tornado.log import app_log
|
||||
from tornado.httputil import url_concat
|
||||
from tornado.web import HTTPError, RequestHandler
|
||||
|
||||
from traitlets.config import Configurable
|
||||
from traitlets import Unicode, Integer, Instance, default, observe, validate
|
||||
from traitlets.config import SingletonConfigurable
|
||||
from traitlets import (
|
||||
Unicode, Integer, Instance, Dict,
|
||||
default, observe, validate,
|
||||
)
|
||||
|
||||
from ..utils import url_path_join
|
||||
|
||||
@@ -57,6 +60,17 @@ class _ExpiringDict(dict):
|
||||
self.timestamps[key] = time.monotonic()
|
||||
self.values[key] = value
|
||||
|
||||
def __repr__(self):
|
||||
"""include values and timestamps in repr"""
|
||||
now = time.monotonic()
|
||||
return repr({
|
||||
key: '{value} (age={age:.0f}s)'.format(
|
||||
value=repr(value)[:16] + '...',
|
||||
age=now-self.timestamps[key],
|
||||
)
|
||||
for key, value in self.values.items()
|
||||
})
|
||||
|
||||
def _check_age(self, key):
|
||||
"""Check timestamp for a key"""
|
||||
if key not in self.values:
|
||||
@@ -86,7 +100,7 @@ class _ExpiringDict(dict):
|
||||
return default
|
||||
|
||||
|
||||
class HubAuth(Configurable):
|
||||
class HubAuth(SingletonConfigurable):
|
||||
"""A class for authenticating with JupyterHub
|
||||
|
||||
This can be used by any application.
|
||||
@@ -164,7 +178,7 @@ class HubAuth(Configurable):
|
||||
|
||||
hub_prefix = Unicode('/hub/',
|
||||
help="""The URL prefix for the Hub itself.
|
||||
|
||||
|
||||
Typically /hub/
|
||||
"""
|
||||
).tag(config=True)
|
||||
@@ -174,7 +188,7 @@ class HubAuth(Configurable):
|
||||
|
||||
login_url = Unicode('/hub/login',
|
||||
help="""The login URL to use
|
||||
|
||||
|
||||
Typically /hub/login
|
||||
"""
|
||||
).tag(config=True)
|
||||
@@ -186,6 +200,24 @@ class HubAuth(Configurable):
|
||||
help="""The name of the cookie I should be looking for"""
|
||||
).tag(config=True)
|
||||
|
||||
cookie_options = Dict(
|
||||
help="""Additional options to pass when setting cookies.
|
||||
|
||||
Can include things like `expires_days=None` for session-expiry
|
||||
or `secure=True` if served on HTTPS and default HTTPS discovery fails
|
||||
(e.g. behind some proxies).
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
@default('cookie_options')
|
||||
def _default_cookie_options(self):
|
||||
# load default from env
|
||||
options_env = os.environ.get('JUPYTERHUB_COOKIE_OPTIONS')
|
||||
if options_env:
|
||||
return json.loads(options_env)
|
||||
else:
|
||||
return {}
|
||||
|
||||
cookie_cache_max_age = Integer(help="DEPRECATED. Use cache_max_age")
|
||||
@observe('cookie_cache_max_age')
|
||||
def _deprecated_cookie_cache(self, change):
|
||||
@@ -227,6 +259,8 @@ class HubAuth(Configurable):
|
||||
cached = self.cache.get(cache_key)
|
||||
if cached is not None:
|
||||
return cached
|
||||
else:
|
||||
app_log.debug("Cache miss: %s" % cache_key)
|
||||
|
||||
data = self._api_request('GET', url, allow_404=True)
|
||||
if data is None:
|
||||
@@ -274,7 +308,7 @@ class HubAuth(Configurable):
|
||||
|
||||
return data
|
||||
|
||||
def user_for_cookie(self, encrypted_cookie, use_cache=True):
|
||||
def user_for_cookie(self, encrypted_cookie, use_cache=True, session_id=''):
|
||||
"""Ask the Hub to identify the user for a given cookie.
|
||||
|
||||
Args:
|
||||
@@ -291,11 +325,11 @@ class HubAuth(Configurable):
|
||||
"authorizations/cookie",
|
||||
self.cookie_name,
|
||||
quote(encrypted_cookie, safe='')),
|
||||
cache_key='cookie:%s' % encrypted_cookie,
|
||||
cache_key='cookie:{}:{}'.format(session_id, encrypted_cookie),
|
||||
use_cache=use_cache,
|
||||
)
|
||||
|
||||
def user_for_token(self, token, use_cache=True):
|
||||
def user_for_token(self, token, use_cache=True, session_id=''):
|
||||
"""Ask the Hub to identify the user for a given token.
|
||||
|
||||
Args:
|
||||
@@ -311,10 +345,10 @@ class HubAuth(Configurable):
|
||||
url=url_path_join(self.api_url,
|
||||
"authorizations/token",
|
||||
quote(token, safe='')),
|
||||
cache_key='token:%s' % token,
|
||||
cache_key='token:{}:{}'.format(session_id, token),
|
||||
use_cache=use_cache,
|
||||
)
|
||||
|
||||
|
||||
auth_header_name = 'Authorization'
|
||||
auth_header_pat = re.compile('token\s+(.+)', re.IGNORECASE)
|
||||
|
||||
@@ -336,8 +370,16 @@ class HubAuth(Configurable):
|
||||
def _get_user_cookie(self, handler):
|
||||
"""Get the user model from a cookie"""
|
||||
encrypted_cookie = handler.get_cookie(self.cookie_name)
|
||||
session_id = self.get_session_id(handler)
|
||||
if encrypted_cookie:
|
||||
return self.user_for_cookie(encrypted_cookie)
|
||||
return self.user_for_cookie(encrypted_cookie, session_id=session_id)
|
||||
|
||||
def get_session_id(self, handler):
|
||||
"""Get the jupyterhub session id
|
||||
|
||||
from the jupyterhub-session-id cookie.
|
||||
"""
|
||||
return handler.get_cookie('jupyterhub-session-id', '')
|
||||
|
||||
def get_user(self, handler):
|
||||
"""Get the Hub user for a given tornado handler.
|
||||
@@ -360,11 +402,12 @@ class HubAuth(Configurable):
|
||||
return handler._cached_hub_user
|
||||
|
||||
handler._cached_hub_user = user_model = None
|
||||
session_id = self.get_session_id(handler)
|
||||
|
||||
# check token first
|
||||
token = self.get_token(handler)
|
||||
if token:
|
||||
user_model = self.user_for_token(token)
|
||||
user_model = self.user_for_token(token, session_id=session_id)
|
||||
if user_model:
|
||||
handler._token_authenticated = True
|
||||
|
||||
@@ -414,8 +457,10 @@ class HubOAuth(HubAuth):
|
||||
|
||||
def _get_user_cookie(self, handler):
|
||||
token = handler.get_secure_cookie(self.cookie_name)
|
||||
session_id = self.get_session_id(handler)
|
||||
if token:
|
||||
user_model = self.user_for_token(token)
|
||||
token = token.decode('ascii', 'replace')
|
||||
user_model = self.user_for_token(token, session_id=session_id)
|
||||
if user_model is None:
|
||||
app_log.warning("Token stored in cookie may have expired")
|
||||
handler.clear_cookie(self.cookie_name)
|
||||
@@ -493,7 +538,7 @@ class HubOAuth(HubAuth):
|
||||
|
||||
def _encode_state(self, state):
|
||||
"""Encode a state dict as url-safe base64"""
|
||||
# trim trailing `=` because
|
||||
# trim trailing `=` because = is itself not url-safe!
|
||||
json_state = json.dumps(state)
|
||||
return base64.urlsafe_b64encode(
|
||||
json_state.encode('utf8')
|
||||
@@ -556,6 +601,8 @@ class HubOAuth(HubAuth):
|
||||
}
|
||||
if handler.request.protocol == 'https':
|
||||
kwargs['secure'] = True
|
||||
# load user cookie overrides
|
||||
kwargs.update(self.cookie_options)
|
||||
handler.set_secure_cookie(
|
||||
cookie_name,
|
||||
b64_state,
|
||||
@@ -603,6 +650,8 @@ class HubOAuth(HubAuth):
|
||||
}
|
||||
if handler.request.protocol == 'https':
|
||||
kwargs['secure'] = True
|
||||
# load user cookie overrides
|
||||
kwargs.update(self.cookie_options)
|
||||
app_log.debug("Setting oauth cookie for %s: %s, %s",
|
||||
handler.request.remote_ip, self.cookie_name, kwargs)
|
||||
handler.set_secure_cookie(
|
||||
@@ -675,7 +724,7 @@ class HubAuthenticated(object):
|
||||
@property
|
||||
def hub_auth(self):
|
||||
if self._hub_auth is None:
|
||||
self._hub_auth = self.hub_auth_class()
|
||||
self._hub_auth = self.hub_auth_class.instance()
|
||||
return self._hub_auth
|
||||
|
||||
@hub_auth.setter
|
||||
@@ -754,7 +803,14 @@ class HubAuthenticated(object):
|
||||
except UserNotAllowed as e:
|
||||
# cache None, in case get_user is called again while processing the error
|
||||
self._hub_auth_user_cache = None
|
||||
raise HTTPError(403, "{kind} {name} is not allowed.".format(**e.model))
|
||||
# Override redirect so if/when tornado @web.authenticated
|
||||
# tries to redirect to login URL, 403 will be raised instead.
|
||||
# This is not the best, but avoids problems that can be caused
|
||||
# when get_current_user is allowed to raise.
|
||||
def raise_on_redirect(*args, **kwargs):
|
||||
raise HTTPError(403, "{kind} {name} is not allowed.".format(**user_model))
|
||||
self.redirect = raise_on_redirect
|
||||
return
|
||||
except Exception:
|
||||
self._hub_auth_user_cache = None
|
||||
raise
|
||||
@@ -812,7 +868,8 @@ class HubOAuthCallbackHandler(HubOAuthenticated, RequestHandler):
|
||||
next_url = self.hub_auth.get_next_url(cookie_state)
|
||||
# TODO: make async (in a Thread?)
|
||||
token = self.hub_auth.token_for_code(code)
|
||||
user_model = self.hub_auth.user_for_token(token)
|
||||
session_id = self.hub_auth.get_session_id(self)
|
||||
user_model = self.hub_auth.user_for_token(token, session_id=session_id)
|
||||
if user_model is None:
|
||||
raise HTTPError(500, "oauth callback failed to identify a user")
|
||||
app_log.info("Logged-in user %s", user_model)
|
||||
|
@@ -56,6 +56,7 @@ from ..traitlets import Command
|
||||
from ..spawner import LocalProcessSpawner, set_user_setuid
|
||||
from ..utils import url_path_join
|
||||
|
||||
|
||||
class _MockUser(HasTraits):
|
||||
name = Unicode()
|
||||
server = Instance(orm.Server, allow_none=True)
|
||||
@@ -71,7 +72,7 @@ class _MockUser(HasTraits):
|
||||
return self.host + self.server.base_url
|
||||
else:
|
||||
return self.server.base_url
|
||||
|
||||
|
||||
@property
|
||||
def base_url(self):
|
||||
if not self.server:
|
||||
@@ -123,6 +124,7 @@ class _ServiceSpawner(LocalProcessSpawner):
|
||||
|
||||
self.pid = self.proc.pid
|
||||
|
||||
|
||||
class Service(LoggingConfigurable):
|
||||
"""An object wrapping a service specification for Hub API consumers.
|
||||
|
||||
@@ -218,12 +220,13 @@ class Service(LoggingConfigurable):
|
||||
base_url = Unicode()
|
||||
db = Any()
|
||||
orm = Any()
|
||||
cookie_options = Dict()
|
||||
|
||||
oauth_provider = Any()
|
||||
|
||||
oauth_client_id = Unicode(
|
||||
help="""OAuth client ID for this service.
|
||||
|
||||
|
||||
You shouldn't generally need to change this.
|
||||
Default: `service-<name>`
|
||||
"""
|
||||
@@ -232,6 +235,28 @@ class Service(LoggingConfigurable):
|
||||
def _default_client_id(self):
|
||||
return 'service-%s' % self.name
|
||||
|
||||
oauth_redirect_uri = Unicode(
|
||||
help="""OAuth redirect URI for this service.
|
||||
|
||||
You shouldn't generally need to change this.
|
||||
Default: `/services/:name/oauth_callback`
|
||||
"""
|
||||
).tag(input=True)
|
||||
@default('oauth_redirect_uri')
|
||||
def _default_redirect_uri(self):
|
||||
if self.server is None:
|
||||
return ''
|
||||
print(self.domain, self.host, self.server)
|
||||
return self.host + url_path_join(self.prefix, 'oauth_callback')
|
||||
|
||||
@property
|
||||
def oauth_available(self):
|
||||
"""Is OAuth available for this client?
|
||||
|
||||
Returns True if a server is defined or oauth_redirect_uri is specified manually
|
||||
"""
|
||||
return bool(self.server is not None or self.oauth_redirect_uri)
|
||||
|
||||
@property
|
||||
def server(self):
|
||||
if self.orm.server:
|
||||
@@ -277,6 +302,7 @@ class Service(LoggingConfigurable):
|
||||
environment=env,
|
||||
api_token=self.api_token,
|
||||
oauth_client_id=self.oauth_client_id,
|
||||
cookie_options=self.cookie_options,
|
||||
cwd=self.cwd,
|
||||
hub=self.hub,
|
||||
user=_MockUser(
|
||||
@@ -296,8 +322,9 @@ class Service(LoggingConfigurable):
|
||||
self.log.error("Service %s exited with status %i", self.name, self.proc.returncode)
|
||||
self.start()
|
||||
|
||||
def stop(self):
|
||||
async def stop(self):
|
||||
"""Stop a managed service"""
|
||||
self.log.debug("Stopping service %s", self.name)
|
||||
if not self.managed:
|
||||
raise RuntimeError("Cannot stop unmanaged service %s" % self)
|
||||
if self.spawner:
|
||||
@@ -305,4 +332,4 @@ class Service(LoggingConfigurable):
|
||||
self.db.delete(self.orm.server)
|
||||
self.db.commit()
|
||||
self.spawner.stop_polling()
|
||||
return self.spawner.stop()
|
||||
return (await self.spawner.stop())
|
||||
|
@@ -152,9 +152,41 @@ page_template = """
|
||||
</a>
|
||||
</span>
|
||||
{% endblock %}
|
||||
|
||||
{% block logo %}
|
||||
<img src='{{logo_url}}' alt='Jupyter Notebook'/>
|
||||
{% endblock logo %}
|
||||
|
||||
{% block script %}
|
||||
{{ super() }}
|
||||
<script type='text/javascript'>
|
||||
function _remove_redirects_param() {
|
||||
// remove ?redirects= param from URL so that
|
||||
// successful page loads don't increment the redirect loop counter
|
||||
if (window.location.search.length <= 1) {
|
||||
return;
|
||||
}
|
||||
var search_parameters = window.location.search.slice(1).split('&');
|
||||
for (var i = 0; i < search_parameters.length; i++) {
|
||||
if (search_parameters[i].split('=')[0] === 'redirects') {
|
||||
// remote token from search parameters
|
||||
search_parameters.splice(i, 1);
|
||||
var new_search = '';
|
||||
if (search_parameters.length) {
|
||||
new_search = '?' + search_parameters.join('&');
|
||||
}
|
||||
var new_url = window.location.origin +
|
||||
window.location.pathname +
|
||||
new_search +
|
||||
window.location.hash;
|
||||
window.history.replaceState({}, "", new_url);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
_remove_redirects_param();
|
||||
</script>
|
||||
{% endblock script %}
|
||||
"""
|
||||
|
||||
|
||||
@@ -304,7 +336,7 @@ class SingleUserNotebookApp(NotebookApp):
|
||||
|
||||
def _confirm_exit(self):
|
||||
# disable the exit confirmation for background notebook processes
|
||||
ioloop.IOLoop.instance().stop()
|
||||
self.io_loop.add_callback_from_signal(self.io_loop.stop)
|
||||
|
||||
def migrate_config(self):
|
||||
if self.disable_user_config:
|
||||
@@ -337,10 +369,9 @@ class SingleUserNotebookApp(NotebookApp):
|
||||
path = list(_exclude_home(path))
|
||||
return path
|
||||
|
||||
@gen.coroutine
|
||||
def check_hub_version(self):
|
||||
async def check_hub_version(self):
|
||||
"""Test a connection to my Hub
|
||||
|
||||
|
||||
- exit if I can't connect at all
|
||||
- check version and warn on sufficient mismatch
|
||||
"""
|
||||
@@ -348,19 +379,25 @@ class SingleUserNotebookApp(NotebookApp):
|
||||
RETRIES = 5
|
||||
for i in range(1, RETRIES+1):
|
||||
try:
|
||||
resp = yield client.fetch(self.hub_api_url)
|
||||
resp = await client.fetch(self.hub_api_url)
|
||||
except Exception:
|
||||
self.log.exception("Failed to connect to my Hub at %s (attempt %i/%i). Is it running?",
|
||||
self.hub_api_url, i, RETRIES)
|
||||
yield gen.sleep(min(2**i, 16))
|
||||
await gen.sleep(min(2**i, 16))
|
||||
else:
|
||||
break
|
||||
else:
|
||||
self.exit(1)
|
||||
|
||||
|
||||
hub_version = resp.headers.get('X-JupyterHub-Version')
|
||||
_check_version(hub_version, __version__, self.log)
|
||||
|
||||
def initialize(self, argv=None):
|
||||
# disable trash by default
|
||||
# this can be re-enabled by config
|
||||
self.config.FileContentsManager.delete_to_trash = False
|
||||
return super().initialize(argv)
|
||||
|
||||
def start(self):
|
||||
self.log.info("Starting jupyterhub-singleuser server version %s", __version__)
|
||||
# start by hitting Hub to check version
|
||||
|
@@ -5,7 +5,9 @@ Contains base Spawner class & default implementation
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import asyncio
|
||||
import errno
|
||||
import json
|
||||
import os
|
||||
import pipes
|
||||
import shutil
|
||||
@@ -15,20 +17,22 @@ import warnings
|
||||
from subprocess import Popen
|
||||
from tempfile import mkdtemp
|
||||
|
||||
# FIXME: remove when we drop Python 3.5 support
|
||||
from async_generator import async_generator, yield_
|
||||
|
||||
from sqlalchemy import inspect
|
||||
|
||||
from tornado import gen
|
||||
from tornado.ioloop import PeriodicCallback
|
||||
|
||||
from traitlets.config import LoggingConfigurable
|
||||
from traitlets import (
|
||||
Any, Bool, Dict, Instance, Integer, Float, List, Unicode,
|
||||
Any, Bool, Dict, Instance, Integer, Float, List, Unicode, Union,
|
||||
observe, validate,
|
||||
)
|
||||
|
||||
from .objects import Server
|
||||
from .traitlets import Command, ByteSpecification
|
||||
from .utils import random_port, url_path_join, exponential_backoff
|
||||
from .traitlets import Command, ByteSpecification, Callable
|
||||
from .utils import iterate_until, maybe_future, random_port, url_path_join, exponential_backoff
|
||||
|
||||
|
||||
class Spawner(LoggingConfigurable):
|
||||
@@ -46,7 +50,7 @@ class Spawner(LoggingConfigurable):
|
||||
is created for each user. If there are 20 JupyterHub users, there will be 20
|
||||
instances of the subclass.
|
||||
"""
|
||||
|
||||
|
||||
# private attributes for tracking status
|
||||
_spawn_pending = False
|
||||
_start_pending = False
|
||||
@@ -77,7 +81,7 @@ class Spawner(LoggingConfigurable):
|
||||
return 'spawn'
|
||||
elif self._stop_pending:
|
||||
return 'stop'
|
||||
return False
|
||||
return None
|
||||
|
||||
@property
|
||||
def ready(self):
|
||||
@@ -99,11 +103,12 @@ class Spawner(LoggingConfigurable):
|
||||
"""
|
||||
return bool(self.pending or self.ready)
|
||||
|
||||
|
||||
# options passed by constructor
|
||||
authenticator = Any()
|
||||
hub = Any()
|
||||
orm_spawner = Any()
|
||||
db = Any()
|
||||
cookie_options = Dict()
|
||||
|
||||
@observe('orm_spawner')
|
||||
def _orm_spawner_changed(self, change):
|
||||
@@ -125,7 +130,7 @@ class Spawner(LoggingConfigurable):
|
||||
if missing:
|
||||
raise NotImplementedError("class `{}` needs to redefine the `start`,"
|
||||
"`stop` and `poll` methods. `{}` not redefined.".format(cls.__name__, '`, `'.join(missing)))
|
||||
|
||||
|
||||
proxy_spec = Unicode()
|
||||
|
||||
@property
|
||||
@@ -227,7 +232,10 @@ class Spawner(LoggingConfigurable):
|
||||
help="Enable debug-logging of the single-user server"
|
||||
).tag(config=True)
|
||||
|
||||
options_form = Unicode(
|
||||
options_form = Union([
|
||||
Unicode(),
|
||||
Callable()
|
||||
],
|
||||
help="""
|
||||
An HTML form for options a user can specify on launching their server.
|
||||
|
||||
@@ -247,8 +255,30 @@ class Spawner(LoggingConfigurable):
|
||||
</select>
|
||||
|
||||
The data from this form submission will be passed on to your spawner in `self.user_options`
|
||||
|
||||
Instead of a form snippet string, this could also be a callable that takes as one
|
||||
parameter the current spawner instance and returns a string. The callable will
|
||||
be called asynchronously if it returns a future, rather than a str. Note that
|
||||
the interface of the spawner class is not deemed stable across versions,
|
||||
so using this functionality might cause your JupyterHub upgrades to break.
|
||||
""").tag(config=True)
|
||||
|
||||
async def get_options_form(self):
|
||||
"""Get the options form
|
||||
|
||||
Returns:
|
||||
Future (str): the content of the options form presented to the user
|
||||
prior to starting a Spawner.
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
if callable(self.options_form):
|
||||
options_form = await maybe_future(self.options_form(self))
|
||||
else:
|
||||
options_form = self.options_form
|
||||
|
||||
return options_form
|
||||
|
||||
def options_from_form(self, form_data):
|
||||
"""Interpret HTTP form data
|
||||
|
||||
@@ -303,7 +333,7 @@ class Spawner(LoggingConfigurable):
|
||||
- The JupyterHub process' environment variables that are whitelisted in `env_keep`
|
||||
- Variables to establish contact between the single-user notebook and the hub (such as JUPYTERHUB_API_TOKEN)
|
||||
|
||||
The `enviornment` configurable should be set by JupyterHub administrators to add
|
||||
The `environment` configurable should be set by JupyterHub administrators to add
|
||||
installation specific environment variables. It is a dict where the key is the name of the environment
|
||||
variable, and the value can be a string or a callable. If it is a callable, it will be called
|
||||
with one parameter (the spawner instance), and should return a string fairly quickly (no blocking
|
||||
@@ -408,7 +438,10 @@ class Spawner(LoggingConfigurable):
|
||||
will be able to allocate this much memory - only that it can not
|
||||
allocate more than this.
|
||||
|
||||
This needs to be supported by your spawner for it to work.
|
||||
**This is a configuration setting. Your spawner must implement support
|
||||
for the limit to work.** The default spawner, `LocalProcessSpawner`,
|
||||
does **not** implement this support. A custom spawner **must** add
|
||||
support for this setting for it to be enforced.
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
@@ -424,7 +457,10 @@ class Spawner(LoggingConfigurable):
|
||||
use more cpu-cores than this. There is no guarantee that it can
|
||||
access this many cpu-cores.
|
||||
|
||||
This needs to be supported by your spawner for it to work.
|
||||
**This is a configuration setting. Your spawner must implement support
|
||||
for the limit to work.** The default spawner, `LocalProcessSpawner`,
|
||||
does **not** implement this support. A custom spawner **must** add
|
||||
support for this setting for it to be enforced.
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
@@ -438,7 +474,10 @@ class Spawner(LoggingConfigurable):
|
||||
- G -> Gigabytes
|
||||
- T -> Terabytes
|
||||
|
||||
This needs to be supported by your spawner for it to work.
|
||||
**This is a configuration setting. Your spawner must implement support
|
||||
for the limit to work.** The default spawner, `LocalProcessSpawner`,
|
||||
does **not** implement this support. A custom spawner **must** add
|
||||
support for this setting for it to be enforced.
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
@@ -450,7 +489,10 @@ class Spawner(LoggingConfigurable):
|
||||
If this value is set to 0.5, allows use of 50% of one CPU.
|
||||
If this value is set to 2, allows use of up to 2 CPUs.
|
||||
|
||||
Note that this needs to be supported by your spawner for it to work.
|
||||
**This is a configuration setting. Your spawner must implement support
|
||||
for the limit to work.** The default spawner, `LocalProcessSpawner`,
|
||||
does **not** implement this support. A custom spawner **must** add
|
||||
support for this setting for it to be enforced.
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
@@ -473,6 +515,15 @@ class Spawner(LoggingConfigurable):
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
post_stop_hook = Any(
|
||||
help="""
|
||||
An optional hook function that you can implement to do work after
|
||||
the spawner stops.
|
||||
|
||||
This can be set independent of any concrete spawner implementation.
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
def load_state(self, state):
|
||||
"""Restore state of spawner from database.
|
||||
|
||||
@@ -549,6 +600,8 @@ class Spawner(LoggingConfigurable):
|
||||
env['JUPYTERHUB_ADMIN_ACCESS'] = '1'
|
||||
# OAuth settings
|
||||
env['JUPYTERHUB_CLIENT_ID'] = self.oauth_client_id
|
||||
if self.cookie_options:
|
||||
env['JUPYTERHUB_COOKIE_OPTIONS'] = json.dumps(self.cookie_options)
|
||||
env['JUPYTERHUB_HOST'] = self.hub.public_host
|
||||
env['JUPYTERHUB_OAUTH_CALLBACK_URL'] = \
|
||||
url_path_join(self.user.url, self.name, 'oauth_callback')
|
||||
@@ -625,7 +678,7 @@ class Spawner(LoggingConfigurable):
|
||||
|
||||
if self.port:
|
||||
args.append('--port=%i' % self.port)
|
||||
elif self.server.port:
|
||||
elif self.server and self.server.port:
|
||||
self.log.warning("Setting port from user.server is deprecated as of JupyterHub 0.7.")
|
||||
args.append('--port=%i' % self.server.port)
|
||||
|
||||
@@ -648,8 +701,67 @@ class Spawner(LoggingConfigurable):
|
||||
if self.pre_spawn_hook:
|
||||
return self.pre_spawn_hook(self)
|
||||
|
||||
@gen.coroutine
|
||||
def start(self):
|
||||
def run_post_stop_hook(self):
|
||||
"""Run the post_stop_hook if defined"""
|
||||
if self.post_stop_hook is not None:
|
||||
try:
|
||||
return self.post_stop_hook(self)
|
||||
except Exception:
|
||||
self.log.exception("post_stop_hook failed with exception: %s", self)
|
||||
|
||||
@property
|
||||
def _progress_url(self):
|
||||
return self.user.progress_url(self.name)
|
||||
|
||||
@async_generator
|
||||
async def _generate_progress(self):
|
||||
"""Private wrapper of progress generator
|
||||
|
||||
This method is always an async generator and will always yield at least one event.
|
||||
"""
|
||||
if not self._spawn_pending:
|
||||
raise RuntimeError("Spawn not pending, can't generate progress")
|
||||
|
||||
await yield_({
|
||||
"progress": 0,
|
||||
"message": "Server requested",
|
||||
})
|
||||
from async_generator import aclosing
|
||||
|
||||
async with aclosing(self.progress()) as progress:
|
||||
async for event in progress:
|
||||
await yield_(event)
|
||||
|
||||
@async_generator
|
||||
async def progress(self):
|
||||
"""Async generator for progress events
|
||||
|
||||
Must be an async generator
|
||||
|
||||
For Python 3.5-compatibility, use the async_generator package
|
||||
|
||||
Should yield messages of the form:
|
||||
|
||||
::
|
||||
|
||||
{
|
||||
"progress": 80, # integer, out of 100
|
||||
"message": text, # text message (will be escaped for HTML)
|
||||
"html_message": html_text, # optional html-formatted message (may have links)
|
||||
}
|
||||
|
||||
In HTML contexts, html_message will be displayed instead of message if present.
|
||||
Progress will be updated if defined.
|
||||
To update messages without progress omit the progress field.
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
await yield_({
|
||||
"progress": 50,
|
||||
"message": "Spawning server...",
|
||||
})
|
||||
|
||||
async def start(self):
|
||||
"""Start the single-user server
|
||||
|
||||
Returns:
|
||||
@@ -660,8 +772,7 @@ class Spawner(LoggingConfigurable):
|
||||
"""
|
||||
raise NotImplementedError("Override in subclass. Must be a Tornado gen.coroutine.")
|
||||
|
||||
@gen.coroutine
|
||||
def stop(self, now=False):
|
||||
async def stop(self, now=False):
|
||||
"""Stop the single-user server
|
||||
|
||||
If `now` is False (default), shutdown the server as gracefully as possible,
|
||||
@@ -674,8 +785,7 @@ class Spawner(LoggingConfigurable):
|
||||
"""
|
||||
raise NotImplementedError("Override in subclass. Must be a Tornado gen.coroutine.")
|
||||
|
||||
@gen.coroutine
|
||||
def poll(self):
|
||||
async def poll(self):
|
||||
"""Check if the single-user process is running
|
||||
|
||||
Returns:
|
||||
@@ -734,10 +844,9 @@ class Spawner(LoggingConfigurable):
|
||||
)
|
||||
self._poll_callback.start()
|
||||
|
||||
@gen.coroutine
|
||||
def poll_and_notify(self):
|
||||
async def poll_and_notify(self):
|
||||
"""Used as a callback to periodically poll the process and notify any watchers"""
|
||||
status = yield self.poll()
|
||||
status = await self.poll()
|
||||
if status is None:
|
||||
# still running, nothing to do here
|
||||
return
|
||||
@@ -749,22 +858,20 @@ class Spawner(LoggingConfigurable):
|
||||
|
||||
for callback in callbacks:
|
||||
try:
|
||||
yield gen.maybe_future(callback())
|
||||
await maybe_future(callback())
|
||||
except Exception:
|
||||
self.log.exception("Unhandled error in poll callback for %s", self)
|
||||
return status
|
||||
|
||||
death_interval = Float(0.1)
|
||||
@gen.coroutine
|
||||
def wait_for_death(self, timeout=10):
|
||||
async def wait_for_death(self, timeout=10):
|
||||
"""Wait for the single-user server to die, up to timeout seconds"""
|
||||
@gen.coroutine
|
||||
def _wait_for_death():
|
||||
status = yield self.poll()
|
||||
async def _wait_for_death():
|
||||
status = await self.poll()
|
||||
return status is not None
|
||||
|
||||
try:
|
||||
r = yield exponential_backoff(
|
||||
r = await exponential_backoff(
|
||||
_wait_for_death,
|
||||
'Process did not die in {timeout} seconds'.format(timeout=timeout),
|
||||
start_wait=self.death_interval,
|
||||
@@ -837,6 +944,8 @@ class LocalProcessSpawner(Spawner):
|
||||
Does not work on Windows.
|
||||
|
||||
This is the default spawner for JupyterHub.
|
||||
|
||||
Note: This spawner does not implement CPU / memory guarantees and limits.
|
||||
"""
|
||||
|
||||
interrupt_timeout = Integer(10,
|
||||
@@ -960,8 +1069,7 @@ class LocalProcessSpawner(Spawner):
|
||||
env = self.user_env(env)
|
||||
return env
|
||||
|
||||
@gen.coroutine
|
||||
def start(self):
|
||||
async def start(self):
|
||||
"""Start the single-user server."""
|
||||
self.port = random_port()
|
||||
cmd = []
|
||||
@@ -1005,10 +1113,10 @@ class LocalProcessSpawner(Spawner):
|
||||
if self.ip:
|
||||
self.server.ip = self.ip
|
||||
self.server.port = self.port
|
||||
self.db.commit()
|
||||
return (self.ip or '127.0.0.1', self.port)
|
||||
|
||||
@gen.coroutine
|
||||
def poll(self):
|
||||
async def poll(self):
|
||||
"""Poll the spawned process to see if it is still running.
|
||||
|
||||
If the process is still running, we return None. If it is not running,
|
||||
@@ -1031,15 +1139,14 @@ class LocalProcessSpawner(Spawner):
|
||||
|
||||
# send signal 0 to check if PID exists
|
||||
# this doesn't work on Windows, but that's okay because we don't support Windows.
|
||||
alive = yield self._signal(0)
|
||||
alive = await self._signal(0)
|
||||
if not alive:
|
||||
self.clear_state()
|
||||
return 0
|
||||
else:
|
||||
return None
|
||||
|
||||
@gen.coroutine
|
||||
def _signal(self, sig):
|
||||
async def _signal(self, sig):
|
||||
"""Send given signal to a single-user server's process.
|
||||
|
||||
Returns True if the process still exists, False otherwise.
|
||||
@@ -1055,8 +1162,7 @@ class LocalProcessSpawner(Spawner):
|
||||
raise
|
||||
return True # process exists
|
||||
|
||||
@gen.coroutine
|
||||
def stop(self, now=False):
|
||||
async def stop(self, now=False):
|
||||
"""Stop the single-user server process for the current user.
|
||||
|
||||
If `now` is False (default), shutdown the server as gracefully as possible,
|
||||
@@ -1066,30 +1172,30 @@ class LocalProcessSpawner(Spawner):
|
||||
The coroutine should return when the process is no longer running.
|
||||
"""
|
||||
if not now:
|
||||
status = yield self.poll()
|
||||
status = await self.poll()
|
||||
if status is not None:
|
||||
return
|
||||
self.log.debug("Interrupting %i", self.pid)
|
||||
yield self._signal(signal.SIGINT)
|
||||
yield self.wait_for_death(self.interrupt_timeout)
|
||||
await self._signal(signal.SIGINT)
|
||||
await self.wait_for_death(self.interrupt_timeout)
|
||||
|
||||
# clean shutdown failed, use TERM
|
||||
status = yield self.poll()
|
||||
status = await self.poll()
|
||||
if status is not None:
|
||||
return
|
||||
self.log.debug("Terminating %i", self.pid)
|
||||
yield self._signal(signal.SIGTERM)
|
||||
yield self.wait_for_death(self.term_timeout)
|
||||
await self._signal(signal.SIGTERM)
|
||||
await self.wait_for_death(self.term_timeout)
|
||||
|
||||
# TERM failed, use KILL
|
||||
status = yield self.poll()
|
||||
status = await self.poll()
|
||||
if status is not None:
|
||||
return
|
||||
self.log.debug("Killing %i", self.pid)
|
||||
yield self._signal(signal.SIGKILL)
|
||||
yield self.wait_for_death(self.kill_timeout)
|
||||
await self._signal(signal.SIGKILL)
|
||||
await self.wait_for_death(self.kill_timeout)
|
||||
|
||||
status = yield self.poll()
|
||||
status = await self.poll()
|
||||
if status is None:
|
||||
# it all failed, zombie process
|
||||
self.log.warning("Process %i never died", self.pid)
|
||||
|
@@ -1,17 +1,43 @@
|
||||
"""py.test fixtures"""
|
||||
"""py.test fixtures
|
||||
|
||||
Fixtures for jupyterhub components
|
||||
----------------------------------
|
||||
- `app`
|
||||
- `auth_state_enabled`
|
||||
- `db`
|
||||
- `io_loop`
|
||||
- single user servers
|
||||
- `cleanup_after`: allows cleanup of single user servers between tests
|
||||
- mocked service
|
||||
- `MockServiceSpawner`
|
||||
- `mockservice`: mocked service with no external service url
|
||||
- `mockservice_url`: mocked service with a url to test external services
|
||||
|
||||
Fixtures to add functionality or spawning behavior
|
||||
--------------------------------------------------
|
||||
- `admin_access`
|
||||
- `no_patience`
|
||||
- `slow_spawn`
|
||||
- `never_spawn`
|
||||
- `bad_spawn`
|
||||
- `slow_bad_spawn`
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import logging
|
||||
from getpass import getuser
|
||||
from subprocess import TimeoutExpired
|
||||
import time
|
||||
from unittest import mock
|
||||
import logging
|
||||
import os
|
||||
from pytest import fixture, raises
|
||||
from subprocess import TimeoutExpired
|
||||
from tornado import ioloop, gen
|
||||
from tornado.httpclient import HTTPError
|
||||
from unittest import mock
|
||||
|
||||
from .. import orm
|
||||
from .. import crypto
|
||||
from ..utils import random_port
|
||||
|
||||
from . import mocking
|
||||
@@ -23,6 +49,46 @@ import jupyterhub.services.service
|
||||
# global db session object
|
||||
_db = None
|
||||
|
||||
|
||||
@fixture(scope='module')
|
||||
def app(request, io_loop):
|
||||
"""Mock a jupyterhub app for testing"""
|
||||
mocked_app = MockHub.instance(log_level=logging.DEBUG)
|
||||
|
||||
@gen.coroutine
|
||||
def make_app():
|
||||
yield mocked_app.initialize([])
|
||||
yield mocked_app.start()
|
||||
|
||||
io_loop.run_sync(make_app)
|
||||
|
||||
def fin():
|
||||
# disconnect logging during cleanup because pytest closes captured FDs prematurely
|
||||
mocked_app.log.handlers = []
|
||||
MockHub.clear_instance()
|
||||
mocked_app.stop()
|
||||
|
||||
request.addfinalizer(fin)
|
||||
return mocked_app
|
||||
|
||||
|
||||
@fixture
|
||||
def auth_state_enabled(app):
|
||||
app.authenticator.auth_state = {
|
||||
'who': 'cares',
|
||||
}
|
||||
app.authenticator.enable_auth_state = True
|
||||
ck = crypto.CryptKeeper.instance()
|
||||
before_keys = ck.keys
|
||||
ck.keys = [os.urandom(32)]
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
ck.keys = before_keys
|
||||
app.authenticator.enable_auth_state = False
|
||||
app.authenticator.auth_state = None
|
||||
|
||||
|
||||
@fixture
|
||||
def db():
|
||||
"""Get a db session"""
|
||||
@@ -45,39 +111,47 @@ def io_loop(request):
|
||||
|
||||
def _close():
|
||||
io_loop.clear_current()
|
||||
if (not ioloop.IOLoop.initialized() or
|
||||
io_loop is not ioloop.IOLoop.instance()):
|
||||
io_loop.close(all_fds=True)
|
||||
io_loop.close(all_fds=True)
|
||||
|
||||
request.addfinalizer(_close)
|
||||
return io_loop
|
||||
|
||||
@fixture(scope='module')
|
||||
def app(request, io_loop):
|
||||
"""Mock a jupyterhub app for testing"""
|
||||
mocked_app = MockHub.instance(log_level=logging.DEBUG)
|
||||
@gen.coroutine
|
||||
def make_app():
|
||||
yield mocked_app.initialize([])
|
||||
yield mocked_app.start()
|
||||
io_loop.run_sync(make_app)
|
||||
|
||||
def fin():
|
||||
# disconnect logging during cleanup because pytest closes captured FDs prematurely
|
||||
mocked_app.log.handlers = []
|
||||
MockHub.clear_instance()
|
||||
mocked_app.stop()
|
||||
request.addfinalizer(fin)
|
||||
return mocked_app
|
||||
@fixture(autouse=True)
|
||||
def cleanup_after(request, io_loop):
|
||||
"""function-scoped fixture to shutdown user servers
|
||||
|
||||
allows cleanup of servers between tests
|
||||
without having to launch a whole new app
|
||||
"""
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if not MockHub.initialized():
|
||||
return
|
||||
app = MockHub.instance()
|
||||
for uid, user in app.users.items():
|
||||
for name, spawner in list(user.spawners.items()):
|
||||
if spawner.active:
|
||||
try:
|
||||
io_loop.run_sync(lambda: app.proxy.delete_user(user, name))
|
||||
except HTTPError:
|
||||
pass
|
||||
io_loop.run_sync(lambda: user.stop(name))
|
||||
app.db.commit()
|
||||
|
||||
|
||||
# mock services for testing.
|
||||
# Shorter intervals, etc.
|
||||
class MockServiceSpawner(jupyterhub.services.service._ServiceSpawner):
|
||||
"""mock services for testing.
|
||||
|
||||
Shorter intervals, etc.
|
||||
"""
|
||||
poll_interval = 1
|
||||
|
||||
|
||||
_mock_service_counter = 0
|
||||
|
||||
|
||||
def _mockservice(request, app, url=False):
|
||||
global _mock_service_counter
|
||||
_mock_service_counter += 1
|
||||
@@ -104,7 +178,8 @@ def _mockservice(request, app, url=False):
|
||||
service.start()
|
||||
io_loop.run_sync(start)
|
||||
def cleanup():
|
||||
service.stop()
|
||||
import asyncio
|
||||
asyncio.get_event_loop().run_until_complete(service.stop())
|
||||
app.services[:] = []
|
||||
app._service_map.clear()
|
||||
request.addfinalizer(cleanup)
|
||||
@@ -128,12 +203,20 @@ def mockservice_url(request, app):
|
||||
yield _mockservice(request, app, url=True)
|
||||
|
||||
|
||||
@fixture
|
||||
def admin_access(app):
|
||||
"""Grant admin-access with this fixture"""
|
||||
with mock.patch.dict(app.tornado_settings,
|
||||
{'admin_access': True}):
|
||||
yield
|
||||
|
||||
|
||||
@fixture
|
||||
def no_patience(app):
|
||||
"""Set slow-spawning timeouts to zero"""
|
||||
with mock.patch.dict(app.tornado_application.settings,
|
||||
{'slow_spawn_timeout': 0,
|
||||
'slow_stop_timeout': 0}):
|
||||
with mock.patch.dict(app.tornado_settings,
|
||||
{'slow_spawn_timeout': 0.1,
|
||||
'slow_stop_timeout': 0.1}):
|
||||
yield
|
||||
|
||||
|
||||
@@ -167,4 +250,3 @@ def slow_bad_spawn(app):
|
||||
with mock.patch.dict(app.tornado_settings,
|
||||
{'spawner_class': mocking.SlowBadSpawner}):
|
||||
yield
|
||||
|
||||
|
@@ -1,5 +1,33 @@
|
||||
"""mock utilities for testing"""
|
||||
"""mock utilities for testing
|
||||
|
||||
Functions
|
||||
---------
|
||||
- mock_authenticate
|
||||
- mock_check_account
|
||||
- mock_open_session
|
||||
|
||||
Spawners
|
||||
--------
|
||||
- MockSpawner: based on LocalProcessSpawner
|
||||
- SlowSpawner:
|
||||
- NeverSpawner:
|
||||
- BadSpawner:
|
||||
- SlowBadSpawner
|
||||
- FormSpawner
|
||||
|
||||
Other components
|
||||
----------------
|
||||
- MockPAMAuthenticator
|
||||
- MockHub
|
||||
- MockSingleUserServer
|
||||
- StubSingleUserSpawner
|
||||
|
||||
- public_host
|
||||
- public_url
|
||||
|
||||
"""
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import os
|
||||
import sys
|
||||
from tempfile import NamedTemporaryFile
|
||||
@@ -24,7 +52,8 @@ from .utils import async_requests
|
||||
|
||||
from pamela import PAMError
|
||||
|
||||
def mock_authenticate(username, password, service='login'):
|
||||
|
||||
def mock_authenticate(username, password, service, encoding):
|
||||
# just use equality for testing
|
||||
if password == username:
|
||||
return True
|
||||
@@ -32,7 +61,14 @@ def mock_authenticate(username, password, service='login'):
|
||||
raise PAMError("Fake")
|
||||
|
||||
|
||||
def mock_open_session(username, service):
|
||||
def mock_check_account(username, service, encoding):
|
||||
if username.startswith('notallowed'):
|
||||
raise PAMError("Fake")
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def mock_open_session(username, service, encoding):
|
||||
pass
|
||||
|
||||
|
||||
@@ -40,7 +76,7 @@ class MockSpawner(LocalProcessSpawner):
|
||||
"""Base mock spawner
|
||||
|
||||
- disables user-switching that we need root permissions to do
|
||||
- spawns jupyterhub.tests.mocksu instead of a full single-user server
|
||||
- spawns `jupyterhub.tests.mocksu` instead of a full single-user server
|
||||
"""
|
||||
def make_preexec_fn(self, *a, **kw):
|
||||
# skip the setuid stuff
|
||||
@@ -64,6 +100,7 @@ class MockSpawner(LocalProcessSpawner):
|
||||
self.use_this_api_token = self.api_token
|
||||
return super().start()
|
||||
|
||||
|
||||
class SlowSpawner(MockSpawner):
|
||||
"""A spawner that takes a few seconds to start"""
|
||||
|
||||
@@ -119,7 +156,6 @@ class SlowBadSpawner(MockSpawner):
|
||||
raise RuntimeError("I don't work!")
|
||||
|
||||
|
||||
|
||||
class FormSpawner(MockSpawner):
|
||||
"""A spawner that has an options form defined"""
|
||||
options_form = "IMAFORM"
|
||||
@@ -138,6 +174,8 @@ class FormSpawner(MockSpawner):
|
||||
|
||||
class MockPAMAuthenticator(PAMAuthenticator):
|
||||
auth_state = None
|
||||
# If true, return admin users marked as admin.
|
||||
return_admin = False
|
||||
@default('admin_users')
|
||||
def _admin_users_default(self):
|
||||
return {'admin'}
|
||||
@@ -152,7 +190,8 @@ class MockPAMAuthenticator(PAMAuthenticator):
|
||||
authenticate=mock_authenticate,
|
||||
open_session=mock_open_session,
|
||||
close_session=mock_open_session,
|
||||
):
|
||||
check_account=mock_check_account,
|
||||
):
|
||||
username = yield super(MockPAMAuthenticator, self).authenticate(*args, **kwargs)
|
||||
if username is None:
|
||||
return
|
||||
@@ -161,6 +200,11 @@ class MockPAMAuthenticator(PAMAuthenticator):
|
||||
'name': username,
|
||||
'auth_state': self.auth_state,
|
||||
}
|
||||
elif self.return_admin:
|
||||
return {
|
||||
'name': username,
|
||||
'admin': username in self.admin_users,
|
||||
}
|
||||
else:
|
||||
return username
|
||||
|
||||
@@ -169,17 +213,21 @@ class MockHub(JupyterHub):
|
||||
"""Hub with various mock bits"""
|
||||
|
||||
db_file = None
|
||||
|
||||
last_activity_interval = 2
|
||||
|
||||
base_url = '/@/space%20word/'
|
||||
|
||||
log_datefmt = '%M:%S'
|
||||
|
||||
|
||||
@default('subdomain_host')
|
||||
def _subdomain_host_default(self):
|
||||
return os.environ.get('JUPYTERHUB_TEST_SUBDOMAIN_HOST', '')
|
||||
|
||||
|
||||
@default('bind_url')
|
||||
def _default_bind_url(self):
|
||||
if self.subdomain_host:
|
||||
port = urlparse(self.subdomain_host).port
|
||||
else:
|
||||
port = random_port()
|
||||
return 'http://127.0.0.1:%i/@/space%%20word/' % port
|
||||
|
||||
@default('ip')
|
||||
def _ip_default(self):
|
||||
return '127.0.0.1'
|
||||
@@ -195,17 +243,31 @@ class MockHub(JupyterHub):
|
||||
@default('authenticator_class')
|
||||
def _authenticator_class_default(self):
|
||||
return MockPAMAuthenticator
|
||||
|
||||
|
||||
@default('spawner_class')
|
||||
def _spawner_class_default(self):
|
||||
return MockSpawner
|
||||
|
||||
|
||||
def init_signal(self):
|
||||
pass
|
||||
|
||||
def load_config_file(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def init_tornado_application(self):
|
||||
"""Instantiate the tornado Application object"""
|
||||
super().init_tornado_application()
|
||||
# reconnect tornado_settings so that mocks can update the real thing
|
||||
self.tornado_settings = self.users.settings = self.tornado_application.settings
|
||||
|
||||
def init_services(self):
|
||||
# explicitly expire services before reinitializing
|
||||
# this only happens in tests because re-initialize
|
||||
# does not occur in a real instance
|
||||
for service in self.db.query(orm.Service):
|
||||
self.db.expire(service)
|
||||
return super().init_services()
|
||||
|
||||
@gen.coroutine
|
||||
def initialize(self, argv=None):
|
||||
self.pid_file = NamedTemporaryFile(delete=False).name
|
||||
@@ -222,11 +284,26 @@ class MockHub(JupyterHub):
|
||||
|
||||
def stop(self):
|
||||
super().stop()
|
||||
IOLoop().run_sync(self.cleanup)
|
||||
|
||||
# run cleanup in a background thread
|
||||
# to avoid multiple eventloops in the same thread errors from asyncio
|
||||
|
||||
def cleanup():
|
||||
asyncio.set_event_loop(asyncio.new_event_loop())
|
||||
loop = IOLoop.current()
|
||||
loop.run_sync(self.cleanup)
|
||||
loop.close()
|
||||
|
||||
pool = ThreadPoolExecutor(1)
|
||||
f = pool.submit(cleanup)
|
||||
# wait for cleanup to finish
|
||||
f.result()
|
||||
pool.shutdown()
|
||||
|
||||
# ignore the call that will fire in atexit
|
||||
self.cleanup = lambda : None
|
||||
self.db_file.close()
|
||||
|
||||
|
||||
@gen.coroutine
|
||||
def login_user(self, name):
|
||||
"""Login a user by name, returning her cookies."""
|
||||
@@ -297,25 +374,26 @@ class StubSingleUserSpawner(MockSpawner):
|
||||
io_loop = IOLoop()
|
||||
io_loop.make_current()
|
||||
io_loop.add_callback(lambda : evt.set())
|
||||
|
||||
|
||||
with mock.patch.dict(os.environ, env):
|
||||
app = self._app = MockSingleUserServer()
|
||||
app.initialize(args)
|
||||
assert app.hub_auth.oauth_client_id
|
||||
assert app.hub_auth.api_token
|
||||
app.start()
|
||||
|
||||
|
||||
self._thread = threading.Thread(target=_run)
|
||||
self._thread.start()
|
||||
ready = evt.wait(timeout=3)
|
||||
assert ready
|
||||
return (ip, port)
|
||||
|
||||
|
||||
@gen.coroutine
|
||||
def stop(self):
|
||||
self._app.stop()
|
||||
self._thread.join()
|
||||
|
||||
self._thread.join(timeout=30)
|
||||
assert not self._thread.is_alive()
|
||||
|
||||
@gen.coroutine
|
||||
def poll(self):
|
||||
if self._thread is None:
|
||||
|
@@ -1,13 +1,16 @@
|
||||
"""Mock service for testing Service integration
|
||||
|
||||
A JupyterHub service running a basic HTTP server.
|
||||
Used by the mockservice fixtures.
|
||||
|
||||
Handlers allow:
|
||||
Used by the `mockservice` fixtures found in `conftest.py` file.
|
||||
|
||||
- echoing proxied URLs back
|
||||
- retrieving service's environment variables
|
||||
- testing service's API access to the Hub retrieval of sys.argv.
|
||||
Handlers and their purpose include:
|
||||
|
||||
- EchoHandler: echoing proxied URLs back
|
||||
- EnvHandler: retrieving service's environment variables
|
||||
- APIHandler: testing service's API access to the Hub retrieval of `sys.argv`.
|
||||
- WhoAmIHandler: returns name of user making a request (deprecated cookie login)
|
||||
- OWhoAmIHandler: returns name of user making a request (OAuth login)
|
||||
"""
|
||||
|
||||
import json
|
||||
@@ -51,12 +54,13 @@ class APIHandler(web.RequestHandler):
|
||||
class WhoAmIHandler(HubAuthenticated, web.RequestHandler):
|
||||
"""Reply with the name of the user who made the request.
|
||||
|
||||
Uses deprecated cookie login
|
||||
Uses "deprecated" cookie login
|
||||
"""
|
||||
@web.authenticated
|
||||
def get(self):
|
||||
self.write(self.get_current_user())
|
||||
|
||||
|
||||
class OWhoAmIHandler(HubOAuthenticated, web.RequestHandler):
|
||||
"""Reply with the name of the user who made the request.
|
||||
|
||||
|
@@ -2,8 +2,15 @@
|
||||
|
||||
basic HTTP Server that echos URLs back,
|
||||
and allow retrieval of sys.argv.
|
||||
"""
|
||||
|
||||
Used by the mock spawners found in `mocking.py` file.
|
||||
|
||||
Handlers and their purpose include:
|
||||
|
||||
- EchoHandler: echoing URLs back
|
||||
- ArgsHandler: allowing retrieval of `sys.argv`.
|
||||
|
||||
"""
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user