satisfy updated ruff rules

mostly f-strings, manual fixes
This commit is contained in:
Min RK
2024-12-03 09:13:31 +01:00
parent def928f1b7
commit a2877c7be2
16 changed files with 44 additions and 45 deletions

View File

@@ -9,7 +9,6 @@ import secrets
from functools import wraps from functools import wraps
from flask import Flask, Response, make_response, redirect, request, session from flask import Flask, Response, make_response, redirect, request, session
from jupyterhub.services.auth import HubOAuth from jupyterhub.services.auth import HubOAuth
prefix = os.environ.get('JUPYTERHUB_SERVICE_PREFIX', '/') prefix = os.environ.get('JUPYTERHUB_SERVICE_PREFIX', '/')

View File

@@ -260,7 +260,7 @@ class UserListAPIHandler(APIHandler):
raise web.HTTPError(400, msg) raise web.HTTPError(400, msg)
if not to_create: if not to_create:
raise web.HTTPError(409, "All %i users already exist" % len(usernames)) raise web.HTTPError(409, f"All {len(usernames)} users already exist")
created = [] created = []
for name in to_create: for name in to_create:

View File

@@ -3320,7 +3320,7 @@ class JupyterHub(Application):
if self.pid_file: if self.pid_file:
self.log.debug("Writing PID %i to %s", pid, self.pid_file) self.log.debug("Writing PID %i to %s", pid, self.pid_file)
with open(self.pid_file, 'w') as f: with open(self.pid_file, 'w') as f:
f.write('%i' % pid) f.write(str(pid))
@catch_config_error @catch_config_error
async def initialize(self, *args, **kwargs): async def initialize(self, *args, **kwargs):

View File

@@ -1225,7 +1225,7 @@ class LocalAuthenticator(Authenticator):
cmd = [arg.replace('USERNAME', name) for arg in self.add_user_cmd] cmd = [arg.replace('USERNAME', name) for arg in self.add_user_cmd]
try: try:
uid = self.uids[name] uid = self.uids[name]
cmd += ['--uid', '%d' % uid] cmd += ['--uid', str(uid)]
except KeyError: except KeyError:
self.log.debug(f"No UID for user {name}") self.log.debug(f"No UID for user {name}")
cmd += [name] cmd += [name]

View File

@@ -1061,10 +1061,12 @@ class BaseHandler(RequestHandler):
# round suggestion to nicer human value (nearest 10 seconds or minute) # round suggestion to nicer human value (nearest 10 seconds or minute)
if retry_time <= 90: if retry_time <= 90:
# round human seconds up to nearest 10 # round human seconds up to nearest 10
human_retry_time = "%i0 seconds" % math.ceil(retry_time / 10.0) delay = math.ceil(retry_time / 10.0)
human_retry_time = f"{delay}0 seconds"
else: else:
# round number of minutes # round number of minutes
human_retry_time = "%i minutes" % round(retry_time / 60.0) delay = round(retry_time / 60.0)
human_retry_time = f"{delay} minutes"
self.log.warning( self.log.warning(
'%s pending spawns, throttling. Suggested retry in %s seconds.', '%s pending spawns, throttling. Suggested retry in %s seconds.',
@@ -1099,12 +1101,12 @@ class BaseHandler(RequestHandler):
self.log.debug( self.log.debug(
"%i%s concurrent spawns", "%i%s concurrent spawns",
spawn_pending_count, spawn_pending_count,
'/%i' % concurrent_spawn_limit if concurrent_spawn_limit else '', f'/{concurrent_spawn_limit}' if concurrent_spawn_limit else '',
) )
self.log.debug( self.log.debug(
"%i%s active servers", "%i%s active servers",
active_count, active_count,
'/%i' % active_server_limit if active_server_limit else '', f'/{active_server_limit}' if active_server_limit else '',
) )
spawner = user.spawners[server_name] spawner = user.spawners[server_name]

View File

@@ -1050,7 +1050,7 @@ class APIToken(Hashed, Base):
@property @property
def api_id(self): def api_id(self):
return 'a%i' % self.id return f"a{self.id}"
@property @property
def owner(self): def owner(self):

View File

@@ -358,14 +358,15 @@ async def _mockservice(request, app, name, external=False, url=False):
(as opposed to headless, API-only). (as opposed to headless, API-only).
""" """
spec = {'name': name, 'command': mockservice_cmd, 'admin': True} spec = {'name': name, 'command': mockservice_cmd, 'admin': True}
port = random_port()
if url: if url:
if app.internal_ssl: if app.internal_ssl:
spec['url'] = 'https://127.0.0.1:%i' % random_port() spec['url'] = f'https://127.0.0.1:{port}'
else: else:
spec['url'] = 'http://127.0.0.1:%i' % random_port() spec['url'] = f'http://127.0.0.1:{port}'
if external: if external:
spec['oauth_redirect_uri'] = 'http://127.0.0.1:%i' % random_port() spec['oauth_redirect_uri'] = f'http://127.0.0.1:{port}'
event_loop = asyncio.get_running_loop() event_loop = asyncio.get_running_loop()

View File

@@ -256,7 +256,7 @@ class MockHub(JupyterHub):
port = urlparse(self.subdomain_host).port port = urlparse(self.subdomain_host).port
else: else:
port = random_port() port = random_port()
return 'http://127.0.0.1:%i/@/space%%20word/' % (port,) return f'http://127.0.0.1:{port}/@/space%20word/'
@default('ip') @default('ip')
def _ip_default(self): def _ip_default(self):

View File

@@ -7,7 +7,7 @@ import pytest
from .. import crypto from .. import crypto
from ..crypto import decrypt, encrypt from ..crypto import decrypt, encrypt
keys = [('%i' % i).encode('ascii') * 32 for i in range(3)] keys = [str(i).encode('ascii') * 32 for i in range(3)]
hex_keys = [b2a_hex(key).decode('ascii') for key in keys] hex_keys = [b2a_hex(key).decode('ascii') for key in keys]
b64_keys = [b2a_base64(key).decode('ascii').strip() for key in keys] b64_keys = [b2a_base64(key).decode('ascii').strip() for key in keys]
@@ -36,7 +36,7 @@ def test_env_constructor(key_env, keys):
"key", "key",
[ [
'a' * 44, # base64, not 32 bytes 'a' * 44, # base64, not 32 bytes
('%44s' % 'notbase64'), # not base64 f"{'notbase64':44}", # not base64
b'x' * 64, # not hex b'x' * 64, # not hex
b'short', # not 32 bytes b'short', # not 32 bytes
], ],

View File

@@ -33,19 +33,19 @@ def test_server(db):
# test wrapper # test wrapper
server = objects.Server(orm_server=server) server = objects.Server(orm_server=server)
assert server.host == 'http://%s:%i' % (socket.gethostname(), server.port) assert server.host == f'http://{socket.gethostname()}:{server.port}'
assert server.url == server.host + '/' assert server.url == server.host + '/'
assert server.bind_url == 'http://*:%i/' % server.port assert server.bind_url == f'http://*:{server.port}/'
server.ip = '127.0.0.1' server.ip = '127.0.0.1'
assert server.host == 'http://127.0.0.1:%i' % server.port assert server.host == f'http://127.0.0.1:{server.port}'
assert server.url == server.host + '/' assert server.url == server.host + '/'
server.connect_ip = 'hub' server.connect_ip = 'hub'
assert server.host == 'http://hub:%i' % server.port assert server.host == f'http://hub:{server.port}'
assert server.url == server.host + '/' assert server.url == server.host + '/'
server.connect_url = 'http://hub-url:%i/connect' % server.port server.connect_url = f'http://hub-url:{server.port}/connect'
assert server.host == 'http://hub-url:%i' % server.port assert server.host == f'http://hub-url:{server.port}'
server.bind_url = 'http://127.0.0.1/' server.bind_url = 'http://127.0.0.1/'
assert server.port == 80 assert server.port == 80

View File

@@ -1060,7 +1060,7 @@ async def test_oauth_token_page(app):
@pytest.mark.parametrize("error_status", [503, 404]) @pytest.mark.parametrize("error_status", [503, 404])
async def test_proxy_error(app, error_status): async def test_proxy_error(app, error_status):
r = await get_page('/error/%i' % error_status, app) r = await get_page(f'/error/{error_status}', app)
assert r.status_code == 200 assert r.status_code == 200

View File

@@ -35,7 +35,7 @@ async def test_external_proxy(request):
proxy_port = random_port() proxy_port = random_port()
cfg = Config() cfg = Config()
cfg.ConfigurableHTTPProxy.auth_token = auth_token cfg.ConfigurableHTTPProxy.auth_token = auth_token
cfg.ConfigurableHTTPProxy.api_url = 'http://%s:%i' % (proxy_ip, proxy_port) cfg.ConfigurableHTTPProxy.api_url = f'http://{proxy_ip}:{proxy_port}'
cfg.ConfigurableHTTPProxy.should_start = False cfg.ConfigurableHTTPProxy.should_start = False
app = MockHub.instance(config=cfg) app = MockHub.instance(config=cfg)
@@ -76,7 +76,7 @@ async def test_external_proxy(request):
request.addfinalizer(_cleanup_proxy) request.addfinalizer(_cleanup_proxy)
def wait_for_proxy(): def wait_for_proxy():
return wait_for_http_server('http://%s:%i' % (proxy_ip, proxy_port)) return wait_for_http_server(f'http://{proxy_ip}:{proxy_port}')
await wait_for_proxy() await wait_for_proxy()
@@ -141,7 +141,7 @@ async def test_external_proxy(request):
'--api-port', '--api-port',
str(proxy_port), str(proxy_port),
'--default-target', '--default-target',
'http://%s:%i' % (app.hub_ip, app.hub_port), f'http://{app.hub_ip}:{app.hub_port}',
] ]
if app.subdomain_host: if app.subdomain_host:
cmd.append('--host-routing') cmd.append('--host-routing')

View File

@@ -27,7 +27,7 @@ async def external_service(app, name='mockservice'):
'JUPYTERHUB_API_TOKEN': hexlify(os.urandom(5)), 'JUPYTERHUB_API_TOKEN': hexlify(os.urandom(5)),
'JUPYTERHUB_SERVICE_NAME': name, 'JUPYTERHUB_SERVICE_NAME': name,
'JUPYTERHUB_API_URL': url_path_join(app.hub.url, 'api/'), 'JUPYTERHUB_API_URL': url_path_join(app.hub.url, 'api/'),
'JUPYTERHUB_SERVICE_URL': 'http://127.0.0.1:%i' % random_port(), 'JUPYTERHUB_SERVICE_URL': f'http://127.0.0.1:{random_port()}',
} }
proc = Popen(mockservice_cmd, env=env) proc = Popen(mockservice_cmd, env=env)
try: try:

View File

@@ -260,7 +260,7 @@ async def test_shell_cmd(db, tmpdir, request):
s.server.port = port s.server.port = port
db.commit() db.commit()
await wait_for_spawner(s) await wait_for_spawner(s)
r = await async_requests.get('http://%s:%i/env' % (ip, port)) r = await async_requests.get(f'http://{ip}:{port}/env')
r.raise_for_status() r.raise_for_status()
env = r.json() env = r.json()
assert env['TESTVAR'] == 'foo' assert env['TESTVAR'] == 'foo'

View File

@@ -920,19 +920,16 @@ class User:
await asyncio.wait_for(f, timeout=spawner.start_timeout) await asyncio.wait_for(f, timeout=spawner.start_timeout)
url = f.result() url = f.result()
if url: if url:
# get ip, port info from return value of start() # get url from return value of start()
if isinstance(url, str): if not isinstance(url, str):
# >= 0.9 can return a full URL string # older Spawners return (ip, port)
pass
else:
# >= 0.7 returns (ip, port)
proto = 'https' if self.settings['internal_ssl'] else 'http' proto = 'https' if self.settings['internal_ssl'] else 'http'
ip, port = url
# check if spawner returned an IPv6 address # check if spawner returned an IPv6 address
if ':' in url[0]: if ':' in ip:
url = '%s://[%s]:%i' % ((proto,) + url) # ipv6 needs [::] in url
else: ip = f'[{ip}]'
url = '%s://%s:%i' % ((proto,) + url) url = f'{proto}://{ip}:{int(port)}'
urlinfo = urlparse(url) urlinfo = urlparse(url)
server.proto = urlinfo.scheme server.proto = urlinfo.scheme
server.ip = urlinfo.hostname server.ip = urlinfo.hostname

View File

@@ -502,20 +502,20 @@ def print_ps_info(file=sys.stderr):
# format CPU percentage # format CPU percentage
cpu = p.cpu_percent(0.1) cpu = p.cpu_percent(0.1)
if cpu >= 10: if cpu >= 10:
cpu_s = "%i" % cpu cpu_s = str(int(cpu))
else: else:
cpu_s = f"{cpu:.1f}" cpu_s = f"{cpu:.1f}"
# format memory (only resident set) # format memory (only resident set)
rss = p.memory_info().rss rss = p.memory_info().rss
if rss >= 1e9: if rss >= 1e9:
mem_s = '%.1fG' % (rss / 1e9) mem_s = f'{rss / 1e9:.1f}G'
elif rss >= 1e7: elif rss >= 1e7:
mem_s = '%.0fM' % (rss / 1e6) mem_s = f'{rss / 1e6:.0f}M'
elif rss >= 1e6: elif rss >= 1e6:
mem_s = '%.1fM' % (rss / 1e6) mem_s = f'{rss / 1e6:.1f}M'
else: else:
mem_s = '%.0fk' % (rss / 1e3) mem_s = f'{rss / 1e3:.0f}k'
# left-justify and shrink-to-fit columns # left-justify and shrink-to-fit columns
cpulen = max(len(cpu_s), 4) cpulen = max(len(cpu_s), 4)
@@ -560,7 +560,7 @@ def print_stacks(file=sys.stderr):
from .log import coroutine_frames from .log import coroutine_frames
print("Active threads: %i" % threading.active_count(), file=file) print(f"Active threads: {threading.active_count()}", file=file)
for thread in threading.enumerate(): for thread in threading.enumerate():
print(f"Thread {thread.name}:", end='', file=file) print(f"Thread {thread.name}:", end='', file=file)
frame = sys._current_frames()[thread.ident] frame = sys._current_frames()[thread.ident]
@@ -592,7 +592,7 @@ def print_stacks(file=sys.stderr):
# coroutines to native `async def` # coroutines to native `async def`
tasks = asyncio_all_tasks() tasks = asyncio_all_tasks()
if tasks: if tasks:
print("AsyncIO tasks: %i" % len(tasks)) print(f"AsyncIO tasks: {len(tasks)}")
for task in tasks: for task in tasks:
task.print_stack(file=file) task.print_stack(file=file)