Skip to content

Commit 63c6bdc

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 479dcce commit 63c6bdc

File tree

4 files changed

+40
-53
lines changed

4 files changed

+40
-53
lines changed

batchspawner/api.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import json
2-
from tornado import web
2+
33
from jupyterhub.apihandlers import APIHandler, default_handlers
4+
from tornado import web
45

56

67
class BatchSpawnerAPIHandler(APIHandler):

batchspawner/batchspawner.py

Lines changed: 17 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -16,20 +16,15 @@
1616
* job names instead of PIDs
1717
"""
1818
import asyncio
19-
import pwd
2019
import os
20+
import pwd
2121
import re
22-
2322
import xml.etree.ElementTree as ET
24-
2523
from enum import Enum
2624

2725
from jinja2 import Template
28-
29-
from jupyterhub.spawner import Spawner
30-
from traitlets import Integer, Unicode, Float, Dict, default
31-
32-
from jupyterhub.spawner import set_user_setuid
26+
from jupyterhub.spawner import Spawner, set_user_setuid
27+
from traitlets import Dict, Float, Integer, Unicode, default
3328

3429

3530
def format_template(template, *args, **kwargs):
@@ -240,9 +235,7 @@ async def run_command(self, cmd, input=None, env=None):
240235
self.log.error(out)
241236
self.log.error("Stderr:")
242237
self.log.error(eout)
243-
raise RuntimeError(
244-
"{} exit status {}: {}".format(cmd, proc.returncode, eout)
245-
)
238+
raise RuntimeError(f"{cmd} exit status {proc.returncode}: {eout}")
246239
except asyncio.TimeoutError:
247240
self.log.error(
248241
"Encountered timeout trying to clean up command, process probably killed already: %s"
@@ -322,7 +315,7 @@ async def query_job_status(self):
322315
except RuntimeError as e:
323316
# e.args[0] is stderr from the process
324317
self.job_status = e.args[0]
325-
except Exception as e:
318+
except Exception:
326319
self.log.error("Error querying job " + self.job_id)
327320
self.job_status = ""
328321

@@ -354,13 +347,13 @@ async def cancel_batch_job(self):
354347

355348
def load_state(self, state):
356349
"""load job_id from state"""
357-
super(BatchSpawnerBase, self).load_state(state)
350+
super().load_state(state)
358351
self.job_id = state.get("job_id", "")
359352
self.job_status = state.get("job_status", "")
360353

361354
def get_state(self):
362355
"""add job_id to state"""
363-
state = super(BatchSpawnerBase, self).get_state()
356+
state = super().get_state()
364357
if self.job_id:
365358
state["job_id"] = self.job_id
366359
if self.job_status:
@@ -369,7 +362,7 @@ def get_state(self):
369362

370363
def clear_state(self):
371364
"""clear job_id state"""
372-
super(BatchSpawnerBase, self).clear_state()
365+
super().clear_state()
373366
self.job_id = ""
374367
self.job_status = ""
375368

@@ -415,7 +408,7 @@ async def start(self):
415408
if self.server:
416409
self.server.port = self.port
417410

418-
job = await self.submit_batch_script()
411+
await self.submit_batch_script()
419412

420413
# We are called with a timeout, and if the timeout expires this function will
421414
# be interrupted at the next yield, and self.stop() will be called.
@@ -458,7 +451,7 @@ async def start(self):
458451

459452
self.db.commit()
460453
self.log.info(
461-
"Notebook server job {0} started at {1}:{2}".format(
454+
"Notebook server job {} started at {}:{}".format(
462455
self.job_id, self.ip, self.port
463456
)
464457
)
@@ -482,7 +475,7 @@ async def stop(self, now=False):
482475
await asyncio.sleep(1)
483476
if self.job_id:
484477
self.log.warning(
485-
"Notebook server job {0} at {1}:{2} possibly failed to terminate".format(
478+
"Notebook server job {} at {}:{} possibly failed to terminate".format(
486479
self.job_id, self.ip, self.port
487480
)
488481
)
@@ -799,7 +792,7 @@ def parse_job_id(self, output):
799792
def state_ispending(self):
800793
if self.job_status:
801794
job_info = ET.fromstring(self.job_status).find(
802-
".//job_list[JB_job_number='{0}']".format(self.job_id)
795+
f".//job_list[JB_job_number='{self.job_id}']"
803796
)
804797
if job_info is not None:
805798
return job_info.attrib.get("state") == "pending"
@@ -808,7 +801,7 @@ def state_ispending(self):
808801
def state_isrunning(self):
809802
if self.job_status:
810803
job_info = ET.fromstring(self.job_status).find(
811-
".//job_list[JB_job_number='{0}']".format(self.job_id)
804+
f".//job_list[JB_job_number='{self.job_id}']"
812805
)
813806
if job_info is not None:
814807
return job_info.attrib.get("state") == "running"
@@ -817,13 +810,13 @@ def state_isrunning(self):
817810
def state_gethost(self):
818811
if self.job_status:
819812
queue_name = ET.fromstring(self.job_status).find(
820-
".//job_list[JB_job_number='{0}']/queue_name".format(self.job_id)
813+
f".//job_list[JB_job_number='{self.job_id}']/queue_name"
821814
)
822815
if queue_name is not None and queue_name.text:
823816
return queue_name.text.split("@")[1]
824817

825818
self.log.error(
826-
"Spawner unable to match host addr in job {0} with status {1}".format(
819+
"Spawner unable to match host addr in job {} with status {}".format(
827820
self.job_id, self.job_status
828821
)
829822
)
@@ -887,12 +880,7 @@ def parse_job_id(self, output):
887880
raise Exception(error_msg)
888881

889882
def cmd_formatted_for_batch(self):
890-
return (
891-
super(CondorSpawner, self)
892-
.cmd_formatted_for_batch()
893-
.replace('"', '""')
894-
.replace("'", "''")
895-
)
883+
return super().cmd_formatted_for_batch().replace('"', '""').replace("'", "''")
896884

897885

898886
class LsfSpawner(BatchSpawnerBase):
@@ -957,7 +945,7 @@ def state_gethost(self):
957945
return self.job_status.split(" ")[1].strip().split(":")[0]
958946

959947
self.log.error(
960-
"Spawner unable to match host addr in job {0} with status {1}".format(
948+
"Spawner unable to match host addr in job {} with status {}".format(
961949
self.job_id, self.job_status
962950
)
963951
)

batchspawner/singleuser.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,11 @@
11
import os
22
import sys
3-
43
from runpy import run_path
54
from shutil import which
65

7-
from jupyterhub.utils import random_port, url_path_join
8-
from jupyterhub.services.auth import HubAuth
9-
106
import requests
7+
from jupyterhub.services.auth import HubAuth
8+
from jupyterhub.utils import random_port, url_path_join
119

1210

1311
def main(argv=None):
@@ -35,7 +33,7 @@ def main(argv=None):
3533
)
3634

3735
cmd_path = which(sys.argv[1])
38-
sys.argv = sys.argv[1:] + ["--port={}".format(port)]
36+
sys.argv = sys.argv[1:] + [f"--port={port}"]
3937
run_path(cmd_path, run_name="__main__")
4038

4139

batchspawner/tests/test_spawners.py

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,11 @@
44
import re
55
import time
66

7+
import pytest
78
from jupyterhub import orm
89
from jupyterhub.objects import Hub, Server
910
from jupyterhub.user import User
1011
from traitlets import Unicode
11-
import pytest
1212

1313
from .. import BatchSpawnerRegexStates, JobStatus
1414

@@ -41,7 +41,7 @@ async def run_command(self, *args, **kwargs):
4141
print("run:", run_re)
4242
assert (
4343
run_re.search(cmd) is not None
44-
), "Failed test: re={0} cmd={1}".format(run_re, cmd)
44+
), f"Failed test: re={run_re} cmd={cmd}"
4545
# Run command normally
4646
out = await super().run_command(*args, **kwargs)
4747
# Test that the command matches the expectations
@@ -51,7 +51,7 @@ async def run_command(self, *args, **kwargs):
5151
print("out:", out_re)
5252
assert (
5353
out_re.search(cmd) is not None
54-
), "Failed output: re={0} cmd={1} out={2}".format(out_re, cmd, out)
54+
), f"Failed output: re={out_re} cmd={cmd} out={out}"
5555
return out
5656

5757

@@ -282,10 +282,10 @@ async def run_command(self, cmd, input=None, env=None):
282282
# Test the input
283283
run_re = cmd_expectlist.pop(0)
284284
if run_re:
285-
print('run: "{}" [{}]'.format(cmd, run_re))
285+
print(f'run: "{cmd}" [{run_re}]')
286286
assert (
287287
run_re.search(cmd) is not None
288-
), "Failed test: re={0} cmd={1}".format(run_re, cmd)
288+
), f"Failed test: re={run_re} cmd={cmd}"
289289
# Test the stdin - will only be the batch script. For
290290
# each regular expression in batch_script_re_list, assert that
291291
# each re in that list matches the batch script.
@@ -294,7 +294,7 @@ async def run_command(self, cmd, input=None, env=None):
294294
for match_re in batch_script_re_list:
295295
assert (
296296
match_re.search(batch_script) is not None
297-
), "Batch script does not match {}".format(match_re)
297+
), f"Batch script does not match {match_re}"
298298
# Return expected output.
299299
out = out_list.pop(0)
300300
print(" --> " + out)
@@ -345,11 +345,11 @@ async def test_torque(db, event_loop):
345345
), # pending
346346
(
347347
re.compile(r"sudo.*qstat"),
348-
"<job_state>R</job_state><exec_host>{}/1</exec_host>".format(testhost),
348+
f"<job_state>R</job_state><exec_host>{testhost}/1</exec_host>",
349349
), # running
350350
(
351351
re.compile(r"sudo.*qstat"),
352-
"<job_state>R</job_state><exec_host>{}/1</exec_host>".format(testhost),
352+
f"<job_state>R</job_state><exec_host>{testhost}/1</exec_host>",
353353
), # running
354354
(re.compile(r"sudo.*qdel"), "STOP"),
355355
(re.compile(r"sudo.*qstat"), ""),
@@ -387,11 +387,11 @@ async def test_moab(db, event_loop):
387387
(re.compile(r"sudo.*mdiag"), 'State="Idle"'), # pending
388388
(
389389
re.compile(r"sudo.*mdiag"),
390-
'State="Running" AllocNodeList="{}"'.format(testhost),
390+
f'State="Running" AllocNodeList="{testhost}"',
391391
), # running
392392
(
393393
re.compile(r"sudo.*mdiag"),
394-
'State="Running" AllocNodeList="{}"'.format(testhost),
394+
f'State="Running" AllocNodeList="{testhost}"',
395395
), # running
396396
(re.compile(r"sudo.*mjobctl.*-c"), "STOP"),
397397
(re.compile(r"sudo.*mdiag"), ""),
@@ -429,11 +429,11 @@ async def test_pbs(db, event_loop):
429429
(re.compile(r"sudo.*qstat"), "job_state = Q"), # pending
430430
(
431431
re.compile(r"sudo.*qstat"),
432-
"job_state = R\nexec_host = {}/2*1".format(testhost),
432+
f"job_state = R\nexec_host = {testhost}/2*1",
433433
), # running
434434
(
435435
re.compile(r"sudo.*qstat"),
436-
"job_state = R\nexec_host = {}/2*1".format(testhost),
436+
f"job_state = R\nexec_host = {testhost}/2*1",
437437
), # running
438438
(re.compile(r"sudo.*qdel"), "STOP"),
439439
(re.compile(r"sudo.*qstat"), ""),
@@ -556,11 +556,11 @@ async def test_condor(db, event_loop):
556556
script = [
557557
(
558558
re.compile(r"sudo.*condor_submit"),
559-
"submitted to cluster {}".format(str(testjob)),
559+
f"submitted to cluster {str(testjob)}",
560560
),
561561
(re.compile(r"sudo.*condor_q"), "1,"), # pending
562-
(re.compile(r"sudo.*condor_q"), "2, @{}".format(testhost)), # runing
563-
(re.compile(r"sudo.*condor_q"), "2, @{}".format(testhost)),
562+
(re.compile(r"sudo.*condor_q"), f"2, @{testhost}"), # runing
563+
(re.compile(r"sudo.*condor_q"), f"2, @{testhost}"),
564564
(re.compile(r"sudo.*condor_rm"), "STOP"),
565565
(re.compile(r"sudo.*condor_q"), ""),
566566
]
@@ -594,11 +594,11 @@ async def test_lfs(db, event_loop):
594594
script = [
595595
(
596596
re.compile(r"sudo.*bsub"),
597-
"Job <{}> is submitted to default queue <normal>".format(str(testjob)),
597+
f"Job <{str(testjob)}> is submitted to default queue <normal>",
598598
),
599599
(re.compile(r"sudo.*bjobs"), "PEND "), # pending
600-
(re.compile(r"sudo.*bjobs"), "RUN {}".format(testhost)), # running
601-
(re.compile(r"sudo.*bjobs"), "RUN {}".format(testhost)),
600+
(re.compile(r"sudo.*bjobs"), f"RUN {testhost}"), # running
601+
(re.compile(r"sudo.*bjobs"), f"RUN {testhost}"),
602602
(re.compile(r"sudo.*bkill"), "STOP"),
603603
(re.compile(r"sudo.*bjobs"), ""),
604604
]

0 commit comments

Comments
 (0)