Skip to content

Commit f26e8ab

Browse files
author
Alan Christie
committed
fix: Use of logging.Level rather than Level
1 parent 9c74317 commit f26e8ab

File tree

1 file changed

+45
-83
lines changed

1 file changed

+45
-83
lines changed

viewer/target_loader.py

+45-83
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import contextlib
12
import functools
23
import hashlib
34
import logging
@@ -76,12 +77,6 @@ class UploadState(str, Enum):
7677
CANCELED = "CANCELED"
7778

7879

79-
class Level(str, Enum):
80-
INFO = "INFO"
81-
WARNING = "WARNING"
82-
FATAL = "FATAL"
83-
84-
8580
@dataclass
8681
class MetadataObject:
8782
"""Data structure to store freshly created model instances.
@@ -121,10 +116,12 @@ class ProcessedObject:
121116
@dataclass
122117
class UploadReportEntry:
123118
message: str
124-
level: Level | None = None
119+
level: int | None = None
125120

126121
def __str__(self):
127-
return ": ".join([k for k in (self.level, self.message) if k])
122+
if self.level is None:
123+
return self.message
124+
return f"{logging.getLevelName(self.level)}: {self.message}"
128125

129126

130127
@dataclass
@@ -137,18 +134,12 @@ class UploadReport:
137134
def __post_init__(self) -> None:
138135
self.task_id = f"task {self.task.request.id}: " if self.task else ""
139136

140-
def log(self, level: Level, message: str) -> None:
137+
def log(self, level: int, message: str) -> None:
141138
msg = f"{self.task_id}{message}"
142-
if level == Level.FATAL:
139+
if level == logging.ERROR:
143140
self.failed = True
144141
self.upload_state = UploadState.REPORTING
145-
logger.error(msg)
146-
elif level == Level.WARNING:
147-
logger.warning(msg)
148-
else:
149-
# must be info
150-
logger.info(msg)
151-
142+
logger.log(level, msg)
152143
self.stack.append(UploadReportEntry(level=level, message=message))
153144
self._update_task(self.json())
154145

@@ -169,17 +160,14 @@ def json(self):
169160
def _update_task(self, message: str | list) -> None:
170161
if not self.task:
171162
return
172-
try:
163+
with contextlib.suppress(AttributeError):
173164
logger.debug("taskstuff %s", dir(self.task))
174165
self.task.update_state(
175166
state=self.upload_state,
176167
meta={
177168
"description": message,
178169
},
179170
)
180-
except AttributeError:
181-
# no task passed to method, nothing to do
182-
pass
183171

184172

185173
def _validate_bundle_against_mode(config_yaml: Dict[str, Any]) -> Optional[str]:
@@ -343,14 +331,14 @@ def wrapper_create_objects(
343331
instance_data.key,
344332
instance_data.fields,
345333
)
346-
self.report.log(Level.FATAL, msg)
334+
self.report.log(logging.ERROR, msg)
347335
failed = failed + 1
348336
except IntegrityError:
349337
msg = "{} object {} failed to save".format(
350338
instance_data.model_class._meta.object_name, # pylint: disable=protected-access
351339
instance_data.key,
352340
)
353-
self.report.log(Level.FATAL, msg)
341+
self.report.log(logging.ERROR, msg)
354342
failed = failed + 1
355343

356344
if not obj:
@@ -377,7 +365,7 @@ def wrapper_create_objects(
377365
created,
378366
existing,
379367
) # pylint: disable=protected-access
380-
self.report.log(Level.INFO, msg)
368+
self.report.log(logging.INFO, msg)
381369

382370
return result
383371

@@ -455,7 +443,7 @@ def __init__(
455443
# Initial (reassuring message)
456444
bundle_filename = os.path.basename(self.bundle_path)
457445
self.report.log(
458-
Level.INFO,
446+
logging.INFO,
459447
f"Created TargetLoader for '{bundle_filename}' proposal_ref='{proposal_ref}'",
460448
)
461449

@@ -480,7 +468,7 @@ def validate_map_files(
480468
"""
481469

482470
def logfunc(_, message):
483-
self.report.log(Level.WARNING, message)
471+
self.report.log(logging.WARNING, message)
484472

485473
result = []
486474
for item in file_struct:
@@ -531,9 +519,9 @@ def validate_files(
531519

532520
def logfunc(key, message):
533521
if key in required:
534-
self.report.log(Level.FATAL, message)
522+
self.report.log(logging.ERROR, message)
535523
else:
536-
self.report.log(Level.WARNING, message)
524+
self.report.log(logging.WARNING, message)
537525

538526
result = {}
539527
for key, value in file_struct.items():
@@ -598,16 +586,13 @@ def _check_file(
598586
key: str,
599587
logfunc: Callable,
600588
) -> Tuple[str | None, str | None]:
601-
file_hash = value.get("sha256", None)
589+
file_hash = value.get("sha256")
602590
try:
603591
filename = value["file"]
604592
except KeyError:
605593
# this is rather unexpected, haven't seen it yet
606594
filename = None
607-
logfunc(
608-
key,
609-
"{}: malformed dict, key 'file' missing".format(obj_identifier),
610-
)
595+
logfunc(key, f"{obj_identifier}: malformed dict, key 'file' missing")
611596
return filename, file_hash
612597

613598
def _check_file_hash(
@@ -621,15 +606,11 @@ def _check_file_hash(
621606
file_path = self.raw_data.joinpath(filename)
622607
if file_path.is_file():
623608
if file_hash and file_hash != calculate_sha256(file_path):
624-
logfunc(key, "Invalid hash for file {}".format(filename))
609+
logfunc(key, f"Invalid hash for file {filename}")
625610
else:
626611
logfunc(
627612
key,
628-
"{} referenced in {}: {} but not found in archive".format(
629-
key,
630-
METADATA_FILE,
631-
obj_identifier,
632-
),
613+
f"{key} referenced in {METADATA_FILE}: {obj_identifier} but not found in archive",
633614
)
634615

635616
@create_objects(depth=1)
@@ -714,7 +695,8 @@ def process_experiment(
714695
else:
715696
exp_type = -1
716697
self.report.log(
717-
Level.FATAL, f"Unexpected 'type' '{dtype}' value for {experiment_name}"
698+
logging.ERROR,
699+
f"Unexpected 'type' '{dtype}' value for {experiment_name}",
718700
)
719701

720702
dstatus = extract(key="status")
@@ -731,7 +713,7 @@ def process_experiment(
731713
except KeyError:
732714
status = -1
733715
self.report.log(
734-
Level.FATAL, f"Unexpected status '{dstatus}' for {experiment_name}"
716+
logging.ERROR, f"Unexpected status '{dstatus}' for {experiment_name}"
735717
)
736718

737719
# TODO: unhandled atm
@@ -811,10 +793,8 @@ def process_compound(
811793
else "ligand_cif"
812794
)
813795
self.report.log(
814-
Level.WARNING,
815-
"{} missing from {} in '{}' experiment section".format(
816-
exc, smiles, protein_name
817-
),
796+
logging.WARNING,
797+
f"{exc} missing from {smiles} in '{protein_name}' experiment section",
818798
)
819799
return None
820800

@@ -907,7 +887,7 @@ def process_quat_assembly(
907887
item_name=assembly_name,
908888
)
909889

910-
chains = extract(key="chains", level=Level.WARNING)
890+
chains = extract(key="chains", level=logging.WARNING)
911891

912892
fields = {
913893
"name": assembly_name,
@@ -1180,7 +1160,7 @@ def process_site_observation(
11801160
data=data,
11811161
section_name="crystals",
11821162
item_name=experiment_id,
1183-
level=Level.WARNING,
1163+
level=logging.WARNING,
11841164
)
11851165

11861166
experiment = experiments[experiment_id].instance
@@ -1228,18 +1208,13 @@ def process_site_observation(
12281208
logger.debug('looking for ligand_mol: %s', ligand_mol)
12291209
mol_data = None
12301210
if ligand_mol:
1231-
try:
1211+
with contextlib.suppress(TypeError, FileNotFoundError):
12321212
with open(
12331213
self.raw_data.joinpath(ligand_mol),
12341214
"r",
12351215
encoding="utf-8",
12361216
) as f:
12371217
mol_data = f.read()
1238-
except (TypeError, FileNotFoundError):
1239-
# this site observation doesn't have a ligand. perfectly
1240-
# legitimate case
1241-
pass
1242-
12431218
smiles = extract(key="ligand_smiles")
12441219

12451220
fields = {
@@ -1287,19 +1262,15 @@ def process_bundle(self):
12871262
upload_dir = next(up_iter)
12881263
except StopIteration as exc:
12891264
msg = "Upload directory missing from uploaded file"
1290-
self.report.log(Level.FATAL, msg)
1265+
self.report.log(logging.ERROR, msg)
12911266
# what do you mean unused?!
12921267
raise StopIteration(
12931268
msg
12941269
) from exc # pylint: disable=# pylint: disable=protected-access
12951270

1296-
try:
1271+
with contextlib.suppress(StopIteration):
12971272
upload_dir = next(up_iter)
1298-
self.report.log(Level.WARNING, "Multiple upload directories in archive")
1299-
except StopIteration:
1300-
# just a warning, ignoring the second one
1301-
pass
1302-
1273+
self.report.log(logging.WARNING, "Multiple upload directories in archive")
13031274
# now that target name is not included in path, I don't need
13041275
# it here, need it just before creating target object. Also,
13051276
# there's probably no need to throw a fatal here, I can
@@ -1309,7 +1280,7 @@ def process_bundle(self):
13091280
config_file = next(config_it)
13101281
except StopIteration as exc:
13111282
msg = f"config file missing from {str(upload_dir)}"
1312-
self.report.log(Level.FATAL, msg)
1283+
self.report.log(logging.ERROR, msg)
13131284
raise StopIteration() from exc
13141285

13151286
# load necessary files
@@ -1325,15 +1296,15 @@ def process_bundle(self):
13251296
# Validate the upload's XCA version information against any MODE-based conditions.
13261297
# An error message is returned if the bundle is not supported.
13271298
if vb_err_msg := _validate_bundle_against_mode(config):
1328-
self.report.log(Level.FATAL, vb_err_msg)
1299+
self.report.log(logging.ERROR, vb_err_msg)
13291300
raise AssertionError(vb_err_msg)
13301301

13311302
# Target (very least) is required
13321303
try:
13331304
self.target_name = config["target_name"]
13341305
except KeyError as exc:
13351306
msg = "target_name missing in config file"
1336-
self.report.log(Level.FATAL, msg)
1307+
self.report.log(logging.ERROR, msg)
13371308
raise KeyError(msg) from exc
13381309

13391310
# moved this bit from init
@@ -1358,7 +1329,7 @@ def process_bundle(self):
13581329
# remove uploaded file
13591330
Path(self.bundle_path).unlink()
13601331
msg = f"{self.bundle_name} already uploaded"
1361-
self.report.log(Level.FATAL, msg)
1332+
self.report.log(logging.ERROR, msg)
13621333
raise FileExistsError(msg)
13631334

13641335
if project_created and committer.pk == settings.ANONYMOUS_USER:
@@ -1543,7 +1514,7 @@ def _load_yaml(self, yaml_file: Path) -> dict | None:
15431514
contents = yaml.safe_load(file)
15441515
except FileNotFoundError:
15451516
self.report.log(
1546-
Level.FATAL, f"File {yaml_file.name} not found in data archive"
1517+
logging.ERROR, f"File {yaml_file.name} not found in data archive"
15471518
)
15481519

15491520
return contents
@@ -1558,7 +1529,7 @@ def _get_yaml_blocks(self, yaml_data: dict, blocks: Iterable) -> list[dict]:
15581529
result.append(yaml_data[block])
15591530
except KeyError:
15601531
msg = error_text.format(block)
1561-
self.report.log(Level.FATAL, msg)
1532+
self.report.log(logging.ERROR, msg)
15621533

15631534
return result
15641535

@@ -1568,26 +1539,18 @@ def _extract(
15681539
key: str | int,
15691540
section_name: str,
15701541
item_name: str,
1571-
level: Level = Level.FATAL,
1542+
level: int = logging.ERROR,
15721543
return_type: type = str,
15731544
) -> Any:
15741545
try:
15751546
result = data[key]
15761547
except KeyError as exc:
1577-
if level == Level.INFO:
1578-
result = ""
1579-
else:
1580-
result = "missing"
1548+
result = "" if level == logging.INFO else "missing"
15811549
if return_type == list:
15821550
result = [result]
15831551

15841552
self.report.log(
1585-
level,
1586-
"{} missing from {}: {} section".format(
1587-
exc,
1588-
section_name,
1589-
item_name,
1590-
),
1553+
level, f"{exc} missing from {section_name}: {item_name} section"
15911554
)
15921555

15931556
return result
@@ -1631,11 +1594,10 @@ def _tag_site_observations(self, site_observation_objects, category):
16311594
]
16321595

16331596
for tag in tags:
1634-
if tag not in groups.keys():
1635-
groups[tag] = [obj.instance]
1636-
else:
1597+
if tag in groups:
16371598
groups[tag].append(obj.instance)
1638-
1599+
else:
1600+
groups[tag] = [obj.instance]
16391601
# I suspect I need to group them by site..
16401602
for tag, so_list in groups.items():
16411603
try:
@@ -1727,7 +1689,7 @@ def load_target(
17271689

17281690
# Decompression can take some time, so we want to report progress
17291691
bundle_filename = os.path.basename(data_bundle)
1730-
target_loader.report.log(Level.INFO, f"Decompressing '{bundle_filename}'")
1692+
target_loader.report.log(logging.INFO, f"Decompressing '{bundle_filename}'")
17311693

17321694
try:
17331695
# archive is first extracted to temporary dir and moved later
@@ -1743,7 +1705,7 @@ def load_target(
17431705
target_loader.experiment_upload.message = exc.args[0]
17441706
raise FileNotFoundError(msg) from exc
17451707

1746-
target_loader.report.log(Level.INFO, f"Decompressed '{bundle_filename}'")
1708+
target_loader.report.log(logging.INFO, f"Decompressed '{bundle_filename}'")
17471709

17481710
try:
17491711
with transaction.atomic():

0 commit comments

Comments
 (0)