Skip to content

Commit 4c3bb47

Browse files
committed
Make it work with multiple directories
1 parent c3cf4d3 commit 4c3bb47

File tree

1 file changed

+26
-25
lines changed
  • web/client/codechecker_client/cmd

1 file changed

+26
-25
lines changed

web/client/codechecker_client/cmd/store.py

+26-25
Original file line numberDiff line numberDiff line change
@@ -426,31 +426,31 @@ def assemble_zip(inputs,
426426
contanining analysis related information into a zip file which
427427
will be sent to the server.
428428
"""
429-
files_to_compress = set()
429+
files_to_compress: Dict[str, set] = defaultdict(set)
430430
analyzer_result_file_paths = []
431431
stats = StorageZipStatistics()
432432

433-
# resultfile_to_metadata = dict()
434-
435433
for dir_path, file_paths in report_file.analyzer_result_files(inputs):
436434
analyzer_result_file_paths.extend(file_paths)
437435

438436
metadata_file_path = os.path.join(dir_path, 'metadata.json')
439437
if os.path.exists(metadata_file_path):
440-
files_to_compress.add(metadata_file_path)
441-
# for src_dir_path, mip in MetadataInfoParser(metadata_file_path):
438+
files_to_compress[os.path.dirname(metadata_file_path)] \
439+
.add(metadata_file_path)
442440

443441
skip_file_path = os.path.join(dir_path, 'skip_file')
444442
if os.path.exists(skip_file_path):
445443
with open(skip_file_path, 'r') as f:
446444
LOG.info("Found skip file %s with the following content:\n%s",
447445
skip_file_path, f.read())
448446

449-
files_to_compress.add(skip_file_path)
447+
files_to_compress[os.path.dirname(skip_file_path)] \
448+
.add(skip_file_path)
450449

451450
review_status_file_path = os.path.join(dir_path, 'review_status.yaml')
452451
if os.path.exists(review_status_file_path):
453-
files_to_compress.add(review_status_file_path)
452+
files_to_compress[os.path.dirname(review_status_file_path)]\
453+
.add(review_status_file_path)
454454

455455
LOG.debug(f"Processing {len(analyzer_result_file_paths)} report files ...")
456456

@@ -463,7 +463,7 @@ def assemble_zip(inputs,
463463
changed_files = set()
464464
file_paths = set()
465465
file_report_positions: FileReportPositions = defaultdict(set)
466-
unique_reports = dict()
466+
unique_reports: Dict[str, Dict[str, List[Report]]] = defaultdict(dict)
467467

468468
unique_report_hashes = set()
469469
for file_path, reports in analyzer_result_file_reports.items():
@@ -477,7 +477,8 @@ def assemble_zip(inputs,
477477
report_path_hash = get_report_path_hash(report)
478478
if report_path_hash not in unique_report_hashes:
479479
unique_report_hashes.add(report_path_hash)
480-
unique_reports.setdefault(report.analyzer_name, []) \
480+
unique_reports[os.path.dirname(file_path)]\
481+
.setdefault(report.analyzer_name, []) \
481482
.append(report)
482483
stats.add_report(report)
483484

@@ -486,15 +487,17 @@ def assemble_zip(inputs,
486487

487488
# TODO: Doesn't support storing multiple report dirs.
488489
if unique_reports:
489-
for analyzer_name, reports in unique_reports.items():
490-
if not analyzer_name:
491-
analyzer_name = 'unknown'
492-
_, tmpfile = tempfile.mkstemp(f'-{analyzer_name}.plist')
493-
494-
report_file.create(tmpfile, reports, checker_labels,
495-
AnalyzerInfo(analyzer_name))
496-
LOG.debug(f"Stored '{analyzer_name}' unique reports in {tmpfile}.")
497-
files_to_compress.add(tmpfile)
490+
for dirname, analyzer_reports in unique_reports.items():
491+
for analyzer_name, reports in analyzer_reports.items():
492+
if not analyzer_name:
493+
analyzer_name = 'unknown'
494+
_, tmpfile = tempfile.mkstemp(f'-{analyzer_name}.plist')
495+
496+
report_file.create(tmpfile, reports, checker_labels,
497+
AnalyzerInfo(analyzer_name))
498+
LOG.debug(f"Stored '{analyzer_name}' unique reports "
499+
f"in {tmpfile}.")
500+
files_to_compress[dirname].add(tmpfile)
498501

499502
if changed_files:
500503
reports_helper.dump_changed_files(changed_files)
@@ -552,15 +555,13 @@ def assemble_zip(inputs,
552555
with zipfile.ZipFile(zip_file, 'a', allowZip64=True) as zipf:
553556
# Add the files to the zip which will be sent to the server.
554557

555-
assert len(inputs) > 0
556-
if len(files_to_compress) > 0:
557-
# Create a unique report directory name.
558-
report_dir_name = hashlib.md5(os.path.dirname(
559-
list(files_to_compress)[0]).encode('utf-8')).hexdigest()
560-
561-
for file_path in files_to_compress:
558+
for dirname, files in files_to_compress.items():
559+
for file_path in files:
562560
_, file_name = os.path.split(file_path)
563561

562+
# Create a unique report directory name.
563+
report_dir_name = \
564+
hashlib.md5(dirname.encode('utf-8')).hexdigest()
564565
zip_target = \
565566
os.path.join('reports', report_dir_name, file_name)
566567
zipf.write(file_path, zip_target)

0 commit comments

Comments
 (0)