2626import shutil
2727
2828from collections import defaultdict , namedtuple
29- from contextlib import contextmanager
29+ from contextlib import contextmanager , nullcontext
3030from datetime import timedelta
3131from threading import Timer
3232from typing import Dict , Iterable , List , Set , Tuple
@@ -55,7 +55,7 @@ def assemble_blame_info(_, __) -> int:
5555from codechecker_client .task_client import await_task_termination
5656from codechecker_common import arg , logger , cmd_config
5757from codechecker_common .checker_labels import CheckerLabels
58- from codechecker_common .compatibility .multiprocessing import Pool
58+ from codechecker_common .compatibility .multiprocessing import Pool , cpu_count
5959from codechecker_common .source_code_comment_handler import \
6060 SourceCodeCommentHandler
6161from codechecker_common .util import format_size , load_json , strtobool
@@ -257,6 +257,16 @@ def add_arguments_to_parser(parser):
257257 "match will be removed. You may also use Unix "
258258 "shell-like wildcards (e.g. '/*/jsmith/')." )
259259
260+ parser .add_argument ('-j' , '--jobs' ,
261+ type = int ,
262+ dest = "jobs" ,
263+ default = cpu_count (),
264+ help = "Number of parallel jobs that process the "
265+ "input directory to upload. If the directory is "
266+ "located on NFS drive, then the storage may hang "
267+ "indefinitely in case of parallel processing. "
268+ "Choosing value 1 doesn't use sub-processes." )
269+
260270 parser .add_argument ('--zip-loc' ,
261271 type = str ,
262272 metavar = 'PATH' ,
@@ -424,18 +434,23 @@ def get_reports(
424434def parse_analyzer_result_files (
425435 analyzer_result_files : Iterable [str ],
426436 checker_labels : CheckerLabels ,
427- zip_iter = map
437+ jobs : int = cpu_count ()
428438) -> AnalyzerResultFileReports :
429439 """ Get reports from the given analyzer result files. """
430440 analyzer_result_file_reports : AnalyzerResultFileReports = defaultdict (list )
431441
432- for idx , (file_path , reports ) in enumerate (zip (
433- analyzer_result_files , zip_iter (
442+ ctx = nullcontext () if jobs == 1 else Pool (max_workers = jobs )
443+
444+ with ctx as executor :
445+ map_fn = map if jobs == 1 else executor .map
446+
447+ for idx , (file_path , reports ) in enumerate (zip (
448+ analyzer_result_files , map_fn (
434449 functools .partial (get_reports , checker_labels = checker_labels ),
435450 analyzer_result_files ))):
436- LOG .debug (f"[{ idx } /{ len (analyzer_result_files )} ] "
437- f"Parsed '{ file_path } ' ..." )
438- analyzer_result_file_reports [file_path ] = reports
451+ LOG .debug (f"[{ idx } /{ len (analyzer_result_files )} ] "
452+ f"Parsed '{ file_path } ' ..." )
453+ analyzer_result_file_reports [file_path ] = reports
439454
440455 return analyzer_result_file_reports
441456
@@ -454,7 +469,8 @@ def assemble_zip(inputs,
454469 client ,
455470 prod_client ,
456471 checker_labels : CheckerLabels ,
457- tmp_dir : str ):
472+ tmp_dir : str ,
473+ jobs : int ):
458474 """Collect and compress report and source files, together with files
459475 contanining analysis related information into a zip file which
460476 will be sent to the server.
@@ -491,9 +507,8 @@ def assemble_zip(inputs,
491507
492508 LOG .debug (f"Processing { len (analyzer_result_file_paths )} report files ..." )
493509
494- with Pool () as executor :
495- analyzer_result_file_reports = parse_analyzer_result_files (
496- analyzer_result_file_paths , checker_labels , executor .map )
510+ analyzer_result_file_reports = parse_analyzer_result_files (
511+ analyzer_result_file_paths , checker_labels , jobs )
497512
498513 LOG .info ("Processing report files done." )
499514
@@ -959,7 +974,8 @@ def main(args):
959974 client ,
960975 prod_client ,
961976 context .checker_labels ,
962- temp_dir_path )
977+ temp_dir_path ,
978+ args .jobs )
963979 except ReportLimitExceedError :
964980 sys .exit (1 )
965981 except Exception as ex :
0 commit comments