Skip to content

Commit

Permalink
feat(explorer): add -j argument
Browse files Browse the repository at this point in the history
  • Loading branch information
lukasrothenberger committed Oct 23, 2024
1 parent 2692d5a commit 2f216c2
Show file tree
Hide file tree
Showing 7 changed files with 37 additions and 21 deletions.
6 changes: 4 additions & 2 deletions discopop_explorer/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,12 +69,13 @@ def parse_args() -> ExplorerArguments:
help="Dump DetectionResult object to JSON file. If a path is given, the DetectionResult object is written to the given file, otherwise to detection_result_dump.json. Contents are equivalent to the json output. NOTE: This dump contains a dump of the PET Graph!",
)
parser.add_argument(
"--enable-patterns", type=str, nargs="?", default="reduction,doall,pipeline,geodec",
help="Specify comma-separated list of pattern types to be identified. Options: reduction,doall,pipeline,geodec,simplegpu. Default: reduction,doall,pipeline,geodec",
"--enable-patterns", type=str, nargs="?", default="reduction,doall",
help="Specify comma-separated list of pattern types to be identified. Options: reduction,doall,pipeline,geodec,simplegpu. Default: reduction,doall",
)
parser.add_argument("--load-existing-doall-and-reduction-patterns", action="store_true", help="Skip pattern detection and insert existing patterns.json contents into the created detection_result.json")
parser.add_argument("--log", type=str, default="WARNING", help="Specify log level: DEBUG, INFO, WARNING, ERROR, CRITICAL")
parser.add_argument("--write-log", action="store_true", help="Create Logfile.")
parser.add_argument("-j", "--jobs", type=int, help="Allow the use of N threads. Use 0 or 1 to disable threading. Default: Unlimited", default=None)

# EXPERIMENTAL FLAGS:
# temporary flag for microbenchmark file
Expand Down Expand Up @@ -160,6 +161,7 @@ def parse_args() -> ExplorerArguments:
write_log=arguments.write_log,
load_existing_doall_and_reduction_patterns=arguments.load_existing_doall_and_reduction_patterns,
collect_statistics=arguments.enable_statistics,
jobs=arguments.jobs,
)


Expand Down
4 changes: 4 additions & 0 deletions discopop_explorer/discopop_explorer.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ class ExplorerArguments(GeneralArguments):
reduction_file: str
file_mapping_file: str
plugins: List[str]
jobs: Optional[int]
# output and formatting
enable_json_file: Optional[str]
enable_profiling_dump_file: Optional[str] # None means no dump, otherwise the path
Expand Down Expand Up @@ -122,6 +123,7 @@ def __run(
enable_detection_of_scheduling_clauses: bool = False,
hotspot_functions: Optional[Dict[HotspotType, List[Tuple[int, int, HotspotNodeType, str, float]]]] = None,
load_existing_doall_and_reduction_patterns: bool = False,
jobs: Optional[int] = None,
) -> DetectionResult:
pet = PEGraphX.from_parsed_input(*parse_inputs(cu_xml, dep_file, reduction_file, file_mapping)) # type: ignore
print("PET CREATION FINISHED.")
Expand Down Expand Up @@ -170,6 +172,7 @@ def __run(
enable_task_pattern,
enable_detection_of_scheduling_clauses,
hotspot_functions,
jobs,
)

for plugin_name in plugins:
Expand Down Expand Up @@ -240,6 +243,7 @@ def run(arguments: ExplorerArguments) -> None:
enable_detection_of_scheduling_clauses=arguments.detect_scheduling_clauses,
hotspot_functions=hotspots,
load_existing_doall_and_reduction_patterns=arguments.load_existing_doall_and_reduction_patterns,
jobs=arguments.jobs,
)

end = time.time()
Expand Down
5 changes: 3 additions & 2 deletions discopop_explorer/pattern_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ def detect_patterns(
enable_task_pattern: bool,
enable_detection_of_scheduling_clauses: bool,
hotspots: Optional[Dict[HotspotType, List[Tuple[int, int, HotspotNodeType, str, float]]]],
jobs: Optional[int],
) -> DetectionResult:
"""Runs pattern discovery on the CU graph"""
self.__merge(False, True)
Expand All @@ -88,11 +89,11 @@ def detect_patterns(

if "*" in enable_patterns or "reduction" in enable_patterns:
print("REDUCTIONS...")
res.patterns.reduction = detect_reduction(self.pet, hotspots)
res.patterns.reduction = detect_reduction(self.pet, hotspots, jobs)
print("\tDONE.")
if "*" in enable_patterns or "doall" in enable_patterns:
print("DOALL...")
res.patterns.do_all = detect_do_all(self.pet, hotspots, res.patterns.reduction)
res.patterns.do_all = detect_do_all(self.pet, hotspots, res.patterns.reduction, jobs)
print("\tDONE.")
if "*" in enable_patterns or "pipeline" in enable_patterns:
print("PIPELINE...")
Expand Down
13 changes: 9 additions & 4 deletions discopop_explorer/pattern_detectors/do_all_detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ def run_detection(
pet: PEGraphX,
hotspots: Optional[Dict[HotspotType, List[Tuple[int, int, HotspotNodeType, str, float]]]],
reduction_info: List[ReductionInfo],
jobs: Optional[int] = None,
) -> List[DoAllInfo]:
"""Search for do-all loop pattern
Expand All @@ -107,10 +108,14 @@ def run_detection(
nodes = cast(List[LoopNode], filter_for_hotspots(pet, cast(List[Node], nodes), hotspots))

param_list = [(node) for node in nodes]
with Pool(initializer=__initialize_worker, initargs=(pet,)) as pool:
tmp_result = list(tqdm.tqdm(pool.imap_unordered(__check_node, param_list), total=len(param_list)))
for local_result in tmp_result:
result += local_result
if jobs is None or jobs > 1:
with Pool(initializer=__initialize_worker, initargs=(pet,)) as pool:
tmp_result = list(tqdm.tqdm(pool.imap_unordered(__check_node, param_list), total=len(param_list)))
for local_result in tmp_result:
result += local_result
else:
for param_tpl in param_list:
result += __check_node(param_tpl)
print("GLOBAL RES: ", [r.start_line for r in result])

for pattern in result:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,10 +111,9 @@ def run_detection(
nodes = cast(List[FunctionNode], filter_for_hotspots(pet, cast(List[Node], nodes), hotspots))

param_list = [(node) for node in nodes]
with Pool(initializer=__initialize_worker, initargs=(pet,)) as pool:
tmp_result = list(tqdm.tqdm(pool.imap_unordered(__check_node, param_list), total=len(param_list)))
for local_result in tmp_result:
result += local_result

for param_tpl in param_list:
result += __check_node(param_tpl)
print("GLOBAL RES: ", result)

for pattern in result:
Expand Down
7 changes: 3 additions & 4 deletions discopop_explorer/pattern_detectors/pipeline_detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,10 +180,9 @@ def run_detection(
nodes = cast(List[LoopNode], filter_for_hotspots(pet, cast(List[Node], nodes), hotspots))

param_list = [(node) for node in nodes]
with Pool(initializer=__initialize_worker, initargs=(pet,)) as pool:
tmp_result = list(tqdm.tqdm(pool.imap_unordered(__check_node, param_list), total=len(param_list)))
for local_result in tmp_result:
result += local_result

for param_tpl in param_list:
result += __check_node(param_tpl)
print("GLOBAL RES: ", result)

for pattern in result:
Expand Down
16 changes: 11 additions & 5 deletions discopop_explorer/pattern_detectors/reduction_detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,9 @@ def __str__(self) -> str:


def run_detection(
pet: PEGraphX, hotspots: Optional[Dict[HotspotType, List[Tuple[int, int, HotspotNodeType, str, float]]]]
pet: PEGraphX,
hotspots: Optional[Dict[HotspotType, List[Tuple[int, int, HotspotNodeType, str, float]]]],
jobs: Optional[int] = None,
) -> List[ReductionInfo]:
"""Search for reduction pattern
Expand All @@ -92,10 +94,14 @@ def run_detection(
nodes = cast(List[LoopNode], filter_for_hotspots(pet, cast(List[Node], nodes), hotspots))

param_list = [(node) for node in nodes]
with Pool(initializer=__initialize_worker, initargs=(pet,)) as pool:
tmp_result = list(tqdm.tqdm(pool.imap_unordered(__check_node, param_list), total=len(param_list)))
for local_result in tmp_result:
result += local_result
if jobs is None or jobs > 1:
with Pool(processes=jobs, initializer=__initialize_worker, initargs=(pet,)) as pool:
tmp_result = list(tqdm.tqdm(pool.imap_unordered(__check_node, param_list), total=len(param_list)))
for local_result in tmp_result:
result += local_result
else:
for param_tpl in param_list:
result += __check_node(param_tpl)
print("GLOBAL RES: ", [r.start_line for r in result])

for pattern in result:
Expand Down

0 comments on commit 2f216c2

Please sign in to comment.