Skip to content

Commit

Permalink
chore(mypy): add type annotations
Browse files Browse the repository at this point in the history
  • Loading branch information
lukasrothenberger committed Jul 11, 2024
1 parent 7dbe684 commit 8989478
Show file tree
Hide file tree
Showing 101 changed files with 1,398 additions and 413 deletions.
20 changes: 10 additions & 10 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,23 +47,23 @@ jobs:
- name: "Run MyPy Type Checker - DiscoPoP Library"
run: python -m mypy --config-file=mypy.ini -p discopop_library

- name: "Run MyPy Type Checker - DiscoPoP Profiler - DEPRECATED"
run: python -m mypy --config-file=mypy.ini -p DEPRECATED_discopop_profiler

- name: "Run MyPy Type Checker - DiscoPoP Wizard - DEPRECATED"
run: python -m mypy --config-file=mypy.ini -p DEPRECATED_discopop_wizard
# - name: "Run MyPy Type Checker - DiscoPoP Profiler - DEPRECATED"
# run: python -m mypy --config-file=mypy.ini -p DEPRECATED_discopop_profiler
#
# - name: "Run MyPy Type Checker - DiscoPoP Wizard - DEPRECATED"
# run: python -m mypy --config-file=mypy.ini -p DEPRECATED_discopop_wizard

- name: "Check formatting of DiscoPoP Explorer"
run: python -m black -l 120 --check discopop_explorer

- name: "Check formatting of DiscoPoP Library"
run: python -m black -l 120 --check discopop_library

- name: "Check formatting of DiscoPoP Profiler - DEPRECATED"
run: python -m black -l 120 --check DEPRECATED_discopop_profiler

- name: "Check formatting of DiscoPoP Wizard - DEPRECATED"
run: python -m black -l 120 --check DEPRECATED_discopop_wizard
# - name: "Check formatting of DiscoPoP Profiler - DEPRECATED"
# run: python -m black -l 120 --check DEPRECATED_discopop_profiler
#
# - name: "Check formatting of DiscoPoP Wizard - DEPRECATED"
# run: python -m black -l 120 --check DEPRECATED_discopop_wizard

execute_cxx_unit_tests:
runs-on: ubuntu-20.04
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def __init__(self, parent_frame):
self.text_container["xscrollcommand"] = x_scrollbar.set
self.text_container.config(state=tk.DISABLED)

def set_text(self, content: str):
def set_text(self, content: str) -> None:
self.text_container.config(state=tk.NORMAL)
self.text_container.delete("1.0", tk.END)
self.text_container.insert("1.0", content)
Expand Down
33 changes: 19 additions & 14 deletions discopop_explorer/PEGraphX.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@
from alive_progress import alive_bar # type: ignore
from lxml.objectify import ObjectifiedElement # type: ignore

from discopop_library.HostpotLoader.HotspotNodeType import HotspotNodeType # type:ignore
from discopop_library.HostpotLoader.HotspotNodeType import HotspotNodeType
from discopop_library.HostpotLoader.HotspotType import HotspotType # type:ignore

from .parser import LoopData, readlineToCUIdMap, writelineToCUIdMap, DependenceItem
from .variable import Variable
Expand Down Expand Up @@ -195,12 +196,12 @@ def __init__(self, node_id: NodeID):
self.id = node_id
self.file_id, self.node_id = parse_id(node_id)

@classmethod
def from_kwargs(cls, node_id: NodeID, **kwargs):
node = cls(node_id)
for key, value in kwargs.items():
setattr(node, key, value)
return node
# @classmethod
# def from_kwargs(cls, node_id: NodeID, **kwargs) -> Node:
# node = cls(node_id)
# for key, value in kwargs.items():
# setattr(node, key, value)
# return node

def start_position(self) -> LineID:
"""Start position file_id:line
Expand All @@ -219,7 +220,7 @@ def end_position(self) -> LineID:
"""
return LineID(f"{self.file_id}:{self.end_line}")

def contains_line(self, other_line) -> bool:
def contains_line(self, other_line: str) -> bool:
if other_line == "GlobalVar" or other_line == "LineNotFound":
return False
if not ":" in other_line:
Expand Down Expand Up @@ -380,7 +381,7 @@ def get_exit_cu_ids(self, pet: PEGraphX) -> Set[NodeID]:
exit_cu_ids.add(child_cu_id)
return exit_cu_ids

def calculate_reachability_pairs(self, pet: PEGraphX):
def calculate_reachability_pairs(self, pet: PEGraphX) -> Dict[NodeID, Set[NodeID]]:
reachability_pairs: Dict[NodeID, Set[NodeID]] = dict()
# create graph copy and remove all but successor edges
copied_graph = pet.g.copy()
Expand Down Expand Up @@ -614,7 +615,7 @@ class PEGraphX(object):
main: Node
pos: Dict[Any, Any]

def __init__(self, g: nx.MultiDiGraph, reduction_vars: List[Dict[str, str]], pos):
def __init__(self, g: nx.MultiDiGraph, reduction_vars: List[Dict[str, str]], pos: Dict[Any, Any]):
self.g = g
self.reduction_vars = reduction_vars
for _, node in g.nodes(data="data"):
Expand All @@ -629,7 +630,7 @@ def from_parsed_input(
dependencies_list: List[DependenceItem],
loop_data: Dict[str, LoopData],
reduction_vars: List[Dict[str, str]],
):
) -> PEGraphX:
"""Constructor for making a PETGraphX from the output of parser.parse_inputs()"""
g = nx.MultiDiGraph()
print("\tCreating graph...")
Expand Down Expand Up @@ -766,7 +767,11 @@ def map_static_and_dynamic_dependencies(self):

print("Done.")

def calculateFunctionMetadata(self, hotspot_information=None, func_nodes=None) -> None:
def calculateFunctionMetadata(
self,
hotspot_information: Optional[Dict[HotspotType, List[Tuple[int, int, HotspotNodeType, str]]]] = None,
func_nodes: Optional[List[FunctionNode]] = None,
) -> None:
# store id of parent function in each node
# and store in each function node a list of all children ids
if func_nodes is None:
Expand Down Expand Up @@ -1088,7 +1093,7 @@ def subtree_of_type_rec(self, root, visited, type=Node):

return res

def __cu_equal__(self, cu_1: Node, cu_2: Node):
def __cu_equal__(self, cu_1: Node, cu_2: Node) -> bool:
"""Alternative to CUNode.__eq__, bypasses the isinstance-check and relies on MyPy for type safety.
:param cu_1: CUNode 1
:param cu_2: CUNode 2
Expand Down Expand Up @@ -1358,7 +1363,7 @@ def get_undefined_variables_inside_loop(

return vars

def unused_is_first_written_in_loop(self, dep: Dependency, root_loop: Node):
def unused_is_first_written_in_loop(self, dep: Dependency, root_loop: Node) -> bool:
"""Checks whether a variable is first written inside the current node
:param var:
Expand Down
2 changes: 1 addition & 1 deletion discopop_explorer/discopop_explorer.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ def __run(
return res


def run(arguments: ExplorerArguments):
def run(arguments: ExplorerArguments) -> None:
"""Run the discopop_explorer with the given arguments"""
logger = logging.getLogger("Explorer")

Expand Down
11 changes: 6 additions & 5 deletions discopop_explorer/generate_Data_CUInst.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
# the 3-Clause BSD License. See the LICENSE file in the package base
# directory for details.

from io import TextIOWrapper
from typing import List, cast, TextIO

from .PEGraphX import (
Expand All @@ -22,7 +23,7 @@
from .parser import parse_inputs


def __collect_children_ids(pet: PEGraphX, parent_id: NodeID, children_ids: List[NodeID]):
def __collect_children_ids(pet: PEGraphX, parent_id: NodeID, children_ids: List[NodeID]) -> List[NodeID]:
if parent_id in children_ids:
# this id has already been processed. No need to go through it again
return children_ids
Expand Down Expand Up @@ -104,7 +105,7 @@ def __output_dependencies_of_type(
output_file: TextIO,
dep_type: DepType,
dep_identifier: str,
):
) -> None:
"""check for and output dependencies of the given type
:param pet: PET Graph
:param child_id: specific node id, taken from children_ids
Expand All @@ -131,7 +132,7 @@ def __output_dependencies_of_type(
)


def __search_recursive_calls(pet: PEGraphX, output_file, node: Node):
def __search_recursive_calls(pet: PEGraphX, output_file: TextIOWrapper, node: Node) -> None:
if not isinstance(node, CUNode):
return
for recursive_function_call in node.recursive_function_calls:
Expand Down Expand Up @@ -165,7 +166,7 @@ def __search_recursive_calls(pet: PEGraphX, output_file, node: Node):
output_file.write("\n")


def cu_instantiation_input_cpp(pet: PEGraphX, output_file: str):
def cu_instantiation_input_cpp(pet: PEGraphX, output_file: str) -> None:
"""translation of CUInstantiationInput.cpp, previously contained in discopop-analyzer/analyzer/src.
Wrapper to gather information on recursive function calls for CU Instantiation.
:param pet: PET Graph
Expand All @@ -175,7 +176,7 @@ def cu_instantiation_input_cpp(pet: PEGraphX, output_file: str):
__search_recursive_calls(pet, data_cu_inst_file, node)


def wrapper(cu_xml, dep_file, loop_counter_file, reduction_file, output_file):
def wrapper(cu_xml: str, dep_file: str, loop_counter_file: str, reduction_file: str, output_file: str) -> None:
"""Wrapper to generate the Data_CUInst.txt file, required for the generation of CUInstResult.txt"""
# 1. generate PET Graph
pet = PEGraphX.from_parsed_input(*parse_inputs(cu_xml, dep_file, loop_counter_file, reduction_file))
Expand Down
3 changes: 2 additions & 1 deletion discopop_explorer/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
# the 3-Clause BSD License. See the LICENSE file in the package base
# directory for details.

from io import TextIOWrapper
import os
import re
import warnings
Expand Down Expand Up @@ -110,7 +111,7 @@ def __map_dummy_nodes(cu_dict):
return cu_dict


def __parse_dep_file(dep_fd, output_path: str) -> Tuple[List[DependenceItem], List[LoopData]]:
def __parse_dep_file(dep_fd: TextIOWrapper, output_path: str) -> Tuple[List[DependenceItem], List[LoopData]]:
dependencies_list: List[DependenceItem] = []
loop_data_list: List[LoopData] = []

Expand Down
38 changes: 22 additions & 16 deletions discopop_explorer/pattern_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,16 @@
import json
import os
import sys
from typing import cast
from typing import Dict, List, Optional, Tuple, cast

from alive_progress import alive_bar # type: ignore

from discopop_explorer.pattern_detectors.task_parallelism.task_parallelism_detector import (
build_preprocessed_graph_and_run_detection as detect_tp,
)
from discopop_explorer.variable import Variable
from discopop_library.HostpotLoader.HotspotNodeType import HotspotNodeType
from discopop_library.HostpotLoader.HotspotType import HotspotType
from discopop_library.JSONHandler.JSONHandler import read_patterns_from_json_to_json
from discopop_library.discopop_optimizer.OptimizationGraph import OptimizationGraph
from discopop_library.discopop_optimizer.Variables.Experiment import Experiment
Expand Down Expand Up @@ -43,7 +45,7 @@ def __init__(self, pet_graph: PEGraphX) -> None:
"""
self.pet = pet_graph

def __merge(self, loop_type: bool, remove_dummies: bool):
def __merge(self, loop_type: bool, remove_dummies: bool) -> None:
"""Removes dummy nodes
:param loop_type: loops only
Expand All @@ -63,20 +65,20 @@ def __merge(self, loop_type: bool, remove_dummies: bool):

def detect_patterns(
self,
project_path,
cu_dict,
dependencies,
loop_data,
reduction_vars,
file_mapping,
cu_inst_result_file,
llvm_cxxfilt_path,
discopop_build_path,
enable_patterns,
enable_task_pattern,
enable_detection_of_scheduling_clauses,
hotspots,
):
project_path: str,
cu_dict: str,
dependencies: str,
loop_data: str,
reduction_vars: str,
file_mapping: Optional[str],
cu_inst_result_file: Optional[str],
llvm_cxxfilt_path: Optional[str],
discopop_build_path: Optional[str],
enable_patterns: str,
enable_task_pattern: bool,
enable_detection_of_scheduling_clauses: bool,
hotspots: Optional[Dict[HotspotType, List[Tuple[int, int, HotspotNodeType, str]]]],
) -> DetectionResult:
"""Runs pattern discovery on the CU graph"""
self.__merge(False, True)
self.pet.map_static_and_dynamic_dependencies()
Expand Down Expand Up @@ -105,6 +107,10 @@ def detect_patterns(

# check if task pattern should be enabled
if enable_task_pattern:
if cu_inst_result_file is None:
raise ValueError("cu_inst_result_file not specified.")
if file_mapping is None:
raise ValueError("file_mapping not specified.")
res.patterns.task = detect_tp(
cu_dict,
dependencies,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,24 @@
# the 3-Clause BSD License. See the LICENSE file in the package base
# directory for details.
import sys
from typing import List, Tuple, Dict, Set
from typing import List, Tuple, Dict, Set, cast

from discopop_explorer.PEGraphX import EdgeType, CUNode, PEGraphX, NodeID, MemoryRegion
from discopop_explorer.pattern_detectors.PatternInfo import PatternInfo
from discopop_explorer.pattern_detectors.combined_gpu_patterns.classes.Aliases import (
VarName,
)
from discopop_explorer.pattern_detectors.combined_gpu_patterns.classes.Dependency import Dependency
from discopop_explorer.pattern_detectors.combined_gpu_patterns.classes.EntryPoint import EntryPoint
from discopop_explorer.pattern_detectors.combined_gpu_patterns.classes.Enums import (
ExitPointPositioning,
EntryPointPositioning,
ExitPointType,
EntryPointType,
UpdateType,
)
from discopop_explorer.pattern_detectors.combined_gpu_patterns.classes.ExitPoint import ExitPoint
from discopop_explorer.pattern_detectors.combined_gpu_patterns.classes.Update import Update
from discopop_explorer.pattern_detectors.combined_gpu_patterns.prepare_metadata import (
get_dependencies_as_metadata,
)
Expand Down Expand Up @@ -68,10 +71,10 @@ class CombinedGPURegion(PatternInfo):
target_data_regions: Dict[str, List[Tuple[List[NodeID], NodeID, NodeID, str, str]]]
# {var: ([contained cu_s], entry_cu, exit_after_cu, meta_entry_line_num, meta_exit_line_num)
data_region_entry_points: List[
Tuple[VarName, NodeID, EntryPointType, str, EntryPointPositioning]
Tuple[str, NodeID, NodeID, EntryPointType, str, EntryPointPositioning]
] # [(var, cu_id, entry_point_type, meta_line_num, positioning)]
data_region_exit_points: List[
Tuple[VarName, NodeID, ExitPointType, str, ExitPointPositioning]
Tuple[str, NodeID, NodeID, ExitPointType, str, ExitPointPositioning]
] # [(var, cu_id, exit_point_type, meta_line_num, positioning)]
data_region_depend_in: List[Tuple[VarName, NodeID, str]] # [(var, cu_id, meta_line_num)]
data_region_depend_out: List[Tuple[VarName, NodeID, str]] # [(var, cu_id, meta_line_num)]
Expand Down Expand Up @@ -302,9 +305,9 @@ def __init__(self, pet: PEGraphX, contained_regions: List[GPURegionInfo], projec
)

# remove duplicates
updates = remove_duplicates(updates)
entry_points = remove_duplicates(entry_points)
exit_points = remove_duplicates(exit_points)
updates = cast(Set[Update], remove_duplicates(updates))
entry_points = cast(Set[EntryPoint], remove_duplicates(entry_points))
exit_points = cast(Set[ExitPoint], remove_duplicates(exit_points))

# join entries
updates = join_elements(updates)
Expand Down Expand Up @@ -354,7 +357,7 @@ def __init__(self, pet: PEGraphX, contained_regions: List[GPURegionInfo], projec
def __str__(self):
raise NotImplementedError() # used to identify necessity to call to_string() instead

def to_string(self, pet: PEGraphX):
def to_string(self, pet: PEGraphX) -> str:
contained_regions_str = "\n" if len(self.contained_regions) > 0 else ""
for region in self.contained_regions:
region_str = region.to_string(pet)
Expand Down Expand Up @@ -453,7 +456,7 @@ def find_combinations_within_function_body(


def find_true_successor_combinations(
pet, intra_function_combinations: List[Tuple[CombinedGPURegion, CombinedGPURegion]]
pet: PEGraphX, intra_function_combinations: List[Tuple[CombinedGPURegion, CombinedGPURegion]]
) -> List[Tuple[CombinedGPURegion, CombinedGPURegion]]:
"""Check for combinations options without branching inbetween.
As a result, both regions will always be executed in succession."""
Expand All @@ -462,7 +465,9 @@ def find_true_successor_combinations(
# a true successor relation exists, if every successor path outgoing from any child of region_1 arrives at region_2
for region_1, region_2 in intra_function_combinations:
true_successors = True
queue: List[CUNode] = pet.direct_children(pet.node_at(region_1.contained_regions[0].node_id))
queue: List[CUNode] = cast(
List[CUNode], pet.direct_children(pet.node_at(region_1.contained_regions[0].node_id))
)
visited: List[CUNode] = []
while queue:
current_node: CUNode = queue.pop()
Expand All @@ -479,7 +484,7 @@ def find_true_successor_combinations(
else:
# end of the function's body not yet reached, continue searching
# add successors to queue
queue += [succ for succ in successors if succ not in visited]
queue += [cast(CUNode, succ) for succ in successors if succ not in visited]
if true_successors:
result.append((region_1, region_2))
return result
Expand Down
Loading

0 comments on commit 8989478

Please sign in to comment.