diff --git a/rr/experiment/algorithm.py b/rr/experiment/algorithm.py index b6759eb..8173a02 100644 --- a/rr/experiment/algorithm.py +++ b/rr/experiment/algorithm.py @@ -9,14 +9,14 @@ class Model: def __init__(self, nb_tree_per_forest=50, max_depth=10): """Create a new ML model (Random forest classifier from scikitlearn) - Args: - nb_tree_per_forest (int): number of decision trees in the forest - max_depth (int): max depth of the trees - Returns: - None - Raises: - None - """ + Args: + nb_tree_per_forest (int): number of decision trees in the forest + max_depth (int): max depth of the trees + Returns: + None + Raises: + None + """ self.model = RandomForestClassifier( n_estimators=nb_tree_per_forest, max_depth=max_depth, random_state=0 ) @@ -24,28 +24,28 @@ def __init__(self, nb_tree_per_forest=50, max_depth=10): def train(self, X, y): """Train the model using the given data - Args: - X (numpy.ndarray):A NxM 2D-array where each row corresponds to a sample and each column to a feature - y (numpy.ndarray): A 1D-array of length N, where each element corresponds to a sample label - Returns: - None - Raises: - None - """ + Args: + X (numpy.ndarray):A NxM 2D-array where each row corresponds to a sample and each column to a feature + y (numpy.ndarray): A 1D-array of length N, where each element corresponds to a sample label + Returns: + None + Raises: + None + """ self.model.fit(X, y) def predict(self, X): """Make a prediction on the data using the trained model - Args: - X (numpy.ndarray):A NxM 2D-array where each row corresponds to a sample and each column to a feature - Returns: - numpy.ndarray: A 1D array (with a dtype of int) containing the predicted - label for each sample - - Raises: - None - """ + Args: + X (numpy.ndarray):A NxM 2D-array where each row corresponds to a sample and each column to a feature + Returns: + numpy.ndarray: A 1D array (with a dtype of int) containing the predicted + label for each sample + + Raises: + None + """ prediction = self.model.predict(X) return prediction diff --git a/rr/experiment/analysis.py b/rr/experiment/analysis.py index e1d6172..d42abe7 100644 --- a/rr/experiment/analysis.py +++ b/rr/experiment/analysis.py @@ -8,7 +8,7 @@ def get_confusion_matrix(prediction_label, true_label): Args: prediction_label (list): Estimated targets as returned by a classifier. - true_label (list): Ground truth (correct) target values. + true_label (list): Ground truth (correct) target values. Returns: numpy.ndarray: A 2D array (with a dtype of int) containing the confusion matrix. Raises: @@ -25,7 +25,7 @@ def plot_confusion_matrix( Args: cm (numpy.ndarray): A 2D array (with a dtype of int) containing the confusion matrix. classes (numpy.ndarray): A 1D array (with a dtype of str) containing the lable name for each class - normalize (boolean): Flag to normalize the data + normalize (boolean): Flag to normalize the data title (str): The title of the plot file_name (str): File name to export the graph Returns: diff --git a/rr/experiment/experiments.py b/rr/experiment/experiments.py index 0b1caaf..c9aeb68 100644 --- a/rr/experiment/experiments.py +++ b/rr/experiment/experiments.py @@ -69,7 +69,9 @@ def experiment_impact_nb_trees(tabnum, filepath, nb_trees, max_depth, plot_path) for m, nb_tree_per_forest in enumerate(nb_trees): num = (n * len(nb_trees)) + m + tabnum result += "\nTable {table_number}: Confusion matrix with {nb_trees} tree(s) for Protocol `{protocol}`".format( - table_number=num, protocol=p, nb_trees=nb_tree_per_forest, + table_number=num, + protocol=p, + nb_trees=nb_tree_per_forest, ) cm = base_experiment( p, @@ -109,7 +111,9 @@ def experiment_impact_tree_depth(tabnum, filepath, nb_trees, max_depths, plot_pa for m, max_depth in enumerate(max_depths): num = (n * len(max_depths)) + m + tabnum result += "\nTable {table_number}: Confusion matrix with trees maximum depth of {max_depth} for Protocol `{protocol}`".format( - table_number=num, protocol=p, max_depth=max_depth, + table_number=num, + protocol=p, + max_depth=max_depth, ) cm = base_experiment( p,