Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Patchwork PR: GenerateDocstring #2

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions K-Means/KMeans.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,16 @@ class KMeans:
"""

def __init__(self, K=5, max_iters=100, plot_steps=False):
"""Initialize the KMeans clustering algorithm.

Args:
K (int): The number of clusters to form. Default is 5.
max_iters (int): The maximum number of iterations for the algorithm. Default is 100.
plot_steps (bool): Whether to plot the clustering steps. Default is False.

Returns:
None: This method initializes the object and doesn't return anything.
"""
self.K = K
self.max_iters = max_iters
self.plot_steps = plot_steps
Expand Down
9 changes: 9 additions & 0 deletions Linear Regression/LinearRegression.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,15 @@ class LinearRegression:
"""

def __init__(self, learning_rate=0.001, n_iters=1000) -> None:
"""Initialize the linear regression model.

Args:
learning_rate float: The learning rate for gradient descent optimization. Default is 0.001.
n_iters int: The number of iterations for the optimization process. Default is 1000.

Returns:
None: This method initializes the model parameters and doesn't return anything.
"""
self.learning_rate = learning_rate
self.n_iters = n_iters
self.weights = None
Expand Down
17 changes: 17 additions & 0 deletions Logistic Regression/LogisticRegression.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,14 @@
import numpy as np

def sigmoid(x):
"""Computes the sigmoid function for the given input.

Args:
x (numpy.ndarray or float): The input value or array of values.

Returns:
numpy.ndarray or float: The sigmoid of the input, with values in the range (0, 1).
"""
return 1 / (1 + np.exp(-x))

class LogisticRegression:
Expand All @@ -13,6 +21,15 @@ class LogisticRegression:
n_iters : int, optional (default=1000)
The number of iterations to run gradient descent.

"""Initialize the linear regression model.

Args:
learning_rate float: The step size for gradient descent optimization. Default is 0.001.
n_iters int: The number of iterations for the optimization process. Default is 1000.

Returns:
None: This method initializes the model parameters but does not return anything.
"""
Attributes:
weights : ndarray of shape (n_features,)
Coefficients of the model.
Expand Down
10 changes: 10 additions & 0 deletions Perceptron/Perceptron.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,16 @@ class Perceptron:
"""

def __init__(self, learning_rate=0.01, n_iters=1000):
"""
Initializes the Perceptron model with specified learning rate and number of iterations.

Args:
learning_rate (float): The learning rate for weight updates. Default is 0.01.
n_iters (int): The number of iterations for training. Default is 1000.

Returns:
None
"""
self.lr = learning_rate
self.n_iters = n_iters
self.activation_func = unit_step_func
Expand Down