@@ -43,7 +43,7 @@ def __init__(self, learning_rate: float = 0.01, epochs: int = 1000) -> None:
4343 self .bias = 0.0
4444 self .errors : list [int ] = []
4545
46- def fit (self , samples : np .ndarray , y : np .ndarray ) -> "Perceptron" :
46+ def fit (self , samples : np .ndarray , targets : np .ndarray ) -> "Perceptron" :
4747 """
4848 Fit training data.
4949
@@ -52,7 +52,7 @@ def fit(self, samples: np.ndarray, y: np.ndarray) -> "Perceptron":
5252 samples : shape = [n_samples, n_features]
5353 Training vectors, where n_samples is the number of samples
5454 and n_features is the number of features.
55- y : shape = [n_samples]
55+ targets : shape = [n_samples]
5656 Target values.
5757
5858 Returns:
@@ -74,7 +74,7 @@ def fit(self, samples: np.ndarray, y: np.ndarray) -> "Perceptron":
7474
7575 for _ in range (self .epochs ):
7676 errors = 0
77- for xi , target in zip (samples , y ):
77+ for xi , target in zip (samples , targets ):
7878 # Calculate update
7979 update = self .learning_rate * (target - self .predict (xi ))
8080 self .weights += update * xi
@@ -100,9 +100,9 @@ def predict(self, samples: np.ndarray) -> np.ndarray:
100100 linear_output = np .dot (samples , self .weights ) + self .bias
101101 return self .activation_function (linear_output )
102102
103- def activation_function (self , x : np .ndarray ) -> np .ndarray :
103+ def activation_function (self , values : np .ndarray ) -> np .ndarray :
104104 """
105- Step activation function: returns 1 if x >= 0, else 0
105+ Step activation function: returns 1 if values >= 0, else 0
106106
107107 Examples:
108108 ---------
@@ -111,7 +111,7 @@ def activation_function(self, x: np.ndarray) -> np.ndarray:
111111 >>> perceptron.activation_function(np.array([0.5, -0.5, 0])).tolist()
112112 [1, 0, 1]
113113 """
114- return np .where (x >= 0 , 1 , 0 )
114+ return np .where (values >= 0 , 1 , 0 )
115115
116116
117117if __name__ == "__main__" :
0 commit comments