Skip to content

Commit

Permalink
Fixing Pylint errors.
Browse files Browse the repository at this point in the history
  • Loading branch information
trekhleb committed Dec 24, 2018
1 parent 3b89d57 commit 3d0c4f2
Show file tree
Hide file tree
Showing 4 changed files with 83 additions and 20 deletions.
1 change: 1 addition & 0 deletions homemade/anomaly_detection/gaussian_anomaly_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ def estimate_gaussian(data):

@staticmethod
def select_threshold(labels, probabilities):
# pylint: disable=R0914
"""Finds the best threshold (epsilon) to use for selecting outliers"""

best_epsilon = 0
Expand Down
2 changes: 2 additions & 0 deletions homemade/logistic_regression/logistic_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,11 @@


class LogisticRegression:
# pylint: disable=R0902
"""Logistic Regression Class"""

def __init__(self, data, labels, polynomial_degree=0, sinusoid_degree=0, normalize_data=False):
# pylint: disable=R0913
"""Logistic regression constructor.
:param data: training set.
Expand Down
4 changes: 4 additions & 0 deletions homemade/neural_network/multilayer_perceptron.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
class MultilayerPerceptron:
"""Multilayer Perceptron Class"""

# pylint: disable=R0913
def __init__(self, data, labels, layers, epsilon, normalize_data=False):
"""Multilayer perceptron constructor.
Expand Down Expand Up @@ -71,6 +72,7 @@ def predict(self, data):
def gradient_descent(
data, labels, unrolled_theta, layers, regularization_param, max_iteration, alpha
):
# pylint: disable=R0913
"""Gradient descent function.
Iteratively optimizes theta model parameters.
Expand Down Expand Up @@ -138,6 +140,7 @@ def gradient_step(data, labels, unrolled_thetas, layers, regularization_param):

return thetas_unrolled_gradients

# pylint: disable=R0914
@staticmethod
def cost_function(data, labels, thetas, layers, regularization_param):
"""Cost function.
Expand Down Expand Up @@ -213,6 +216,7 @@ def feedforward_propagation(data, thetas, layers):
# Output layer should not contain bias units.
return in_layer_activation[:, 1:]

# pylint: disable=R0914
@staticmethod
def back_propagation(data, labels, thetas, layers, regularization_param):
"""Backpropagation function"""
Expand Down
Loading

0 comments on commit 3d0c4f2

Please sign in to comment.