Skip to content

Commit

Permalink
Fixing Pylint errors.
Browse files Browse the repository at this point in the history
  • Loading branch information
trekhleb committed Dec 23, 2018
1 parent d8a8241 commit d727046
Show file tree
Hide file tree
Showing 12 changed files with 39 additions and 11 deletions.
2 changes: 2 additions & 0 deletions homemade/anomaly_detection/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
"""Anomaly Detection Module"""

from .gaussian_anomaly_detection import GaussianAnomalyDetection
14 changes: 8 additions & 6 deletions homemade/anomaly_detection/gaussian_anomaly_detection.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Anomaly Detection Module"""

import numpy as np
import math

Expand Down Expand Up @@ -81,25 +83,25 @@ def select_threshold(labels, probabilities):

# The number of false positives: the ground truth label says it’s not
# an anomaly, but our algorithm incorrectly classified it as an anomaly.
fp = np.sum((predictions == 1) & (labels == 0))
false_positives = np.sum((predictions == 1) & (labels == 0))

# The number of false negatives: the ground truth label says it’s an anomaly,
# but our algorithm incorrectly classified it as not being anomalous.
fn = np.sum((predictions == 0) & (labels == 1))
false_negatives = np.sum((predictions == 0) & (labels == 1))

# The number of true positives: the ground truth label says it’s an
# anomaly and our algorithm correctly classified it as an anomaly.
tp = np.sum((predictions == 1) & (labels == 1))
true_positives = np.sum((predictions == 1) & (labels == 1))

# Prevent division by zero.
if (tp + fp) == 0 or (tp + fn) == 0:
if (true_positives + false_positives) == 0 or (true_positives + false_negatives) == 0:
continue

# Precision.
precision = tp / (tp + fp)
precision = true_positives / (true_positives + false_positives)

# Recall.
recall = tp / (tp + fn)
recall = true_positives / (true_positives + false_negatives)

# F1.
f1 = 2 * precision * recall / (precision + recall)
Expand Down
2 changes: 2 additions & 0 deletions homemade/k_means/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
"""KMeans Module"""

from .k_means import KMeans
2 changes: 2 additions & 0 deletions homemade/k_means/k_means.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""KMeans Module"""

import numpy as np


Expand Down
2 changes: 2 additions & 0 deletions homemade/linear_regression/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
"""Linear Regression Module"""

from .linear_regression import LinearRegression
2 changes: 2 additions & 0 deletions homemade/logistic_regression/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
"""Logistic Regression Module"""

from .logistic_regression import LogisticRegression
4 changes: 4 additions & 0 deletions homemade/logistic_regression/logistic_regression.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Logistic Regression Module"""

import numpy as np
from scipy.optimize import minimize
from ..utils.features import prepare_for_training
Expand Down Expand Up @@ -75,6 +77,8 @@ def train(self, lambda_param=0, max_iterations=1000):
return self.thetas, cost_histories

def predict(self, data):
"""Prediction function"""

num_examples = data.shape[0]

data_processed = prepare_for_training(
Expand Down
2 changes: 2 additions & 0 deletions homemade/neural_network/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
"""Neural Network Module"""

from .multilayer_perceptron import MultilayerPerceptron
8 changes: 7 additions & 1 deletion homemade/neural_network/multilayer_perceptron.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Neural Network Module"""

import numpy as np
from ..utils.features import prepare_for_training
from ..utils.hypothesis import sigmoid, sigmoid_gradient
Expand Down Expand Up @@ -29,6 +31,8 @@ def __init__(self, data, labels, layers, epsilon, normalize_data=False):
self.thetas = MultilayerPerceptron.thetas_init(layers, epsilon)

def train(self, regularization_param=0, max_iterations=1000, alpha=1):
"""Train the model"""

# Flatten model thetas for gradient descent.
unrolled_thetas = MultilayerPerceptron.thetas_unroll(self.thetas)

Expand Down Expand Up @@ -65,7 +69,7 @@ def predict(self, data):

@staticmethod
def gradient_descent(
data, labels, unrolled_theta, layers, regularization_param, max_iteration, alpha
data, labels, unrolled_theta, layers, regularization_param, max_iteration, alpha
):
"""Gradient descent function.
Expand Down Expand Up @@ -187,6 +191,8 @@ def cost_function(data, labels, thetas, layers, regularization_param):

@staticmethod
def feedforward_propagation(data, thetas, layers):
"""Feedforward propagation function"""

# Calculate the total number of layers.
num_layers = len(layers)

Expand Down
2 changes: 2 additions & 0 deletions homemade/utils/features/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Dataset Features Related Utils"""

from .normalize import normalize
from .add_polynomials import add_polynomials
from .add_sinusoids import add_sinusoids
Expand Down
2 changes: 2 additions & 0 deletions homemade/utils/hypothesis/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
"""Dataset Hypothesis Related Utils"""

from .sigmoid import sigmoid
from .sigmoid_gradient import sigmoid_gradient
Original file line number Diff line number Diff line change
Expand Up @@ -377,7 +377,7 @@
},
{
"cell_type": "code",
"execution_count": 11,
"execution_count": 8,
"metadata": {},
"outputs": [
{
Expand Down Expand Up @@ -426,7 +426,7 @@
},
{
"cell_type": "code",
"execution_count": 19,
"execution_count": 9,
"metadata": {},
"outputs": [
{
Expand Down Expand Up @@ -482,7 +482,7 @@
},
{
"cell_type": "code",
"execution_count": 25,
"execution_count": 10,
"metadata": {},
"outputs": [
{
Expand All @@ -491,7 +491,7 @@
"[]"
]
},
"execution_count": 25,
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
},
Expand Down

0 comments on commit d727046

Please sign in to comment.