Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 17bcde1

Browse files
authored
style fixes
1 parent d8616d0 commit 17bcde1

File tree

1 file changed

+7
-12
lines changed

1 file changed

+7
-12
lines changed

learning.py

Lines changed: 7 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def euclidean_distance(X, Y):
2323
return math.sqrt(sum((x - y)**2 for x, y in zip(X, Y)))
2424

2525

26-
def cross_entropy_loss(X,Y):
26+
def cross_entropy_loss(X, Y):
2727
n=len(X)
2828
return (-1.0/n)*sum(x*math.log(y) + (1-x)*math.log(1-y) for x, y in zip(X, Y))
2929

@@ -180,7 +180,7 @@ def classes_to_numbers(self, classes=None):
180180
for item in self.examples:
181181
item[self.target] = classes.index(item[self.target])
182182

183-
def remove_examples(self, value=""):
183+
def remove_examples(self, value=''):
184184
"""Remove examples that contain given value."""
185185
self.examples = [x for x in self.examples if value not in x]
186186
self.update_values()
@@ -661,7 +661,7 @@ def predict(example):
661661

662662

663663
def NeuralNetLearner(dataset, hidden_layer_sizes=[3],
664-
learning_rate=0.01, epochs=100, activation = sigmoid):
664+
learning_rate=0.01, epochs=100, activation=sigmoid):
665665
"""Layered feed-forward network.
666666
hidden_layer_sizes: List of number of hidden units per hidden layer
667667
learning_rate: Learning rate of gradient descent
@@ -859,12 +859,9 @@ def network(input_units, hidden_layer_sizes, output_units, activation=sigmoid):
859859

860860

861861
def init_examples(examples, idx_i, idx_t, o_units):
862-
inputs = {}
863-
targets = {}
864-
865-
for i in range(len(examples)):
866-
e = examples[i]
862+
inputs, targets = {}, {}
867863

864+
for i, e in enumerate(examples):
868865
# Input values of e
869866
inputs[i] = [e[i] for i in idx_i]
870867

@@ -1049,7 +1046,7 @@ def grade_learner(predict, tests):
10491046
return mean(int(predict(X) == y) for X, y in tests)
10501047

10511048

1052-
def train_test_split(dataset, start = None, end = None, test_split = None):
1049+
def train_test_split(dataset, start=None, end=None, test_split=None):
10531050
"""If you are giving 'start' and 'end' as parameters,
10541051
then it will return the testing set from index 'start' to 'end'
10551052
and the rest for training.
@@ -1263,9 +1260,7 @@ def ContinuousXor(n):
12631260
# ______________________________________________________________________________
12641261

12651262

1266-
def compare(algorithms=None,
1267-
datasets=None,
1268-
k=10, trials=1):
1263+
def compare(algorithms=None, datasets=None, k=10, trials=1):
12691264
"""Compare various learners on various datasets using cross-validation.
12701265
Print results as a table."""
12711266
algorithms = algorithms or [PluralityLearner, NaiveBayesLearner, # default list

0 commit comments

Comments
 (0)