@@ -23,7 +23,7 @@ def euclidean_distance(X, Y):
23
23
return math .sqrt (sum ((x - y )** 2 for x , y in zip (X , Y )))
24
24
25
25
26
- def cross_entropy_loss (X ,Y ):
26
+ def cross_entropy_loss (X , Y ):
27
27
n = len (X )
28
28
return (- 1.0 / n )* sum (x * math .log (y ) + (1 - x )* math .log (1 - y ) for x , y in zip (X , Y ))
29
29
@@ -180,7 +180,7 @@ def classes_to_numbers(self, classes=None):
180
180
for item in self .examples :
181
181
item [self .target ] = classes .index (item [self .target ])
182
182
183
- def remove_examples (self , value = "" ):
183
+ def remove_examples (self , value = '' ):
184
184
"""Remove examples that contain given value."""
185
185
self .examples = [x for x in self .examples if value not in x ]
186
186
self .update_values ()
@@ -661,7 +661,7 @@ def predict(example):
661
661
662
662
663
663
def NeuralNetLearner (dataset , hidden_layer_sizes = [3 ],
664
- learning_rate = 0.01 , epochs = 100 , activation = sigmoid ):
664
+ learning_rate = 0.01 , epochs = 100 , activation = sigmoid ):
665
665
"""Layered feed-forward network.
666
666
hidden_layer_sizes: List of number of hidden units per hidden layer
667
667
learning_rate: Learning rate of gradient descent
@@ -859,12 +859,9 @@ def network(input_units, hidden_layer_sizes, output_units, activation=sigmoid):
859
859
860
860
861
861
def init_examples (examples , idx_i , idx_t , o_units ):
862
- inputs = {}
863
- targets = {}
864
-
865
- for i in range (len (examples )):
866
- e = examples [i ]
862
+ inputs , targets = {}, {}
867
863
864
+ for i , e in enumerate (examples ):
868
865
# Input values of e
869
866
inputs [i ] = [e [i ] for i in idx_i ]
870
867
@@ -1049,7 +1046,7 @@ def grade_learner(predict, tests):
1049
1046
return mean (int (predict (X ) == y ) for X , y in tests )
1050
1047
1051
1048
1052
- def train_test_split (dataset , start = None , end = None , test_split = None ):
1049
+ def train_test_split (dataset , start = None , end = None , test_split = None ):
1053
1050
"""If you are giving 'start' and 'end' as parameters,
1054
1051
then it will return the testing set from index 'start' to 'end'
1055
1052
and the rest for training.
@@ -1263,9 +1260,7 @@ def ContinuousXor(n):
1263
1260
# ______________________________________________________________________________
1264
1261
1265
1262
1266
- def compare (algorithms = None ,
1267
- datasets = None ,
1268
- k = 10 , trials = 1 ):
1263
+ def compare (algorithms = None , datasets = None , k = 10 , trials = 1 ):
1269
1264
"""Compare various learners on various datasets using cross-validation.
1270
1265
Print results as a table."""
1271
1266
algorithms = algorithms or [PluralityLearner , NaiveBayesLearner , # default list
0 commit comments