@@ -304,16 +304,6 @@ def decision_tree_learning(self, examples, attrs, parent_examples=()):
304
304
tree .add (v , subtree )
305
305
return tree
306
306
307
- def choose_attribute (self , attrs , examples ):
308
- "Choose the attribute with the highest information gain."
309
- return argmax (attrs , lambda a : self .information_gain (a , examples ))
310
-
311
- def all_same_class (self , examples ):
312
- "Are all these examples in the same target class?"
313
- target = self .dataset .target
314
- class0 = examples [0 ][target ]
315
- return all (e [target ] == class0 for e in examples )
316
-
317
307
def plurality_value (self , examples ):
318
308
"""Return the most popular target value for this set of examples.
319
309
(If target is binary, this is the majority; otherwise plurality.)"""
@@ -324,6 +314,16 @@ def plurality_value(self, examples):
324
314
def count (self , attr , val , examples ):
325
315
return count_if (lambda e : e [attr ] == val , examples )
326
316
317
+ def all_same_class (self , examples ):
318
+ "Are all these examples in the same target class?"
319
+ target = self .dataset .target
320
+ class0 = examples [0 ][target ]
321
+ return all (e [target ] == class0 for e in examples )
322
+
323
+ def choose_attribute (self , attrs , examples ):
324
+ "Choose the attribute with the highest information gain."
325
+ return argmax (attrs , lambda a : self .information_gain (a , examples ))
326
+
327
327
def information_gain (self , attr , examples ):
328
328
def I (examples ):
329
329
target = self .dataset .target
0 commit comments