Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 99d4cc3

Browse files
antmarakisnorvig
authored andcommitted
Implementation: Multi-Class Backpropagation (#486)
* Update test_learning.py * Update learning.py * set max_score to -1 (for now) * Update learning.py * Make find_max more pythonic
1 parent 6178784 commit 99d4cc3

File tree

2 files changed

+100
-61
lines changed

2 files changed

+100
-61
lines changed

learning.py

Lines changed: 79 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -469,7 +469,7 @@ def NeuralNetLearner(dataset, hidden_layer_sizes=[3],
469469
"""
470470

471471
i_units = len(dataset.inputs)
472-
o_units = 1 # As of now, dataset.target gives only one index.
472+
o_units = len(dataset.values[dataset.target])
473473

474474
# construct a network
475475
raw_net = network(i_units, hidden_layer_sizes, o_units)
@@ -494,49 +494,12 @@ def predict(example):
494494

495495
# Hypothesis
496496
o_nodes = learned_net[-1]
497-
pred = [o_nodes[i].value for i in range(o_units)]
498-
return 1 if pred[0] >= 0.5 else 0
497+
prediction = find_max_node(o_nodes)
498+
return prediction
499499

500500
return predict
501501

502502

503-
class NNUnit:
504-
"""Single Unit of Multiple Layer Neural Network
505-
inputs: Incoming connections
506-
weights: Weights to incoming connections
507-
"""
508-
509-
def __init__(self, weights=None, inputs=None):
510-
self.weights = []
511-
self.inputs = []
512-
self.value = None
513-
self.activation = sigmoid
514-
515-
516-
def network(input_units, hidden_layer_sizes, output_units):
517-
"""Create Directed Acyclic Network of given number layers.
518-
hidden_layers_sizes : List number of neuron units in each hidden layer
519-
excluding input and output layers
520-
"""
521-
# Check for PerceptronLearner
522-
if hidden_layer_sizes:
523-
layers_sizes = [input_units] + hidden_layer_sizes + [output_units]
524-
else:
525-
layers_sizes = [input_units] + [output_units]
526-
527-
net = [[NNUnit() for n in range(size)]
528-
for size in layers_sizes]
529-
n_layers = len(net)
530-
531-
# Make Connection
532-
for i in range(1, n_layers):
533-
for n in net[i]:
534-
for k in net[i-1]:
535-
n.inputs.append(k)
536-
n.weights.append(0)
537-
return net
538-
539-
540503
def BackPropagationLearner(dataset, net, learning_rate, epochs):
541504
"""[Figure 18.23] The back-propagation algorithm for multilayer network"""
542505
# Initialise weights
@@ -551,17 +514,21 @@ def BackPropagationLearner(dataset, net, learning_rate, epochs):
551514
Changing dataset class will have effect on all the learners.
552515
Will be taken care of later
553516
'''
554-
idx_t = [dataset.target]
555-
idx_i = dataset.inputs
556-
n_layers = len(net)
557517
o_nodes = net[-1]
558518
i_nodes = net[0]
519+
o_units = len(o_nodes)
520+
idx_t = dataset.target
521+
idx_i = dataset.inputs
522+
n_layers = len(net)
523+
524+
inputs, targets = init_examples(examples, idx_i, idx_t, o_units)
559525

560526
for epoch in range(epochs):
561527
# Iterate over each example
562-
for e in examples:
563-
i_val = [e[i] for i in idx_i]
564-
t_val = [e[i] for i in idx_t]
528+
for e in range(len(examples)):
529+
i_val = inputs[e]
530+
t_val = targets[e]
531+
565532
# Activate input layer
566533
for v, n in zip(i_val, i_nodes):
567534
n.value = v
@@ -577,7 +544,6 @@ def BackPropagationLearner(dataset, net, learning_rate, epochs):
577544
delta = [[] for i in range(n_layers)]
578545

579546
# Compute outer layer delta
580-
o_units = len(o_nodes)
581547
err = [t_val[i] - o_nodes[i].value
582548
for i in range(o_units)]
583549
delta[-1] = [(o_nodes[i].value) * (1 - o_nodes[i].value) *
@@ -613,7 +579,7 @@ def BackPropagationLearner(dataset, net, learning_rate, epochs):
613579
def PerceptronLearner(dataset, learning_rate=0.01, epochs=100):
614580
"""Logistic Regression, NO hidden layer"""
615581
i_units = len(dataset.inputs)
616-
o_units = 1 # As of now, dataset.target gives only one index.
582+
o_units = len(dataset.values[dataset.target])
617583
hidden_layer_sizes = []
618584
raw_net = network(i_units, hidden_layer_sizes, o_units)
619585
learned_net = BackPropagationLearner(dataset, raw_net, learning_rate, epochs)
@@ -635,10 +601,73 @@ def predict(example):
635601

636602
# Hypothesis
637603
o_nodes = learned_net[-1]
638-
pred = [o_nodes[i].value for i in range(o_units)]
639-
return 1 if pred[0] >= 0.5 else 0
604+
prediction = find_max_node(o_nodes)
605+
return prediction
640606

641607
return predict
608+
609+
610+
class NNUnit:
611+
"""Single Unit of Multiple Layer Neural Network
612+
inputs: Incoming connections
613+
weights: Weights to incoming connections
614+
"""
615+
616+
def __init__(self, weights=None, inputs=None):
617+
self.weights = []
618+
self.inputs = []
619+
self.value = None
620+
self.activation = sigmoid
621+
622+
623+
def network(input_units, hidden_layer_sizes, output_units):
624+
"""Create Directed Acyclic Network of given number layers.
625+
hidden_layers_sizes : List number of neuron units in each hidden layer
626+
excluding input and output layers
627+
"""
628+
# Check for PerceptronLearner
629+
if hidden_layer_sizes:
630+
layers_sizes = [input_units] + hidden_layer_sizes + [output_units]
631+
else:
632+
layers_sizes = [input_units] + [output_units]
633+
634+
net = [[NNUnit() for n in range(size)]
635+
for size in layers_sizes]
636+
n_layers = len(net)
637+
638+
# Make Connection
639+
for i in range(1, n_layers):
640+
for n in net[i]:
641+
for k in net[i-1]:
642+
n.inputs.append(k)
643+
n.weights.append(0)
644+
return net
645+
646+
647+
def init_examples(examples, idx_i, idx_t, o_units):
648+
inputs = {}
649+
targets = {}
650+
651+
for i in range(len(examples)):
652+
e = examples[i]
653+
# Input values of e
654+
inputs[i] = [e[i] for i in idx_i]
655+
656+
if o_units > 1:
657+
# One-Hot representation of e's target
658+
t = [0 for i in range(o_units)]
659+
t[e[idx_t]] = 1
660+
targets[i] = t
661+
else:
662+
# Target value of e
663+
targets[i] = [e[idx_t]]
664+
665+
return inputs, targets
666+
667+
668+
def find_max_node(nodes):
669+
return nodes.index(argmax(nodes, key=lambda node: node.value))
670+
642671
# ______________________________________________________________________________
643672

644673

tests/test_learning.py

Lines changed: 21 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -66,23 +66,33 @@ def test_decision_tree_learner():
6666

6767
def test_neural_network_learner():
6868
iris = DataSet(name="iris")
69-
iris.remove_examples("virginica")
70-
69+
7170
classes = ["setosa","versicolor","virginica"]
72-
iris.classes_to_numbers()
71+
iris.classes_to_numbers(classes)
72+
73+
nNL = NeuralNetLearner(iris, [5], 0.15, 75)
74+
pred1 = nNL([5,3,1,0.1])
75+
pred2 = nNL([6,3,3,1.5])
76+
pred3 = nNL([7.5,4,6,2])
7377

74-
nNL = NeuralNetLearner(iris)
75-
# NeuralNetLearner might be wrong. Just check if prediction is in range.
76-
assert nNL([5,3,1,0.1]) in range(len(classes))
78+
# NeuralNetLearner might be wrong. If it is, check if prediction is in range.
79+
assert pred1 == 0 or pred1 in range(len(classes))
80+
assert pred2 == 1 or pred2 in range(len(classes))
81+
assert pred3 == 2 or pred3 in range(len(classes))
7782

7883

7984
def test_perceptron():
8085
iris = DataSet(name="iris")
81-
iris.remove_examples("virginica")
82-
83-
classes = ["setosa","versicolor","virginica"]
8486
iris.classes_to_numbers()
8587

88+
classes_number = len(iris.values[iris.target])
89+
8690
perceptron = PerceptronLearner(iris)
87-
# PerceptronLearner might be wrong. Just check if prediction is in range.
88-
assert perceptron([5,3,1,0.1]) in range(len(classes))
91+
pred1 = perceptron([5,3,1,0.1])
92+
pred2 = perceptron([6,3,4,1])
93+
pred3 = perceptron([7.5,4,6,2])
94+
95+
# PerceptronLearner might be wrong. If it is, check if prediction is in range.
96+
assert pred1 == 0 or pred1 in range(classes_number)
97+
assert pred2 == 1 or pred2 in range(classes_number)
98+
assert pred3 == 2 or pred3 in range(classes_number)

0 commit comments

Comments
 (0)