Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 4c64a2c

Browse files
committed
adds tests for element_wise_product and forward_backward
1 parent 4075725 commit 4c64a2c

File tree

3 files changed

+36
-12
lines changed

3 files changed

+36
-12
lines changed

probability.py

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -524,15 +524,7 @@ def markov_blanket_sample(X, e, bn):
524524

525525
# _________________________________________________________________________
526526

527-
"""
528-
umbrella_evidence = [T, T, F, T, T]
529-
umbrella_prior = [0.5, 0.5]
530-
umbrella_transition = [[0.7, 0.3], [0.3, 0.7]]
531-
umbrella_sensor = [[0.9, 0.2], [0.1, 0.8]]
532-
umbrellaHMM = HiddenMarkovModel(umbrella_transition, umbrella_sensor)
533-
534-
print(forward_backward(umbrellaHMM, umbrella_evidence, umbrella_prior))
535-
"""
527+
# Umbrella Example [Fig. 15.2]
536528

537529
class HiddenMarkovModel:
538530

@@ -568,7 +560,19 @@ def backward(HMM, b, ev):
568560

569561

570562
def forward_backward(HMM, ev, prior):
571-
"""[Fig. 15.4]"""
563+
"""[Fig. 15.4]
564+
Forward-Backward algorithm for smoothing. Computes posterior probabilities
565+
of a sequence of states given a sequence of observations.
566+
567+
umbrella_evidence = [T, T, F, T, T]
568+
umbrella_prior = [0.5, 0.5]
569+
umbrella_transition = [[0.7, 0.3], [0.3, 0.7]]
570+
umbrella_sensor = [[0.9, 0.2], [0.1, 0.8]]
571+
umbrellaHMM = HiddenMarkovModel(umbrella_transition, umbrella_sensor)
572+
573+
>>> forward_backward(umbrellaHMM, umbrella_evidence, umbrella_prior)
574+
[[0.6469, 0.3531], [0.8673, 0.1327], [0.8204, 0.1796], [0.3075, 0.6925], [0.8204, 0.1796], [0.8673, 0.1327]]
575+
"""
572576
t = len(ev)
573577
ev.insert(0, None) # to make the code look similar to pseudo code
574578

@@ -587,6 +591,7 @@ def forward_backward(HMM, ev, prior):
587591
bv.append(b)
588592

589593
sv = sv[::-1]
594+
# to have only 4 digits after decimal point
590595
for i in range(len(sv)):
591596
for j in range(len(sv[i])):
592597
sv[i][j] = float("{0:.4f}".format(sv[i][j]))

tests/test_probability.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,5 +102,21 @@ def test_likelihood_weighting():
102102
'Burglary', dict(JohnCalls=T, MaryCalls=T),
103103
burglary, 10000).show_approx() == 'False: 0.702, True: 0.298'
104104

105+
106+
def test_forward_backward():
107+
umbrella_prior = [0.5, 0.5]
108+
umbrella_transition = [[0.7, 0.3], [0.3, 0.7]]
109+
umbrella_sensor = [[0.9, 0.2], [0.1, 0.8]]
110+
umbrellaHMM = HiddenMarkovModel(umbrella_transition, umbrella_sensor)
111+
112+
umbrella_evidence = [T, T, F, T, T]
113+
assert forward_backward(umbrellaHMM, umbrella_evidence, umbrella_prior) == [[0.6469, 0.3531],
114+
[0.8673, 0.1327], [0.8204, 0.1796], [0.3075, 0.6925], [0.8204, 0.1796], [0.8673, 0.1327]]
115+
116+
umbrella_evidence = [T, F, T, F, T]
117+
assert forward_backward(umbrellaHMM, umbrella_evidence, umbrella_prior) == [[0.5871, 0.4129],
118+
[0.7177, 0.2823], [0.2324, 0.7676], [0.6072, 0.3928], [0.2324, 0.7676], [0.7177, 0.2823]]
119+
120+
105121
if __name__ == '__main__':
106122
pytest.main()

tests/test_utils.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,7 @@ def test_removeall_list():
1515

1616
def test_removeall_string():
1717
assert removeall('s', '') == ''
18-
assert removeall(
19-
's', 'This is a test. Was a test.') == 'Thi i a tet. Wa a tet.'
18+
assert removeall('s', 'This is a test. Was a test.') == 'Thi i a tet. Wa a tet.'
2019

2120

2221
def test_unique():
@@ -86,6 +85,10 @@ def test_histogram():
8685
def test_dotproduct():
8786
assert dotproduct([1, 2, 3], [1000, 100, 10]) == 1230
8887

88+
def test_element_wise_product():
89+
assert element_wise_product([1, 2, 5], [7, 10, 0]) == [7, 20, 0]
90+
assert element_wise_product([1, 6, 3, 0], [9, 12, 0, 0]) == [9, 72, 0, 0]
91+
8992

9093
def test_vector_add():
9194
assert vector_add((0, 1), (8, 9)) == (8, 10)

0 commit comments

Comments
 (0)