Skip to content

Commit 9c2ffe3

Browse files
mo-kliantmarakis
authored andcommitted
fixed viterbi algorithm #1126 (#1129)
1 parent f4dee6f commit 9c2ffe3

File tree

2 files changed

+25
-8
lines changed

2 files changed

+25
-8
lines changed

probability.py

+21-6
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
import random
1212
from collections import defaultdict
1313
from functools import reduce
14+
import numpy as np
1415

1516

1617
# ______________________________________________________________________________
@@ -687,28 +688,42 @@ def forward_backward(HMM, ev):
687688

688689
def viterbi(HMM, ev):
689690
"""[Equation 15.11]
690-
Viterbi algorithm to find the most likely sequence. Computes the best path,
691+
Viterbi algorithm to find the most likely sequence. Computes the best path and the corresponding probabilities,
691692
given an HMM model and a sequence of observations."""
692693
t = len(ev)
694+
ev = ev.copy()
693695
ev.insert(0, None)
694696

695697
m = [[0.0, 0.0] for _ in range(len(ev) - 1)]
696698

697699
# the recursion is initialized with m1 = forward(P(X0), e1)
698700
m[0] = forward(HMM, HMM.prior, ev[1])
701+
# keep track of maximizing predecessors
702+
backtracking_graph = []
699703

700704
for i in range(1, t):
701705
m[i] = element_wise_product(HMM.sensor_dist(ev[i + 1]),
702706
[max(element_wise_product(HMM.transition_model[0], m[i - 1])),
703707
max(element_wise_product(HMM.transition_model[1], m[i - 1]))])
708+
backtracking_graph.append([np.argmax(element_wise_product(HMM.transition_model[0], m[i - 1])),
709+
np.argmax(element_wise_product(HMM.transition_model[1], m[i - 1]))])
710+
711+
# computed probabilities
712+
ml_probabilities = [0.0] * (len(ev) - 1)
713+
# most likely sequence
714+
ml_path = [True] * (len(ev) - 1)
704715

705-
path = [0.0] * (len(ev) - 1)
706716
# the construction of the most likely sequence starts in the final state with the largest probability,
707-
# and runs backwards; the algorithm needs to store for each xt its best predecessor xt-1
708-
for i in range(t, -1, -1):
709-
path[i - 1] = max(m[i - 1])
717+
# and runs backwards; the algorithm needs to store for each xt its predecessor xt-1 maximizing its probability
718+
i_max = np.argmax(m[-1])
719+
720+
for i in range(t - 1, -1, -1):
721+
ml_probabilities[i] = m[i][i_max]
722+
ml_path[i] = True if i_max == 0 else False
723+
if i > 0:
724+
i_max = backtracking_graph[i - 1][i_max]
710725

711-
return path
726+
return ml_path, ml_probabilities
712727

713728

714729
# _________________________________________________________________________

tests/test_probability.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -288,10 +288,12 @@ def test_viterbi():
288288
umbrellaHMM = HiddenMarkovModel(umbrella_transition, umbrella_sensor)
289289

290290
umbrella_evidence = [T, T, F, T, T]
291-
assert rounder(viterbi(umbrellaHMM, umbrella_evidence)) == [0.8182, 0.5155, 0.1237, 0.0334, 0.0210]
291+
assert viterbi(umbrellaHMM, umbrella_evidence)[0] == [T, T, F, T, T]
292+
assert rounder(viterbi(umbrellaHMM, umbrella_evidence)[1]) == [0.8182, 0.5155, 0.1237, 0.0334, 0.0210]
292293

293294
umbrella_evidence = [T, F, T, F, T]
294-
assert rounder(viterbi(umbrellaHMM, umbrella_evidence)) == [0.8182, 0.1964, 0.053, 0.0154, 0.0042]
295+
assert viterbi(umbrellaHMM, umbrella_evidence)[0] == [T, F, F, F, T]
296+
assert rounder(viterbi(umbrellaHMM, umbrella_evidence)[1]) == [0.8182, 0.1964, 0.0275, 0.0154, 0.0042]
295297

296298

297299
def test_fixed_lag_smoothing():

0 commit comments

Comments
 (0)