forked from deepcomm/KOcodes
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathexample_propagation.py
24 lines (20 loc) · 966 Bytes
/
example_propagation.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
from belief_propagation import BeliefPropagation, TannerGraph, bsc_llr
import numpy as np
# consider a parity check matrix
H = np.array([[1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 1, 1, 0],
[0, 0, 1, 0, 0, 1, 0, 1, 0, 1],
[0, 0, 0, 1, 0, 0, 1, 0, 1, 1]])
# Use it to construct a Tanner graph. Assume a BSC channel model with probability p=0.1 for bit flip
model = bsc_llr(0.1)
tg = TannerGraph.from_biadjacency_matrix(H, channel_model=model)
# let us assume the codeword [1,1,0,0,1,0,0,0,0,0] was sent, but due to a channel error the last bit got flipped
c = np.array([1, 1, 0, 0, 1, 0, 0, 0, 0, 1])
# consequently we get initially H.dot(c) % 2
# array([0, 0, 0, 1, 1])
# let us try to correct the error
bp = BeliefPropagation(tg, H, max_iter=10)
estimate, llr, decode_success = bp.decode(c)
# You can see that the error is corrected
np.logical_xor(c, estimate)