-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathGibbs.py
90 lines (66 loc) · 2.83 KB
/
Gibbs.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
D = 2
# set up the means
a_mu = 0
b_mu = 0
a_sigma = 1
b_sigma = 1
a_b_cov = 0.5
joint_cov = np.vstack(((a_sigma, a_b_cov), (a_b_cov, b_sigma)))
joint_mu = np.vstack((a_mu, b_mu))
N = 10000
L = np.linalg.cholesky(joint_cov)
samples_from_true_distribution = L @ np.random.randn(D, N) + joint_mu
'''Returns the conditional distribution given the joint distribution and which variable
the conditional probability should use.
Right now this only works for 2-variable joint distributions.
joint_mu: joint distribution's mu
joint_cov: joint distribution's covariance
var_index: index of the variable in the joint distribution. Everything else will be
conditioned on. For example, if the joint distribution p(a, b, c) has mu [mu_a, mu_b, mu_c],
to get p(c | a, b), use var_index = 2.
returns:
a function that can sample from the univariate conditional distribution
'''
assert joint_mu.shape[0] == 2, 'Sorry, this function only works for 2-dimensional joint distributions right now'
a = joint_mu[var_index]
b = joint_mu[~var_index]
A = joint_cov[var_index, var_index]
B = joint_cov[~var_index, ~var_index]
C = joint_cov[var_index, ~var_index]
# we're dealing with one dimension so
B_inv = 1/B
# Return a function that can sample given a value of g
def dist(g):
# a + C*B^{-1}(g - b)
mu = a + C * B_inv * (g - b)
# A - C * B^{-1} * C^T
cov = A - B_inv * C * C
return np.sqrt(cov) * np.random.randn(1) + mu
return dist
Now set up the conditionals for this particular problem.
# Set up the conditional probability distribution for each dimension
# For example, I can sample p(a | b) using sample_for_dim[0].
univariate_conditionals = [
get_conditional_dist(joint_mu, joint_cov, d)
for d in range(D)
]
def gibbs_sample(univariate_conditionals, sample_count):
'''Does Gibbs sampling given the distribution's univariate conditionals.
Returns a D x N matrix
'''
D = len(univariate_conditionals)
assert D == 2, "Sorry, this only supports 2 dimensions right now"
# initializes an empty matrix for the samples
samples = np.zeros((D, sample_count))
# initialize the first sample to some arbitrary value
samples[:, 0] = [3, -3]
for i in range(1, sample_count):
# first set this sample equal to the previous sample
samples[:, i] = samples[:, i - 1]
# now update the dimension whose turn it is using the conditional distribution
# pass in all dimension from the previous sample except this dimension
d = i % D
samples[d, i] = univariate_conditionals[d](samples[~d, i - 1])
return samples
# visualizing
samples = gibbs_sample(univariate_conditionals, sample_count=100)