forked from yshin1209/Computational-Medicine
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Barrier Method
65 lines (52 loc) · 2.58 KB
/
Barrier Method
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
# Barrier Method (Gradeint Descent with an Inequality Constraint)
# Yong-Jun Shin (2019)
import numpy as np
import matplotlib.pyplot as plt
import math
def f(x):
return x**2 + 4*x + 5 - b*math.log(x-1) # -b*math.log(x-1)
# barrier function for x > 1 (inequaility constraint)
N = 10 # total number of iterations
iteration = np.arange(0, N, 1) # iteration vector [0,..., N-1]
x_opt = np.empty(N) # optimal x (x_opt) candidate (vector)
x_opt[0] = 5 # initial x_opt value = 5
dx = 0.01 # delta x
mu = 0.1 # step size mu
b = 0.00000001 # constant b
for n in range (0, N-1): # n: iteration index
slope = (f(x_opt[n]) - f(x_opt[n]-dx))/dx # slope (first derivative) at x_opt(n)
x_opt[n+1] = x_opt[n] - mu * slope # gradient descent
if ( x_opt[n+1]-dx <= 1): # if x_opt - dx is less than or equal to 1
x_opt[n+1] = dx + 1.0000001 # then make x_opt equal to dx + 1.0000001
plt.plot(iteration,x_opt)
plt.xlabel('iteration (n)')
plt.ylabel('optimal x (opt_x)')
plt.title('Gradient Descent')
plt.grid(True)
plt.show()
# Barrier Method (Newton's Method with an Inequality Constraint)
# Yong-Jun Shin (2019)
import numpy as np
import matplotlib.pyplot as plt
def f(x):
return x**2 + 4*x + 5 - b*math.log(x-1) # -b*math.log(x-1)
# barrier function for x > 1 (inequaility constraint)
N = 10 # total number of iterations
iteration = np.arange(0, N, 1) # iteration vector [0,..., N-1]
x_opt = np.empty(N) # optimal x (x_opt) candidate (vector)
x_opt[0] = 5 # initial x_opt value = 5
dx = 0.001 # delta x
c = 1 # constant c
b = 0.0000001 # constant b
for n in range (0, N-1): # n: iteration index
slope = (f(x_opt[n]) - f(x_opt[n]-dx))/dx # slope (first derivative) at x_opt(n)
hess = (f(x_opt[n]+dx) - 2*f(x_opt[n]) + f(x_opt[n]-dx))/dx**2 # second derivative at x_opt(n)
x_opt[n+1] = x_opt[n] - (c/hess) * slope # Newton's method (step size = c/hess)
if ( x_opt[n+1]-dx <= 1): # if x_opt - dx is less than or equal to 1
x_opt[n+1] = dx + 1.0000001 # then make x_opt equal to dx + 1.0000001
plt.plot(iteration,x_opt)
plt.xlabel('iteration (n)')
plt.ylabel('optimal x (opt_x)')
plt.title('Newton\'s Method')
plt.grid(True)
plt.show()