Skip to content

Commit fa67aec

Browse files
committed
Accelerated Gradient Descent
1 parent 6b61402 commit fa67aec

File tree

1 file changed

+58
-5
lines changed

1 file changed

+58
-5
lines changed

SOTAAlgorithmsImplementation/AcceleratedGradientBoosting.py

Lines changed: 58 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,13 @@
1616
and outputs predictors that are considerably more sparse in the number of trees,
1717
while retaining the exceptional performance of gradient boosting.
1818
"""
19+
import sys
20+
import math
21+
import operator
1922
from operator import itemgetter
2023

24+
#sys.setrecursionlimit(100000)
25+
2126

2227
class DecisionNode:
2328
"""Decision node which tests at given threshold and returns subtree decision recursively."""
@@ -87,7 +92,13 @@ def predict(self, x):
8792
Returns:
8893
list of regression predictions for x.
8994
"""
90-
result = list(map(self.root.decide, x))
95+
try:
96+
if len(x[0]):
97+
result = list(map(self.root.decide, x))
98+
else:
99+
result = self.root.decide(x)
100+
except TypeError:
101+
result = self.root.decide(x)
91102
return result
92103

93104

@@ -108,14 +119,56 @@ def decide(self, features):
108119
return self.decision
109120

110121

122+
def f_factory(epoch, shrinkage, learner, g):
123+
"""Generate f inplace function."""
124+
return lambda vec: g[epoch](vec) + shrinkage * learner.predict(vec)
125+
126+
127+
def g_factory(epoch, gamma_param, f):
128+
"""Generate g inplace function."""
129+
return lambda vec: (1 - gamma_param) * f[epoch + 1](vec) + gamma_param * f[epoch](vec)
130+
131+
111132
class AGBRegressor:
112133
"""Accelerated Gradient Boosting regressor."""
113-
...
134+
def __init__(self):
135+
self.predict = lambda: 0
136+
137+
def fit(self, x, y, shrinkage=0.9, epochs=20):
138+
"""Fit additive model to given data.
139+
140+
Args:
141+
x (list of lists): data.
142+
y (list): target values.
143+
shrinkage (double): learning bound.
144+
epochs (int): learning iterations.
145+
"""
146+
lambda_param = [0]
147+
gamma_param = 0
148+
start_function = sum(y) / len(y)
149+
g = [lambda vec: start_function]
150+
f = [lambda vec: start_function]
151+
tested_feature = 0
152+
for epoch in range(epochs):
153+
print("Training on epoch {0}".format(epoch))
154+
print(epoch, f, g)
155+
z = list(map(lambda a: operator.sub(*a), zip(y, map(g[-1], x))))
156+
learner = WeakRegressionTree()
157+
learner.fit(x, z, tested_feature)
158+
f.append(f_factory(epoch, shrinkage, learner, g))
159+
print(f, epoch, epoch + 1)
160+
g.append(g_factory(epoch, gamma_param, f))
161+
lambda_param.append((1 + math.sqrt(1 + 4 * (lambda_param[-1] ** 2))) / 2)
162+
gamma_param = (1 - lambda_param[-2]) / lambda_param[-1]
163+
tested_feature += 1
164+
if tested_feature >= len(x[0]):
165+
tested_feature = 0
166+
self.predict = f[-1]
114167

115168

116169
if __name__ == '__main__':
117170
x = [[0, 0, 0], [1, 1, 1], [2, 2, 2], [3, 3, 3]]
118171
y = [1, 2, 3, 4]
119-
rt = WeakRegressionTree()
120-
rt.fit(x, y, 0)
121-
print(rt.predict(x))
172+
agb = AGBRegressor()
173+
agb.fit(x, y, epochs=30)
174+
print(list(map(agb.predict, x)), y)

0 commit comments

Comments
 (0)