16
16
and outputs predictors that are considerably more sparse in the number of trees,
17
17
while retaining the exceptional performance of gradient boosting.
18
18
"""
19
+ import sys
20
+ import math
21
+ import operator
19
22
from operator import itemgetter
20
23
24
+ #sys.setrecursionlimit(100000)
25
+
21
26
22
27
class DecisionNode :
23
28
"""Decision node which tests at given threshold and returns subtree decision recursively."""
@@ -87,7 +92,13 @@ def predict(self, x):
87
92
Returns:
88
93
list of regression predictions for x.
89
94
"""
90
- result = list (map (self .root .decide , x ))
95
+ try :
96
+ if len (x [0 ]):
97
+ result = list (map (self .root .decide , x ))
98
+ else :
99
+ result = self .root .decide (x )
100
+ except TypeError :
101
+ result = self .root .decide (x )
91
102
return result
92
103
93
104
@@ -108,14 +119,56 @@ def decide(self, features):
108
119
return self .decision
109
120
110
121
122
+ def f_factory (epoch , shrinkage , learner , g ):
123
+ """Generate f inplace function."""
124
+ return lambda vec : g [epoch ](vec ) + shrinkage * learner .predict (vec )
125
+
126
+
127
+ def g_factory (epoch , gamma_param , f ):
128
+ """Generate g inplace function."""
129
+ return lambda vec : (1 - gamma_param ) * f [epoch + 1 ](vec ) + gamma_param * f [epoch ](vec )
130
+
131
+
111
132
class AGBRegressor :
112
133
"""Accelerated Gradient Boosting regressor."""
113
- ...
134
+ def __init__ (self ):
135
+ self .predict = lambda : 0
136
+
137
+ def fit (self , x , y , shrinkage = 0.9 , epochs = 20 ):
138
+ """Fit additive model to given data.
139
+
140
+ Args:
141
+ x (list of lists): data.
142
+ y (list): target values.
143
+ shrinkage (double): learning bound.
144
+ epochs (int): learning iterations.
145
+ """
146
+ lambda_param = [0 ]
147
+ gamma_param = 0
148
+ start_function = sum (y ) / len (y )
149
+ g = [lambda vec : start_function ]
150
+ f = [lambda vec : start_function ]
151
+ tested_feature = 0
152
+ for epoch in range (epochs ):
153
+ print ("Training on epoch {0}" .format (epoch ))
154
+ print (epoch , f , g )
155
+ z = list (map (lambda a : operator .sub (* a ), zip (y , map (g [- 1 ], x ))))
156
+ learner = WeakRegressionTree ()
157
+ learner .fit (x , z , tested_feature )
158
+ f .append (f_factory (epoch , shrinkage , learner , g ))
159
+ print (f , epoch , epoch + 1 )
160
+ g .append (g_factory (epoch , gamma_param , f ))
161
+ lambda_param .append ((1 + math .sqrt (1 + 4 * (lambda_param [- 1 ] ** 2 ))) / 2 )
162
+ gamma_param = (1 - lambda_param [- 2 ]) / lambda_param [- 1 ]
163
+ tested_feature += 1
164
+ if tested_feature >= len (x [0 ]):
165
+ tested_feature = 0
166
+ self .predict = f [- 1 ]
114
167
115
168
116
169
if __name__ == '__main__' :
117
170
x = [[0 , 0 , 0 ], [1 , 1 , 1 ], [2 , 2 , 2 ], [3 , 3 , 3 ]]
118
171
y = [1 , 2 , 3 , 4 ]
119
- rt = WeakRegressionTree ()
120
- rt .fit (x , y , 0 )
121
- print (rt .predict ( x ) )
172
+ agb = AGBRegressor ()
173
+ agb .fit (x , y , epochs = 30 )
174
+ print (list ( map ( agb .predict , x )), y )
0 commit comments