1
1
#!/usr/bin/python
2
2
3
3
import keras
4
-
5
- from keras .models import Model
6
- from keras .layers import Input , Dense
7
- from keras .layers import Conv2D
8
- from keras .layers import BatchNormalization
9
4
from keras .layers import Activation
5
+ from keras .layers import BatchNormalization
6
+ from keras .layers import Conv2D
7
+ from keras .layers import Input , Dense
8
+ from keras .models import Model
10
9
11
10
input_data = Input (shape = (19 , 19 , 17 ))
12
11
12
+
13
13
def conv_block (x ):
14
14
y = Conv2D (256 , (3 , 3 ), padding = 'same' )(x )
15
15
y = BatchNormalization ()(y )
16
16
y = Activation ('relu' )(y )
17
17
return y
18
18
19
+
19
20
def residual_block (x ):
20
21
y = Conv2D (256 , (3 , 3 ), padding = 'same' )(x )
21
22
y = BatchNormalization ()(y )
@@ -26,13 +27,15 @@ def residual_block(x):
26
27
y = Activation ('relu' )(y )
27
28
return y
28
29
30
+
29
31
def policy_head (x ):
30
32
y = Conv2D (2 , (1 , 1 ), padding = 'same' )(x )
31
33
y = BatchNormalization ()(y )
32
34
y = Activation ('relu' )(y )
33
- y = Dense (19 ** 2 + 1 , activation = 'sigmoid' )(y )
35
+ y = Dense (19 ** 2 + 1 , activation = 'sigmoid' )(y )
34
36
return y
35
37
38
+
36
39
def value_head (x ):
37
40
y = Conv2D (1 , (1 , 1 ), padding = 'same' )(x )
38
41
y = BatchNormalization ()(y )
@@ -43,13 +46,14 @@ def value_head(x):
43
46
y = Activation ('tanh' )(y )
44
47
return y
45
48
49
+
46
50
# in the paper there were either 39 or 19 residual blocks
47
51
def alphago_zero_nn (residual_blocks = 39 ):
48
52
x = conv_block (input_data )
49
53
50
54
for i in range (residual_blocks ):
51
55
x = residual_block (x )
52
-
56
+
53
57
policy_out = policy_head (x )
54
58
value_out = value_head (x )
55
59
@@ -60,6 +64,3 @@ def alphago_zero_nn(residual_blocks=39):
60
64
61
65
model_alphago_zero = alphago_zero_nn ()
62
66
print (model_alphago_zero .summary ())
63
-
64
-
65
-
0 commit comments