-
Notifications
You must be signed in to change notification settings - Fork 0
/
DropoutLayer.py
55 lines (48 loc) · 1.5 KB
/
DropoutLayer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
# -*- coding: utf-8 -*-
"""
Implementation of Dropout layer
"""
import numpy as np
#from FCLayer import FC
class Dropout:
def __init__(self, keep_prob):
self.keep_prob = keep_prob
def forward(self, x):
self.dropMatrix = np.random.rand(*x.shape) < self.keep_prob
return np.multiply(x, self.dropMatrix) / self.keep_prob
def backprop(self, dLdOut):
return np.multiply(dLdOut, self.dropMatrix) / self.keep_prob
# =============================================================================
# def forward(self, x, keep_prob):
# dropMatrix = np.random.rand(*x.shape) < keep_prob
#
# return np.multiply(x, dropMatrix) / keep_prob
# =============================================================================
# =============================================================================
# fc = FC(25,3)
# drop = Dropout()
#
# nrIter = 100000
#
# meanDifference = np.zeros(nrIter)
#
# for i in range(nrIter):
#
# inp = np.random.randn(25)
# #print("Input:\n",inp)
# outDrop = drop.forward(inp, 0.2)
#
# #print("Output:\n", outDrop)
#
# out1 = fc.forward(inp)
# #print("Without dropout:\n", out1)
# mean1 = np.mean(out1)
# out2 = fc.forward(outDrop)
# #print("With dropout:\n", out2)
# mean2 = np.mean(out2)
#
# meanDifference[i] = mean1 - mean2
#
#
# print(np.mean(meanDifference))
# =============================================================================