mirror of
https://github.com/autistic-symposium/tensorflow-for-deep-learning-py.git
synced 2025-05-10 10:45:04 -04:00
25 lines
No EOL
854 B
Python
25 lines
No EOL
854 B
Python
"""
|
|
Inverted Dropout: Recommended implementation example.
|
|
We drop and scale at train time and don't do anything at test time.
|
|
"""
|
|
|
|
p = 0.5 # probability of keeping a unit active. higher = less dropout
|
|
|
|
def train_step(X):
|
|
# forward pass for example 3-layer neural network
|
|
H1 = np.maximum(0, np.dot(W1, X) + b1)
|
|
U1 = (np.random.rand(*H1.shape) < p) / p # first dropout mask. Notice /p!
|
|
H1 *= U1 # drop!
|
|
H2 = np.maximum(0, np.dot(W2, H1) + b2)
|
|
U2 = (np.random.rand(*H2.shape) < p) / p # second dropout mask. Notice /p!
|
|
H2 *= U2 # drop!
|
|
out = np.dot(W3, H2) + b3
|
|
|
|
# backward pass: compute gradients... (not shown)
|
|
# perform parameter update... (not shown)
|
|
|
|
def predict(X):
|
|
# ensembled forward pass
|
|
H1 = np.maximum(0, np.dot(W1, X) + b1) # no scaling necessary
|
|
H2 = np.maximum(0, np.dot(W2, H1) + b2)
|
|
out = np.dot(W3, H2) + b3 |