if dropoutRate > 0.001 : //Below 0.001 I take it as if there is no dropout at all. (To avoid float problems with == 0.0. Although my tries show it actually works fine.)
probabilityOfStayingActivated = (1-dropoutRate)
srng = T.shared_randomstreams.RandomStreams(rng.randint(999999))
dropoutMask = srng.binomial(n=1, size=inputTrainShape, p=probabilityOfStayingActivated, dtype=theano.config.floatX)
inputImgAfterDropout = inputTrain * dropoutMask
inputImgAfterDropoutInference = inputInference * probabilityOfStayingActivated
inputImgAfterDropoutTesting = inputTesting * probabilityOfStayingActivated
else :
inputImgAfterDropout = inputTrain
inputImgAfterDropoutInference = inputInference
inputImgAfterDropoutTesting = inputTesting
return (inputImgAfterDropout, inputImgAfterDropoutInference, inputImgAfterDropoutTesting)
def applyBn(rollingAverageForBatchNormalizationOverThatManyBatches, inputTrain, inputVal, inputTest, inputShapeTrain) :
numberOfChannels = inputShapeTrain[1]
After Change
// tf.nn.dropout(x, keep_prob) scales kept values UP, so that at inference you dont need to scale then.
inputImgAfterDropoutTrain = inputTrain * dropoutMask
inputImgAfterDropoutVal = inputVal * keep_prob
inputImgAfterDropoutTest = inputTest * keep_prob
else :
inputImgAfterDropoutTrain = inputTrain
inputImgAfterDropoutVal = inputVal
inputImgAfterDropoutTest = inputTest
return (inputImgAfterDropoutTrain, inputImgAfterDropoutVal, inputImgAfterDropoutTest)
def applyBn(rollingAverageForBatchNormalizationOverThatManyBatches, inputTrain, inputVal, inputTest, inputShapeTrain) :
numOfChanns = inputShapeTrain[1]