Mist gradient check (#57)
Signed-off-by: AlexDBlack <blacka101@gmail.com>
This commit is contained in:
parent
db7ca956c5
commit
8f96f71f2b
@ -142,7 +142,7 @@ public class GradientCheckTests extends BaseDL4JTest {
|
||||
// (a) activation function
|
||||
// (b) Whether to test at random initialization, or after some learning (i.e., 'characteristic mode of operation')
|
||||
// (c) Loss function (with specified output activations)
|
||||
Activation[] activFns = {Activation.SIGMOID, Activation.TANH};
|
||||
Activation[] activFns = {Activation.SIGMOID, Activation.TANH, Activation.MISH};
|
||||
boolean[] characteristic = {false, true}; //If true: run some backprop steps first
|
||||
|
||||
LossFunction[] lossFunctions = {LossFunction.MCXENT, LossFunction.MSE};
|
||||
|
Loading…
x
Reference in New Issue
Block a user