Using @SuperBuilder for LayerConfigurations
Signed-off-by: brian <brian@brutex.de>master
parent
4482113f23
commit
f6100c362d
|
@ -207,7 +207,7 @@ public class TupleStreamDataSetIteratorTest extends SolrCloudTestCase {
|
|||
final MultiLayerNetwork model = new MultiLayerNetwork(
|
||||
NeuralNetConfiguration.builder()
|
||||
.list(
|
||||
new OutputLayer.Builder(LossFunction.MSE)
|
||||
OutputLayer.builder(LossFunction.MSE)
|
||||
.nIn(3)
|
||||
.nOut(1)
|
||||
.weightInit(WeightInit.ONES)
|
||||
|
|
|
@ -155,7 +155,7 @@ public class ModelTupleStreamIntegrationTest extends SolrCloudTestCase {
|
|||
|
||||
final NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.list(
|
||||
new OutputLayer.Builder()
|
||||
OutputLayer.builder()
|
||||
.nIn(numInputs)
|
||||
.nOut(numOutputs)
|
||||
.activation(Activation.IDENTITY)
|
||||
|
|
|
@ -244,7 +244,7 @@ public class ModelTupleStreamTest {
|
|||
|
||||
final NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.list(
|
||||
new OutputLayer.Builder()
|
||||
OutputLayer.builder()
|
||||
.nIn(numInputs)
|
||||
.nOut(numOutputs)
|
||||
.activation(Activation.IDENTITY)
|
||||
|
@ -278,7 +278,7 @@ public class ModelTupleStreamTest {
|
|||
.graphBuilder()
|
||||
.addInputs("inputLayer")
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder()
|
||||
OutputLayer.builder()
|
||||
.nIn(numInputs)
|
||||
.nOut(numOutputs)
|
||||
.activation(Activation.IDENTITY)
|
||||
|
|
|
@ -194,7 +194,7 @@ public class ScoringModelTest {
|
|||
|
||||
final NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.list(
|
||||
new OutputLayer.Builder().nIn(numFeatures).nOut(1).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).build()
|
||||
OutputLayer.builder().nIn(numFeatures).nOut(1).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).build()
|
||||
)
|
||||
.build();
|
||||
|
||||
|
@ -221,7 +221,7 @@ public class ScoringModelTest {
|
|||
.graphBuilder()
|
||||
.addInputs("inputLayer")
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder().nIn(numFeatures).nOut(1).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).build(),
|
||||
OutputLayer.builder().nIn(numFeatures).nOut(1).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).build(),
|
||||
"inputLayer")
|
||||
.setOutputs("outputLayer")
|
||||
.build();
|
||||
|
|
|
@ -75,8 +75,8 @@ public class JsonModelServerTest extends BaseDL4JTest {
|
|||
.updater(new Adam(0.119f))
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(0, new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(10).build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.SQUARED_LOSS).activation(Activation.SIGMOID).nIn(10).nOut(1).build())
|
||||
.layer(0, DenseLayer.builder().activation(Activation.TANH).nIn(4).nOut(10).build())
|
||||
.layer(1, OutputLayer.builder(LossFunctions.LossFunction.SQUARED_LOSS).activation(Activation.SIGMOID).nIn(10).nOut(1).build())
|
||||
.build();
|
||||
|
||||
model = new MultiLayerNetwork(conf);
|
||||
|
@ -543,8 +543,8 @@ public class JsonModelServerTest extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.list()
|
||||
.layer(new DenseLayer.Builder().nIn(784).nOut(10).build())
|
||||
.layer(new LossLayer.Builder().activation(Activation.SOFTMAX).build())
|
||||
.layer(DenseLayer.builder().nIn(784).nOut(10).build())
|
||||
.layer(LossLayer.builder().lossFunction().activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -600,10 +600,10 @@ public class JsonModelServerTest extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("input1", "input2")
|
||||
.addLayer("L1", new DenseLayer.Builder().nIn(3).nOut(4).build(), "input1")
|
||||
.addLayer("L2", new DenseLayer.Builder().nIn(3).nOut(4).build(), "input2")
|
||||
.addLayer("L1", DenseLayer.builder().nIn(3).nOut(4).build(), "input1")
|
||||
.addLayer("L2", DenseLayer.builder().nIn(3).nOut(4).build(), "input2")
|
||||
.addVertex("merge", new MergeVertex(), "L1", "L2")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(4+4).nOut(3).build(), "merge")
|
||||
.addLayer("out", OutputLayer.builder().nIn(4+4).nOut(3).build(), "merge")
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
||||
|
@ -656,11 +656,11 @@ public class JsonModelServerTest extends BaseDL4JTest {
|
|||
.updater(new Sgd(0.01))
|
||||
.graphBuilder()
|
||||
.addInputs("input")
|
||||
.addLayer("L1", new DenseLayer.Builder().nIn(8).nOut(4).build(), "input")
|
||||
.addLayer("out1", new OutputLayer.Builder()
|
||||
.addLayer("L1", DenseLayer.builder().nIn(8).nOut(4).build(), "input")
|
||||
.addLayer("out1", OutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nIn(4).nOut(3).build(), "L1")
|
||||
.addLayer("out2", new OutputLayer.Builder()
|
||||
.addLayer("out2", OutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.nIn(4).nOut(2).build(), "L1")
|
||||
.setOutputs("out1","out2")
|
||||
|
|
|
@ -129,9 +129,9 @@ public abstract class BaseSparkTest extends BaseDL4JTest implements Serializable
|
|||
protected NeuralNetConfiguration getBasicConf() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
|
||||
.updater(new Nesterovs(0.1, 0.9)).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(nIn).nOut(3)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
|
|
@ -137,7 +137,7 @@ public class GradientSharingTrainingTest extends BaseSparkTest {
|
|||
.updater(new AMSGrad(0.1))
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("out", new OutputLayer.Builder().nIn(784).nOut(10).activation(Activation.SOFTMAX)
|
||||
.layer("out", OutputLayer.builder().nIn(784).nOut(10).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
@ -272,15 +272,15 @@ public class GradientSharingTrainingTest extends BaseSparkTest {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.seed(12345)
|
||||
.list()
|
||||
.layer(new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.layer(OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
} else {
|
||||
conf = NeuralNetConfiguration.builder()
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.seed(12345)
|
||||
.list()
|
||||
.layer(new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.layer(DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
}
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -358,7 +358,7 @@ public class GradientSharingTrainingTest extends BaseSparkTest {
|
|||
.updater(new AMSGrad(0.001))
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("out", new OutputLayer.Builder().nIn(784).nOut(10).activation(Activation.SOFTMAX)
|
||||
.layer("out", OutputLayer.builder().nIn(784).nOut(10).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
|
|
@ -132,9 +132,9 @@ public abstract class BaseSparkTest extends BaseDL4JTest implements Serializable
|
|||
protected NeuralNetConfiguration getBasicConf() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
|
||||
.updater(new Nesterovs(0.1, 0.9)).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(nIn).nOut(3)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
|
|
@ -71,7 +71,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd()).weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -127,7 +127,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(10.0)) //Intentionally huge LR
|
||||
.weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3).activation(Activation.IDENTITY)
|
||||
.lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -166,7 +166,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -212,7 +212,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -249,7 +249,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd()).weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
|
|
@ -74,7 +74,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd()).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
ComputationGraph net = new ComputationGraph(conf);
|
||||
|
@ -128,7 +128,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(2.0)) //Intentionally huge LR
|
||||
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.IDENTITY)
|
||||
.lossFunction(LossFunctions.LossFunction.MSE).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
ComputationGraph net = new ComputationGraph(conf);
|
||||
|
@ -169,7 +169,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
ComputationGraph net = new ComputationGraph(conf);
|
||||
|
@ -217,7 +217,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
ComputationGraph net = new ComputationGraph(conf);
|
||||
|
@ -256,7 +256,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd()).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
ComputationGraph net = new ComputationGraph(conf);
|
||||
|
|
|
@ -69,7 +69,7 @@ public class TestKryo extends BaseSparkKryoTest {
|
|||
m.put(0, 0.5);
|
||||
m.put(10, 0.1);
|
||||
NeuralNetConfiguration mlc = NeuralNetConfiguration.builder()
|
||||
.updater(new Nadam(new MapSchedule(ScheduleType.ITERATION,m))).list().layer(0, new OutputLayer.Builder().nIn(10).nOut(10).build())
|
||||
.updater(new Nadam(new MapSchedule(ScheduleType.ITERATION,m))).list().layer(0, OutputLayer.builder().nIn(10).nOut(10).build())
|
||||
.build();
|
||||
|
||||
testSerialization(mlc, si);
|
||||
|
@ -79,23 +79,23 @@ public class TestKryo extends BaseSparkKryoTest {
|
|||
.dist(new UniformDistribution(-1, 1))
|
||||
.updater(new Adam(new MapSchedule(ScheduleType.ITERATION,m)))
|
||||
.graphBuilder()
|
||||
.addInputs("in").addLayer("out", new OutputLayer.Builder().nIn(10).nOut(10).build(), "in")
|
||||
.addInputs("in").addLayer("out", OutputLayer.builder().nIn(10).nOut(10).build(), "in")
|
||||
.setOutputs("out").build();
|
||||
|
||||
testSerialization(cgc, si);
|
||||
|
||||
|
||||
//Check main layers:
|
||||
Layer[] layers = new Layer[] {new OutputLayer.Builder().nIn(10).nOut(10).build(),
|
||||
new RnnOutputLayer.Builder().nIn(10).nOut(10).build(), new LossLayer.Builder().build(),
|
||||
new CenterLossOutputLayer.Builder().nIn(10).nOut(10).build(),
|
||||
new DenseLayer.Builder().nIn(10).nOut(10).build(),
|
||||
new ConvolutionLayer.Builder().nIn(10).nOut(10).build(), new SubsamplingLayer.Builder().build(),
|
||||
Layer[] layers = new Layer[] {OutputLayer.builder().nIn(10).nOut(10).build(),
|
||||
RnnOutputLayer.builder().nIn(10).nOut(10).build(), LossLayer.builder().lossFunction().build(),
|
||||
CenterLossOutputLayer.builder().nIn(10).nOut(10).build(),
|
||||
DenseLayer.builder().nIn(10).nOut(10).build(),
|
||||
ConvolutionLayer.builder().nIn(10).nOut(10).build(), SubsamplingLayer.builder().build(),
|
||||
new Convolution1DLayer.Builder(2, 2).nIn(10).nOut(10).build(),
|
||||
new ActivationLayer.Builder().activation(Activation.TANH).build(),
|
||||
new GlobalPoolingLayer.Builder().build(), new GravesLSTM.Builder().nIn(10).nOut(10).build(),
|
||||
new LSTM.Builder().nIn(10).nOut(10).build(), new DropoutLayer.Builder(0.5).build(),
|
||||
new BatchNormalization.Builder().build(), new LocalResponseNormalization.Builder().build()};
|
||||
ActivationLayer.builder().activation(Activation.TANH).build(),
|
||||
GlobalPoolingLayer.builder().build(), GravesLSTM.builder().nIn(10).nOut(10).build(),
|
||||
LSTM.builder().nIn(10).nOut(10).build(), DropoutLayer.builder(0.5).build(),
|
||||
BatchNormalization.builder().build(), LocalResponseNormalization.builder().build()};
|
||||
|
||||
for (Layer l : layers) {
|
||||
testSerialization(l, si);
|
||||
|
|
|
@ -86,9 +86,9 @@ public class TestPreProcessedData extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(Updater.RMSPROP)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3).activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
.build();
|
||||
|
@ -137,9 +137,9 @@ public class TestPreProcessedData extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(Updater.RMSPROP)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3).activation(Activation.SOFTMAX)
|
||||
.build(),
|
||||
"0")
|
||||
|
@ -191,9 +191,9 @@ public class TestPreProcessedData extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(Updater.RMSPROP)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3).activation(Activation.SOFTMAX)
|
||||
.build(),
|
||||
"0")
|
||||
|
|
|
@ -41,7 +41,7 @@ public class TestKryoWarning {
|
|||
try {
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new OutputLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(0, OutputLayer.builder().nIn(10).nOut(10).build())
|
||||
.build();
|
||||
|
||||
TrainingMaster tm = new ParameterAveragingTrainingMaster.Builder(1).build();
|
||||
|
@ -58,7 +58,7 @@ public class TestKryoWarning {
|
|||
try {
|
||||
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(10).nOut(10).build(), "in").setOutputs("0")
|
||||
.addLayer("0", OutputLayer.builder().nIn(10).nOut(10).build(), "in").setOutputs("0")
|
||||
.build();
|
||||
|
||||
TrainingMaster tm = new ParameterAveragingTrainingMaster.Builder(1).build();
|
||||
|
|
|
@ -53,9 +53,9 @@ public class TestCustomLayer extends BaseSparkTest {
|
|||
//Custom layers are tested more extensively in dl4j core
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder().updater(new Sgd(0.1)).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(1, new CustomLayer(3.14159)).layer(2,
|
||||
new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.nIn(10).nOut(10).build())
|
||||
.build();
|
||||
|
||||
|
|
|
@ -79,8 +79,8 @@ public class TestSparkComputationGraph extends BaseSparkTest {
|
|||
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("l0", new DenseLayer.Builder().nIn(4).nOut(10).build(), "in")
|
||||
.addLayer("l1", new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.addLayer("l0", DenseLayer.builder().nIn(4).nOut(10).build(), "in")
|
||||
.addLayer("l1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(10).nOut(2).build(), "l0")
|
||||
.setOutputs("l1").build();
|
||||
|
||||
|
@ -107,8 +107,8 @@ public class TestSparkComputationGraph extends BaseSparkTest {
|
|||
ComputationGraphConfiguration config = NeuralNetConfiguration.builder()
|
||||
.updater(new Sgd(0.1))
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(2).build(), "in").addLayer("out",
|
||||
new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3)
|
||||
.addLayer("dense", DenseLayer.builder().nIn(4).nOut(2).build(), "in").addLayer("out",
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3)
|
||||
.build(),
|
||||
"dense")
|
||||
.setOutputs("out").build();
|
||||
|
@ -141,9 +141,9 @@ public class TestSparkComputationGraph extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().l1(0.1).l2(0.1)
|
||||
.seed(123).updater(new Nesterovs(0.1, 0.9)).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(nIn).nOut(3)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).build(),
|
||||
"0")
|
||||
|
@ -220,9 +220,9 @@ public class TestSparkComputationGraph extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(Updater.RMSPROP)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(4)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(4).nOut(4)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.build(),
|
||||
"0")
|
||||
|
@ -421,8 +421,8 @@ public class TestSparkComputationGraph extends BaseSparkTest {
|
|||
.graphBuilder()
|
||||
.addInputs("input1", "input2")
|
||||
.addVertex("avg",new ElementWiseVertex(ElementWiseVertex.Op.Average),"input1","input2")
|
||||
.addLayer("dense",new DenseLayer.Builder().dropOut(0.9).nIn(featSize).nOut(featSize / 2).build(),"avg")
|
||||
.addLayer("output",new OutputLayer.Builder().nIn(featSize / 2).nOut(2).lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).hasBias(false).build(),"dense")
|
||||
.addLayer("dense",DenseLayer.builder().dropOut(0.9).nIn(featSize).nOut(featSize / 2).build(),"avg")
|
||||
.addLayer("output",OutputLayer.builder().nIn(featSize / 2).nOut(2).lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).hasBias(false).build(),"dense")
|
||||
.setOutputs("output")
|
||||
.build();
|
||||
|
||||
|
|
|
@ -62,10 +62,10 @@ public class TestFrozenLayers extends BaseSparkTest {
|
|||
int nOut = 3;
|
||||
|
||||
MultiLayerNetwork origModel = new MultiLayerNetwork(overallConf.clone().list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(6).nOut(5).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(5).nOut(4).build())
|
||||
.layer(2, new DenseLayer.Builder().nIn(4).nOut(3).build())
|
||||
.layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(0, DenseLayer.builder().nIn(6).nOut(5).build())
|
||||
.layer(1, DenseLayer.builder().nIn(5).nOut(4).build())
|
||||
.layer(2, DenseLayer.builder().nIn(4).nOut(3).build())
|
||||
.layer(3, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
|
||||
.build())
|
||||
.build());
|
||||
|
@ -138,10 +138,10 @@ public class TestFrozenLayers extends BaseSparkTest {
|
|||
|
||||
ComputationGraph origModel = new ComputationGraph(NeuralNetConfiguration.builder().updater(new Sgd(0.1))
|
||||
.activation(Activation.TANH).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(6).nOut(5).build(), "in")
|
||||
.addLayer("1", new DenseLayer.Builder().nIn(5).nOut(4).build(), "0")
|
||||
.addLayer("2", new DenseLayer.Builder().nIn(4).nOut(3).build(), "1")
|
||||
.addLayer("3", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("0", DenseLayer.builder().nIn(6).nOut(5).build(), "in")
|
||||
.addLayer("1", DenseLayer.builder().nIn(5).nOut(4).build(), "0")
|
||||
.addLayer("2", DenseLayer.builder().nIn(4).nOut(3).build(), "1")
|
||||
.addLayer("3", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
|
||||
.build(),
|
||||
"2")
|
||||
|
|
|
@ -58,8 +58,8 @@ public class TestMiscFunctions extends BaseSparkTest {
|
|||
public void testFeedForwardWithKey() {
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(3).build())
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
||||
|
@ -109,9 +109,9 @@ public class TestMiscFunctions extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer( new LSTM.Builder().nIn(4).nOut(3).build())
|
||||
.layer(new GlobalPoolingLayer(PoolingType.AVG))
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.layer( LSTM.builder().nIn(4).nOut(3).build())
|
||||
.layer(GlobalPoolingLayer.builder(PoolingType.AVG))
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
||||
|
@ -164,9 +164,9 @@ public class TestMiscFunctions extends BaseSparkTest {
|
|||
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER)
|
||||
.graphBuilder().addInputs("in1", "in2")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(4).nOut(3).build(), "in1")
|
||||
.addLayer("1", new DenseLayer.Builder().nIn(4).nOut(3).build(), "in2").addLayer("2",
|
||||
new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(6).nOut(3)
|
||||
.addLayer("0", DenseLayer.builder().nIn(4).nOut(3).build(), "in1")
|
||||
.addLayer("1", DenseLayer.builder().nIn(4).nOut(3).build(), "in2").addLayer("2",
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(6).nOut(3)
|
||||
.activation(Activation.SOFTMAX).build(),
|
||||
"0", "1")
|
||||
.setOutputs("2").build();
|
||||
|
|
|
@ -111,9 +111,9 @@ public class TestSparkDl4jMultiLayer extends BaseSparkTest {
|
|||
.updater(new Adam(1e-3))
|
||||
.l2(1e-5)
|
||||
.list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(28 * 28).nOut(500).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(500).nOut(100).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(0, DenseLayer.builder().nIn(28 * 28).nOut(500).build())
|
||||
.layer(1, DenseLayer.builder().nIn(500).nOut(100).build())
|
||||
.layer(2, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.activation(Activation.SOFTMAX).nIn(100).nOut(10).build())
|
||||
.build();
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ public class TestCompareParameterAveragingSparkVsSingleMachine {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.weightInit(WeightInit.XAVIER).updater(updater).seed(seed).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new OutputLayer.Builder()
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(10).build()).layer(1, OutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(10).build())
|
||||
.build();
|
||||
return conf;
|
||||
|
@ -79,11 +79,11 @@ public class TestCompareParameterAveragingSparkVsSingleMachine {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.weightInit(WeightInit.XAVIER).updater(updater).seed(seed).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().nOut(3).kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.layer(0, ConvolutionLayer.builder().nOut(3).kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new ConvolutionLayer.Builder().nOut(3).kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.layer(1, ConvolutionLayer.builder().nOut(3).kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(10)
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(10)
|
||||
.build())
|
||||
.inputType(InputType.convolutional(10, 10, 3)).build();
|
||||
return conf;
|
||||
|
@ -95,8 +95,8 @@ public class TestCompareParameterAveragingSparkVsSingleMachine {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.weightInit(WeightInit.XAVIER).updater(updater).seed(seed).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in").addLayer("1",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10)
|
||||
.addLayer("0", DenseLayer.builder().nIn(10).nOut(10).build(), "in").addLayer("1",
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10)
|
||||
.nOut(10).build(),
|
||||
"0")
|
||||
.setOutputs("1").build();
|
||||
|
@ -109,11 +109,11 @@ public class TestCompareParameterAveragingSparkVsSingleMachine {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.weightInit(WeightInit.XAVIER).updater(updater).seed(seed).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new ConvolutionLayer.Builder().nOut(3).kernelSize(2, 2).stride(1, 1)
|
||||
.addLayer("0", ConvolutionLayer.builder().nOut(3).kernelSize(2, 2).stride(1, 1)
|
||||
.padding(0, 0).activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new ConvolutionLayer.Builder().nOut(3).kernelSize(2, 2).stride(1, 1)
|
||||
.addLayer("1", ConvolutionLayer.builder().nOut(3).kernelSize(2, 2).stride(1, 1)
|
||||
.padding(0, 0).activation(Activation.TANH).build(), "0")
|
||||
.addLayer("2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(10)
|
||||
.addLayer("2", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(10)
|
||||
.build(), "1")
|
||||
.setOutputs("2").setInputTypes(InputType.convolutional(10, 10, 3))
|
||||
.build();
|
||||
|
|
|
@ -129,9 +129,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
DataSet d = new IrisDataSetIterator(150, 150).next();
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(100).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(100).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(100).nOut(3)
|
||||
.activation(Activation.SOFTMAX).weightInit(WeightInit.XAVIER)
|
||||
.build())
|
||||
|
@ -167,9 +167,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
.updater(new Adam(1e-6))
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new BatchNormalization.Builder().nIn(4).nOut(4).build())
|
||||
.layer(new DenseLayer.Builder().nIn(4).nOut(32).activation(Activation.RELU).build())
|
||||
.layer(new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(32).nOut(3)
|
||||
.layer(BatchNormalization.builder().nIn(4).nOut(4).build())
|
||||
.layer(DenseLayer.builder().nIn(4).nOut(32).activation(Activation.RELU).build())
|
||||
.layer(org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(32).nOut(3)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
||||
|
@ -277,9 +277,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
}
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(nIn).nOut(3)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder(
|
||||
LossFunctions.LossFunction.MSE).nIn(3).nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
.build();
|
||||
|
@ -302,9 +302,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l1(0.1).l2(0.1)
|
||||
.seed(123).updater(new Nesterovs(0.1, 0.9)).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(nIn).nOut(3)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -391,9 +391,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -455,9 +455,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -525,9 +525,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -614,9 +614,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
|
||||
.activation(Activation.SOFTMAX).build(),
|
||||
"0")
|
||||
|
@ -687,9 +687,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(4)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(4).nOut(4)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
.build();
|
||||
|
@ -771,9 +771,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -822,9 +822,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
|
||||
.activation(Activation.SOFTMAX).build(),
|
||||
"0")
|
||||
|
@ -862,7 +862,7 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
Nd4j.getRandom().setSeed(12345);
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(new RmsProp())
|
||||
.weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new VariationalAutoencoder.Builder().nIn(8).nOut(10).encoderLayerSizes(12)
|
||||
.layer(0, VariationalAutoencoder.builder().nIn(8).nOut(10).encoderLayerSizes(12)
|
||||
.decoderLayerSizes(13).reconstructionDistribution(
|
||||
new GaussianReconstructionDistribution(Activation.IDENTITY))
|
||||
.build())
|
||||
|
@ -898,7 +898,7 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
Nd4j.getRandom().setSeed(12345);
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(new RmsProp())
|
||||
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new VariationalAutoencoder.Builder().nIn(8).nOut(10).encoderLayerSizes(12)
|
||||
.addLayer("0", VariationalAutoencoder.builder().nIn(8).nOut(10).encoderLayerSizes(12)
|
||||
.decoderLayerSizes(13).reconstructionDistribution(
|
||||
new GaussianReconstructionDistribution(Activation.IDENTITY))
|
||||
.build(), "in")
|
||||
|
@ -938,8 +938,8 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(layerSize).nOut(nOut)
|
||||
.layer(0, DenseLayer.builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(1, OutputLayer.builder().nIn(layerSize).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT)
|
||||
.build())
|
||||
|
@ -993,8 +993,8 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(layerSize).nOut(nOut)
|
||||
.layer(0, DenseLayer.builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(1, OutputLayer.builder().nIn(layerSize).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT)
|
||||
.build())
|
||||
|
@ -1047,13 +1047,13 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
}
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.list()
|
||||
.layer(new OutputLayer.Builder().nIn(4).nOut(3).build())
|
||||
.layer(OutputLayer.builder().nIn(4).nOut(3).build())
|
||||
.build();
|
||||
|
||||
ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(4).nOut(3).build(), "in")
|
||||
.addLayer("out", OutputLayer.builder().nIn(4).nOut(3).build(), "in")
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
||||
|
|
|
@ -69,8 +69,8 @@ public class TestTrainingStatsCollection extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nIn(10).nOut(10).build())
|
||||
.build();
|
||||
|
||||
int miniBatchSizePerWorker = 10;
|
||||
|
|
|
@ -62,9 +62,9 @@ public class TestListeners extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(100).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(100).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(100).nOut(3)
|
||||
.activation(Activation.SOFTMAX).weightInit(WeightInit.XAVIER)
|
||||
.build())
|
||||
|
|
|
@ -65,27 +65,27 @@ public class ActorCriticFactoryCompGraphStdConv implements ActorCriticFactoryCom
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.l2(conf.getL2()).graphBuilder()
|
||||
.addInputs("input").addLayer("0",
|
||||
new ConvolutionLayer.Builder(8, 8).nIn(shapeInputs[0]).nOut(16)
|
||||
ConvolutionLayer.builder(8, 8).nIn(shapeInputs[0]).nOut(16)
|
||||
.stride(4, 4).activation(Activation.RELU).build(),
|
||||
"input");
|
||||
|
||||
confB.addLayer("1", new ConvolutionLayer.Builder(4, 4).nIn(16).nOut(32).stride(2, 2).activation(Activation.RELU).build(), "0");
|
||||
confB.addLayer("1", ConvolutionLayer.builder(4, 4).nIn(16).nOut(32).stride(2, 2).activation(Activation.RELU).build(), "0");
|
||||
|
||||
confB.addLayer("2", new DenseLayer.Builder().nIn(w * h * 32).nOut(256).activation(Activation.RELU).build(), "1");
|
||||
confB.addLayer("2", DenseLayer.builder().nIn(w * h * 32).nOut(256).activation(Activation.RELU).build(), "1");
|
||||
|
||||
if (conf.isUseLSTM()) {
|
||||
confB.addLayer("3", new LSTM.Builder().nIn(256).nOut(256).activation(Activation.TANH).build(), "2");
|
||||
confB.addLayer("3", LSTM.builder().nIn(256).nOut(256).activation(Activation.TANH).build(), "2");
|
||||
|
||||
confB.addLayer("value", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
confB.addLayer("value", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nIn(256).nOut(1).build(), "3");
|
||||
|
||||
confB.addLayer("softmax", new RnnOutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
confB.addLayer("softmax", RnnOutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.nIn(256).nOut(numOutputs).build(), "3");
|
||||
} else {
|
||||
confB.addLayer("value", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
confB.addLayer("value", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nIn(256).nOut(1).build(), "2");
|
||||
|
||||
confB.addLayer("softmax", new OutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
confB.addLayer("softmax", OutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.nIn(256).nOut(numOutputs).build(), "2");
|
||||
}
|
||||
|
||||
|
|
|
@ -56,31 +56,31 @@ public class ActorCriticFactoryCompGraphStdDense implements ActorCriticFactoryCo
|
|||
.l2(conf.getL2()).graphBuilder()
|
||||
.setInputTypes(conf.isUseLSTM() ? InputType.recurrent(nIn)
|
||||
: InputType.feedForward(nIn)).addInputs("input")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(nIn)
|
||||
.addLayer("0", DenseLayer.builder().nIn(nIn)
|
||||
.nOut(conf.getNumHiddenNodes()).activation(Activation.RELU).build(),
|
||||
"input");
|
||||
|
||||
|
||||
for (int i = 1; i < conf.getNumLayers(); i++) {
|
||||
confB.addLayer(i + "", new DenseLayer.Builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
confB.addLayer(i + "", DenseLayer.builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
.activation(Activation.RELU).build(), (i - 1) + "");
|
||||
}
|
||||
|
||||
|
||||
if (conf.isUseLSTM()) {
|
||||
confB.addLayer(getConf().getNumLayers() + "", new LSTM.Builder().activation(Activation.TANH)
|
||||
confB.addLayer(getConf().getNumLayers() + "", LSTM.builder().activation(Activation.TANH)
|
||||
.nOut(conf.getNumHiddenNodes()).build(), (getConf().getNumLayers() - 1) + "");
|
||||
|
||||
confB.addLayer("value", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
confB.addLayer("value", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nOut(1).build(), getConf().getNumLayers() + "");
|
||||
|
||||
confB.addLayer("softmax", new RnnOutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
confB.addLayer("softmax", RnnOutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.nOut(numOutputs).build(), getConf().getNumLayers() + "");
|
||||
} else {
|
||||
confB.addLayer("value", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
confB.addLayer("value", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nOut(1).build(), (getConf().getNumLayers() - 1) + "");
|
||||
|
||||
confB.addLayer("softmax", new OutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
confB.addLayer("softmax", OutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.nOut(numOutputs).build(), (getConf().getNumLayers() - 1) + "");
|
||||
}
|
||||
|
||||
|
|
|
@ -61,22 +61,22 @@ public class ActorCriticFactorySeparateStdDense implements ActorCriticFactorySep
|
|||
.updater(conf.getUpdater() != null ? conf.getUpdater() : new Adam())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.l2(conf.getL2())
|
||||
.list().layer(0, new DenseLayer.Builder().nIn(nIn).nOut(conf.getNumHiddenNodes())
|
||||
.list().layer(0, DenseLayer.builder().nIn(nIn).nOut(conf.getNumHiddenNodes())
|
||||
.activation(Activation.RELU).build());
|
||||
|
||||
|
||||
for (int i = 1; i < conf.getNumLayers(); i++) {
|
||||
confB.layer(i, new DenseLayer.Builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
confB.layer(i, DenseLayer.builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
.activation(Activation.RELU).build());
|
||||
}
|
||||
|
||||
if (conf.isUseLSTM()) {
|
||||
confB.layer(conf.getNumLayers(), new LSTM.Builder().nOut(conf.getNumHiddenNodes()).activation(Activation.TANH).build());
|
||||
confB.layer(conf.getNumLayers(), LSTM.builder().nOut(conf.getNumHiddenNodes()).activation(Activation.TANH).build());
|
||||
|
||||
confB.layer(conf.getNumLayers() + 1, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
confB.layer(conf.getNumLayers() + 1, RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nIn(conf.getNumHiddenNodes()).nOut(1).build());
|
||||
} else {
|
||||
confB.layer(conf.getNumLayers(), new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
confB.layer(conf.getNumLayers(), OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nIn(conf.getNumHiddenNodes()).nOut(1).build());
|
||||
}
|
||||
|
||||
|
@ -96,22 +96,22 @@ public class ActorCriticFactorySeparateStdDense implements ActorCriticFactorySep
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
//.regularization(true)
|
||||
//.l2(conf.getL2())
|
||||
.list().layer(0, new DenseLayer.Builder().nIn(nIn).nOut(conf.getNumHiddenNodes())
|
||||
.list().layer(0, DenseLayer.builder().nIn(nIn).nOut(conf.getNumHiddenNodes())
|
||||
.activation(Activation.RELU).build());
|
||||
|
||||
|
||||
for (int i = 1; i < conf.getNumLayers(); i++) {
|
||||
confB2.layer(i, new DenseLayer.Builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
confB2.layer(i, DenseLayer.builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
.activation(Activation.RELU).build());
|
||||
}
|
||||
|
||||
if (conf.isUseLSTM()) {
|
||||
confB2.layer(conf.getNumLayers(), new LSTM.Builder().nOut(conf.getNumHiddenNodes()).activation(Activation.TANH).build());
|
||||
confB2.layer(conf.getNumLayers(), LSTM.builder().nOut(conf.getNumHiddenNodes()).activation(Activation.TANH).build());
|
||||
|
||||
confB2.layer(conf.getNumLayers() + 1, new RnnOutputLayer.Builder(new ActorCriticLoss())
|
||||
confB2.layer(conf.getNumLayers() + 1, RnnOutputLayer.builder(new ActorCriticLoss())
|
||||
.activation(Activation.SOFTMAX).nIn(conf.getNumHiddenNodes()).nOut(numOutputs).build());
|
||||
} else {
|
||||
confB2.layer(conf.getNumLayers(), new OutputLayer.Builder(new ActorCriticLoss())
|
||||
confB2.layer(conf.getNumLayers(), OutputLayer.builder(new ActorCriticLoss())
|
||||
.activation(Activation.SOFTMAX).nIn(conf.getNumHiddenNodes()).nOut(numOutputs).build());
|
||||
}
|
||||
|
||||
|
|
|
@ -60,15 +60,15 @@ public class DQNFactoryStdConv implements DQNFactory {
|
|||
.l2(conf.getL2())
|
||||
.updater(conf.getUpdater() != null ? conf.getUpdater() : new Adam())
|
||||
.weightInit(WeightInit.XAVIER).l2(conf.getL2()).list()
|
||||
.layer(0, new ConvolutionLayer.Builder(8, 8).nIn(shapeInputs[0]).nOut(16).stride(4, 4)
|
||||
.layer(0, ConvolutionLayer.builder(8, 8).nIn(shapeInputs[0]).nOut(16).stride(4, 4)
|
||||
.activation(Activation.RELU).build());
|
||||
|
||||
|
||||
confB.layer(1, new ConvolutionLayer.Builder(4, 4).nOut(32).stride(2, 2).activation(Activation.RELU).build());
|
||||
confB.layer(1, ConvolutionLayer.builder(4, 4).nOut(32).stride(2, 2).activation(Activation.RELU).build());
|
||||
|
||||
confB.layer(2, new DenseLayer.Builder().nOut(256).activation(Activation.RELU).build());
|
||||
confB.layer(2, DenseLayer.builder().nOut(256).activation(Activation.RELU).build());
|
||||
|
||||
confB.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(numOutputs)
|
||||
confB.layer(3, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(numOutputs)
|
||||
.build());
|
||||
|
||||
confB.inputType(InputType.convolutional(shapeInputs[1], shapeInputs[2], shapeInputs[0]));
|
||||
|
|
|
@ -61,7 +61,7 @@ public class DQNFactoryStdDense implements DQNFactory {
|
|||
.l2(conf.getL2())
|
||||
.list()
|
||||
.layer(0,
|
||||
new DenseLayer.Builder()
|
||||
DenseLayer.builder()
|
||||
.nIn(nIn)
|
||||
.nOut(conf.getNumHiddenNodes())
|
||||
.activation(Activation.RELU).build()
|
||||
|
@ -69,12 +69,12 @@ public class DQNFactoryStdDense implements DQNFactory {
|
|||
|
||||
|
||||
for (int i = 1; i < conf.getNumLayers(); i++) {
|
||||
confB.layer(i, new DenseLayer.Builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
confB.layer(i, DenseLayer.builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
.activation(Activation.RELU).build());
|
||||
}
|
||||
|
||||
confB.layer(conf.getNumLayers(),
|
||||
new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.activation(Activation.IDENTITY)
|
||||
.nIn(conf.getNumHiddenNodes())
|
||||
.nOut(numOutputs)
|
||||
|
|
|
@ -141,16 +141,16 @@ public class NStepRnn {
|
|||
.graphBuilder()
|
||||
.addInputs("input")
|
||||
.setInputTypes(InputType.recurrent(NUM_INPUTS))
|
||||
.addLayer("lstm", new LSTM.Builder().nOut(lstmLayerSize).activation(Activation.TANH).build(), "input")
|
||||
.addLayer("dl", new DenseLayer.Builder().nOut(dl1Size).activation(Activation.RELU).build(), "input", "lstm")
|
||||
.addLayer("dl-1", new DenseLayer.Builder().nOut(dl2Size).activation(Activation.RELU).build(), "dl")
|
||||
.addLayer("lstm", LSTM.builder().nOut(lstmLayerSize).activation(Activation.TANH).build(), "input")
|
||||
.addLayer("dl", DenseLayer.builder().nOut(dl1Size).activation(Activation.RELU).build(), "input", "lstm")
|
||||
.addLayer("dl-1", DenseLayer.builder().nOut(dl2Size).activation(Activation.RELU).build(), "dl")
|
||||
.addVertex("dl-rnn", new PreprocessorVertex(new FeedForwardToRnnPreProcessor()), "dl-1");
|
||||
}
|
||||
|
||||
private static ITrainableNeuralNet buildActorCriticNetwork() {
|
||||
ComputationGraphConfiguration valueConfiguration = buildBaseNetworkConfiguration(COMBINED_LSTM_LAYER_SIZE, COMBINED_DL1_LAYER_SIZE, COMBINED_DL2_LAYER_SIZE)
|
||||
.addLayer("value", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(1).build(), "dl-rnn", "lstm")
|
||||
.addLayer("softmax", new RnnOutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX).nOut(NUM_ACTIONS).build(), "dl-rnn", "lstm")
|
||||
.addLayer("value", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(1).build(), "dl-rnn", "lstm")
|
||||
.addLayer("softmax", RnnOutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX).nOut(NUM_ACTIONS).build(), "dl-rnn", "lstm")
|
||||
.setOutputs("value", "softmax")
|
||||
.build();
|
||||
|
||||
|
@ -164,12 +164,12 @@ public class NStepRnn {
|
|||
|
||||
private static ITrainableNeuralNet buildSeparateActorCriticNetwork() {
|
||||
ComputationGraphConfiguration valueConfiguration = buildBaseNetworkConfiguration(SEPARATE_LSTM_LAYER_SIZE, SEPARATE_DL1_LAYER_SIZE, SEPARATE_DL2_LAYER_SIZE)
|
||||
.addLayer("value", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(1).build(), "dl-rnn", "lstm")
|
||||
.addLayer("value", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(1).build(), "dl-rnn", "lstm")
|
||||
.setOutputs("value")
|
||||
.build();
|
||||
|
||||
ComputationGraphConfiguration policyConfiguration = buildBaseNetworkConfiguration(SEPARATE_LSTM_LAYER_SIZE, SEPARATE_DL1_LAYER_SIZE, SEPARATE_DL2_LAYER_SIZE)
|
||||
.addLayer("softmax", new RnnOutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX).nOut(NUM_ACTIONS).build(), "dl-rnn", "lstm")
|
||||
.addLayer("softmax", RnnOutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX).nOut(NUM_ACTIONS).build(), "dl-rnn", "lstm")
|
||||
.setOutputs("softmax")
|
||||
.build();
|
||||
|
||||
|
|
|
@ -197,13 +197,13 @@ public class RobotLakeExample {
|
|||
InputType.feedForward(4)) // radar )
|
||||
.addInputs("tracker-in", "radar-in")
|
||||
|
||||
.layer("dl_1", new DenseLayer.Builder().activation(Activation.RELU).nOut(40).build(), "tracker-in", "radar-in")
|
||||
.layer("dl_out", new DenseLayer.Builder().activation(Activation.RELU).nOut(40).build(), "dl_1");
|
||||
.layer("dl_1", DenseLayer.builder().activation(Activation.RELU).nOut(40).build(), "tracker-in", "radar-in")
|
||||
.layer("dl_out", DenseLayer.builder().activation(Activation.RELU).nOut(40).build(), "dl_1");
|
||||
}
|
||||
|
||||
private static ITrainableNeuralNet buildQNetwork() {
|
||||
ComputationGraphConfiguration conf = buildBaseNetworkConfiguration()
|
||||
.addLayer("output", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.addLayer("output", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nOut(RobotLake.NUM_ACTIONS).build(), "dl_out")
|
||||
|
||||
.setOutputs("output")
|
||||
|
@ -220,9 +220,9 @@ public class RobotLakeExample {
|
|||
|
||||
private static ITrainableNeuralNet buildActorCriticNetwork() {
|
||||
ComputationGraphConfiguration conf = buildBaseNetworkConfiguration()
|
||||
.addLayer("value", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.addLayer("value", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nOut(1).build(), "dl_out")
|
||||
.addLayer("softmax", new OutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.addLayer("softmax", OutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.nOut(RobotLake.NUM_ACTIONS).build(), "dl_out")
|
||||
.setOutputs("value", "softmax")
|
||||
.build();
|
||||
|
|
|
@ -181,18 +181,18 @@ public class TMazeExample {
|
|||
.graphBuilder()
|
||||
.setInputTypes(InputType.recurrent(NUM_INPUTS))
|
||||
.addInputs("input")
|
||||
.addLayer("goal", new LSTM.Builder()
|
||||
.addLayer("goal", LSTM.builder()
|
||||
.nOut(40)
|
||||
.activation(Activation.TANH)
|
||||
.build(), "input")
|
||||
.addLayer("corridor", new DenseLayer.Builder().nOut(40).activation(Activation.RELU).build(), "input", "goal")
|
||||
.addLayer("corridor-1", new DenseLayer.Builder().nOut(20).activation(Activation.RELU).build(), "corridor")
|
||||
.addLayer("corridor", DenseLayer.builder().nOut(40).activation(Activation.RELU).build(), "input", "goal")
|
||||
.addLayer("corridor-1", DenseLayer.builder().nOut(20).activation(Activation.RELU).build(), "corridor")
|
||||
.addVertex("corridor-rnn", new PreprocessorVertex(new FeedForwardToRnnPreProcessor()), "corridor-1");
|
||||
}
|
||||
|
||||
private static ITrainableNeuralNet buildQNetwork() {
|
||||
ComputationGraphConfiguration conf = buildBaseNetworkConfiguration()
|
||||
.addLayer("output", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.addLayer("output", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nOut(NUM_ACTIONS).build(), "goal", "corridor-rnn")
|
||||
|
||||
.setOutputs("output")
|
||||
|
@ -207,9 +207,9 @@ public class TMazeExample {
|
|||
|
||||
private static ITrainableNeuralNet buildActorCriticNetwork() {
|
||||
ComputationGraphConfiguration conf = buildBaseNetworkConfiguration()
|
||||
.addLayer("value", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.addLayer("value", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nOut(1).build(), "goal", "corridor-rnn")
|
||||
.addLayer("softmax", new RnnOutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.addLayer("softmax", RnnOutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.nOut(NUM_ACTIONS).build(), "goal", "corridor-rnn")
|
||||
.setOutputs("value", "softmax")
|
||||
.build();
|
||||
|
|
|
@ -167,9 +167,9 @@ public class PolicyTest {
|
|||
@Test
|
||||
public void testACPolicy() throws Exception {
|
||||
ComputationGraph cg = new ComputationGraph(NeuralNetConfiguration.builder().seed(444).graphBuilder().addInputs("input")
|
||||
.addLayer("output", new OutputLayer.Builder().nOut(1).lossFunction(LossFunctions.LossFunction.XENT).activation(Activation.SIGMOID).build(), "input").setOutputs("output").build());
|
||||
.addLayer("output", OutputLayer.builder().nOut(1).lossFunction(LossFunctions.LossFunction.XENT).activation(Activation.SIGMOID).build(), "input").setOutputs("output").build());
|
||||
MultiLayerNetwork mln = new MultiLayerNetwork(NeuralNetConfiguration.builder().seed(555).list()
|
||||
.layer(0, new OutputLayer.Builder().nOut(1).lossFunction(LossFunctions.LossFunction.XENT).activation(Activation.SIGMOID).build()).build());
|
||||
.layer(0, OutputLayer.builder().nOut(1).lossFunction(LossFunctions.LossFunction.XENT).activation(Activation.SIGMOID).build()).build());
|
||||
|
||||
ACPolicy policy = new ACPolicy(new DummyAC(mln), true, Nd4j.getRandom());
|
||||
|
||||
|
|
|
@ -95,13 +95,13 @@ public class App {
|
|||
|
||||
private static LayerConfiguration[] genLayers() {
|
||||
return new LayerConfiguration[] {
|
||||
new DenseLayer.Builder().nIn(INPUT).nOut(X_DIM*Y_DIM*CHANNELS).weightInit(WeightInit.NORMAL).build(),
|
||||
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(),
|
||||
new DenseLayer.Builder().nIn(X_DIM*Y_DIM*CHANNELS).nOut(X_DIM*Y_DIM).build(),
|
||||
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(),
|
||||
new DenseLayer.Builder().nIn(X_DIM*Y_DIM).nOut(X_DIM*Y_DIM).build(),
|
||||
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(),
|
||||
new DenseLayer.Builder().nIn(X_DIM*Y_DIM).nOut(X_DIM*Y_DIM*CHANNELS).activation(Activation.TANH)
|
||||
DenseLayer.builder().nIn(INPUT).nOut(X_DIM*Y_DIM*CHANNELS).weightInit(WeightInit.NORMAL).build(),
|
||||
ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
|
||||
DenseLayer.builder().nIn(X_DIM*Y_DIM*CHANNELS).nOut(X_DIM*Y_DIM).build(),
|
||||
ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
|
||||
DenseLayer.builder().nIn(X_DIM*Y_DIM).nOut(X_DIM*Y_DIM).build(),
|
||||
ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
|
||||
DenseLayer.builder().nIn(X_DIM*Y_DIM).nOut(X_DIM*Y_DIM*CHANNELS).activation(Activation.TANH)
|
||||
.build()
|
||||
};
|
||||
}
|
||||
|
@ -131,19 +131,19 @@ public class App {
|
|||
|
||||
private static LayerConfiguration[] disLayers() {
|
||||
return new LayerConfiguration[]{
|
||||
new DenseLayer.Builder().nOut(X_DIM*Y_DIM*CHANNELS*2).build(), //input is set by setInputType on the network
|
||||
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(),
|
||||
new DropoutLayer.Builder(1 - 0.5).build(),
|
||||
new DenseLayer.Builder().nIn(X_DIM * Y_DIM*CHANNELS*2).nOut(X_DIM*Y_DIM*CHANNELS*4).build(), //HxBxC
|
||||
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(),
|
||||
new DropoutLayer.Builder(1 - 0.5).build(),
|
||||
new DenseLayer.Builder().nIn(X_DIM*Y_DIM*CHANNELS*4).nOut(X_DIM*Y_DIM*CHANNELS).build(),
|
||||
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(),
|
||||
new DropoutLayer.Builder(1 - 0.5).build(),
|
||||
new DenseLayer.Builder().nIn(X_DIM*Y_DIM*CHANNELS).nOut(X_DIM*Y_DIM).build(),
|
||||
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(),
|
||||
new DropoutLayer.Builder(1 - 0.5).build(),
|
||||
new OutputLayer.Builder(LossFunction.XENT).nIn(X_DIM*Y_DIM).nOut(1).activation(Activation.SIGMOID).build()
|
||||
DenseLayer.builder().nOut(X_DIM*Y_DIM*CHANNELS*2).build(), //input is set by setInputType on the network
|
||||
ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
|
||||
DropoutLayer.builder(1 - 0.5).build(),
|
||||
DenseLayer.builder().nIn(X_DIM * Y_DIM*CHANNELS*2).nOut(X_DIM*Y_DIM*CHANNELS*4).build(), //HxBxC
|
||||
ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
|
||||
DropoutLayer.builder(1 - 0.5).build(),
|
||||
DenseLayer.builder().nIn(X_DIM*Y_DIM*CHANNELS*4).nOut(X_DIM*Y_DIM*CHANNELS).build(),
|
||||
ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
|
||||
DropoutLayer.builder(1 - 0.5).build(),
|
||||
DenseLayer.builder().nIn(X_DIM*Y_DIM*CHANNELS).nOut(X_DIM*Y_DIM).build(),
|
||||
ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
|
||||
DropoutLayer.builder(1 - 0.5).build(),
|
||||
OutputLayer.builder().lossFunction(LossFunction.XENT).nIn(X_DIM*Y_DIM).nOut(1).activation(Activation.SIGMOID).build()
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -242,6 +242,7 @@ public class App {
|
|||
gan.addTrainingListeners(new ScoreToChartListener("gan"));
|
||||
//dis.setListeners(new ScoreToChartListener("dis"));
|
||||
|
||||
System.out.println(gan.toString());
|
||||
gan.fit(Nd4j.rand(batchSize, CHANNELS, X_DIM, Y_DIM), Nd4j.zeros(batchSize, 1));
|
||||
|
||||
//gan.fit(new DataSet(trainData.next().getFeatures(), Nd4j.zeros(batchSize, 1)));
|
||||
|
|
|
@ -101,21 +101,21 @@ public class MnistDCGANExample {
|
|||
public static void main(String[] args) throws Exception {
|
||||
Supplier<MultiLayerNetwork> genSupplier = () -> {
|
||||
return new MultiLayerNetwork(NeuralNetConfiguration.builder()
|
||||
.layer(0, new DenseLayer.Builder().nIn(latentDim).nOut(width / 2 * height / 2 * 128)
|
||||
.layer(0, DenseLayer.builder().nIn(latentDim).nOut(width / 2 * height / 2 * 128)
|
||||
.activation(Activation.LEAKYRELU).weightInit(WeightInit.NORMAL).build())
|
||||
.layer(1, new Convolution2D.Builder().nIn(128).nOut(128).kernelSize(5, 5)
|
||||
.layer(1, Convolution2D.builder().nIn(128).nOut(128).kernelSize(5, 5)
|
||||
.convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build())
|
||||
// Up-sampling to 28x28x256
|
||||
.layer(2, new Deconvolution2D.Builder().nIn(128).nOut(128).stride(2, 2)
|
||||
.layer(2, Deconvolution2D.builder().nIn(128).nOut(128).stride(2, 2)
|
||||
.kernelSize(5, 5).convolutionMode(ConvolutionMode.Same)
|
||||
.activation(Activation.LEAKYRELU).build())
|
||||
.layer(3, new Convolution2D.Builder().nIn(128).nOut(128).kernelSize(5, 5)
|
||||
.layer(3, Convolution2D.builder().nIn(128).nOut(128).kernelSize(5, 5)
|
||||
.convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build())
|
||||
.layer(4, new Convolution2D.Builder().nIn(128).nOut(128).kernelSize(5, 5)
|
||||
.layer(4, Convolution2D.builder().nIn(128).nOut(128).kernelSize(5, 5)
|
||||
.convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build())
|
||||
.layer(5, new Convolution2D.Builder().nIn(128).nOut(channels).kernelSize(7, 7)
|
||||
.layer(5, Convolution2D.builder().nIn(128).nOut(channels).kernelSize(7, 7)
|
||||
.convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build())
|
||||
.layer(6, new ActivationLayer.Builder().activation(Activation.TANH).build())
|
||||
.layer(6, ActivationLayer.builder().activation(Activation.TANH).build())
|
||||
.inputPreProcessor(1,
|
||||
new FeedForwardToCnnPreProcessor(height / 2, width / 2, 128))
|
||||
.inputPreProcessor(6, new CnnToFeedForwardPreProcessor(height, width, channels))
|
||||
|
@ -129,17 +129,17 @@ public class MnistDCGANExample {
|
|||
//.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
|
||||
//.gradientNormalizationThreshold(100.0)
|
||||
|
||||
.layer(0, new Convolution2D.Builder().nIn(channels).nOut(64).kernelSize(3, 3)
|
||||
.layer(0, Convolution2D.builder().nIn(channels).nOut(64).kernelSize(3, 3)
|
||||
.activation(Activation.LEAKYRELU).build())
|
||||
.layer(1, new Convolution2D.Builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2)
|
||||
.layer(1, Convolution2D.builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2)
|
||||
.activation(Activation.LEAKYRELU).build())
|
||||
.layer(2, new Convolution2D.Builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2)
|
||||
.layer(2, Convolution2D.builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2)
|
||||
.activation(Activation.LEAKYRELU).build())
|
||||
.layer(3, new Convolution2D.Builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2)
|
||||
.layer(3, Convolution2D.builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2)
|
||||
.activation(Activation.LEAKYRELU).build())
|
||||
.layer(4, new DropoutLayer.Builder().dropOut(0.5).build())
|
||||
.layer(5, new DenseLayer.Builder().nIn(64 * 2 * 2).nOut(1).activation(Activation.SIGMOID).build())
|
||||
.layer(6, new LossLayer.Builder().lossFunction(LossFunctions.LossFunction.XENT).build())
|
||||
.layer(4, DropoutLayer.builder().dropOut(0.5).build())
|
||||
.layer(5, DenseLayer.builder().nIn(64 * 2 * 2).nOut(1).activation(Activation.SIGMOID).build())
|
||||
.layer(6, LossLayer.builder().lossFunction(LossFunctions.LossFunction.XENT.getILossFunction()).build())
|
||||
.inputPreProcessor(0, new FeedForwardToCnnPreProcessor(height, width, channels))
|
||||
.inputPreProcessor(4, new CnnToFeedForwardPreProcessor(2, 2, 64))
|
||||
.inputType(InputType.convolutionalFlat(height, width, channels))
|
||||
|
|
|
@ -62,13 +62,13 @@ public class MnistSimpleGAN {
|
|||
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
|
||||
.gradientNormalizationThreshold(100)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(100).nOut(256).weightInit(WeightInit.NORMAL).build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(new DenseLayer.Builder().nIn(256).nOut(512).build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(new DenseLayer.Builder().nIn(512).nOut(1024).build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(new DenseLayer.Builder().nIn(1024).nOut(784).activation(Activation.TANH).build())
|
||||
.layer(DenseLayer.builder().nIn(100).nOut(256).weightInit(WeightInit.NORMAL).build())
|
||||
.layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(DenseLayer.builder().nIn(256).nOut(512).build())
|
||||
.layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(DenseLayer.builder().nIn(512).nOut(1024).build())
|
||||
.layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(DenseLayer.builder().nIn(1024).nOut(784).activation(Activation.TANH).build())
|
||||
.build();
|
||||
return new MultiLayerNetwork(genConf);
|
||||
}
|
||||
|
@ -83,16 +83,16 @@ public class MnistSimpleGAN {
|
|||
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
|
||||
.gradientNormalizationThreshold(100)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(784).nOut(1024).updater(updater).build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(new DropoutLayer.Builder(1 - 0.5).build())
|
||||
.layer(new DenseLayer.Builder().nIn(1024).nOut(512).updater(updater).build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(new DropoutLayer.Builder(1 - 0.5).build())
|
||||
.layer(new DenseLayer.Builder().nIn(512).nOut(256).updater(updater).build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(new DropoutLayer.Builder(1 - 0.5).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.XENT).nIn(256).nOut(1)
|
||||
.layer(DenseLayer.builder().nIn(784).nOut(1024).updater(updater).build())
|
||||
.layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(DropoutLayer.builder(1 - 0.5).build())
|
||||
.layer(DenseLayer.builder().nIn(1024).nOut(512).updater(updater).build())
|
||||
.layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(DropoutLayer.builder(1 - 0.5).build())
|
||||
.layer(DenseLayer.builder().nIn(512).nOut(256).updater(updater).build())
|
||||
.layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(DropoutLayer.builder(1 - 0.5).build())
|
||||
.layer(OutputLayer.builder(LossFunctions.LossFunction.XENT).nIn(256).nOut(1)
|
||||
.activation(Activation.SIGMOID).updater(updater).build())
|
||||
.build();
|
||||
|
||||
|
|
|
@ -288,12 +288,12 @@ public class BrianTest extends BaseSparkSessionTest {
|
|||
.seed(123)
|
||||
.updater(new Nesterovs(0.1, 0.9))
|
||||
|
||||
.layer(0, new DenseLayer.Builder().nIn(5).nOut(20).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, DenseLayer.builder().nIn(5).nOut(20).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).l2(0.001).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(20).nOut(20).weightInit(WeightInit.XAVIER)
|
||||
.layer(1, DenseLayer.builder().nIn(20).nOut(20).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
//.layer(2, new DenseLayerConfiguration.Builder().nIn(9).nOut(9).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.XENT).nIn(20).nOut(4)
|
||||
.layer(2, OutputLayer.builder(LossFunctions.LossFunction.XENT).nIn(20).nOut(4)
|
||||
.weightInit(WeightInit.XAVIER).activation(Activation.SIGMOID).build())
|
||||
.build();
|
||||
|
||||
|
|
|
@ -298,10 +298,10 @@ public class BrianTest2 /*extends BaseDL4JTest*/ {
|
|||
.seed(123)
|
||||
.updater(new Nesterovs(0.1, 0.9))
|
||||
|
||||
.layer(0, new DenseLayer.Builder().nIn(5).nOut(20).weightInit(WeightInit.XAVIER).activation(Activation.RELU).l2(0.001).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(20).nOut(20).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
|
||||
.layer(0, DenseLayer.builder().nIn(5).nOut(20).weightInit(WeightInit.XAVIER).activation(Activation.RELU).l2(0.001).build())
|
||||
.layer(1, DenseLayer.builder().nIn(20).nOut(20).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
|
||||
//.layer(2, new DenseLayerConfiguration.Builder().nIn(9).nOut(9).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.XENT).nIn(20).nOut(4).weightInit(WeightInit.XAVIER).activation(Activation.SIGMOID).build())
|
||||
.layer(2, OutputLayer.builder(LossFunctions.LossFunction.XENT).nIn(20).nOut(4).weightInit(WeightInit.XAVIER).activation(Activation.SIGMOID).build())
|
||||
.build();
|
||||
|
||||
//Define SparkNet
|
||||
|
|
|
@ -87,15 +87,15 @@ public class TestServer {
|
|||
.activation(Activation.RELU)
|
||||
.l2(0)
|
||||
|
||||
//.layer(0, new ConvolutionLayer.Builder().nIn(1).kernelSize(1, 5).stride(1,1).padding(0,2).nOut(1).name("1st Filter").updater(new Adam.Builder().learningRate(0.2).build()).build())
|
||||
//.layer(1, new ConvolutionLayer.Builder().nIn(1).kernelSize(1, 2).stride(1,2).padding(0,0).nOut(1).name("2nd Filter").updater(new Adam.Builder().learningRate(0.1).build()).build())
|
||||
//.layer(0, ConvolutionLayer.builder().nIn(1).kernelSize(1, 5).stride(1,1).padding(0,2).nOut(1).name("1st Filter").updater(new Adam.Builder().learningRate(0.2).build()).build())
|
||||
//.layer(1, ConvolutionLayer.builder().nIn(1).kernelSize(1, 2).stride(1,2).padding(0,0).nOut(1).name("2nd Filter").updater(new Adam.Builder().learningRate(0.1).build()).build())
|
||||
// .layer(1, new DenseLayerConfiguration.Builder().nIn(10).nOut(64).activation(Activation.RELU).build())
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(100).activation(Activation.RELU).l2(0.003).build())
|
||||
.layer(1, new LSTM.Builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(2, new LSTM.Builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(3, new DenseLayer.Builder().nIn(100).nOut(16).activation(Activation.RELU).l2(0.001).build())
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(100).activation(Activation.RELU).l2(0.003).build())
|
||||
.layer(1, LSTM.builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(2, LSTM.builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(3, DenseLayer.builder().nIn(100).nOut(16).activation(Activation.RELU).l2(0.001).build())
|
||||
|
||||
.layer(4, new OutputLayer.Builder().nIn(16).nOut(numClasses)
|
||||
.layer(4, OutputLayer.builder().nIn(16).nOut(numClasses)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(new LossMCXENT())
|
||||
.build()
|
||||
|
|
|
@ -127,15 +127,15 @@ public class TestServer2 {
|
|||
.activation(Activation.RELU)
|
||||
.l2(0)
|
||||
|
||||
//.layer(0, new ConvolutionLayer.Builder().nIn(1).kernelSize(1, 5).stride(1,1).padding(0,2).nOut(1).name("1st Filter").updater(new Adam.Builder().learningRate(0.2).build()).build())
|
||||
//.layer(1, new ConvolutionLayer.Builder().nIn(1).kernelSize(1, 2).stride(1,2).padding(0,0).nOut(1).name("2nd Filter").updater(new Adam.Builder().learningRate(0.1).build()).build())
|
||||
//.layer(0, ConvolutionLayer.builder().nIn(1).kernelSize(1, 5).stride(1,1).padding(0,2).nOut(1).name("1st Filter").updater(new Adam.Builder().learningRate(0.2).build()).build())
|
||||
//.layer(1, ConvolutionLayer.builder().nIn(1).kernelSize(1, 2).stride(1,2).padding(0,0).nOut(1).name("2nd Filter").updater(new Adam.Builder().learningRate(0.1).build()).build())
|
||||
// .layer(1, new DenseLayerConfiguration.Builder().nIn(10).nOut(64).activation(Activation.RELU).build())
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(100).activation(Activation.RELU).l2(0.003).build())
|
||||
.layer(1, new LSTM.Builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(2, new LSTM.Builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(3, new DenseLayer.Builder().nIn(100).nOut(16).activation(Activation.RELU).l2(0.001).build())
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(100).activation(Activation.RELU).l2(0.003).build())
|
||||
.layer(1, LSTM.builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(2, LSTM.builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(3, DenseLayer.builder().nIn(100).nOut(16).activation(Activation.RELU).l2(0.001).build())
|
||||
|
||||
.layer(4, new OutputLayer.Builder().nIn(16).nOut(numClasses)
|
||||
.layer(4, OutputLayer.builder().nIn(16).nOut(numClasses)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(new LossMCXENT())
|
||||
.build()
|
||||
|
|
|
@ -832,7 +832,7 @@ public class IntegrationTestRunner {
|
|||
if(m instanceof MultiLayerNetwork){
|
||||
paramPrefix = l.getIndex() + "_";
|
||||
} else {
|
||||
paramPrefix = l.getLayerConfiguration().getLayerName() + "_";
|
||||
paramPrefix = l.getLayerConfiguration().getName() + "_";
|
||||
}
|
||||
Map<String,INDArray> paramTable = l.getParamTable();
|
||||
for(Map.Entry<String,INDArray> e : paramTable.entrySet()){
|
||||
|
|
|
@ -88,11 +88,11 @@ public class CNN1DTestCases {
|
|||
.convolutionMode(ConvolutionMode.Same))
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new Convolution1DLayer.Builder().nOut(32).activation(Activation.TANH).kernelSize(3).stride(1).build(), "in")
|
||||
.layer("1", new Subsampling1DLayer.Builder().kernelSize(2).stride(1).poolingType(SubsamplingLayer.PoolingType.MAX).build(), "0")
|
||||
.layer("2", new Cropping1D(1), "1")
|
||||
.layer("3", new ZeroPadding1DLayer(1), "2")
|
||||
.layer("out", new RnnOutputLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).nOut(nOut).build(), "3")
|
||||
.layer("0", Convolution1DLayer.builder().nOut(32).activation(Activation.TANH).kernelSize(3).stride(1).build(), "in")
|
||||
.layer("1", Subsampling1DLayer.builder().kernelSize(2).stride(1).poolingType(SubsamplingLayer.PoolingType.MAX.toPoolingType()).build(), "0")
|
||||
.layer("2", Cropping1D.builder(1).build(), "1")
|
||||
.layer("3", ZeroPadding1DLayer.builder(1).build(), "2")
|
||||
.layer("out", RnnOutputLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).nOut(nOut).build(), "3")
|
||||
.setInputTypes(InputType.recurrent(nOut))
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
|
|
@ -105,30 +105,30 @@ public class CNN2DTestCases {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.updater(new Nesterovs(0.01, 0.9))
|
||||
|
||||
.layer(0, new ConvolutionLayer.Builder(5, 5)
|
||||
.layer(0, ConvolutionLayer.builder(5, 5)
|
||||
//nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
|
||||
.nIn(nChannels)
|
||||
.stride(1, 1)
|
||||
.nOut(20)
|
||||
.activation(Activation.IDENTITY)
|
||||
.build())
|
||||
.layer(1, new SubsamplingLayer.Builder(PoolingType.MAX)
|
||||
.layer(1, SubsamplingLayer.builder(PoolingType.MAX)
|
||||
.kernelSize(2, 2)
|
||||
.stride(2, 2)
|
||||
.build())
|
||||
.layer(2, new ConvolutionLayer.Builder(5, 5)
|
||||
.layer(2, ConvolutionLayer.builder(5, 5)
|
||||
//Note that nIn need not be specified in later layers
|
||||
.stride(1, 1)
|
||||
.nOut(50)
|
||||
.activation(Activation.IDENTITY)
|
||||
.build())
|
||||
.layer(3, new SubsamplingLayer.Builder(PoolingType.MAX)
|
||||
.layer(3, SubsamplingLayer.builder(PoolingType.MAX)
|
||||
.kernelSize(2, 2)
|
||||
.stride(2, 2)
|
||||
.build())
|
||||
.layer(4, new DenseLayer.Builder().activation(Activation.RELU)
|
||||
.layer(4, DenseLayer.builder().activation(Activation.RELU)
|
||||
.nOut(500).build())
|
||||
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(5, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nOut(outputNum)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
@ -221,7 +221,7 @@ public class CNN2DTestCases {
|
|||
.seed(12345)
|
||||
.build())
|
||||
.removeVertexKeepConnections("predictions")
|
||||
.addLayer("predictions", new OutputLayer.Builder()
|
||||
.addLayer("predictions", OutputLayer.builder()
|
||||
.nIn(4096)
|
||||
.nOut(200) //Tiny imagenet
|
||||
.build(), "fc2")
|
||||
|
@ -321,7 +321,7 @@ public class CNN2DTestCases {
|
|||
.removeVertexKeepConnections("conv2d_9")
|
||||
.removeVertexAndConnections("outputs")
|
||||
.addLayer("convolution2d_9",
|
||||
new ConvolutionLayer.Builder(1,1)
|
||||
ConvolutionLayer.builder(1,1)
|
||||
.nIn(1024)
|
||||
.nOut(nBoxes * (5 + nClasses))
|
||||
.stride(1,1)
|
||||
|
@ -417,32 +417,32 @@ public class CNN2DTestCases {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.updater(new Nesterovs(0.01, 0.9))
|
||||
|
||||
.layer(0, new ConvolutionLayer.Builder(5, 5)
|
||||
.layer(0, ConvolutionLayer.builder(5, 5)
|
||||
//nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
|
||||
.nIn(1)
|
||||
.stride(1, 1)
|
||||
.nOut(20)
|
||||
.activation(Activation.IDENTITY)
|
||||
.build())
|
||||
.layer(1, new SubsamplingLayer.Builder(PoolingType.MAX)
|
||||
.layer(1, SubsamplingLayer.builder(PoolingType.MAX)
|
||||
.kernelSize(2, 2)
|
||||
.stride(2, 2)
|
||||
.build())
|
||||
.layer(2, new ConvolutionLayer.Builder(5, 5)
|
||||
.layer(2, ConvolutionLayer.builder(5, 5)
|
||||
//Note that nIn need not be specified in later layers
|
||||
.stride(1, 1)
|
||||
.nOut(50)
|
||||
.activation(Activation.IDENTITY)
|
||||
.dropOut(0.5) //**** Dropout on conv layer
|
||||
.build())
|
||||
.layer(3, new SubsamplingLayer.Builder(PoolingType.MAX)
|
||||
.layer(3, SubsamplingLayer.builder(PoolingType.MAX)
|
||||
.kernelSize(2, 2)
|
||||
.stride(2, 2)
|
||||
.build())
|
||||
.layer(4, new DenseLayer.Builder().activation(Activation.RELU)
|
||||
.layer(4, DenseLayer.builder().activation(Activation.RELU)
|
||||
.dropOut(0.5) //**** Dropout on dense layer
|
||||
.nOut(500).build())
|
||||
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(5, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nOut(10)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
|
|
@ -82,18 +82,18 @@ public class CNN3DTestCases {
|
|||
.updater(new Nesterovs(0.01, 0.9))
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
|
||||
.layer(new Convolution3D.Builder(3,3,3)
|
||||
.layer(Convolution3D.builder(3,3,3)
|
||||
.dataFormat(Convolution3D.DataFormat.NCDHW)
|
||||
.nIn(nChannels)
|
||||
.stride(2, 2, 2)
|
||||
.nOut(8)
|
||||
.activation(Activation.IDENTITY)
|
||||
.build())
|
||||
.layer(new Subsampling3DLayer.Builder(PoolingType.MAX)
|
||||
.layer(Subsampling3DLayer.builder(PoolingType.MAX)
|
||||
.kernelSize(2, 2, 2)
|
||||
.stride(2, 2, 2)
|
||||
.build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nOut(outputNum)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
|
|
@ -104,8 +104,8 @@ public class MLPTestCases {
|
|||
.build()))
|
||||
.l1(1e-3).l2(1e-3)
|
||||
|
||||
.layer(new DenseLayer.Builder().activation(Activation.TANH).nOut(64).build())
|
||||
.layer(new OutputLayer.Builder().nOut(10)
|
||||
.layer(DenseLayer.builder().activation(Activation.TANH).nOut(64).build())
|
||||
.layer(OutputLayer.builder().nOut(10)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
@ -202,11 +202,11 @@ public class MLPTestCases {
|
|||
.seed(seed)
|
||||
.updater(new Nesterovs(learningRate, 0.9))
|
||||
|
||||
.layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
|
||||
.layer(0, DenseLayer.builder().nIn(numInputs).nOut(numHiddenNodes)
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(1, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.nIn(numHiddenNodes).nOut(numOutputs).build())
|
||||
|
|
|
@ -119,11 +119,11 @@ public class RNNTestCases {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.updater(new Adam(1e-3))
|
||||
|
||||
.layer(0, new LSTM.Builder().nIn(iter.inputColumns()).nOut(lstmLayerSize)
|
||||
.layer(0, LSTM.builder().nIn(iter.inputColumns()).nOut(lstmLayerSize)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new LSTM.Builder().nIn(lstmLayerSize).nOut(lstmLayerSize)
|
||||
.layer(1, LSTM.builder().nIn(lstmLayerSize).nOut(lstmLayerSize)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(2, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX) //MCXENT + softmax for classification
|
||||
.layer(2, RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX) //MCXENT + softmax for classification
|
||||
.nIn(lstmLayerSize).nOut(nOut).build())
|
||||
.backpropType(BackpropType.TruncatedBPTT).tbpttFwdLength(tbpttLength).tbpttBackLength(tbpttLength)
|
||||
|
||||
|
@ -201,9 +201,9 @@ public class RNNTestCases {
|
|||
.updater(new Adam(5e-2))
|
||||
.l1(1e-3).l2(1e-3)
|
||||
|
||||
.layer(0, new LSTM.Builder().activation(Activation.TANH).nOut(10).build())
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(new OutputLayer.Builder().nOut(6)
|
||||
.layer(0, LSTM.builder().activation(Activation.TANH).nOut(10).build())
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(OutputLayer.builder().nOut(6)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
@ -322,9 +322,9 @@ public class RNNTestCases {
|
|||
.updater(new Adam(5e-2))
|
||||
.l1(1e-3).l2(1e-3)
|
||||
|
||||
.layer(0, new Bidirectional(new LSTM.Builder().activation(Activation.TANH).nOut(10).build()))
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(new OutputLayer.Builder().nOut(6)
|
||||
.layer(0, Bidirectional.builder(LSTM.builder().activation(Activation.TANH).nOut(10).build()))
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(OutputLayer.builder().nOut(6)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
|
|
@ -79,7 +79,7 @@ public class UnsupervisedTestCases {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.l2(1e-4)
|
||||
|
||||
.layer(0, new VariationalAutoencoder.Builder()
|
||||
.layer(0, VariationalAutoencoder.builder()
|
||||
.activation(Activation.TANH)
|
||||
.encoderLayerSizes(256, 256) //2 encoder layers, each of size 256
|
||||
.decoderLayerSizes(256, 256) //2 decoder layers, each of size 256
|
||||
|
|
|
@ -42,9 +42,9 @@ public class RandomTests extends BaseDL4JTest {
|
|||
|
||||
final NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(10)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(10)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
|
|
@ -73,7 +73,7 @@ public class TestUtils {
|
|||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
//Also check the NeuralNetConfiguration is serializable (required by Spark etc)
|
||||
//Also check the NeuralNetConfiguration is serializable (required by Spark etc.)
|
||||
NeuralNetConfiguration conf = net.getNetConfiguration();
|
||||
serializeDeserializeJava(conf);
|
||||
|
||||
|
@ -317,14 +317,14 @@ public class TestUtils {
|
|||
for(Layer l : layers){
|
||||
//Don't use instanceof here - there are sub conv subclasses
|
||||
if(l.getClass() == ConvolutionLayer.class || l instanceof SubsamplingLayer || l instanceof BatchNormalization || l instanceof LSTM){
|
||||
Preconditions.checkNotNull(l.getHelper(), l.getLayerConfiguration().getLayerName());
|
||||
Preconditions.checkNotNull(l.getHelper(), l.getLayerConfiguration().getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void assertHelpersAbsent(Layer[] layers) throws Exception {
|
||||
for(Layer l : layers){
|
||||
Preconditions.checkState(l.getHelper() == null, l.getLayerConfiguration().getLayerName());
|
||||
Preconditions.checkState(l.getHelper() == null, l.getLayerConfiguration().getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -473,9 +473,7 @@ public class RecordReaderDataSetiteratorTest extends BaseDL4JTest {
|
|||
|
||||
|
||||
public Pair<double[][],File> makeRandomCSV(String tempFile, int nLines, int nFeatures) throws IOException {
|
||||
File temp = temporaryFolder;
|
||||
temp.mkdirs();
|
||||
temp.deleteOnExit();
|
||||
File temp = new File(temporaryFolder, "makeRandomCSV.csv");
|
||||
Random rand = new Random(12345);
|
||||
|
||||
double[][] dArr = new double[nLines][nFeatures + 1];
|
||||
|
|
|
@ -774,7 +774,7 @@ public class RecordReaderMultiDataSetIteratorTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testExcludeStringColCSV() throws Exception {
|
||||
File csvFile = temporaryFolder;
|
||||
File csvFile = new File(temporaryFolder, "test.csv");
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for(int i=1; i<=10; i++ ){
|
||||
|
|
|
@ -41,7 +41,9 @@ import org.deeplearning4j.nn.weights.WeightInit;
|
|||
import org.deeplearning4j.optimize.listeners.CollectScoresIterationListener;
|
||||
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
|
||||
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.Timeout;
|
||||
import org.nd4j.linalg.activations.Activation;
|
||||
import org.nd4j.linalg.api.ndarray.INDArray;
|
||||
import org.nd4j.linalg.dataset.DataSet;
|
||||
|
@ -170,11 +172,11 @@ public class DataSetIteratorTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration.NeuralNetConfigurationBuilder builder = NeuralNetConfiguration.builder().seed(seed)
|
||||
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.layer(0, new ConvolutionLayer.Builder(5, 5).nIn(numChannels).nOut(6)
|
||||
.layer(0, ConvolutionLayer.builder(5, 5).nIn(numChannels).nOut(6)
|
||||
.weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
|
||||
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
|
||||
.layer(1, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
|
||||
.stride(1, 1).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(2, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
.inputType(InputType.convolutionalFlat(numRows, numColumns, numChannels));
|
||||
|
@ -207,7 +209,8 @@ public class DataSetIteratorTest extends BaseDL4JTest {
|
|||
}
|
||||
|
||||
|
||||
@Test //@Ignore //Ignored for now - CIFAR iterator needs work - https://github.com/eclipse/deeplearning4j/issues/4673
|
||||
@Test @Timeout(1200) @Disabled("Runs quite some time.")
|
||||
//Ignored for now - CIFAR iterator needs work - https://github.com/eclipse/deeplearning4j/issues/4673
|
||||
public void testCifarModel() throws Exception {
|
||||
// Streaming
|
||||
runCifar(false);
|
||||
|
@ -230,11 +233,11 @@ public class DataSetIteratorTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration.NeuralNetConfigurationBuilder builder = NeuralNetConfiguration.builder().seed(seed)
|
||||
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.layer(0, new ConvolutionLayer.Builder(5, 5).nIn(channels).nOut(6).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, ConvolutionLayer.builder(5, 5).nIn(channels).nOut(6).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
|
||||
.layer(1, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
|
||||
.build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(2, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
||||
|
|
|
@ -76,10 +76,12 @@ public class TestFileIterators extends BaseDL4JTest {
|
|||
assertEquals(exp, act);
|
||||
|
||||
//Test multiple directories
|
||||
|
||||
File f2a = new File(folder2, "f2a");
|
||||
f2a.mkdirs();
|
||||
File f2b = new File(folder2, "f2b");
|
||||
f2b.mkdirs();
|
||||
File f2c = new File(folder2, "f2c");
|
||||
f2c.mkdirs();
|
||||
d1.save(new File(f2a, "d1.bin"));
|
||||
d2.save(new File(f2a, "d2.bin"));
|
||||
d3.save(new File(f2b, "d3.bin"));
|
||||
|
@ -188,8 +190,11 @@ public class TestFileIterators extends BaseDL4JTest {
|
|||
|
||||
//Test multiple directories
|
||||
File f2a = new File(folder2, "2-f2a");
|
||||
f2a.mkdirs();
|
||||
File f2b = new File(folder2, "2-f2b");
|
||||
f2b.mkdirs();
|
||||
File f2c = new File(folder2, "2-f2C");
|
||||
f2c.mkdirs();
|
||||
d1.save(new File(f2a, "d1.bin"));
|
||||
d2.save(new File(f2a, "d2.bin"));
|
||||
d3.save(new File(f2b, "d3.bin"));
|
||||
|
|
|
@ -135,8 +135,8 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.seed(12345)
|
||||
.updater(new Sgd(0.5)).weightInit(WeightInit.XAVIER)
|
||||
.layer(new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -221,7 +221,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.01)).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -250,7 +250,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -300,7 +300,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(5.0)) //Intentionally huge LR
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -338,7 +338,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -381,7 +381,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -421,11 +421,11 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Nesterovs(0.0,0.9))
|
||||
.layer(0, new DenseLayer.Builder().nIn(1).nOut(20)
|
||||
.layer(0, DenseLayer.builder().nIn(1).nOut(20)
|
||||
.weightInit(WeightInit.XAVIER).activation(
|
||||
Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MSE).weightInit(WeightInit.XAVIER)
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.IDENTITY).weightInit(WeightInit.XAVIER).nIn(20).nOut(1)
|
||||
.build())
|
||||
.build();
|
||||
|
@ -468,7 +468,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -506,7 +506,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -570,8 +570,8 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(784).nOut(32).build())
|
||||
.layer(new OutputLayer.Builder().nIn(32).nOut(784).activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.layer(DenseLayer.builder().nIn(784).nOut(32).build())
|
||||
.layer(OutputLayer.builder().nIn(32).nOut(784).activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -613,7 +613,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new AutoEncoder.Builder().nIn(784).nOut(32).build())
|
||||
.layer(AutoEncoder.builder().nIn(784).nOut(32).build())
|
||||
|
||||
.build();
|
||||
|
||||
|
@ -656,7 +656,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new VariationalAutoencoder.Builder()
|
||||
.layer(VariationalAutoencoder.builder()
|
||||
.nIn(784).nOut(32)
|
||||
.encoderLayerSizes(64)
|
||||
.decoderLayerSizes(64)
|
||||
|
@ -701,7 +701,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new VariationalAutoencoder.Builder()
|
||||
.layer(VariationalAutoencoder.builder()
|
||||
.nIn(784).nOut(32)
|
||||
.encoderLayerSizes(64)
|
||||
.decoderLayerSizes(64)
|
||||
|
@ -748,8 +748,8 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(784).nOut(32).build())
|
||||
.layer(new OutputLayer.Builder().nIn(32).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(DenseLayer.builder().nIn(784).nOut(32).build())
|
||||
.layer(OutputLayer.builder().nIn(32).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -785,7 +785,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
public void testEarlyStoppingListeners() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -868,14 +868,14 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
.ClipElementWiseAbsoluteValue)
|
||||
.gradientNormalizationThreshold(1.0)
|
||||
|
||||
.layer(0, new LSTM.Builder()
|
||||
.layer(0, LSTM.builder()
|
||||
.nIn(10)
|
||||
.nOut(10)
|
||||
.activation(Activation.TANH)
|
||||
.gateActivationFunction(Activation.SIGMOID)
|
||||
.dropOut(0.5)
|
||||
.build())
|
||||
.layer(1, new RnnOutputLayer.Builder()
|
||||
.layer(1, RnnOutputLayer.builder()
|
||||
.nIn(10)
|
||||
.nOut(outputs)
|
||||
.activation(Activation.SOFTMAX)
|
||||
|
|
|
@ -79,7 +79,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
|
@ -124,7 +124,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(5.0)) //Intentionally huge LR
|
||||
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
ComputationGraph net = new ComputationGraph(conf);
|
||||
|
@ -160,7 +160,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
|
@ -202,7 +202,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
|
@ -236,7 +236,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
|
@ -300,8 +300,8 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new DenseLayer.Builder().nIn(784).nOut(32).build(), "in")
|
||||
.layer("1", new OutputLayer.Builder().nIn(32).nOut(784).activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build(), "0")
|
||||
.layer("0", DenseLayer.builder().nIn(784).nOut(32).build(), "in")
|
||||
.layer("1", OutputLayer.builder().nIn(32).nOut(784).activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build(), "0")
|
||||
.setOutputs("1")
|
||||
.build();
|
||||
|
||||
|
@ -346,7 +346,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new AutoEncoder.Builder().nIn(784).nOut(32).build(), "in")
|
||||
.layer("0", AutoEncoder.builder().nIn(784).nOut(32).build(), "in")
|
||||
.setOutputs("0")
|
||||
|
||||
.build();
|
||||
|
@ -391,7 +391,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new VariationalAutoencoder.Builder()
|
||||
.layer("0", VariationalAutoencoder.builder()
|
||||
.nIn(784).nOut(32)
|
||||
.encoderLayerSizes(64)
|
||||
.decoderLayerSizes(64)
|
||||
|
@ -439,7 +439,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
.updater(new Adam(1e-5))
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new VariationalAutoencoder.Builder()
|
||||
.layer("0", VariationalAutoencoder.builder()
|
||||
.nIn(784).nOut(32)
|
||||
.encoderLayerSizes(64)
|
||||
.decoderLayerSizes(64)
|
||||
|
@ -489,8 +489,8 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new DenseLayer.Builder().nIn(784).nOut(32).build(), "in")
|
||||
.layer("1", new OutputLayer.Builder().nIn(32).nOut(10).activation(Activation.SOFTMAX).build(), "0")
|
||||
.layer("0", DenseLayer.builder().nIn(784).nOut(32).build(), "in")
|
||||
.layer("1", OutputLayer.builder().nIn(32).nOut(10).activation(Activation.SOFTMAX).build(), "0")
|
||||
.setOutputs("1")
|
||||
.build();
|
||||
|
||||
|
@ -530,7 +530,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0")
|
||||
|
|
|
@ -73,9 +73,9 @@ public class EvalTest extends BaseDL4JTest {
|
|||
|
||||
.optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).seed(42)
|
||||
.updater(new Sgd(1e-6)).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(2).activation(Activation.TANH)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(2).activation(Activation.TANH)
|
||||
.weightInit(WeightInit.XAVIER).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
|
||||
|
@ -180,7 +180,7 @@ public class EvalTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new Sgd(0.1))
|
||||
.list()
|
||||
.layer(0, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(0, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(4).nOut(3).build())
|
||||
.build();
|
||||
|
||||
|
@ -300,8 +300,8 @@ public class EvalTest extends BaseDL4JTest {
|
|||
.trainingWorkspaceMode(ws)
|
||||
.inferenceWorkspaceMode(ws)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut)
|
||||
.layer(LSTM.builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(RnnOutputLayer.builder().nIn(layerSize).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
.build();
|
||||
|
@ -311,8 +311,8 @@ public class EvalTest extends BaseDL4JTest {
|
|||
.trainingWorkspaceMode(ws)
|
||||
.inferenceWorkspaceMode(ws)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut)
|
||||
.layer(LSTM.builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(RnnOutputLayer.builder().nIn(layerSize).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.tbpttFwdLength(10).tbpttBackLength(10)
|
||||
.backpropType(BackpropType.TruncatedBPTT)
|
||||
|
@ -377,8 +377,8 @@ public class EvalTest extends BaseDL4JTest {
|
|||
.inferenceWorkspaceMode(ws)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new LSTM.Builder().nIn(nIn).nOut(layerSize).build(), "in")
|
||||
.addLayer("1", new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut)
|
||||
.addLayer("0", LSTM.builder().nIn(nIn).nOut(layerSize).build(), "in")
|
||||
.addLayer("1", RnnOutputLayer.builder().nIn(layerSize).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build(), "0")
|
||||
.setOutputs("1")
|
||||
|
@ -390,8 +390,8 @@ public class EvalTest extends BaseDL4JTest {
|
|||
.inferenceWorkspaceMode(ws)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new LSTM.Builder().nIn(nIn).nOut(layerSize).build(), "in")
|
||||
.addLayer("1", new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut)
|
||||
.addLayer("0", LSTM.builder().nIn(nIn).nOut(layerSize).build(), "in")
|
||||
.addLayer("1", RnnOutputLayer.builder().nIn(layerSize).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build(), "0")
|
||||
.setOutputs("1")
|
||||
|
@ -457,8 +457,8 @@ public class EvalTest extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
|
||||
.list()
|
||||
.layer(0, new LSTM.Builder().activation(Activation.TANH).nIn(3).nOut(3).build())
|
||||
.layer(1, new RnnOutputLayer.Builder().activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.XENT)
|
||||
.layer(0, LSTM.builder().activation(Activation.TANH).nIn(3).nOut(3).build())
|
||||
.layer(1, RnnOutputLayer.builder().activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.XENT)
|
||||
.nIn(3).nOut(1).build())
|
||||
.backpropType(BackpropType.TruncatedBPTT).tbpttFwdLength(10).tbpttBackLength(10)
|
||||
.build();
|
||||
|
@ -477,9 +477,9 @@ public class EvalTest extends BaseDL4JTest {
|
|||
|
||||
.optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).seed(42)
|
||||
.updater(new Sgd(1e-6)).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(2).activation(Activation.TANH)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(2).activation(Activation.TANH)
|
||||
.weightInit(WeightInit.XAVIER).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -507,8 +507,8 @@ public class EvalTest extends BaseDL4JTest {
|
|||
.seed(12345)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("out1", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
|
||||
.addLayer("out2", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
|
||||
.addLayer("out1", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
|
||||
.addLayer("out2", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
|
||||
.setOutputs("out1", "out2")
|
||||
.build();
|
||||
|
||||
|
@ -541,11 +541,11 @@ public class EvalTest extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new EmbeddingSequenceLayer.Builder().nIn(10).nOut(10).build(), "in")
|
||||
.layer("1", new LSTM.Builder().nIn(10).nOut(10).build(), "0")
|
||||
.layer("2", new LSTM.Builder().nIn(10).nOut(10).build(), "0")
|
||||
.layer("out1", new RnnOutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "1")
|
||||
.layer("out2", new RnnOutputLayer.Builder().nIn(10).nOut(20).activation(Activation.SOFTMAX).build(), "2")
|
||||
.layer("0", EmbeddingSequenceLayer.builder().nIn(10).nOut(10).build(), "in")
|
||||
.layer("1", LSTM.builder().nIn(10).nOut(10).build(), "0")
|
||||
.layer("2", LSTM.builder().nIn(10).nOut(10).build(), "0")
|
||||
.layer("out1", RnnOutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "1")
|
||||
.layer("out2", RnnOutputLayer.builder().nIn(10).nOut(20).activation(Activation.SOFTMAX).build(), "2")
|
||||
.setOutputs("out1", "out2")
|
||||
.build();
|
||||
|
||||
|
@ -569,8 +569,8 @@ public class EvalTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.list()
|
||||
.layer(new DenseLayer.Builder().nIn(4).nOut(10).build())
|
||||
.layer(new OutputLayer.Builder().nIn(10).nOut(3).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.RELU).build())
|
||||
.layer(DenseLayer.builder().nIn(4).nOut(10).build())
|
||||
.layer(OutputLayer.builder().nIn(10).nOut(3).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.RELU).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
|
|
@ -48,8 +48,8 @@ public class EvaluationToolsTests extends BaseDL4JTest {
|
|||
DataSetIterator iter = new IrisDataSetIterator(150, 150);
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1,
|
||||
new OutputLayer.Builder().nIn(4).nOut(2).activation(Activation.SOFTMAX)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1,
|
||||
OutputLayer.builder().nIn(4).nOut(2).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -90,8 +90,8 @@ public class EvaluationToolsTests extends BaseDL4JTest {
|
|||
DataSetIterator iter = new IrisDataSetIterator(150, 150);
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1,
|
||||
new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1,
|
||||
OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
|
|
@ -84,8 +84,8 @@ public class ROCTest extends BaseDL4JTest {
|
|||
Nd4j.getRandom().setSeed(12345);
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).seed(12345)
|
||||
.list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1,
|
||||
new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1,
|
||||
OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
|
|
@ -49,7 +49,7 @@ public class RegressionEvalTest extends BaseDL4JTest {
|
|||
|
||||
//Basic sanity check
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.ZERO).list()
|
||||
.layer(0, new OutputLayer.Builder().activation(Activation.TANH)
|
||||
.layer(0, OutputLayer.builder().activation(Activation.TANH)
|
||||
.lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(5).build())
|
||||
.build();
|
||||
|
||||
|
@ -71,7 +71,7 @@ public class RegressionEvalTest extends BaseDL4JTest {
|
|||
|
||||
ComputationGraphConfiguration graphConf =
|
||||
NeuralNetConfiguration.builder().weightInit(WeightInit.ZERO).graphBuilder()
|
||||
.addInputs("in").addLayer("0", new OutputLayer.Builder()
|
||||
.addInputs("in").addLayer("0", OutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.activation(Activation.TANH).nIn(10).nOut(5).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
|
|
|
@ -41,8 +41,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
|
||||
public static MultiLayerNetwork getDensePlusOutput(int nIn, int nOut) {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(10).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(10).nOut(nOut).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(nIn).nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nIn(10).nOut(nOut).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -52,8 +52,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
|
||||
public static MultiLayerNetwork getLSTMPlusRnnOutput(int nIn, int nOut) {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new GravesLSTM.Builder().nIn(nIn).nOut(10).build())
|
||||
.layer(1, new RnnOutputLayer.Builder().nIn(10).nOut(nOut).build()).build();
|
||||
.layer(0, GravesLSTM.builder().nIn(nIn).nOut(10).build())
|
||||
.layer(1, RnnOutputLayer.builder().nIn(10).nOut(nOut).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -63,8 +63,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
|
||||
public static MultiLayerNetwork getCnnPlusOutputLayer(int depthIn, int inH, int inW, int nOut) {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new ConvolutionLayer.Builder().nIn(depthIn).nOut(5).build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(nOut).build())
|
||||
.layer(0, ConvolutionLayer.builder().nIn(depthIn).nOut(5).build())
|
||||
.layer(1, OutputLayer.builder().nOut(nOut).build())
|
||||
.inputType(InputType.convolutional(inH, inW, depthIn)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -90,8 +90,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
public void testDenseNout0() {
|
||||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(0).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(0).build())
|
||||
.layer(1, OutputLayer.builder().nIn(10).nOut(10).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -147,8 +147,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
public void testLSTMNOut0() {
|
||||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new GravesLSTM.Builder().nIn(10).nOut(0).build())
|
||||
.layer(1, new RnnOutputLayer.Builder().nIn(10).nOut(10).build()).build();
|
||||
.layer(0, GravesLSTM.builder().nIn(10).nOut(0).build())
|
||||
.layer(1, RnnOutputLayer.builder().nIn(10).nOut(10).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -178,8 +178,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
public void testConvolutionalNOut0() {
|
||||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new ConvolutionLayer.Builder().nIn(5).nOut(0).build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).build())
|
||||
.layer(0, ConvolutionLayer.builder().nIn(5).nOut(0).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).build())
|
||||
.inputType(InputType.convolutional(10, 10, 5)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -208,9 +208,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict)
|
||||
.list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(3, 2).stride(2, 2).padding(0, 0).nOut(5)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(3, 2).stride(2, 2).padding(0, 0).nOut(5)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).build())
|
||||
.inputType(InputType.convolutional(hIn, wIn, depthIn)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -234,9 +234,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
int wIn = 10;
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(7, 7).stride(1, 1).padding(0, 0).nOut(5)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(7, 7).stride(1, 1).padding(0, 0).nOut(5)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(hIn, wIn, depthIn)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -266,9 +266,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(3, 3).stride(2, 2)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(3, 3).stride(2, 2)
|
||||
.padding(0, 0).nIn(depthIn).nOut(5).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(5 * 4 * 4).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(1, OutputLayer.builder().nIn(5 * 4 * 4).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.inputPreProcessor(1, new CnnToFeedForwardPreProcessor()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -299,9 +299,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
|
||||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 3).stride(2, 2).padding(0, 0).nOut(5)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(2, 3).stride(2, 2).padding(0, 0).nOut(5)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(hIn, wIn, depthIn)).build();
|
||||
} catch (Exception e) {
|
||||
fail("Did not expect exception with default (truncate)");
|
||||
|
@ -310,9 +310,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict)
|
||||
.list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 3).stride(2, 2).padding(0, 0).nOut(5)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(2, 3).stride(2, 2).padding(0, 0).nOut(5)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).build())
|
||||
.inputType(InputType.convolutional(hIn, wIn, depthIn)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -339,9 +339,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict)
|
||||
.list()
|
||||
.layer(0, new SubsamplingLayer.Builder().kernelSize(2, 3).stride(2, 2).padding(0, 0)
|
||||
.layer(0, SubsamplingLayer.builder().kernelSize(2, 3).stride(2, 2).padding(0, 0)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).build())
|
||||
.inputType(InputType.convolutional(hIn, wIn, depthIn)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -358,84 +358,84 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testCnnInvalidKernel() {
|
||||
assertThrows(IllegalStateException.class, () -> {
|
||||
new ConvolutionLayer.Builder().kernelSize(3, 0).build();
|
||||
ConvolutionLayer.builder().kernelSize(3, 0).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCnnInvalidKernel2() {
|
||||
assertThrows(IllegalStateException.class, () -> {
|
||||
new ConvolutionLayer.Builder().kernelSize(2, 2, 2).build();
|
||||
ConvolutionLayer.builder().kernelSize(2, 2, 2).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCnnInvalidStride() {
|
||||
assertThrows(IllegalStateException.class, () -> {
|
||||
new ConvolutionLayer.Builder().kernelSize(3, 3).stride(0, 1).build();
|
||||
ConvolutionLayer.builder().kernelSize(3, 3).stride(0, 1).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCnnInvalidStride2() {
|
||||
assertThrows(IllegalArgumentException.class, () -> {
|
||||
new ConvolutionLayer.Builder().kernelSize(3, 3).stride(1).build();
|
||||
ConvolutionLayer.builder().kernelSize(3, 3).stride(1).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCnnInvalidPadding() {
|
||||
assertThrows(IllegalArgumentException.class, () -> {
|
||||
new ConvolutionLayer.Builder().kernelSize(3, 3).stride(1, 1).padding(-1, 0).build();
|
||||
ConvolutionLayer.builder().kernelSize(3, 3).stride(1, 1).padding(-1, 0).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCnnInvalidPadding2() {
|
||||
assertThrows(IllegalArgumentException.class, () -> {
|
||||
new ConvolutionLayer.Builder().kernelSize(3, 3).stride(1, 1).padding(0, 0, 0).build();
|
||||
ConvolutionLayer.builder().kernelSize(3, 3).stride(1, 1).padding(0, 0, 0).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubsamplingInvalidKernel() {
|
||||
assertThrows(IllegalStateException.class, () -> {
|
||||
new SubsamplingLayer.Builder().kernelSize(3, 0).build();
|
||||
SubsamplingLayer.builder().kernelSize(3, 0).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubsamplingInvalidKernel2() {
|
||||
assertThrows(IllegalArgumentException.class, () -> {
|
||||
new SubsamplingLayer.Builder().kernelSize(2).build();
|
||||
SubsamplingLayer.builder().kernelSize(2).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubsamplingInvalidStride() {
|
||||
assertThrows(IllegalStateException.class, () -> {
|
||||
new SubsamplingLayer.Builder().kernelSize(3, 3).stride(0, 1).build();
|
||||
SubsamplingLayer.builder().kernelSize(3, 3).stride(0, 1).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubsamplingInvalidStride2() {
|
||||
assertThrows(RuntimeException.class, () -> {
|
||||
new SubsamplingLayer.Builder().kernelSize(3, 3).stride(1, 1, 1).build();
|
||||
SubsamplingLayer.builder().kernelSize(3, 3).stride(1, 1, 1).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubsamplingInvalidPadding() {
|
||||
assertThrows(IllegalArgumentException.class, () -> {
|
||||
new SubsamplingLayer.Builder().kernelSize(3, 3).stride(1, 1).padding(-1, 0).build();
|
||||
SubsamplingLayer.builder().kernelSize(3, 3).stride(1, 1).padding(-1, 0).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubsamplingInvalidPadding2() {
|
||||
assertThrows(RuntimeException.class, () -> {
|
||||
new SubsamplingLayer.Builder().kernelSize(3, 3).stride(1, 1).padding(0).build();
|
||||
SubsamplingLayer.builder().kernelSize(3, 3).stride(1, 1).padding(0).build();
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -43,8 +43,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testInputNinMismatchDense() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -64,8 +64,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testLabelsNOutMismatchOutputLayer() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -85,8 +85,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testLabelsNOutMismatchRnnOutputLayer() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new LSTM.Builder().nIn(5).nOut(5).build())
|
||||
.layer(1, new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(0, LSTM.builder().nIn(5).nOut(5).build())
|
||||
.layer(1, RnnOutputLayer.builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -112,8 +112,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
int d = 3;
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new ConvolutionLayer.Builder().nIn(d).nOut(5).build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(0, ConvolutionLayer.builder().nIn(d).nOut(5).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(h, w, d)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -139,8 +139,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
int d = 3;
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new ConvolutionLayer.Builder().nIn(d).nOut(5).build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(0, ConvolutionLayer.builder().nIn(d).nOut(5).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(h, w, d)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -165,8 +165,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
int d = 3;
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new SubsamplingLayer.Builder().kernelSize(2, 2).build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(0, SubsamplingLayer.builder().kernelSize(2, 2).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(h, w, d)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -188,8 +188,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
public void testInputNinMismatchLSTM() {
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new GravesLSTM.Builder().nIn(5).nOut(5).build())
|
||||
.layer(1, new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(0, GravesLSTM.builder().nIn(5).nOut(5).build())
|
||||
.layer(1, RnnOutputLayer.builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -209,8 +209,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
public void testInputNinMismatchBidirectionalLSTM() {
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new GravesBidirectionalLSTM.Builder().nIn(5).nOut(5).build())
|
||||
.layer(1, new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(0, GravesBidirectionalLSTM.builder().nIn(5).nOut(5).build())
|
||||
.layer(1, RnnOutputLayer.builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -231,8 +231,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
public void testInputNinMismatchEmbeddingLayer() {
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new EmbeddingLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(0, EmbeddingLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -259,13 +259,13 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
LayerConfiguration l;
|
||||
switch (layerType){
|
||||
case "simple":
|
||||
l = new SimpleRnn.Builder().nIn(5).nOut(5).build();
|
||||
l = SimpleRnn.builder().nIn(5).nOut(5).build();
|
||||
break;
|
||||
case "lstm":
|
||||
l = new LSTM.Builder().nIn(5).nOut(5).build();
|
||||
l = LSTM.builder().nIn(5).nOut(5).build();
|
||||
break;
|
||||
case "graves":
|
||||
l = new GravesLSTM.Builder().nIn(5).nOut(5).build();
|
||||
l = GravesLSTM.builder().nIn(5).nOut(5).build();
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException();
|
||||
|
@ -273,7 +273,7 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(l)
|
||||
.layer(new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(RnnOutputLayer.builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
|
|
@ -88,14 +88,13 @@ public class AttentionLayerTest extends BaseDL4JTest {
|
|||
.activation(Activation.TANH)
|
||||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(LSTM.builder().nOut(layerSize).build())
|
||||
.layer( projectInput ?
|
||||
new SelfAttentionLayer.Builder().nOut(4).nHeads(2).projectInput(true).build()
|
||||
: new SelfAttentionLayer.Builder().nHeads(1).projectInput(false).build()
|
||||
)
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -150,13 +149,13 @@ public class AttentionLayerTest extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(LSTM.builder().nOut(layerSize).build())
|
||||
.layer( projectInput ?
|
||||
new LearnedSelfAttentionLayer.Builder().nOut(4).nHeads(2).nQueries(numQueries).projectInput(true).build()
|
||||
: new LearnedSelfAttentionLayer.Builder().nHeads(1).nQueries(numQueries).projectInput(false).build()
|
||||
)
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -190,13 +189,13 @@ public class AttentionLayerTest extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(LSTM.builder().nOut(layerSize).build())
|
||||
.layer( projectInput ?
|
||||
new LearnedSelfAttentionLayer.Builder().nOut(4).nHeads(2).nQueries(numQueries).projectInput(true).build()
|
||||
: new LearnedSelfAttentionLayer.Builder().nHeads(1).nQueries(numQueries).projectInput(false).build()
|
||||
)
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -245,10 +244,10 @@ public class AttentionLayerTest extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(LSTM.builder().nOut(layerSize).build())
|
||||
.layer(new RecurrentAttentionLayer.Builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build())
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -308,10 +307,10 @@ public class AttentionLayerTest extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(LSTM.builder().nOut(layerSize).build())
|
||||
.layer(new RecurrentAttentionLayer.Builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build())
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -367,15 +366,15 @@ public class AttentionLayerTest extends BaseDL4JTest {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.graphBuilder()
|
||||
.addInputs("input")
|
||||
.addLayer("rnnKeys", new SimpleRnn.Builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("rnnQueries", new SimpleRnn.Builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("rnnValues", new SimpleRnn.Builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("rnnKeys", SimpleRnn.builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("rnnQueries", SimpleRnn.builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("rnnValues", SimpleRnn.builder().nOut(layerSize).build(), "input")
|
||||
.addVertex("attention",
|
||||
projectInput ?
|
||||
new AttentionVertex.Builder().nOut(4).nHeads(2).projectInput(true).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build()
|
||||
: new AttentionVertex.Builder().nOut(3).nHeads(1).projectInput(false).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build(), "rnnQueries", "rnnKeys", "rnnValues")
|
||||
.addLayer("pooling", new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build(), "attention")
|
||||
.addLayer("output", new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling")
|
||||
.addLayer("pooling", GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build(), "attention")
|
||||
.addLayer("output", OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling")
|
||||
.setOutputs("output")
|
||||
.setInputTypes(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -431,13 +430,13 @@ public class AttentionLayerTest extends BaseDL4JTest {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.graphBuilder()
|
||||
.addInputs("input")
|
||||
.addLayer("rnn", new SimpleRnn.Builder().activation(Activation.TANH).nOut(layerSize).build(), "input")
|
||||
.addLayer("rnn", SimpleRnn.builder().activation(Activation.TANH).nOut(layerSize).build(), "input")
|
||||
.addVertex("attention",
|
||||
projectInput ?
|
||||
new AttentionVertex.Builder().nOut(4).nHeads(2).projectInput(true).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build()
|
||||
: new AttentionVertex.Builder().nOut(4).nHeads(1).projectInput(false).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build(), "rnn", "rnn", "rnn")
|
||||
.addLayer("pooling", new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build(), "attention")
|
||||
.addLayer("output", new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling")
|
||||
.addLayer("pooling", GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build(), "attention")
|
||||
.addLayer("output", OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling")
|
||||
.setOutputs("output")
|
||||
.setInputTypes(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
|
|
@ -78,11 +78,11 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.seed(12345L)
|
||||
.dist(new NormalDistribution(0, 1)).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.IDENTITY).build())
|
||||
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).nOut(3).build())
|
||||
.layer(2, new ActivationLayer.Builder().activation(Activation.TANH).build())
|
||||
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1,BatchNormalization.builder().useLogStd(useLogStd).nOut(3).build())
|
||||
.layer(2, ActivationLayer.builder().activation(Activation.TANH).build())
|
||||
.layer(3, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(3).nOut(3).build());
|
||||
|
||||
MultiLayerNetwork mln = new MultiLayerNetwork(builder.build());
|
||||
|
@ -122,11 +122,11 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp()).seed(12345L)
|
||||
.dist(new NormalDistribution(0, 2)).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nIn(depth).nOut(2)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nIn(depth).nOut(2)
|
||||
.activation(Activation.IDENTITY).build())
|
||||
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).build())
|
||||
.layer(2, new ActivationLayer.Builder().activation(Activation.TANH).build())
|
||||
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1,BatchNormalization.builder().useLogStd(useLogStd).build())
|
||||
.layer(2, ActivationLayer.builder().activation(Activation.TANH).build())
|
||||
.layer(3, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(nOut).build())
|
||||
.inputType(InputType.convolutional(hw, hw, depth));
|
||||
|
||||
|
@ -193,14 +193,14 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT)
|
||||
.updater(new NoOp())
|
||||
.dist(new UniformDistribution(-2, 2)).seed(12345L).list()
|
||||
.layer(0, new ConvolutionLayer.Builder(2, 2).stride(1, 1).nOut(3)
|
||||
.layer(0, ConvolutionLayer.builder(2, 2).stride(1, 1).nOut(3)
|
||||
.activation(afn).build())
|
||||
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).build())
|
||||
.layer(2, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
|
||||
.layer(1,BatchNormalization.builder().useLogStd(useLogStd).build())
|
||||
.layer(2, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX)
|
||||
.kernelSize(2, 2).stride(1, 1).build())
|
||||
.layer(3, new BatchNormalization())
|
||||
.layer(4, new ActivationLayer.Builder().activation(afn).build())
|
||||
.layer(5, new OutputLayer.Builder(lf).activation(outputActivation).nOut(nOut)
|
||||
.layer(3, BatchNormalization.builder().build())
|
||||
.layer(4, ActivationLayer.builder().activation(afn).build())
|
||||
.layer(5, OutputLayer.builder(lf).activation(outputActivation).nOut(nOut)
|
||||
.build())
|
||||
.inputType(InputType.convolutional(hw, hw, depth));
|
||||
|
||||
|
@ -300,12 +300,12 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
|
||||
.updater(new NoOp())
|
||||
.dist(new UniformDistribution(-2, 2)).seed(12345L).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(4)
|
||||
.layer(0, DenseLayer.builder().nIn(nIn).nOut(4)
|
||||
.activation(afn).build())
|
||||
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).build())
|
||||
.layer(2, new DenseLayer.Builder().nIn(4).nOut(4).build())
|
||||
.layer(3, new BatchNormalization.Builder().useLogStd(useLogStd).build())
|
||||
.layer(4, new OutputLayer.Builder(lf)
|
||||
.layer(1,BatchNormalization.builder().useLogStd(useLogStd).build())
|
||||
.layer(2, DenseLayer.builder().nIn(4).nOut(4).build())
|
||||
.layer(3,BatchNormalization.builder().useLogStd(useLogStd).build())
|
||||
.layer(4, OutputLayer.builder(lf)
|
||||
.activation(outputActivation).nOut(nOut)
|
||||
.build());
|
||||
|
||||
|
@ -373,11 +373,11 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.seed(12345L)
|
||||
.dist(new NormalDistribution(0, 1)).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY).build())
|
||||
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).lockGammaBeta(true).gamma(2.0).beta(0.5).nOut(3)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(3).activation(Activation.IDENTITY).build())
|
||||
.layer(1,BatchNormalization.builder().useLogStd(useLogStd).lockGammaBeta(true).gamma(2.0).beta(0.5).nOut(3)
|
||||
.build())
|
||||
.layer(2, new ActivationLayer.Builder().activation(Activation.TANH).build())
|
||||
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(2, ActivationLayer.builder().activation(Activation.TANH).build())
|
||||
.layer(3, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(3).nOut(3).build());
|
||||
|
||||
MultiLayerNetwork mln = new MultiLayerNetwork(builder.build());
|
||||
|
@ -417,11 +417,11 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.seed(12345L)
|
||||
.dist(new NormalDistribution(0, 2)).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nIn(depth).nOut(2)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nIn(depth).nOut(2)
|
||||
.activation(Activation.IDENTITY).build())
|
||||
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).lockGammaBeta(true).gamma(2.0).beta(0.5).build())
|
||||
.layer(2, new ActivationLayer.Builder().activation(Activation.TANH).build())
|
||||
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1,BatchNormalization.builder().useLogStd(useLogStd).lockGammaBeta(true).gamma(2.0).beta(0.5).build())
|
||||
.layer(2, ActivationLayer.builder().activation(Activation.TANH).build())
|
||||
.layer(3, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(nOut).build())
|
||||
.inputType(InputType.convolutional(hw, hw, depth));
|
||||
|
||||
|
@ -460,8 +460,8 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.setInputTypes(InputType.convolutional(height, width, channels))
|
||||
.addLayer("bn", new BatchNormalization.Builder().useLogStd(useLogStd).build(), "in")
|
||||
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.addLayer("bn",BatchNormalization.builder().useLogStd(useLogStd).build(), "in")
|
||||
.addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(numClasses).build(), "bn")
|
||||
.setOutputs("out").build();
|
||||
|
||||
|
@ -531,14 +531,14 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.dist(new UniformDistribution(-2, 2)).seed(12345L).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new ConvolutionLayer.Builder(2, 2).stride(1, 1).nOut(3)
|
||||
.addLayer("0", ConvolutionLayer.builder(2, 2).stride(1, 1).nOut(3)
|
||||
.activation(afn).build(), "in")
|
||||
.addLayer("1", new BatchNormalization.Builder().useLogStd(useLogStd).build(), "0")
|
||||
.addLayer("2", new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
|
||||
.addLayer("1",BatchNormalization.builder().useLogStd(useLogStd).build(), "0")
|
||||
.addLayer("2", SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX)
|
||||
.kernelSize(2, 2).stride(1, 1).build(), "1")
|
||||
.addLayer("3", new BatchNormalization.Builder().useLogStd(useLogStd).build(), "2")
|
||||
.addLayer("4", new ActivationLayer.Builder().activation(afn).build(), "3")
|
||||
.addLayer("5", new OutputLayer.Builder(lf).activation(outputActivation)
|
||||
.addLayer("3",BatchNormalization.builder().useLogStd(useLogStd).build(), "2")
|
||||
.addLayer("4", ActivationLayer.builder().activation(afn).build(), "3")
|
||||
.addLayer("5", OutputLayer.builder(lf).activation(outputActivation)
|
||||
.nOut(nOut).build(), "4")
|
||||
.setOutputs("5").setInputTypes(InputType.convolutional(hw, hw, depth))
|
||||
.build();
|
||||
|
|
|
@ -20,6 +20,9 @@
|
|||
|
||||
package org.deeplearning4j.gradientcheck;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.deeplearning4j.BaseDL4JTest;
|
||||
import org.deeplearning4j.TestUtils;
|
||||
|
@ -42,9 +45,6 @@ import org.nd4j.linalg.indexing.NDArrayIndex;
|
|||
import org.nd4j.linalg.learning.config.NoOp;
|
||||
import org.nd4j.linalg.lossfunctions.LossFunctions;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@Slf4j
|
||||
public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
||||
private static final boolean PRINT_RESULTS = true;
|
||||
|
@ -81,24 +81,45 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
INDArray labels = Nd4j.zeros(minibatchSize, finalNOut, length);
|
||||
for (int i = 0; i < minibatchSize; i++) {
|
||||
for (int j = 0; j < length; j++) {
|
||||
labels.putScalar(new int[]{i, i % finalNOut, j}, 1.0);
|
||||
labels.putScalar(new int[] {i, i % finalNOut, j}, 1.0);
|
||||
}
|
||||
}
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder()
|
||||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1)).convolutionMode(ConvolutionMode.Same).list()
|
||||
.layer(new Convolution1DLayer.Builder().activation(afn).kernelSize(kernel)
|
||||
.stride(stride).padding(padding).nIn(convNIn).nOut(convNOut1)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.list()
|
||||
.layer(
|
||||
Convolution1DLayer.builder()
|
||||
.activation(afn)
|
||||
.kernelSize(kernel)
|
||||
.stride(stride)
|
||||
.padding(padding)
|
||||
.nIn(convNIn)
|
||||
.nOut(convNOut1)
|
||||
.rnnDataFormat(RNNFormat.NCW)
|
||||
.build())
|
||||
.layer(new LocallyConnected1D.Builder().activation(afn).kernelSize(kernel)
|
||||
.stride(stride).padding(padding).nIn(convNOut1).nOut(convNOut2).hasBias(false)
|
||||
.layer(
|
||||
LocallyConnected1D.builder()
|
||||
.activation(afn)
|
||||
.kernelSize(kernel)
|
||||
.stride(stride)
|
||||
.padding(padding)
|
||||
.nIn(convNOut1)
|
||||
.nOut(convNOut2)
|
||||
.hasBias(false)
|
||||
.build())
|
||||
.layer(new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputType(InputType.recurrent(convNIn, length)).build();
|
||||
.layer(
|
||||
RnnOutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.nOut(finalNOut)
|
||||
.build())
|
||||
.inputType(InputType.recurrent(convNIn, length))
|
||||
.build();
|
||||
|
||||
String json = conf.toJson();
|
||||
NeuralNetConfiguration c2 = NeuralNetConfiguration.fromJson(json);
|
||||
|
@ -107,28 +128,35 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
String msg = "Minibatch=" + minibatchSize + ", activationFn="
|
||||
+ afn + ", kernel = " + kernel;
|
||||
String msg =
|
||||
"Minibatch=" + minibatchSize + ", activationFn=" + afn + ", kernel = " + kernel;
|
||||
|
||||
if (PRINT_RESULTS) {
|
||||
System.out.println(msg);
|
||||
// for (int j = 0; j < net.getnLayers(); j++)
|
||||
// System.out.println("ILayer " + j + " # params: " + net.getLayer(j).numParams());
|
||||
// for (int j = 0; j < net.getnLayers(); j++)
|
||||
// System.out.println("ILayer " + j + " # params: " +
|
||||
// net.getLayer(j).numParams());
|
||||
}
|
||||
|
||||
boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR,
|
||||
DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);
|
||||
boolean gradOK =
|
||||
GradientCheckUtil.checkGradients(
|
||||
net,
|
||||
DEFAULT_EPS,
|
||||
DEFAULT_MAX_REL_ERROR,
|
||||
DEFAULT_MIN_ABS_ERROR,
|
||||
PRINT_RESULTS,
|
||||
RETURN_ON_FIRST_FAILURE,
|
||||
input,
|
||||
labels);
|
||||
|
||||
assertTrue(gradOK, msg);
|
||||
|
||||
TestUtils.testModelSerialization(net);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCnn1DWithCropping1D() {
|
||||
Nd4j.getRandom().setSeed(1337);
|
||||
|
@ -140,7 +168,6 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
int convNOut2 = 4;
|
||||
int finalNOut = 4;
|
||||
|
||||
|
||||
int[] kernels = {1, 2, 4};
|
||||
int stride = 1;
|
||||
|
||||
|
@ -150,8 +177,11 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
|
||||
Activation[] activations = {Activation.SIGMOID};
|
||||
SubsamplingLayer.PoolingType[] poolingTypes =
|
||||
new SubsamplingLayer.PoolingType[]{SubsamplingLayer.PoolingType.MAX,
|
||||
SubsamplingLayer.PoolingType.AVG, SubsamplingLayer.PoolingType.PNORM};
|
||||
new SubsamplingLayer.PoolingType[] {
|
||||
SubsamplingLayer.PoolingType.MAX,
|
||||
SubsamplingLayer.PoolingType.AVG,
|
||||
SubsamplingLayer.PoolingType.PNORM
|
||||
};
|
||||
|
||||
for (Activation afn : activations) {
|
||||
for (SubsamplingLayer.PoolingType poolingType : poolingTypes) {
|
||||
|
@ -161,24 +191,41 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
INDArray labels = Nd4j.zeros(minibatchSize, finalNOut, croppedLength);
|
||||
for (int i = 0; i < minibatchSize; i++) {
|
||||
for (int j = 0; j < croppedLength; j++) {
|
||||
labels.putScalar(new int[]{i, i % finalNOut, j}, 1.0);
|
||||
labels.putScalar(new int[] {i, i % finalNOut, j}, 1.0);
|
||||
}
|
||||
}
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder()
|
||||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1)).convolutionMode(ConvolutionMode.Same).list()
|
||||
.layer(new Convolution1DLayer.Builder().activation(afn).kernelSize(kernel)
|
||||
.stride(stride).padding(padding).nOut(convNOut1)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.layer(
|
||||
Convolution1DLayer.builder()
|
||||
.activation(afn)
|
||||
.kernelSize(kernel)
|
||||
.stride(stride)
|
||||
.padding(padding)
|
||||
.nOut(convNOut1)
|
||||
.build())
|
||||
.layer(new Cropping1D.Builder(cropping).build())
|
||||
.layer(new Convolution1DLayer.Builder().activation(afn).kernelSize(kernel)
|
||||
.stride(stride).padding(padding).nOut(convNOut2)
|
||||
.layer(Cropping1D.builder(cropping).build())
|
||||
.layer(
|
||||
Convolution1DLayer.builder()
|
||||
.activation(afn)
|
||||
.kernelSize(kernel)
|
||||
.stride(stride)
|
||||
.padding(padding)
|
||||
.nOut(convNOut2)
|
||||
.build())
|
||||
.layer(new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputType(InputType.recurrent(convNIn, length,RNNFormat.NCW)).build();
|
||||
.layer(
|
||||
RnnOutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.nOut(finalNOut)
|
||||
.build())
|
||||
.inputType(InputType.recurrent(convNIn, length, RNNFormat.NCW))
|
||||
.build();
|
||||
|
||||
String json = conf.toJson();
|
||||
NeuralNetConfiguration c2 = NeuralNetConfiguration.fromJson(json);
|
||||
|
@ -187,17 +234,33 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
String msg = "PoolingType=" + poolingType + ", minibatch=" + minibatchSize + ", activationFn="
|
||||
+ afn + ", kernel = " + kernel;
|
||||
String msg =
|
||||
"PoolingType="
|
||||
+ poolingType
|
||||
+ ", minibatch="
|
||||
+ minibatchSize
|
||||
+ ", activationFn="
|
||||
+ afn
|
||||
+ ", kernel = "
|
||||
+ kernel;
|
||||
|
||||
if (PRINT_RESULTS) {
|
||||
System.out.println(msg);
|
||||
// for (int j = 0; j < net.getnLayers(); j++)
|
||||
// System.out.println("ILayer " + j + " # params: " + net.getLayer(j).numParams());
|
||||
// for (int j = 0; j < net.getnLayers(); j++)
|
||||
// System.out.println("ILayer " + j + " # params: " +
|
||||
// net.getLayer(j).numParams());
|
||||
}
|
||||
|
||||
boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR,
|
||||
DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);
|
||||
boolean gradOK =
|
||||
GradientCheckUtil.checkGradients(
|
||||
net,
|
||||
DEFAULT_EPS,
|
||||
DEFAULT_MAX_REL_ERROR,
|
||||
DEFAULT_MIN_ABS_ERROR,
|
||||
PRINT_RESULTS,
|
||||
RETURN_ON_FIRST_FAILURE,
|
||||
input,
|
||||
labels);
|
||||
|
||||
assertTrue(gradOK, msg);
|
||||
|
||||
|
@ -208,7 +271,6 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCnn1DWithZeroPadding1D() {
|
||||
Nd4j.getRandom().setSeed(1337);
|
||||
|
@ -220,7 +282,6 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
int convNOut2 = 4;
|
||||
int finalNOut = 4;
|
||||
|
||||
|
||||
int[] kernels = {1, 2, 4};
|
||||
int stride = 1;
|
||||
int pnorm = 2;
|
||||
|
@ -231,8 +292,11 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
|
||||
Activation[] activations = {Activation.SIGMOID};
|
||||
SubsamplingLayer.PoolingType[] poolingTypes =
|
||||
new SubsamplingLayer.PoolingType[]{SubsamplingLayer.PoolingType.MAX,
|
||||
SubsamplingLayer.PoolingType.AVG, SubsamplingLayer.PoolingType.PNORM};
|
||||
new SubsamplingLayer.PoolingType[] {
|
||||
SubsamplingLayer.PoolingType.MAX,
|
||||
SubsamplingLayer.PoolingType.AVG,
|
||||
SubsamplingLayer.PoolingType.PNORM
|
||||
};
|
||||
|
||||
for (Activation afn : activations) {
|
||||
for (SubsamplingLayer.PoolingType poolingType : poolingTypes) {
|
||||
|
@ -242,27 +306,49 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
INDArray labels = Nd4j.zeros(minibatchSize, finalNOut, paddedLength);
|
||||
for (int i = 0; i < minibatchSize; i++) {
|
||||
for (int j = 0; j < paddedLength; j++) {
|
||||
labels.putScalar(new int[]{i, i % finalNOut, j}, 1.0);
|
||||
labels.putScalar(new int[] {i, i % finalNOut, j}, 1.0);
|
||||
}
|
||||
}
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder()
|
||||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1)).convolutionMode(ConvolutionMode.Same).list()
|
||||
.layer(new Convolution1DLayer.Builder().activation(afn).kernelSize(kernel)
|
||||
.stride(stride).padding(padding).nOut(convNOut1)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.layer(
|
||||
Convolution1DLayer.builder()
|
||||
.activation(afn)
|
||||
.kernelSize(kernel)
|
||||
.stride(stride)
|
||||
.padding(padding)
|
||||
.nOut(convNOut1)
|
||||
.build())
|
||||
.layer(new ZeroPadding1DLayer.Builder(zeroPadding).build())
|
||||
.layer(new Convolution1DLayer.Builder().activation(afn).kernelSize(kernel)
|
||||
.stride(stride).padding(padding).nOut(convNOut2)
|
||||
.layer(ZeroPadding1DLayer.builder(zeroPadding).build())
|
||||
.layer(
|
||||
Convolution1DLayer.builder()
|
||||
.activation(afn)
|
||||
.kernelSize(kernel)
|
||||
.stride(stride)
|
||||
.padding(padding)
|
||||
.nOut(convNOut2)
|
||||
.build())
|
||||
.layer(new ZeroPadding1DLayer.Builder(0).build())
|
||||
.layer(new Subsampling1DLayer.Builder(poolingType).kernelSize(kernel)
|
||||
.stride(stride).padding(padding).pnorm(pnorm).build())
|
||||
.layer(new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputType(InputType.recurrent(convNIn, length,RNNFormat.NCW)).build();
|
||||
.layer(ZeroPadding1DLayer.builder(0).build())
|
||||
.layer(
|
||||
Subsampling1DLayer.builder(poolingType)
|
||||
.kernelSize(kernel)
|
||||
.stride(stride)
|
||||
.padding(padding)
|
||||
.pnorm(pnorm)
|
||||
.build())
|
||||
.layer(
|
||||
RnnOutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.nOut(finalNOut)
|
||||
.build())
|
||||
.inputType(InputType.recurrent(convNIn, length, RNNFormat.NCW))
|
||||
.build();
|
||||
|
||||
String json = conf.toJson();
|
||||
NeuralNetConfiguration c2 = NeuralNetConfiguration.fromJson(json);
|
||||
|
@ -271,17 +357,33 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
String msg = "PoolingType=" + poolingType + ", minibatch=" + minibatchSize + ", activationFn="
|
||||
+ afn + ", kernel = " + kernel;
|
||||
String msg =
|
||||
"PoolingType="
|
||||
+ poolingType
|
||||
+ ", minibatch="
|
||||
+ minibatchSize
|
||||
+ ", activationFn="
|
||||
+ afn
|
||||
+ ", kernel = "
|
||||
+ kernel;
|
||||
|
||||
if (PRINT_RESULTS) {
|
||||
System.out.println(msg);
|
||||
// for (int j = 0; j < net.getnLayers(); j++)
|
||||
// System.out.println("ILayer " + j + " # params: " + net.getLayer(j).numParams());
|
||||
// for (int j = 0; j < net.getnLayers(); j++)
|
||||
// System.out.println("ILayer " + j + " # params: " +
|
||||
// net.getLayer(j).numParams());
|
||||
}
|
||||
|
||||
boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR,
|
||||
DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);
|
||||
boolean gradOK =
|
||||
GradientCheckUtil.checkGradients(
|
||||
net,
|
||||
DEFAULT_EPS,
|
||||
DEFAULT_MAX_REL_ERROR,
|
||||
DEFAULT_MIN_ABS_ERROR,
|
||||
PRINT_RESULTS,
|
||||
RETURN_ON_FIRST_FAILURE,
|
||||
input,
|
||||
labels);
|
||||
|
||||
assertTrue(gradOK, msg);
|
||||
TestUtils.testModelSerialization(net);
|
||||
|
@ -291,7 +393,6 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCnn1DWithSubsampling1D() {
|
||||
Nd4j.getRandom().setSeed(12345);
|
||||
|
@ -310,8 +411,11 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
|
||||
Activation[] activations = {Activation.SIGMOID, Activation.TANH};
|
||||
SubsamplingLayer.PoolingType[] poolingTypes =
|
||||
new SubsamplingLayer.PoolingType[]{SubsamplingLayer.PoolingType.MAX,
|
||||
SubsamplingLayer.PoolingType.AVG, SubsamplingLayer.PoolingType.PNORM};
|
||||
new SubsamplingLayer.PoolingType[] {
|
||||
SubsamplingLayer.PoolingType.MAX,
|
||||
SubsamplingLayer.PoolingType.AVG,
|
||||
SubsamplingLayer.PoolingType.PNORM
|
||||
};
|
||||
|
||||
for (Activation afn : activations) {
|
||||
for (SubsamplingLayer.PoolingType poolingType : poolingTypes) {
|
||||
|
@ -321,25 +425,52 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
INDArray labels = Nd4j.zeros(minibatchSize, finalNOut, length);
|
||||
for (int i = 0; i < minibatchSize; i++) {
|
||||
for (int j = 0; j < length; j++) {
|
||||
labels.putScalar(new int[]{i, i % finalNOut, j}, 1.0);
|
||||
labels.putScalar(new int[] {i, i % finalNOut, j}, 1.0);
|
||||
}
|
||||
}
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder()
|
||||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1)).convolutionMode(ConvolutionMode.Same).list()
|
||||
.layer(0, new Convolution1DLayer.Builder().activation(afn).kernelSize(kernel)
|
||||
.stride(stride).padding(padding).nOut(convNOut1)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.list()
|
||||
.layer(
|
||||
0,
|
||||
Convolution1DLayer.builder()
|
||||
.activation(afn)
|
||||
.kernelSize(kernel)
|
||||
.stride(stride)
|
||||
.padding(padding)
|
||||
.nOut(convNOut1)
|
||||
.build())
|
||||
.layer(1, new Convolution1DLayer.Builder().activation(afn).kernelSize(kernel)
|
||||
.stride(stride).padding(padding).nOut(convNOut2)
|
||||
.layer(
|
||||
1,
|
||||
Convolution1DLayer.builder()
|
||||
.activation(afn)
|
||||
.kernelSize(kernel)
|
||||
.stride(stride)
|
||||
.padding(padding)
|
||||
.nOut(convNOut2)
|
||||
.build())
|
||||
.layer(2, new Subsampling1DLayer.Builder(poolingType).kernelSize(kernel)
|
||||
.stride(stride).padding(padding).pnorm(pnorm).build())
|
||||
.layer(3, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputType(InputType.recurrent(convNIn, length,RNNFormat.NCW)).build();
|
||||
.layer(
|
||||
2,
|
||||
Subsampling1DLayer.builder(poolingType)
|
||||
.kernelSize(kernel)
|
||||
.stride(stride)
|
||||
.padding(padding)
|
||||
.pnorm(pnorm)
|
||||
.build())
|
||||
.layer(
|
||||
3,
|
||||
RnnOutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.nOut(finalNOut)
|
||||
.build())
|
||||
.inputType(InputType.recurrent(convNIn, length, RNNFormat.NCW))
|
||||
.build();
|
||||
|
||||
String json = conf.toJson();
|
||||
NeuralNetConfiguration c2 = NeuralNetConfiguration.fromJson(json);
|
||||
|
@ -348,17 +479,33 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
String msg = "PoolingType=" + poolingType + ", minibatch=" + minibatchSize + ", activationFn="
|
||||
+ afn + ", kernel = " + kernel;
|
||||
String msg =
|
||||
"PoolingType="
|
||||
+ poolingType
|
||||
+ ", minibatch="
|
||||
+ minibatchSize
|
||||
+ ", activationFn="
|
||||
+ afn
|
||||
+ ", kernel = "
|
||||
+ kernel;
|
||||
|
||||
if (PRINT_RESULTS) {
|
||||
System.out.println(msg);
|
||||
// for (int j = 0; j < net.getnLayers(); j++)
|
||||
// System.out.println("ILayer " + j + " # params: " + net.getLayer(j).numParams());
|
||||
// for (int j = 0; j < net.getnLayers(); j++)
|
||||
// System.out.println("ILayer " + j + " # params: " +
|
||||
// net.getLayer(j).numParams());
|
||||
}
|
||||
|
||||
boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR,
|
||||
DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);
|
||||
boolean gradOK =
|
||||
GradientCheckUtil.checkGradients(
|
||||
net,
|
||||
DEFAULT_EPS,
|
||||
DEFAULT_MAX_REL_ERROR,
|
||||
DEFAULT_MIN_ABS_ERROR,
|
||||
PRINT_RESULTS,
|
||||
RETURN_ON_FIRST_FAILURE,
|
||||
input,
|
||||
labels);
|
||||
|
||||
assertTrue(gradOK, msg);
|
||||
TestUtils.testModelSerialization(net);
|
||||
|
@ -369,7 +516,7 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testCnn1dWithMasking(){
|
||||
public void testCnn1dWithMasking() {
|
||||
int length = 12;
|
||||
int convNIn = 2;
|
||||
int convNOut1 = 3;
|
||||
|
@ -379,36 +526,58 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
int pnorm = 2;
|
||||
|
||||
SubsamplingLayer.PoolingType[] poolingTypes =
|
||||
new SubsamplingLayer.PoolingType[] {SubsamplingLayer.PoolingType.MAX, SubsamplingLayer.PoolingType.AVG};
|
||||
new SubsamplingLayer.PoolingType[] {
|
||||
SubsamplingLayer.PoolingType.MAX, SubsamplingLayer.PoolingType.AVG
|
||||
};
|
||||
|
||||
for (SubsamplingLayer.PoolingType poolingType : poolingTypes) {
|
||||
for(ConvolutionMode cm : new ConvolutionMode[]{ConvolutionMode.Same, ConvolutionMode.Truncate}) {
|
||||
for( int stride : new int[]{1, 2}){
|
||||
for (ConvolutionMode cm :
|
||||
new ConvolutionMode[] {ConvolutionMode.Same, ConvolutionMode.Truncate}) {
|
||||
for (int stride : new int[] {1, 2}) {
|
||||
String s = cm + ", stride=" + stride + ", pooling=" + poolingType;
|
||||
log.info("Starting test: " + s);
|
||||
Nd4j.getRandom().setSeed(12345);
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder()
|
||||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 1)).convolutionMode(cm)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.convolutionMode(cm)
|
||||
.seed(12345)
|
||||
.list()
|
||||
.layer(new Convolution1DLayer.Builder().kernelSize(2)
|
||||
.layer(
|
||||
Convolution1DLayer.builder()
|
||||
.kernelSize(2)
|
||||
.rnnDataFormat(RNNFormat.NCW)
|
||||
.stride(stride).nIn(convNIn).nOut(convNOut1)
|
||||
.stride(stride)
|
||||
.nIn(convNIn)
|
||||
.nOut(convNOut1)
|
||||
.build())
|
||||
.layer(new Subsampling1DLayer.Builder(poolingType).kernelSize(2)
|
||||
.stride(stride).pnorm(pnorm).build())
|
||||
.layer(new Convolution1DLayer.Builder().kernelSize(2)
|
||||
.layer(
|
||||
Subsampling1DLayer.builder(poolingType)
|
||||
.kernelSize(2)
|
||||
.stride(stride)
|
||||
.pnorm(pnorm)
|
||||
.build())
|
||||
.layer(
|
||||
Convolution1DLayer.builder()
|
||||
.kernelSize(2)
|
||||
.rnnDataFormat(RNNFormat.NCW)
|
||||
.stride(stride).nIn(convNOut1).nOut(convNOut2)
|
||||
.stride(stride)
|
||||
.nIn(convNOut1)
|
||||
.nOut(convNOut2)
|
||||
.build())
|
||||
.layer(new GlobalPoolingLayer(PoolingType.AVG))
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputType(InputType.recurrent(convNIn, length)).build();
|
||||
.layer( GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(
|
||||
OutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.nOut(finalNOut)
|
||||
.build())
|
||||
.inputType(InputType.recurrent(convNIn, length))
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -416,19 +585,20 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
INDArray f = Nd4j.rand(2, convNIn, length);
|
||||
INDArray fm = Nd4j.create(2, length);
|
||||
fm.get(NDArrayIndex.point(0), NDArrayIndex.all()).assign(1);
|
||||
fm.get(NDArrayIndex.point(1), NDArrayIndex.interval(0,6)).assign(1);
|
||||
fm.get(NDArrayIndex.point(1), NDArrayIndex.interval(0, 6)).assign(1);
|
||||
|
||||
INDArray label = TestUtils.randomOneHot(2, finalNOut);
|
||||
|
||||
boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(f)
|
||||
.labels(label).inputMask(fm));
|
||||
boolean gradOK =
|
||||
GradientCheckUtil.checkGradients(
|
||||
new GradientCheckUtil.MLNConfig().net(net).input(f).labels(label).inputMask(fm));
|
||||
|
||||
assertTrue(gradOK, s);
|
||||
TestUtils.testModelSerialization(net);
|
||||
|
||||
//TODO also check that masked step values don't impact forward pass, score or gradients
|
||||
// TODO also check that masked step values don't impact forward pass, score or gradients
|
||||
|
||||
DataSet ds = new DataSet(f,label,fm,null);
|
||||
DataSet ds = new DataSet(f, label, fm, null);
|
||||
double scoreBefore = net.score(ds);
|
||||
net.setInput(f);
|
||||
net.setLabels(label);
|
||||
|
@ -471,32 +641,44 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
int st = strides[i];
|
||||
boolean mask = masks[i];
|
||||
boolean hasBias = hasB[i];
|
||||
//TODO has bias
|
||||
// TODO has bias
|
||||
String s = "k=" + k + ", s=" + st + " d=" + d + ", seqLen=" + length;
|
||||
log.info("Starting test: " + s);
|
||||
Nd4j.getRandom().setSeed(12345);
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder()
|
||||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.activation(Activation.TANH)
|
||||
.weightInit(new NormalDistribution(0, 1))
|
||||
.seed(12345)
|
||||
.list()
|
||||
.layer(new Convolution1DLayer.Builder().kernelSize(k)
|
||||
.layer(
|
||||
Convolution1DLayer.builder()
|
||||
.kernelSize(k)
|
||||
.dilation(d)
|
||||
.hasBias(hasBias)
|
||||
.convolutionMode(ConvolutionMode.Causal)
|
||||
.stride(st).nOut(convNOut1)
|
||||
.stride(st)
|
||||
.nOut(convNOut1)
|
||||
.build())
|
||||
.layer(new Convolution1DLayer.Builder().kernelSize(k)
|
||||
.layer(
|
||||
Convolution1DLayer.builder()
|
||||
.kernelSize(k)
|
||||
.dilation(d)
|
||||
.convolutionMode(ConvolutionMode.Causal)
|
||||
.stride(st).nOut(convNOut2)
|
||||
.stride(st)
|
||||
.nOut(convNOut2)
|
||||
.build())
|
||||
.layer(new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputType(InputType.recurrent(convNIn, length,RNNFormat.NCW)).build();
|
||||
.layer(
|
||||
RnnOutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.nOut(finalNOut)
|
||||
.build())
|
||||
.inputType(InputType.recurrent(convNIn, length, RNNFormat.NCW))
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -510,12 +692,14 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
|
|||
}
|
||||
|
||||
long outSize1 = Convolution1DUtils.getOutputSize(length, k, st, 0, ConvolutionMode.Causal, d);
|
||||
long outSize2 = Convolution1DUtils.getOutputSize(outSize1, k, st, 0, ConvolutionMode.Causal, d);
|
||||
long outSize2 =
|
||||
Convolution1DUtils.getOutputSize(outSize1, k, st, 0, ConvolutionMode.Causal, d);
|
||||
|
||||
INDArray label = TestUtils.randomOneHotTimeSeries(2, finalNOut, (int)outSize2);
|
||||
INDArray label = TestUtils.randomOneHotTimeSeries(2, finalNOut, (int) outSize2);
|
||||
|
||||
boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(f)
|
||||
.labels(label).inputMask(fm));
|
||||
boolean gradOK =
|
||||
GradientCheckUtil.checkGradients(
|
||||
new GradientCheckUtil.MLNConfig().net(net).input(f).labels(label).inputMask(fm));
|
||||
|
||||
assertTrue(gradOK, s);
|
||||
TestUtils.testModelSerialization(net);
|
||||
|
|
|
@ -115,16 +115,16 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
|
|||
.updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.list()
|
||||
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel)
|
||||
.layer(0, Convolution3D.builder().activation(afn).kernelSize(kernel)
|
||||
.stride(stride).nIn(convNIn).nOut(convNOut1).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(df)
|
||||
.build())
|
||||
.layer(1, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.layer(1, Convolution3D.builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.nIn(convNOut1).nOut(convNOut2).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(df)
|
||||
.build())
|
||||
.layer(2, new DenseLayer.Builder().nOut(denseNOut).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(2, DenseLayer.builder().nOut(denseNOut).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputPreProcessor(2,
|
||||
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
|
||||
|
@ -218,17 +218,17 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
|
|||
.updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.list()
|
||||
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel)
|
||||
.layer(0, Convolution3D.builder().activation(afn).kernelSize(kernel)
|
||||
.nIn(convNIn).nOut(convNOut1).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
|
||||
.build())
|
||||
.layer(1, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.layer(1, Convolution3D.builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.nIn(convNOut1).nOut(convNOut2).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
|
||||
.build())
|
||||
.layer(2, new ZeroPadding3DLayer.Builder(zeroPadding).build())
|
||||
.layer(3, new DenseLayer.Builder().nOut(denseNOut).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(2, ZeroPadding3DLayer.builder(zeroPadding).build())
|
||||
.layer(3, DenseLayer.builder().nOut(denseNOut).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputPreProcessor(3,
|
||||
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
|
||||
|
@ -314,14 +314,14 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.list()
|
||||
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.layer(0, Convolution3D.builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.nIn(convNIn).nOut(convNOut).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(df)
|
||||
.build())
|
||||
.layer(1, new Subsampling3DLayer.Builder(kernel)
|
||||
.poolingType(pool).convolutionMode(mode).dataFormat(df).build())
|
||||
.layer(2, new DenseLayer.Builder().nOut(denseNOut).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1,Subsampling3DLayer.builder(kernel)
|
||||
.poolingType(pool.toPoolingType()).convolutionMode(mode).dataFormat(df).build())
|
||||
.layer(2, DenseLayer.builder().nOut(denseNOut).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputPreProcessor(2,
|
||||
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,convNOut, df))
|
||||
|
@ -401,13 +401,13 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 1))
|
||||
.seed(12345)
|
||||
.list()
|
||||
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.layer(0, Convolution3D.builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.nIn(convNIn).nOut(convNOut).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(df)
|
||||
.build())
|
||||
.layer(1, new Upsampling3D.Builder(upsamplingSize[0]).dataFormat(df).build())
|
||||
.layer(2, new DenseLayer.Builder().nOut(denseNOut).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1, Upsampling3D.builder(upsamplingSize[0]).dataFormat(df).build())
|
||||
.layer(2, DenseLayer.builder().nOut(denseNOut).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputPreProcessor(2,
|
||||
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
|
||||
|
@ -496,17 +496,17 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
|
|||
.updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.list()
|
||||
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel)
|
||||
.layer(0, Convolution3D.builder().activation(afn).kernelSize(kernel)
|
||||
.nIn(convNIn).nOut(convNOut1).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
|
||||
.build())
|
||||
.layer(1, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.layer(1, Convolution3D.builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.nIn(convNOut1).nOut(convNOut2).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
|
||||
.build())
|
||||
.layer(2, new Cropping3D.Builder(cropping).build())
|
||||
.layer(3, new DenseLayer.Builder().nOut(denseNOut).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(2, Cropping3D.builder(cropping).build())
|
||||
.layer(3, DenseLayer.builder().nOut(denseNOut).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputPreProcessor(3,
|
||||
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
|
||||
|
@ -595,15 +595,15 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(new NormalDistribution(0, 0.1))
|
||||
.list()
|
||||
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel)
|
||||
.layer(0, Convolution3D.builder().activation(afn).kernelSize(kernel)
|
||||
.stride(stride).nIn(convNIn).nOut(dOut).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(df)
|
||||
.build())
|
||||
.layer(1, new Deconvolution3D.Builder().activation(afn).kernelSize(kernel)
|
||||
.layer(1, Deconvolution3D.builder().activation(afn).kernelSize(kernel)
|
||||
.stride(stride).nOut(dOut).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(df)
|
||||
.build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputType(InputType.convolutional3D(df, depth, height, width, convNIn)).build();
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -86,10 +86,10 @@ public class CapsnetGradientCheckTest extends BaseDL4JTest {
|
|||
.kernelSize(3, 3)
|
||||
.stride(2, 2)
|
||||
.build())
|
||||
.layer(new CapsuleLayer.Builder(capsule, capsuleDim, routing).build())
|
||||
.layer(new CapsuleStrengthLayer.Builder().build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationSoftmax()).build())
|
||||
.layer(new LossLayer.Builder(new LossNegativeLogLikelihood()).build())
|
||||
.layer(CapsuleLayer.builder(capsule, capsuleDim, routing).build())
|
||||
.layer(CapsuleStrengthLayer.builder().build())
|
||||
.layer(ActivationLayer.builder(new ActivationSoftmax()).build())
|
||||
.layer(LossLayer.builder().lossFunction(new LossNegativeLogLikelihood()).build())
|
||||
.inputType(InputType.convolutional(height, width, inputDepth))
|
||||
.build();
|
||||
|
||||
|
|
|
@ -97,24 +97,23 @@ public class DropoutGradientCheck extends BaseDL4JTest {
|
|||
.convolutionMode(ConvolutionMode.Same)
|
||||
.dropOut(dropout)
|
||||
.activation(Activation.TANH)
|
||||
.updater(new NoOp())
|
||||
.list();
|
||||
.updater(new NoOp());
|
||||
|
||||
if(cnn){
|
||||
builder.layer(new ConvolutionLayer.Builder().kernelSize(3,3).stride(2,2).nOut(2).build());
|
||||
builder.layer(new ConvolutionLayer.Builder().kernelSize(3,3).stride(2,2).nOut(2).build());
|
||||
builder.layer(ConvolutionLayer.builder().kernelSize(3,3).stride(2,2).nOut(2).build());
|
||||
builder.layer(ConvolutionLayer.builder().kernelSize(3,3).stride(2,2).nOut(2).build());
|
||||
builder.inputType(InputType.convolutional(6,6,2));
|
||||
} else {
|
||||
builder.layer(new DenseLayer.Builder().nOut(3).build());
|
||||
builder.layer(new DenseLayer.Builder().nOut(3).build());
|
||||
builder.layer(DenseLayer.builder().nOut(3).build());
|
||||
builder.layer(DenseLayer.builder().nOut(3).build());
|
||||
builder.inputType(InputType.feedForward(6));
|
||||
}
|
||||
builder.layer(new OutputLayer.Builder().nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunction.MCXENT).build());
|
||||
builder.layer(OutputLayer.builder().nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunction.MCXENT).build());
|
||||
|
||||
NeuralNetConfiguration conf = builder.build();
|
||||
//Remove spatial dropout from output layer - can't be used for 2d input
|
||||
if(i == 4){
|
||||
conf.getFlattenedLayerConfigurations().get(2).setIDropout(null);
|
||||
conf.getFlattenedLayerConfigurations().get(2).setDropOut(null);
|
||||
}
|
||||
|
||||
MultiLayerNetwork mln = new MultiLayerNetwork(conf);
|
||||
|
@ -157,11 +156,11 @@ public class DropoutGradientCheck extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).build(), "in")
|
||||
.addLayer("1", new DenseLayer.Builder().nIn(5).nOut(5).build(), "0")
|
||||
.addLayer("2", new DenseLayer.Builder().nIn(5).nOut(5).build(), "0")
|
||||
.addLayer("3", new DenseLayer.Builder().nIn(5).nOut(5).build(), "0")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(15).nOut(5).activation(Activation.SOFTMAX)
|
||||
.addLayer("0", DenseLayer.builder().nIn(5).nOut(5).build(), "in")
|
||||
.addLayer("1", DenseLayer.builder().nIn(5).nOut(5).build(), "0")
|
||||
.addLayer("2", DenseLayer.builder().nIn(5).nOut(5).build(), "0")
|
||||
.addLayer("3", DenseLayer.builder().nIn(5).nOut(5).build(), "0")
|
||||
.addLayer("out", OutputLayer.builder().nIn(15).nOut(5).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunction.MCXENT).build(), "1", "2", "3")
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
|
|
@ -75,10 +75,10 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
|
||||
.layer(0, new SimpleRnn.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.layer(0, SimpleRnn.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1, GlobalPoolingLayer.builder().poolingType(pt).build())
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build())
|
||||
.build();
|
||||
|
||||
|
@ -130,12 +130,12 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1)
|
||||
.dataFormat(nchw ? CNN2DFormat.NCHW : CNN2DFormat.NHWC)
|
||||
.nOut(layerDepth)
|
||||
.build())
|
||||
.layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1, GlobalPoolingLayer.builder().poolingType(pt).build())
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(nOut).build())
|
||||
.inputType(InputType.convolutional(inputH, inputW, inputDepth, nchw ? CNN2DFormat.NCHW : CNN2DFormat.NHWC)).build();
|
||||
|
||||
|
@ -188,10 +188,10 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
|
||||
.layer(0, new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.layer(0, LSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1, GlobalPoolingLayer.builder().poolingType(pt).build())
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build())
|
||||
.build();
|
||||
|
||||
|
@ -263,10 +263,10 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1.0)).convolutionMode(ConvolutionMode.Same)
|
||||
.seed(12345L).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(kernel).stride(stride)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(kernel).stride(stride)
|
||||
.nOut(layerDepth).build())
|
||||
.layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1, GlobalPoolingLayer.builder().poolingType(pt).build())
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(nOut).build())
|
||||
|
||||
.inputType(InputType.convolutional(inputH, inputW, inputDepth)).build();
|
||||
|
|
|
@ -78,11 +78,11 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new NoOp())
|
||||
.list()
|
||||
.layer(0,
|
||||
new DenseLayer.Builder().nIn(4).nOut(3)
|
||||
DenseLayer.builder().nIn(4).nOut(3)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(3).nOut(3).build())
|
||||
.build();
|
||||
|
||||
|
@ -168,11 +168,11 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).updater(new NoOp())
|
||||
.seed(12345L)
|
||||
.list().layer(0,
|
||||
new DenseLayer.Builder().nIn(4).nOut(3)
|
||||
DenseLayer.builder().nIn(4).nOut(3)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(afn)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(lf).activation(outputActivation).nIn(3).nOut(3)
|
||||
.layer(1, OutputLayer.builder(lf).activation(outputActivation).nIn(3).nOut(3)
|
||||
.dist(new NormalDistribution(0, 1)).build())
|
||||
.build();
|
||||
|
||||
|
@ -259,12 +259,12 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
|
||||
.seed(12345L)
|
||||
.list().layer(0,
|
||||
new DenseLayer.Builder().nIn(4).nOut(3)
|
||||
DenseLayer.builder().nIn(4).nOut(3)
|
||||
.dist(new NormalDistribution(0,
|
||||
1))
|
||||
.updater(new NoOp())
|
||||
.activation(afn).build())
|
||||
.layer(1, new OutputLayer.Builder(lf).nIn(3).nOut(3)
|
||||
.layer(1, OutputLayer.builder(lf).nIn(3).nOut(3)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp())
|
||||
.activation(outputActivation).build())
|
||||
|
@ -327,10 +327,10 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l2(0.2).l1(0.1)
|
||||
.dataType(DataType.DOUBLE)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345L)
|
||||
.list().layer(new EmbeddingLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.list().layer(EmbeddingLayer.builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.updater(new NoOp()).build())
|
||||
.layer(new PReLULayer.Builder().inputShape(3).sharedAxes(1).updater(new NoOp()).build())
|
||||
.layer(new OutputLayer.Builder(LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.layer(PReLULayer.builder().inputShape(3).sharedAxes(1).updater(new NoOp()).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.weightInit(WeightInit.XAVIER).dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -365,12 +365,12 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l2(0.2).l1(0.1)
|
||||
.dataType(DataType.DOUBLE)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345L)
|
||||
.list().layer(0,
|
||||
new EmbeddingLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.layer(0,
|
||||
EmbeddingLayer.builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.updater(new NoOp()).activation(
|
||||
Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.updater(new NoOp()).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -437,9 +437,9 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.seed(12345L)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.list().layer(0,
|
||||
new AutoEncoder.Builder().nIn(4).nOut(3)
|
||||
AutoEncoder.builder().nIn(4).nOut(3)
|
||||
.activation(afn).build())
|
||||
.layer(1, new OutputLayer.Builder(lf).nIn(3).nOut(3)
|
||||
.layer(1, OutputLayer.builder(lf).nIn(3).nOut(3)
|
||||
.activation(outputActivation).build())
|
||||
.build();
|
||||
|
||||
|
@ -497,13 +497,13 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.weightInit(new UniformDistribution(0, 1))
|
||||
.graphBuilder()
|
||||
.addInputs("features")
|
||||
.addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(4)
|
||||
.addLayer("dense", DenseLayer.builder().nIn(4).nOut(4)
|
||||
.activation(Activation.TANH)
|
||||
.build(), "features")
|
||||
.addLayer("elementWiseMul", new ElementWiseMultiplicationLayer.Builder().nIn(4).nOut(4)
|
||||
.addLayer("elementWiseMul", ElementWiseMultiplicationLayer.builder().nIn(4).nOut(4)
|
||||
.activation(a)
|
||||
.build(), "dense")
|
||||
.addLayer("loss", new LossLayer.Builder(LossFunctions.LossFunction.COSINE_PROXIMITY)
|
||||
.addLayer("loss", LossLayer.builder().lossFunction(LossFunctions.LossFunction.COSINE_PROXIMITY.getILossFunction())
|
||||
.activation(Activation.IDENTITY).build(), "elementWiseMul")
|
||||
.setOutputs("loss")
|
||||
.build();
|
||||
|
@ -566,12 +566,12 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(new NormalDistribution(0, 1))
|
||||
.list()
|
||||
.layer(new EmbeddingSequenceLayer.Builder()
|
||||
.layer(EmbeddingSequenceLayer.builder()
|
||||
.nIn(8)
|
||||
.nOut(4)
|
||||
.outputDataFormat(seqOutputFormat)
|
||||
.build())
|
||||
.layer(new RnnOutputLayer.Builder().nIn(4).nOut(3).activation(Activation.TANH)
|
||||
.layer(RnnOutputLayer.builder().nIn(4).nOut(3).activation(Activation.TANH)
|
||||
.dataFormat(seqOutputFormat)
|
||||
.lossFunction(LossFunction.MSE).build())
|
||||
.build();
|
||||
|
@ -679,12 +679,12 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
|
||||
.seed(12345L)
|
||||
.list().layer(0,
|
||||
new DenseLayer.Builder().nIn(4).nOut(3)
|
||||
DenseLayer.builder().nIn(4).nOut(3)
|
||||
.dist(new NormalDistribution(0,
|
||||
1))
|
||||
.updater(new NoOp())
|
||||
.activation(afn).build())
|
||||
.layer(1, new OutputLayer.Builder(lf).nIn(3).nOut(3)
|
||||
.layer(1, OutputLayer.builder(lf).nIn(3).nOut(3)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp())
|
||||
.activation(outputActivation).build())
|
||||
|
@ -740,12 +740,12 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).updater(new NoOp())
|
||||
.seed(12345L)
|
||||
.list().layer(0,
|
||||
new DenseLayer.Builder().nIn(4).nOut(3)
|
||||
DenseLayer.builder().nIn(4).nOut(3)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.hasLayerNorm(layerNorm)
|
||||
.activation(afn)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(lf).activation(outputActivation).nIn(3).nOut(3)
|
||||
.layer(1, OutputLayer.builder(lf).activation(outputActivation).nIn(3).nOut(3)
|
||||
.dist(new NormalDistribution(0, 1)).build())
|
||||
.build();
|
||||
|
||||
|
|
|
@ -76,10 +76,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 1)).updater(new NoOp())
|
||||
.graphBuilder().addInputs("input")
|
||||
.addLayer("firstLayer",
|
||||
new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(5).nOut(3).build(),
|
||||
"firstLayer")
|
||||
.setOutputs("outputLayer").build();
|
||||
|
@ -121,13 +121,13 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1)).updater(new NoOp())
|
||||
.graphBuilder().addInputs("input")
|
||||
.addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
.addLayer("l1", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addLayer("l2", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
.addLayer("l2", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addVertex("merge", new MergeVertex(), "l1", "l2")
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(5 + 5).nOut(3).build(),
|
||||
"merge")
|
||||
.setOutputs("outputLayer").build();
|
||||
|
@ -178,13 +178,13 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).graphBuilder().addInputs("input")
|
||||
.addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
.addLayer("l1", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addLayer("l2", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.SIGMOID)
|
||||
.addLayer("l2", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.SIGMOID)
|
||||
.build(), "input")
|
||||
.addVertex("elementwise", new ElementWiseVertex(op), "l1", "l2")
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(5).nOut(3).build(),
|
||||
"elementwise")
|
||||
.setOutputs("outputLayer").build();
|
||||
|
@ -236,15 +236,15 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).graphBuilder().addInputs("input")
|
||||
.addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
.addLayer("l1", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addLayer("l2", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.SIGMOID)
|
||||
.addLayer("l2", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.SIGMOID)
|
||||
.build(), "input")
|
||||
.addLayer("l3", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.RELU).build(),
|
||||
.addLayer("l3", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.RELU).build(),
|
||||
"input")
|
||||
.addVertex("elementwise", new ElementWiseVertex(op), "l1", "l2", "l3")
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(5).nOut(3).build(),
|
||||
"elementwise")
|
||||
.setOutputs("outputLayer").build();
|
||||
|
@ -299,10 +299,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.setOutputs("out")
|
||||
.layer("l1", new DenseLayer.Builder().nIn(3).nOut(firstSmaller ? 1 : 3).build(), "in") //[mb,3]
|
||||
.layer("l2", new DenseLayer.Builder().nIn(3).nOut(firstSmaller ? 3 : 1).build(), "in") //[mb,1]
|
||||
.layer("l1", DenseLayer.builder().nIn(3).nOut(firstSmaller ? 1 : 3).build(), "in") //[mb,3]
|
||||
.layer("l2", DenseLayer.builder().nIn(3).nOut(firstSmaller ? 3 : 1).build(), "in") //[mb,1]
|
||||
.addVertex("ew", new ElementWiseVertex(op), "l1", "l2")
|
||||
.layer("out", new OutputLayer.Builder().nIn(3).nOut(2).lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).build(), "ew")
|
||||
.layer("out", OutputLayer.builder().nIn(3).nOut(2).lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).build(), "ew")
|
||||
.build();
|
||||
|
||||
ComputationGraph graph = new ComputationGraph(conf);
|
||||
|
@ -344,15 +344,15 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 0.1))
|
||||
.updater(new NoOp()).graphBuilder().addInputs("input")
|
||||
.addLayer("l1", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.addLayer("l1", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.dataFormat(format)
|
||||
.nIn(2).nOut(2).activation(Activation.TANH).build(), "input")
|
||||
.addLayer("l2", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1)
|
||||
.addLayer("l2", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1)
|
||||
.padding(0, 0).dataFormat(format)
|
||||
.nIn(2).nOut(2).activation(Activation.TANH).build(), "input")
|
||||
.addVertex("merge", new MergeVertex(), "l1", "l2")
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(5 * 5 * (2 + 2)).nOut(3)
|
||||
.build(),
|
||||
"merge")
|
||||
|
@ -401,23 +401,23 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.updater(new NoOp()).graphBuilder().addInputs("input")
|
||||
.setOutputs("out")
|
||||
.addLayer("rnn1",
|
||||
new SimpleRnn.Builder().nOut(3)
|
||||
SimpleRnn.builder().nOut(3)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addLayer("rnn2",
|
||||
new SimpleRnn.Builder().nOut(3)
|
||||
SimpleRnn.builder().nOut(3)
|
||||
.activation(Activation.TANH).build(),
|
||||
"rnn1")
|
||||
.addLayer("dense1",
|
||||
new DenseLayer.Builder().nOut(3)
|
||||
DenseLayer.builder().nOut(3)
|
||||
.activation(Activation.SIGMOID).build(),
|
||||
"rnn1")
|
||||
.addLayer("rnn3",
|
||||
new SimpleRnn.Builder().nOut(3)
|
||||
SimpleRnn.builder().nOut(3)
|
||||
.activation(Activation.TANH).build(),
|
||||
"dense1")
|
||||
.addVertex("merge", new MergeVertex(), "rnn2", "rnn3")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nOut(outSize)
|
||||
.addLayer("out", RnnOutputLayer.builder().nOut(outSize)
|
||||
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(),
|
||||
|
@ -457,10 +457,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.weightInit(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).graphBuilder().addInputs("input").setOutputs("out")
|
||||
.addLayer("lstm1", new LSTM.Builder().nOut(6).activation(Activation.TANH).build(),
|
||||
.addLayer("lstm1", LSTM.builder().nOut(6).activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addVertex("subset", new SubsetVertex(0, 2), "lstm1")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nOut(2).activation(Activation.SOFTMAX)
|
||||
.addLayer("out", RnnOutputLayer.builder().nOut(2).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "subset")
|
||||
.setInputTypes(InputType.recurrent(inLength,timeSeriesLength,RNNFormat.NCW))
|
||||
.build();
|
||||
|
@ -494,10 +494,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).graphBuilder().addInputs("input").setOutputs("out")
|
||||
.addLayer("lstm1", new LSTM.Builder().nIn(3).nOut(4).activation(Activation.TANH).build(),
|
||||
.addLayer("lstm1", LSTM.builder().nIn(3).nOut(4).activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addVertex("lastTS", new LastTimeStepVertex("input"), "lstm1")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(4).nOut(2).activation(Activation.SOFTMAX)
|
||||
.addLayer("out", OutputLayer.builder().nIn(4).nOut(2).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "lastTS")
|
||||
.build();
|
||||
|
||||
|
@ -548,16 +548,16 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.updater(new NoOp()).graphBuilder()
|
||||
.addInputs("input1", "input2").setOutputs("out")
|
||||
.addLayer("lstm1",
|
||||
new LSTM.Builder().nIn(3).nOut(3)
|
||||
LSTM.builder().nIn(3).nOut(3)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input1")
|
||||
.addLayer("lstm2",
|
||||
new LSTM.Builder().nIn(2).nOut(4)
|
||||
LSTM.builder().nIn(2).nOut(4)
|
||||
.activation(Activation.SOFTSIGN).build(),
|
||||
"input2")
|
||||
.addVertex("lastTS", new LastTimeStepVertex("input2"), "lstm2")
|
||||
.addVertex("duplicate", new DuplicateToTimeSeriesVertex("input2"), "lastTS")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nIn(3+4).nOut(2)
|
||||
.addLayer("out", RnnOutputLayer.builder().nIn(3+4).nOut(2)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(),
|
||||
"lstm1", "duplicate")
|
||||
|
@ -598,16 +598,16 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.updater(new NoOp()).graphBuilder()
|
||||
.addInputs("input").setOutputs("out")
|
||||
.addLayer("lstm_a",
|
||||
new LSTM.Builder().nIn(2).nOut(3)
|
||||
LSTM.builder().nIn(2).nOut(3)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addVertex("input_rev", new ReverseTimeSeriesVertex("input"), "input")
|
||||
.addLayer("lstm_b",
|
||||
new LSTM.Builder().nIn(2).nOut(3)
|
||||
LSTM.builder().nIn(2).nOut(3)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input_rev")
|
||||
.addVertex("lstm_b_rev", new ReverseTimeSeriesVertex("input"), "lstm_b")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nIn(3 + 3).nOut(2)
|
||||
.addLayer("out", RnnOutputLayer.builder().nIn(3 + 3).nOut(2)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(),
|
||||
"lstm_a", "lstm_b_rev")
|
||||
|
@ -655,11 +655,11 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("i0", "i1", "i2")
|
||||
.addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i0")
|
||||
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i1")
|
||||
.addLayer("d2", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i2")
|
||||
.addLayer("d3", new DenseLayer.Builder().nIn(6).nOut(2).build(), "d0", "d1", "d2")
|
||||
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(2)
|
||||
.addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "i0")
|
||||
.addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "i1")
|
||||
.addLayer("d2", DenseLayer.builder().nIn(2).nOut(2).build(), "i2")
|
||||
.addLayer("d3", DenseLayer.builder().nIn(6).nOut(2).build(), "d0", "d1", "d2")
|
||||
.addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(2)
|
||||
.nOut(2).build(), "d3")
|
||||
.setOutputs("out").build();
|
||||
|
||||
|
@ -698,11 +698,11 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("i0")
|
||||
.addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i0")
|
||||
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "d0")
|
||||
.addLayer("d2", new DenseLayer.Builder().nIn(2).nOut(2).build(), "d0")
|
||||
.addLayer("d3", new DenseLayer.Builder().nIn(2).nOut(2).build(), "d0")
|
||||
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(6)
|
||||
.addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "i0")
|
||||
.addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "d0")
|
||||
.addLayer("d2", DenseLayer.builder().nIn(2).nOut(2).build(), "d0")
|
||||
.addLayer("d3", DenseLayer.builder().nIn(2).nOut(2).build(), "d0")
|
||||
.addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(6)
|
||||
.nOut(2).build(), "d1", "d2", "d3")
|
||||
.setOutputs("out").build();
|
||||
|
||||
|
@ -738,14 +738,14 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("i0", "i1", "i2")
|
||||
.addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i0")
|
||||
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i1")
|
||||
.addLayer("d2", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i2")
|
||||
.addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "i0")
|
||||
.addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "i1")
|
||||
.addLayer("d2", DenseLayer.builder().nIn(2).nOut(2).build(), "i2")
|
||||
.addVertex("m", new MergeVertex(), "d0", "d1", "d2")
|
||||
.addLayer("D0", new DenseLayer.Builder().nIn(6).nOut(2).build(), "m")
|
||||
.addLayer("D1", new DenseLayer.Builder().nIn(6).nOut(2).build(), "m")
|
||||
.addLayer("D2", new DenseLayer.Builder().nIn(6).nOut(2).build(), "m")
|
||||
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(6)
|
||||
.addLayer("D0", DenseLayer.builder().nIn(6).nOut(2).build(), "m")
|
||||
.addLayer("D1", DenseLayer.builder().nIn(6).nOut(2).build(), "m")
|
||||
.addLayer("D2", DenseLayer.builder().nIn(6).nOut(2).build(), "m")
|
||||
.addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(6)
|
||||
.nOut(2).build(), "D0", "D1", "D2")
|
||||
.setOutputs("out").build();
|
||||
|
||||
|
@ -787,18 +787,18 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("input")
|
||||
.addLayer("l0", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.addLayer("l0", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.nIn(2).nOut(2).activation(Activation.TANH).build(), "input")
|
||||
.addLayer("l1", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.addLayer("l1", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.nIn(2).nOut(2).activation(Activation.TANH).build(), "l0")
|
||||
.addLayer("l2", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.addLayer("l2", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.nIn(2).nOut(2).activation(Activation.TANH).build(), "l0")
|
||||
.addVertex("m", new MergeVertex(), "l1", "l2")
|
||||
.addLayer("l3", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.addLayer("l3", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.nIn(4).nOut(2).activation(Activation.TANH).build(), "m")
|
||||
.addLayer("l4", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.addLayer("l4", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.nIn(4).nOut(2).activation(Activation.TANH).build(), "m")
|
||||
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.activation(Activation.IDENTITY).nOut(2)
|
||||
.build(), "l3", "l4")
|
||||
.setOutputs("out").setInputTypes(InputType.convolutional(inH, inW, 2))
|
||||
|
@ -839,7 +839,7 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.updater(new NoOp()).graphBuilder()
|
||||
.addInputs("input1", "input2", "input3")
|
||||
.addVertex("stack1", new StackVertex(), "input1", "input2", "input3")
|
||||
.addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5)
|
||||
.addLayer("l1", DenseLayer.builder().nIn(4).nOut(5)
|
||||
.activation(Activation.TANH).build(), "stack1")
|
||||
.addVertex("unstack0", new UnstackVertex(0, 3), "l1")
|
||||
.addVertex("unstack1", new UnstackVertex(1, 3), "l1")
|
||||
|
@ -847,8 +847,8 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.addVertex("l2-1", new L2Vertex(), "unstack1", "unstack0") // x - x-
|
||||
.addVertex("l2-2", new L2Vertex(), "unstack1", "unstack2") // x - x+
|
||||
.addLayer("lossLayer",
|
||||
new LossLayer.Builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
LossLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction())
|
||||
.activation(Activation.SOFTMAX).build(),
|
||||
"l2-1", "l2-2")
|
||||
.setOutputs("lossLayer").build();
|
||||
|
@ -911,9 +911,9 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new GaussianDistribution(0, 1))
|
||||
.updater(new NoOp()).graphBuilder().addInputs("input1")
|
||||
.addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH)
|
||||
.addLayer("l1", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH)
|
||||
.build(), "input1")
|
||||
.addLayer("cl", new CenterLossOutputLayer.Builder()
|
||||
.addLayer("cl",CenterLossOutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).nIn(5).nOut(numLabels)
|
||||
.alpha(1.0).lambda(lambda).gradientCheck(true)
|
||||
.activation(Activation.SOFTMAX).build(), "l1")
|
||||
|
@ -975,9 +975,9 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(3).build())
|
||||
.layer(1, new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(2, new CenterLossOutputLayer.Builder()
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(3).build())
|
||||
.layer(1, GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(2,CenterLossOutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).nOut(numLabels)
|
||||
.alpha(1.0).lambda(lambda).gradientCheck(true)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
|
@ -1030,10 +1030,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH).updater(new NoOp()).graphBuilder()
|
||||
.addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1")
|
||||
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2")
|
||||
.addInputs("in1", "in2").addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "in1")
|
||||
.addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "in2")
|
||||
.addVertex("l2", new L2Vertex(), "d0", "d1")
|
||||
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(1)
|
||||
.addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(1)
|
||||
.nOut(1).activation(Activation.IDENTITY).build(), "l2")
|
||||
.setOutputs("out").build();
|
||||
|
||||
|
@ -1083,14 +1083,14 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH).updater(new NoOp()).graphBuilder()
|
||||
.addInputs("in1", "in2")
|
||||
.addLayer("d0", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in1")
|
||||
.addLayer("d1", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in2")
|
||||
.addLayer("d0", DenseLayer.builder().nIn(layerSizes).nOut(layerSizes).build(), "in1")
|
||||
.addLayer("d1", DenseLayer.builder().nIn(layerSizes).nOut(layerSizes).build(), "in2")
|
||||
.addVertex("stack", new StackVertex(), "d0", "d1")
|
||||
.addLayer("d2", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "stack")
|
||||
.addLayer("d2", DenseLayer.builder().nIn(layerSizes).nOut(layerSizes).build(), "stack")
|
||||
.addVertex("u1", new UnstackVertex(0, 2), "d2").addVertex("u2", new UnstackVertex(1, 2), "d2")
|
||||
.addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.addLayer("out1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.nIn(layerSizes).nOut(layerSizes).activation(Activation.IDENTITY).build(), "u1")
|
||||
.addLayer("out2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.addLayer("out2", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.nIn(layerSizes).nOut(2).activation(Activation.IDENTITY).build(), "u2")
|
||||
.setOutputs("out1", "out2").build();
|
||||
|
||||
|
@ -1137,17 +1137,17 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH).updater(new NoOp()).graphBuilder()
|
||||
.addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1")
|
||||
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2")
|
||||
.addInputs("in1", "in2").addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "in1")
|
||||
.addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "in2")
|
||||
.addVertex("stack", new StackVertex(), "d0", "d1")
|
||||
.addVertex("u0", new UnstackVertex(0, 2), "stack")
|
||||
.addVertex("u1", new UnstackVertex(1, 2), "stack")
|
||||
.addLayer("out1",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
.nOut(2).activation(Activation.IDENTITY).build(),
|
||||
"u0")
|
||||
.addLayer("out2",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
.nOut(2).activation(Activation.IDENTITY).build(),
|
||||
"u1")
|
||||
.setOutputs("out1", "out2").build();
|
||||
|
@ -1198,16 +1198,16 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH).updater(new NoOp()).graphBuilder()
|
||||
.addInputs("in1", "in2")
|
||||
.addLayer("d0", new SimpleRnn.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in1")
|
||||
.addLayer("d1", new SimpleRnn.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in2")
|
||||
.addLayer("d0", SimpleRnn.builder().nIn(layerSizes).nOut(layerSizes).build(), "in1")
|
||||
.addLayer("d1", SimpleRnn.builder().nIn(layerSizes).nOut(layerSizes).build(), "in2")
|
||||
.addVertex("stack", new StackVertex(), "d0", "d1")
|
||||
.addLayer("d2", new SimpleRnn.Builder().nIn(layerSizes).nOut(layerSizes).build(), "stack")
|
||||
.addLayer("d2", SimpleRnn.builder().nIn(layerSizes).nOut(layerSizes).build(), "stack")
|
||||
.addVertex("u1", new UnstackVertex(0, 2), "d2").addVertex("u2", new UnstackVertex(1, 2), "d2")
|
||||
.addLayer("p1", new GlobalPoolingLayer.Builder(PoolingType.AVG).build(), "u1")
|
||||
.addLayer("p2", new GlobalPoolingLayer.Builder(PoolingType.AVG).build(), "u2")
|
||||
.addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.addLayer("p1", GlobalPoolingLayer.builder(PoolingType.AVG).build(), "u1")
|
||||
.addLayer("p2", GlobalPoolingLayer.builder(PoolingType.AVG).build(), "u2")
|
||||
.addLayer("out1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.nIn(layerSizes).nOut(layerSizes).activation(Activation.IDENTITY).build(), "p1")
|
||||
.addLayer("out2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.addLayer("out2", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.nIn(layerSizes).nOut(2).activation(Activation.IDENTITY).build(), "p2")
|
||||
.setOutputs("out1", "out2").build();
|
||||
|
||||
|
@ -1260,14 +1260,14 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH).updater(new NoOp()).graphBuilder()
|
||||
.addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1")
|
||||
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2")
|
||||
.addInputs("in1", "in2").addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "in1")
|
||||
.addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "in2")
|
||||
.addLayer("out1",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
.nOut(2).activation(Activation.IDENTITY).build(),
|
||||
"d0")
|
||||
.addLayer("out2",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
.nOut(2).activation(Activation.IDENTITY).build(),
|
||||
"d1")
|
||||
.setOutputs("out1", "out2").build();
|
||||
|
@ -1320,10 +1320,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.activation(Activation.TANH).updater(new NoOp()).graphBuilder()
|
||||
.addInputs("in1").addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(3).build(), "in1")
|
||||
.addInputs("in1").addLayer("d1", DenseLayer.builder().nIn(2).nOut(3).build(), "in1")
|
||||
.addVertex("norm", new L2NormalizeVertex(definition,L2NormalizeVertex.DEFAULT_EPS), "d1")
|
||||
.addLayer("out1",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(3)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(3)
|
||||
.nOut(2).activation(Activation.IDENTITY).build(),
|
||||
"norm")
|
||||
.setOutputs("out1").build();
|
||||
|
@ -1370,11 +1370,11 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH).updater(new NoOp()).graphBuilder()
|
||||
.addInputs("in1")
|
||||
.addLayer("d1", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(2).build(),
|
||||
.addLayer("d1", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(2).build(),
|
||||
"in1")
|
||||
.addVertex("norm", new L2NormalizeVertex(), "d1")
|
||||
.addLayer("out1",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nOut(2)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nOut(2)
|
||||
.activation(Activation.IDENTITY).build(),
|
||||
"norm")
|
||||
.setOutputs("out1").setInputTypes(InputType.convolutional(h, w, dIn)).build();
|
||||
|
@ -1420,9 +1420,9 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345L)
|
||||
.updater(new NoOp()).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new EmbeddingLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.addLayer("0", EmbeddingLayer.builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.addLayer("1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.activation(Activation.SOFTMAX).build(), "0")
|
||||
.setOutputs("1").build();
|
||||
|
||||
|
|
|
@ -119,10 +119,10 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L)
|
||||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.list()
|
||||
.layer(0, new SimpleRnn.Builder().nIn(nIn).nOut(layerSize)
|
||||
|
||||
.layer(0, SimpleRnn.builder().nIn(nIn).nOut(layerSize)
|
||||
.weightInit(new NormalDistribution(0, 1)).build())
|
||||
.layer(1, new RnnOutputLayer.Builder(s.lf).activation(s.act).nIn(layerSize).nOut(s.nOut)
|
||||
.layer(1, RnnOutputLayer.builder().lossFunction(s.lf).activation(s.act).nIn(layerSize).nOut(s.nOut)
|
||||
.weightInit(new NormalDistribution(0, 1)).build())
|
||||
.build();
|
||||
MultiLayerNetwork mln = new MultiLayerNetwork(conf);
|
||||
|
@ -161,10 +161,10 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.dataType(DataType.DOUBLE)
|
||||
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
|
||||
.layer(0, new SimpleRnn.Builder().nIn(nIn).nOut(2).activation(Activation.TANH).build())
|
||||
.layer(1, new GravesBidirectionalLSTM.Builder().nIn(2).nOut(layerSize)
|
||||
.layer(0, SimpleRnn.builder().nIn(nIn).nOut(2).activation(Activation.TANH).build())
|
||||
.layer(1, GravesBidirectionalLSTM.builder().nIn(2).nOut(layerSize)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(2, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(2, RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build())
|
||||
.build();
|
||||
|
||||
|
@ -241,9 +241,9 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.dist(new NormalDistribution(0, 1)).seed(12345)
|
||||
.list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.layer(0, DenseLayer.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
|
||||
.layer(1, OutputLayer.builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
|
||||
.activation(a).build())
|
||||
.validateOutputLayerConfig(false)
|
||||
.build();
|
||||
|
@ -335,9 +335,9 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.dist(new NormalDistribution(0, 1)).seed(12345)
|
||||
.list()
|
||||
.layer(0, new SimpleRnn.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.layer(0, SimpleRnn.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
|
||||
.layer(1, RnnOutputLayer.builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
|
||||
.activation(a).build())
|
||||
.validateOutputLayerConfig(false)
|
||||
.inputType(InputType.recurrent(nIn,tsLength, RNNFormat.NCW))
|
||||
|
@ -368,9 +368,9 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.dist(new NormalDistribution(0, 2)).seed(12345)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("0", new SimpleRnn.Builder().nOut(layerSize)
|
||||
.addLayer("0", SimpleRnn.builder().nOut(layerSize)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
|
||||
.addLayer("1", RnnOutputLayer.builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
|
||||
.activation(a).build(), "0")
|
||||
.setOutputs("1").validateOutputLayerConfig(false)
|
||||
.setInputTypes(InputType.recurrent(nIn,tsLength,RNNFormat.NCW))
|
||||
|
@ -401,9 +401,9 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
|
|||
.weightInit(new NormalDistribution(0,2))
|
||||
.updater(new NoOp())
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nIn(3).nOut(3).build())
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(new OutputLayer.Builder().nIn(3).nOut(3).activation(Activation.SOFTMAX).build())
|
||||
.layer(LSTM.builder().nIn(3).nOut(3).build())
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(OutputLayer.builder().nIn(3).nOut(3).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.recurrent(3))
|
||||
.build();
|
||||
|
||||
|
@ -457,9 +457,9 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new LSTM.Builder().nIn(3).nOut(3).build(), "in")
|
||||
.layer("1", new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build(), "0")
|
||||
.layer("out", new OutputLayer.Builder().nIn(3).nOut(3).activation(Activation.SOFTMAX).build(), "1")
|
||||
.layer("0", LSTM.builder().nIn(3).nOut(3).build(), "in")
|
||||
.layer("1", GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build(), "0")
|
||||
.layer("out", OutputLayer.builder().nIn(3).nOut(3).activation(Activation.SOFTMAX).build(), "1")
|
||||
.setOutputs("out")
|
||||
.setInputTypes(InputType.recurrent(3))
|
||||
.build();
|
||||
|
|
|
@ -72,10 +72,10 @@ public class LRNGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.seed(12345L)
|
||||
.dist(new NormalDistribution(0, 2)).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().nOut(6).kernelSize(2, 2).stride(1, 1)
|
||||
.layer(0, ConvolutionLayer.builder().nOut(6).kernelSize(2, 2).stride(1, 1)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new LocalResponseNormalization.Builder().build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1, LocalResponseNormalization.builder().build())
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(nOut).build())
|
||||
.inputType(InputType.convolutional(hw, hw, depth));
|
||||
|
||||
|
|
|
@ -73,17 +73,17 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
|
|||
LayerConfiguration l0;
|
||||
LayerConfiguration l1;
|
||||
if (graves) {
|
||||
l0 = new GravesLSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
l0 = GravesLSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
.dist(new NormalDistribution(0, 1.0))
|
||||
.updater(new NoOp()).build();
|
||||
l1 = new GravesLSTM.Builder().nIn(layerSize).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
l1 = GravesLSTM.builder().nIn(layerSize).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
.dist(new NormalDistribution(0, 1.0))
|
||||
.updater(new NoOp()).build();
|
||||
} else {
|
||||
l0 = new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
l0 = LSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
.dist(new NormalDistribution(0, 1.0))
|
||||
.updater(new NoOp()).build();
|
||||
l1 = new LSTM.Builder().nIn(layerSize).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
l1 = LSTM.builder().nIn(layerSize).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
.dist(new NormalDistribution(0, 1.0))
|
||||
.updater(new NoOp()).build();
|
||||
}
|
||||
|
@ -94,7 +94,7 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
|
|||
.list()
|
||||
.layer(0, l0).layer(1,
|
||||
l1)
|
||||
.layer(2, new RnnOutputLayer.Builder(LossFunction.MCXENT)
|
||||
.layer(2, RnnOutputLayer.builder().lossFunction(LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut)
|
||||
|
||||
.dist(new NormalDistribution(0, 1.0)).updater(new NoOp())
|
||||
|
@ -196,14 +196,14 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
|
|||
|
||||
LayerConfiguration layer;
|
||||
if (graves) {
|
||||
layer = new GravesLSTM.Builder().nIn(nIn).nOut(layerSize).activation(afn).build();
|
||||
layer = GravesLSTM.builder().nIn(nIn).nOut(layerSize).activation(afn).build();
|
||||
} else {
|
||||
layer = new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(afn).build();
|
||||
layer = LSTM.builder().nIn(nIn).nOut(layerSize).activation(afn).build();
|
||||
}
|
||||
|
||||
NeuralNetConfiguration.NeuralNetConfigurationBuilder conf2 = (NeuralNetConfigurationBuilder) conf
|
||||
.layer(0, layer)
|
||||
.layer(1, new RnnOutputLayer.Builder(lf).activation(outputActivation)
|
||||
.layer(1, RnnOutputLayer.builder(lf).activation(outputActivation)
|
||||
.nIn(layerSize).nOut(nOut).build());
|
||||
|
||||
MultiLayerNetwork mln = new MultiLayerNetwork(conf2.build());
|
||||
|
@ -251,16 +251,16 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
|
|||
|
||||
LayerConfiguration layer;
|
||||
if (graves) {
|
||||
layer = new GravesLSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build();
|
||||
layer = GravesLSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build();
|
||||
} else {
|
||||
layer = new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build();
|
||||
layer = LSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build();
|
||||
}
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L)
|
||||
.dataType(DataType.DOUBLE)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).list().layer(0, layer)
|
||||
.layer(1, new RnnOutputLayer.Builder(LossFunction.MCXENT).activation(Activation.SOFTMAX)
|
||||
.layer(1, RnnOutputLayer.builder().lossFunction(LossFunction.MCXENT).activation(Activation.SOFTMAX)
|
||||
.nIn(layerSize).nOut(nOut).build())
|
||||
.build();
|
||||
MultiLayerNetwork mln = new MultiLayerNetwork(conf);
|
||||
|
@ -324,11 +324,11 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.layer(0,
|
||||
new GravesBidirectionalLSTM.Builder().nIn(nIn).nOut(layerSize)
|
||||
GravesBidirectionalLSTM.builder().nIn(nIn).nOut(layerSize)
|
||||
.weightInit(new NormalDistribution(0, 1))
|
||||
.activation(afn)
|
||||
.build())
|
||||
.layer(1, new RnnOutputLayer.Builder(lf).activation(outputActivation).nIn(layerSize)
|
||||
.layer(1, RnnOutputLayer.builder(lf).activation(outputActivation).nIn(layerSize)
|
||||
.nOut(nOut)
|
||||
.dist(new NormalDistribution(0, 1)).updater(new NoOp()).build())
|
||||
.build();
|
||||
|
@ -383,12 +383,12 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L)
|
||||
.dataType(DataType.DOUBLE)
|
||||
.list()
|
||||
.layer(0, new GravesBidirectionalLSTM.Builder().nIn(nIn).nOut(layerSize)
|
||||
.layer(0, GravesBidirectionalLSTM.builder().nIn(nIn).nOut(layerSize)
|
||||
|
||||
.dist(new NormalDistribution(0, 1)).updater(
|
||||
Updater.NONE)
|
||||
.build())
|
||||
.layer(1, new RnnOutputLayer.Builder(LossFunction.MCXENT).activation(Activation.SOFTMAX)
|
||||
.layer(1, RnnOutputLayer.builder().lossFunction(LossFunction.MCXENT).activation(Activation.SOFTMAX)
|
||||
.nIn(layerSize).nOut(nOut)
|
||||
.dist(new NormalDistribution(0, 1)).updater(new NoOp()).build())
|
||||
.build();
|
||||
|
@ -432,13 +432,13 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new NoOp()).seed(12345)
|
||||
.dataType(DataType.DOUBLE)
|
||||
.dist(new UniformDistribution(-2, 2)).list()
|
||||
.layer(0, new ConvolutionLayer.Builder(3, 3).nIn(2).nOut(3).stride(1, 1)
|
||||
.layer(0, ConvolutionLayer.builder(3, 3).nIn(2).nOut(3).stride(1, 1)
|
||||
.activation(Activation.TANH).build()) //Out: (10-5)/1+1 = 6 -> 6x6x5
|
||||
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
|
||||
.layer(1, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
|
||||
.stride(1, 1).build()) //Out: (6-2)/1+1 = 5 -> 5x5x5
|
||||
.layer(2, new DenseLayer.Builder().nIn(27).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(3, new GravesLSTM.Builder().nIn(4).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(4, new RnnOutputLayer.Builder().lossFunction(LossFunction.MCXENT).nIn(3).nOut(nClasses)
|
||||
.layer(2, DenseLayer.builder().nIn(27).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(3, GravesLSTM.builder().nIn(4).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(4, RnnOutputLayer.builder().lossFunction(LossFunction.MCXENT).nIn(3).nOut(nClasses)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(6, 6, 2)).build();
|
||||
|
||||
|
|
|
@ -187,8 +187,8 @@ public class LossFunctionGradientCheck extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345)
|
||||
.updater(new NoOp())
|
||||
.dist(new UniformDistribution(-2, 2)).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(1, new OutputLayer.Builder().lossFunction(lossFunctions[i])
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(1, OutputLayer.builder().lossFunction(lossFunctions[i])
|
||||
.activation(outputActivationFn[i]).nIn(4).nOut(nOut[i]).build())
|
||||
.validateOutputLayerConfig(false)
|
||||
.build();
|
||||
|
@ -351,9 +351,9 @@ public class LossFunctionGradientCheck extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345)
|
||||
.updater(new NoOp())
|
||||
.dist(new UniformDistribution(-2, 2)).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(nOut[i]).activation(Activation.TANH)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(nOut[i]).activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new LossLayer.Builder().lossFunction(lossFunctions[i])
|
||||
.layer(1, LossLayer.builder().lossFunction(lossFunctions[i])
|
||||
.activation(outputActivationFn[i]).build())
|
||||
.validateOutputLayerConfig(false)
|
||||
.build();
|
||||
|
@ -361,7 +361,7 @@ public class LossFunctionGradientCheck extends BaseDL4JTest {
|
|||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
assertSame(((LossLayer) net.getLayer(1).getLayerConfiguration()).getLossFn().getClass(), lossFunctions[i]
|
||||
assertSame(((LossLayer) net.getLayer(1).getLayerConfiguration()).getLossFunction().getClass(), lossFunctions[i]
|
||||
.getClass());
|
||||
|
||||
INDArray[] inOut = getFeaturesAndLabels(lossFunctions[i], minibatchSizes[j], 4, nOut[i], 12345);
|
||||
|
@ -655,9 +655,9 @@ public class LossFunctionGradientCheck extends BaseDL4JTest {
|
|||
// .dist(new UniformDistribution(-3, 3))
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder().lossFunction(lossFunctions[i])
|
||||
.layer(1, OutputLayer.builder().lossFunction(lossFunctions[i])
|
||||
.activation(outputActivationFn[i]).nIn(4).nOut(3).build())
|
||||
.validateOutputLayerConfig(false)
|
||||
.build();
|
||||
|
|
|
@ -73,19 +73,19 @@ public class NoBiasGradientCheckTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.seed(12345L)
|
||||
.list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(layerSize)
|
||||
.layer(0, DenseLayer.builder().nIn(nIn).nOut(layerSize)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH)
|
||||
.hasBias(true) //ILayer 0: Always have a bias
|
||||
.build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(layerSize).nOut(layerSize)
|
||||
.layer(1, DenseLayer.builder().nIn(layerSize).nOut(layerSize)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH)
|
||||
.hasBias(denseHasBias)
|
||||
.build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunction.MCXENT)
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
|
@ -144,12 +144,12 @@ public class NoBiasGradientCheckTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.seed(12345L)
|
||||
.list()
|
||||
.layer(0, new LSTM.Builder().nIn(nIn).nOut(layerSize)
|
||||
.layer(0, LSTM.builder().nIn(nIn).nOut(layerSize)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new RnnOutputLayer.Builder(LossFunction.MCXENT)
|
||||
.layer(1, RnnOutputLayer.builder().lossFunction(LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
|
@ -205,13 +205,13 @@ public class NoBiasGradientCheckTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.seed(12345L)
|
||||
.list()
|
||||
.layer(0, new EmbeddingLayer.Builder().nIn(nIn).nOut(layerSize)
|
||||
.layer(0, EmbeddingLayer.builder().nIn(nIn).nOut(layerSize)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH)
|
||||
.hasBias(embeddingHasBias)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunction.MCXENT)
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
|
@ -271,17 +271,17 @@ public class NoBiasGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.list()
|
||||
.layer(new ConvolutionLayer.Builder(kernel,
|
||||
.layer(ConvolutionLayer.builder(kernel,
|
||||
stride, padding).nIn(inputDepth)
|
||||
.hasBias(false)
|
||||
.nOut(3).build())//output: (5-2+0)/1+1 = 4
|
||||
.layer(new SubsamplingLayer.Builder(PoolingType.MAX)
|
||||
.layer(SubsamplingLayer.builder(PoolingType.MAX)
|
||||
.kernelSize(kernel).stride(stride).padding(padding)
|
||||
.pnorm(pNorm).build()) //output: (4-2+0)/1+1 =3 -> 3x3x3
|
||||
.layer(new ConvolutionLayer.Builder(kernel, stride, padding)
|
||||
.layer(ConvolutionLayer.builder(kernel, stride, padding)
|
||||
.hasBias(cnnHasBias)
|
||||
.nOut(2).build()) //Output: (3-2+0)/1+1 = 2
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.nOut(4).build())
|
||||
.inputType(InputType.convolutionalFlat(height, width, inputDepth))
|
||||
|
|
|
@ -121,10 +121,10 @@ public class OutputLayerGradientChecks extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.layer(LSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 1.0))
|
||||
.updater(new NoOp()).build())
|
||||
.layer(new RnnLossLayer.Builder(lf)
|
||||
.layer(RnnLossLayer.builder().lossFunction(lf)
|
||||
.activation(oa)
|
||||
.build())
|
||||
.validateOutputLayerConfig(false).build();
|
||||
|
@ -228,10 +228,10 @@ public class OutputLayerGradientChecks extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.list()
|
||||
.layer(new ConvolutionLayer.Builder().nIn(dIn).nOut(dOut).activation(Activation.TANH)
|
||||
.layer(ConvolutionLayer.builder().nIn(dIn).nOut(dOut).activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 1.0))
|
||||
.updater(new NoOp()).build())
|
||||
.layer(new CnnLossLayer.Builder(lf)
|
||||
.layer(CnnLossLayer.builder().lossFunction(lf)
|
||||
.activation(oa)
|
||||
.build())
|
||||
.validateOutputLayerConfig(false).build();
|
||||
|
@ -375,11 +375,11 @@ public class OutputLayerGradientChecks extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.list()
|
||||
.layer(new Convolution3D.Builder().nIn(chIn).nOut(chOut).activation(Activation.TANH)
|
||||
.layer(Convolution3D.builder().nIn(chIn).nOut(chOut).activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 1.0))
|
||||
.dataFormat(dataFormat)
|
||||
.updater(new NoOp()).build())
|
||||
.layer(new Cnn3DLossLayer.Builder(dataFormat)
|
||||
.layer(Cnn3DLossLayer.builder().dataFormat(dataFormat)
|
||||
.lossFunction(lf)
|
||||
.activation(oa)
|
||||
.build())
|
||||
|
|
|
@ -112,12 +112,12 @@ public class RnnGradientChecks extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nIn(nIn).nOut(3).build())
|
||||
.layer(new Bidirectional(m,
|
||||
.layer(LSTM.builder().nIn(nIn).nOut(3).build())
|
||||
.layer(Bidirectional.builder(m,
|
||||
(simple ?
|
||||
new SimpleRnn.Builder().nIn(3).nOut(3).hasLayerNorm(hasLayerNorm).build() :
|
||||
new LSTM.Builder().nIn(3).nOut(3).build())))
|
||||
.layer(new RnnOutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX).build())
|
||||
SimpleRnn.builder().nIn(3).nOut(3).hasLayerNorm(hasLayerNorm).build() :
|
||||
LSTM.builder().nIn(3).nOut(3).build())))
|
||||
.layer(RnnOutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
||||
|
||||
|
@ -194,9 +194,9 @@ public class RnnGradientChecks extends BaseDL4JTest {
|
|||
.l1(l1s[l])
|
||||
.l2(l2s[l])
|
||||
.list()
|
||||
.layer(new SimpleRnn.Builder().nIn(nIn).nOut(layerSize).hasLayerNorm(hasLayerNorm).build())
|
||||
.layer(new SimpleRnn.Builder().nIn(layerSize).nOut(layerSize).hasLayerNorm(hasLayerNorm).build())
|
||||
.layer(new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut)
|
||||
.layer(SimpleRnn.builder().nIn(nIn).nOut(layerSize).hasLayerNorm(hasLayerNorm).build())
|
||||
.layer(SimpleRnn.builder().nIn(layerSize).nOut(layerSize).hasLayerNorm(hasLayerNorm).build())
|
||||
.layer(RnnOutputLayer.builder().nIn(layerSize).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.build())
|
||||
.build();
|
||||
|
@ -268,11 +268,11 @@ public class RnnGradientChecks extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(simple ? new SimpleRnn.Builder().nOut(layerSize).hasLayerNorm(hasLayerNorm).build() :
|
||||
new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(new LastTimeStep(simple ? new SimpleRnn.Builder().nOut(layerSize).hasLayerNorm(hasLayerNorm).build() :
|
||||
new LSTM.Builder().nOut(layerSize).build()))
|
||||
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.layer(simple ? SimpleRnn.builder().nOut(layerSize).hasLayerNorm(hasLayerNorm).build() :
|
||||
LSTM.builder().nOut(layerSize).build())
|
||||
.layer(new LastTimeStep(simple ? SimpleRnn.builder().nOut(layerSize).hasLayerNorm(hasLayerNorm).build() :
|
||||
LSTM.builder().nOut(layerSize).build()))
|
||||
.layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -334,9 +334,9 @@ public class RnnGradientChecks extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(new TimeDistributed(new DenseLayer.Builder().nOut(layerSize).activation(Activation.SOFTMAX).build()))
|
||||
.layer(new RnnOutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.layer(LSTM.builder().nOut(layerSize).build())
|
||||
.layer(new TimeDistributed(DenseLayer.builder().nOut(layerSize).activation(Activation.SOFTMAX).build()))
|
||||
.layer(RnnOutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
|
|
@ -132,27 +132,27 @@ public class UtilLayerGradientChecks extends BaseDL4JTest {
|
|||
InputType it;
|
||||
switch (inputRank){
|
||||
case 2:
|
||||
l1 = new DenseLayer.Builder().nOut(3).build();
|
||||
l2 = new DenseLayer.Builder().nOut(3).build();
|
||||
l3 = new OutputLayer.Builder().nOut(3).lossFunction(LossFunctions.LossFunction.MSE)
|
||||
l1 = DenseLayer.builder().nOut(3).build();
|
||||
l2 = DenseLayer.builder().nOut(3).build();
|
||||
l3 = OutputLayer.builder().nOut(3).lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.activation(Activation.TANH).build();
|
||||
it = InputType.feedForward(3);
|
||||
break;
|
||||
case 3:
|
||||
l1 = new SimpleRnn.Builder().nIn(3).nOut(3).activation(Activation.TANH).build();
|
||||
l2 = new SimpleRnn.Builder().nIn(3).nOut(3).activation(Activation.TANH).build();
|
||||
l3 = new RnnOutputLayer.Builder().nIn(3).nOut(3).lossFunction(LossFunctions.LossFunction.SQUARED_LOSS)
|
||||
l1 = SimpleRnn.builder().nIn(3).nOut(3).activation(Activation.TANH).build();
|
||||
l2 = SimpleRnn.builder().nIn(3).nOut(3).activation(Activation.TANH).build();
|
||||
l3 = RnnOutputLayer.builder().nIn(3).nOut(3).lossFunction(LossFunctions.LossFunction.SQUARED_LOSS)
|
||||
.activation(Activation.IDENTITY).build();
|
||||
it = InputType.recurrent(3);
|
||||
break;
|
||||
case 4:
|
||||
l1 = new ConvolutionLayer.Builder().nOut(5).convolutionMode(ConvolutionMode.Truncate)
|
||||
l1 = ConvolutionLayer.builder().nOut(5).convolutionMode(ConvolutionMode.Truncate)
|
||||
.stride(1,1).kernelSize(2,2).padding(0,0)
|
||||
.build();
|
||||
l2 = new ConvolutionLayer.Builder().nOut(5).convolutionMode(ConvolutionMode.Truncate)
|
||||
l2 = ConvolutionLayer.builder().nOut(5).convolutionMode(ConvolutionMode.Truncate)
|
||||
.stride(1,1).kernelSize(2,2).padding(0,0)
|
||||
.build();
|
||||
l3 = new OutputLayer.Builder().nOut(5).lossFunction(LossFunctions.LossFunction.SQUARED_LOSS)
|
||||
l3 = OutputLayer.builder().nOut(5).lossFunction(LossFunctions.LossFunction.SQUARED_LOSS)
|
||||
.activation(Activation.IDENTITY)
|
||||
.build();
|
||||
it = InputType.convolutional(5,5,1);
|
||||
|
@ -201,14 +201,14 @@ public class UtilLayerGradientChecks extends BaseDL4JTest {
|
|||
.seed(12345)
|
||||
.updater(Updater.NONE.getIUpdaterWithDefaultConfig())
|
||||
.list()
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10)
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10)
|
||||
.activation(Activation.TANH).weightInit(WeightInit.XAVIER).build())
|
||||
.layer(new FrozenLayerWithBackprop(new DenseLayer.Builder().nIn(10).nOut(10)
|
||||
.layer(new FrozenLayerWithBackprop(DenseLayer.builder().nIn(10).nOut(10)
|
||||
.activation(Activation.TANH).weightInit(WeightInit.XAVIER).build()))
|
||||
.layer(new FrozenLayerWithBackprop(
|
||||
new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
|
||||
DenseLayer.builder().nIn(10).nOut(10).activation(Activation.TANH)
|
||||
.weightInit(WeightInit.XAVIER).build()))
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(10).nOut(10).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf2);
|
||||
|
|
|
@ -99,14 +99,14 @@ public class VaeGradientCheckTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.l2Bias(biasL2[i]).l1Bias(biasL1[i])
|
||||
.updater(new NoOp()).seed(12345L).list()
|
||||
.layer(0, new VariationalAutoencoder.Builder().nIn(4)
|
||||
.layer(0, VariationalAutoencoder.builder().nIn(4)
|
||||
.nOut(3).encoderLayerSizes(encoderSizes)
|
||||
.decoderLayerSizes(decoderSizes)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(afn)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(lf)
|
||||
.layer(1, OutputLayer.builder(lf)
|
||||
.activation(outputActivation).nIn(3).nOut(3)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
|
@ -173,7 +173,7 @@ public class VaeGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.l1(l1).l2Bias(biasL2[i]).l1Bias(biasL1[i]).updater(new NoOp())
|
||||
.seed(12345L).weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new VariationalAutoencoder.Builder().nIn(4).nOut(3)
|
||||
.layer(0, VariationalAutoencoder.builder().nIn(4).nOut(3)
|
||||
.encoderLayerSizes(encoderSizes).decoderLayerSizes(decoderSizes)
|
||||
.pzxActivationFunction(pzxAfn)
|
||||
.reconstructionDistribution(
|
||||
|
@ -263,7 +263,7 @@ public class VaeGradientCheckTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.seed(12345L).dist(new NormalDistribution(0, 1))
|
||||
.list().layer(0,
|
||||
new VariationalAutoencoder.Builder().nIn(inOutSize).nOut(3)
|
||||
VariationalAutoencoder.builder().nIn(inOutSize).nOut(3)
|
||||
.encoderLayerSizes(4).decoderLayerSizes(3)
|
||||
.pzxActivationFunction(Activation.TANH)
|
||||
.reconstructionDistribution(
|
||||
|
@ -306,7 +306,7 @@ public class VaeGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.seed(12345L).weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new VariationalAutoencoder.Builder().nIn(4).nOut(3).encoderLayerSizes(2, 3)
|
||||
.layer(0, VariationalAutoencoder.builder().nIn(4).nOut(3).encoderLayerSizes(2, 3)
|
||||
.decoderLayerSizes(4, 3).pzxActivationFunction(Activation.TANH)
|
||||
.reconstructionDistribution(
|
||||
new GaussianReconstructionDistribution(Activation.TANH))
|
||||
|
|
|
@ -116,7 +116,7 @@ public class YoloGradientCheckTests extends BaseDL4JTest {
|
|||
.l1(l1[i]).l2(l2[i])
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.list()
|
||||
.layer(new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1)
|
||||
.layer(ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1)
|
||||
.dataFormat(format)
|
||||
.nIn(depthIn).nOut(yoloDepth).build())//output: (5-2+0)/1+1 = 4
|
||||
.layer(new Yolo2OutputLayer.Builder()
|
||||
|
@ -234,9 +234,9 @@ public class YoloGradientCheckTests extends BaseDL4JTest {
|
|||
.dist(new GaussianDistribution(0,0.1))
|
||||
.seed(12345)
|
||||
.list()
|
||||
.layer(new ConvolutionLayer.Builder().kernelSize(3,3).stride(1,1).nOut(4).build())
|
||||
.layer(new SubsamplingLayer.Builder().kernelSize(2,2).stride(2,2).build())
|
||||
.layer(new ConvolutionLayer.Builder().activation(Activation.IDENTITY).kernelSize(3,3).stride(1,1).nOut(depthOut).build())
|
||||
.layer(ConvolutionLayer.builder().kernelSize(3,3).stride(1,1).nOut(4).build())
|
||||
.layer(SubsamplingLayer.builder().kernelSize(2,2).stride(2,2).build())
|
||||
.layer(ConvolutionLayer.builder().activation(Activation.IDENTITY).kernelSize(3,3).stride(1,1).nOut(depthOut).build())
|
||||
.layer(new Yolo2OutputLayer.Builder()
|
||||
.boundingBoxPriors(bbPriors)
|
||||
.build())
|
||||
|
|
|
@ -62,9 +62,9 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 1)).updater(new NoOp())
|
||||
.graphBuilder().addInputs("input")
|
||||
.appendLayer("firstLayer",
|
||||
new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build())
|
||||
DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build())
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(5).nOut(3).build(),
|
||||
"firstLayer")
|
||||
.setOutputs("outputLayer").build();
|
||||
|
@ -83,20 +83,20 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.graphBuilder().addInputs("input")
|
||||
.addLayer("cnn1",
|
||||
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
.build(),
|
||||
"input")
|
||||
.addLayer("cnn2",
|
||||
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
.build(),
|
||||
"input")
|
||||
.addLayer("max1",
|
||||
new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
|
||||
SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX)
|
||||
.kernelSize(2, 2).build(),
|
||||
"cnn1", "cnn2")
|
||||
.addLayer("dnn1", new DenseLayer.Builder().nOut(7).build(), "max1")
|
||||
.addLayer("max2", new SubsamplingLayer.Builder().build(), "max1")
|
||||
.addLayer("output", new OutputLayer.Builder().nIn(7).nOut(10).activation(Activation.SOFTMAX).build(), "dnn1",
|
||||
.addLayer("dnn1", DenseLayer.builder().nOut(7).build(), "max1")
|
||||
.addLayer("max2", SubsamplingLayer.builder().build(), "max1")
|
||||
.addLayer("output", OutputLayer.builder().nIn(7).nOut(10).activation(Activation.SOFTMAX).build(), "dnn1",
|
||||
"max2")
|
||||
.setOutputs("output")
|
||||
.inputPreProcessor("cnn1", new FeedForwardToCnnPreProcessor(32, 32, 3))
|
||||
|
@ -119,20 +119,20 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.graphBuilder().addInputs("input1", "input2")
|
||||
.addLayer("cnn1",
|
||||
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
.build(),
|
||||
"input1")
|
||||
.addLayer("cnn2",
|
||||
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
.build(),
|
||||
"input2")
|
||||
.addVertex("merge1", new MergeVertex(), "cnn1", "cnn2")
|
||||
.addVertex("subset1", new SubsetVertex(0, 1), "merge1")
|
||||
.addLayer("dense1", new DenseLayer.Builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addLayer("dense2", new DenseLayer.Builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addLayer("dense1", DenseLayer.builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addLayer("dense2", DenseLayer.builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addVertex("add", new ElementWiseVertex(ElementWiseVertex.Op.Add), "dense1",
|
||||
"dense2")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "add")
|
||||
.addLayer("out", OutputLayer.builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "add")
|
||||
.setOutputs("out").build();
|
||||
|
||||
String json = conf.toJson();
|
||||
|
@ -150,8 +150,8 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
//Test no inputs for a layer:
|
||||
try {
|
||||
NeuralNetConfiguration.builder().graphBuilder().addInputs("input1")
|
||||
.addLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).build()).setOutputs("out")
|
||||
.addLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", OutputLayer.builder().nIn(2).nOut(2).build()).setOutputs("out")
|
||||
.build();
|
||||
fail("No exception thrown for invalid configuration");
|
||||
} catch (IllegalStateException e) {
|
||||
|
@ -162,8 +162,8 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
// Use appendLayer on first layer
|
||||
try {
|
||||
NeuralNetConfiguration.builder().graphBuilder()
|
||||
.appendLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).build()).setOutputs("out")
|
||||
.appendLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.addLayer("out", OutputLayer.builder().nIn(2).nOut(2).build()).setOutputs("out")
|
||||
.build();
|
||||
fail("No exception thrown for invalid configuration");
|
||||
} catch (IllegalStateException e) {
|
||||
|
@ -174,8 +174,8 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
//Test no network inputs
|
||||
try {
|
||||
NeuralNetConfiguration.builder().graphBuilder()
|
||||
.addLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).build(), "dense1")
|
||||
.addLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", OutputLayer.builder().nIn(2).nOut(2).build(), "dense1")
|
||||
.setOutputs("out").build();
|
||||
fail("No exception thrown for invalid configuration");
|
||||
} catch (IllegalStateException e) {
|
||||
|
@ -186,8 +186,8 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
//Test no network outputs
|
||||
try {
|
||||
NeuralNetConfiguration.builder().graphBuilder().addInputs("input1")
|
||||
.addLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).build(), "dense1").build();
|
||||
.addLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", OutputLayer.builder().nIn(2).nOut(2).build(), "dense1").build();
|
||||
fail("No exception thrown for invalid configuration");
|
||||
} catch (IllegalStateException e) {
|
||||
//OK - exception is good
|
||||
|
@ -197,8 +197,8 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
//Test: invalid input
|
||||
try {
|
||||
NeuralNetConfiguration.builder().graphBuilder().addInputs("input1")
|
||||
.addLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).build(), "thisDoesntExist")
|
||||
.addLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", OutputLayer.builder().nIn(2).nOut(2).build(), "thisDoesntExist")
|
||||
.setOutputs("out").build();
|
||||
fail("No exception thrown for invalid configuration");
|
||||
} catch (IllegalStateException e) {
|
||||
|
@ -209,10 +209,10 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
//Test: graph with cycles
|
||||
try {
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("input1")
|
||||
.addLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "input1", "dense3")
|
||||
.addLayer("dense2", new DenseLayer.Builder().nIn(2).nOut(2).build(), "dense1")
|
||||
.addLayer("dense3", new DenseLayer.Builder().nIn(2).nOut(2).build(), "dense2")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).lossFunction(LossFunctions.LossFunction.MSE).build(), "dense1")
|
||||
.addLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build(), "input1", "dense3")
|
||||
.addLayer("dense2", DenseLayer.builder().nIn(2).nOut(2).build(), "dense1")
|
||||
.addLayer("dense3", DenseLayer.builder().nIn(2).nOut(2).build(), "dense2")
|
||||
.addLayer("out", OutputLayer.builder().nIn(2).nOut(2).lossFunction(LossFunctions.LossFunction.MSE).build(), "dense1")
|
||||
.setOutputs("out").build();
|
||||
//Cycle detection happens in ComputationGraph.init()
|
||||
ComputationGraph graph = new ComputationGraph(conf);
|
||||
|
@ -229,20 +229,20 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration.builder().graphBuilder().addInputs("input1", "input2")
|
||||
.setInputTypes(new InputType.InputTypeRecurrent(10, 12))
|
||||
.addLayer("cnn1",
|
||||
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
.build(),
|
||||
"input1")
|
||||
.addLayer("cnn2",
|
||||
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
.build(),
|
||||
"input2")
|
||||
.addVertex("merge1", new MergeVertex(), "cnn1", "cnn2")
|
||||
.addVertex("subset1", new SubsetVertex(0, 1), "merge1")
|
||||
.addLayer("dense1", new DenseLayer.Builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addLayer("dense2", new DenseLayer.Builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addLayer("dense1", DenseLayer.builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addLayer("dense2", DenseLayer.builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addVertex("add", new ElementWiseVertex(ElementWiseVertex.Op.Add), "dense1",
|
||||
"dense2")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "add")
|
||||
.addLayer("out", OutputLayer.builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "add")
|
||||
.setOutputs("out").build();
|
||||
fail("No exception thrown for invalid configuration");
|
||||
} catch (IllegalArgumentException e) {
|
||||
|
@ -283,9 +283,9 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testOutputOrderDoesntChangeWhenCloning() {
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.addLayer("out1", new OutputLayer.Builder().nIn(1).nOut(1).build(), "in")
|
||||
.addLayer("out2", new OutputLayer.Builder().nIn(1).nOut(1).build(), "in")
|
||||
.addLayer("out3", new OutputLayer.Builder().nIn(1).nOut(1).build(), "in")
|
||||
.addLayer("out1", OutputLayer.builder().nIn(1).nOut(1).build(), "in")
|
||||
.addLayer("out2", OutputLayer.builder().nIn(1).nOut(1).build(), "in")
|
||||
.addLayer("out3", OutputLayer.builder().nIn(1).nOut(1).build(), "in")
|
||||
.validateOutputLayerConfig(false)
|
||||
.setOutputs("out1", "out2", "out3").build();
|
||||
|
||||
|
@ -301,14 +301,14 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
public void testAllowDisconnectedLayers() {
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.addLayer("bidirectional",
|
||||
new Bidirectional(new LSTM.Builder().activation(Activation.TANH).nOut(10).build()),
|
||||
Bidirectional.builder(LSTM.builder().activation(Activation.TANH).nOut(10).build()).build(),
|
||||
"in")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nOut(6)
|
||||
.addLayer("out", RnnOutputLayer.builder().nOut(6)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build(), "bidirectional")
|
||||
.addLayer("disconnected_layer",
|
||||
new Bidirectional(new LSTM.Builder().activation(Activation.TANH).nOut(10).build()),
|
||||
Bidirectional.builder(LSTM.builder().activation(Activation.TANH).nOut(10).build()).build(),
|
||||
"in")
|
||||
.setOutputs("out")
|
||||
.setInputTypes(new InputType.InputTypeRecurrent(10, 12))
|
||||
|
@ -323,9 +323,9 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
public void testBidirectionalGraphSummary() {
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.addLayer("bidirectional",
|
||||
new Bidirectional(new LSTM.Builder().activation(Activation.TANH).nOut(10).build()),
|
||||
Bidirectional.builder(LSTM.builder().activation(Activation.TANH).nOut(10).build()).build(),
|
||||
"in")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nOut(6)
|
||||
.addLayer("out", RnnOutputLayer.builder().nOut(6)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build(), "bidirectional")
|
||||
|
@ -411,10 +411,10 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in")
|
||||
.layer("0", DenseLayer.builder().nIn(10).nOut(10).build(), "in")
|
||||
.layer("1",
|
||||
!lossLayer ? new OutputLayer.Builder().nIn(10).nOut(nOut[i]).activation(activations[i]).lossFunction(lf[i]).build()
|
||||
: new LossLayer.Builder().activation(activations[i]).lossFunction(lf[i]).build(), "0")
|
||||
!lossLayer ? OutputLayer.builder().nIn(10).nOut(nOut[i]).activation(activations[i]).lossFunction(lf[i]).build()
|
||||
: LossLayer.builder().activation(activations[i]).lossFunction(lf[i].getILossFunction()).build(), "0")
|
||||
.setOutputs("1")
|
||||
.validateOutputLayerConfig(validate)
|
||||
.build();
|
||||
|
|
|
@ -99,8 +99,8 @@ public class JsonTest extends BaseDL4JTest {
|
|||
for (int i = 0; i < lossFunctions.length; i++) {
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(Updater.ADAM.getIUpdaterWithDefaultConfig())
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(nOut[i]).activation(Activation.TANH).build())
|
||||
.layer(1, new LossLayer.Builder().lossFunction(lossFunctions[i])
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(nOut[i]).activation(Activation.TANH).build())
|
||||
.layer(1, LossLayer.builder().lossFunction(lossFunctions[i])
|
||||
.activation(outputActivationFn[i]).build())
|
||||
.validateOutputLayerConfig(false).build();
|
||||
|
||||
|
|
|
@ -69,9 +69,9 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
|
||||
private static NeuralNetConfiguration getConf() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2)
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2)
|
||||
.dist(new NormalDistribution(0, 1)).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(2).nOut(1)
|
||||
.layer(1, OutputLayer.builder().nIn(2).nOut(1)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 1)).lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.build())
|
||||
|
@ -82,7 +82,7 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testJson() throws Exception {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.layer(0, new DenseLayer.Builder().dist(new NormalDistribution(1, 1e-1)).build())
|
||||
.layer(0, DenseLayer.builder().dist(new NormalDistribution(1, 1e-1)).build())
|
||||
.inputPreProcessor(0, new CnnToFeedForwardPreProcessor()).build();
|
||||
|
||||
String json = conf.toJson();
|
||||
|
@ -123,17 +123,17 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
.l1(1e-1).l2(2e-4).weightNoise(new DropConnect(0.5)).miniBatch(true)
|
||||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
|
||||
.layer(0,
|
||||
new ConvolutionLayer.Builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
ConvolutionLayer.builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[]{2, 2})
|
||||
.layer(1, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX, new int[]{2, 2})
|
||||
.build())
|
||||
.layer(2,
|
||||
new ConvolutionLayer.Builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
ConvolutionLayer.builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[]{2, 2})
|
||||
.layer(3, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX, new int[]{2, 2})
|
||||
.build())
|
||||
.layer(4, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(4, DenseLayer.builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(5, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
||||
|
@ -157,15 +157,15 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration.NeuralNetConfigurationBuilder builder = NeuralNetConfiguration.builder().seed(seed)
|
||||
.l1(1e-1).l2(2e-4).dropOut(0.5).miniBatch(true)
|
||||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
|
||||
.layer(new ConvolutionLayer.Builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
.layer(ConvolutionLayer.builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(new Upsampling2D.Builder().size(2).build())
|
||||
.layer(Upsampling2D.builder().size(2).build())
|
||||
.layer(2,
|
||||
new ConvolutionLayer.Builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
ConvolutionLayer.builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(new Upsampling2D.Builder().size(2).build())
|
||||
.layer(4, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(Upsampling2D.builder().size(2).build())
|
||||
.layer(4, DenseLayer.builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(5, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
||||
|
@ -181,9 +181,9 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
public void testGlobalPoolingJson() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1.0)).seed(12345L)
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(5).build())
|
||||
.layer(1, new GlobalPoolingLayer.Builder().poolingType(PoolingType.PNORM).pnorm(3).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(5).build())
|
||||
.layer(1, GlobalPoolingLayer.builder().poolingType(PoolingType.PNORM).pnorm(3).build())
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(3).build())
|
||||
.inputType(InputType.convolutional(32, 32, 1)).build();
|
||||
|
||||
|
@ -196,7 +196,7 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testYaml() throws Exception {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.layer(0, new DenseLayer.Builder().dist(new NormalDistribution(1, 1e-1)).build())
|
||||
.layer(0, DenseLayer.builder().dist(new NormalDistribution(1, 1e-1)).build())
|
||||
.inputPreProcessor(0, new CnnToFeedForwardPreProcessor()).build();
|
||||
String json = conf.toYaml();
|
||||
NeuralNetConfiguration from = NeuralNetConfiguration.fromYaml(json);
|
||||
|
@ -226,8 +226,8 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testClone() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.layer(0, new DenseLayer.Builder().build())
|
||||
.layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.layer(0, DenseLayer.builder().build())
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.inputPreProcessor(1, new CnnToFeedForwardPreProcessor()).build();
|
||||
|
||||
NeuralNetConfiguration conf2 = conf.clone();
|
||||
|
@ -301,8 +301,8 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
|
||||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.layer(1, new DenseLayer.Builder().nIn(3).nOut(4).build())
|
||||
.layer(2, new OutputLayer.Builder().nIn(4).nOut(5).build())
|
||||
.layer(1, DenseLayer.builder().nIn(3).nOut(4).build())
|
||||
.layer(2, OutputLayer.builder().nIn(4).nOut(5).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -317,8 +317,8 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
|
||||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.layer(0, new DenseLayer.Builder().nIn(3).nOut(4).build())
|
||||
.layer(2, new OutputLayer.Builder().nIn(4).nOut(5).build())
|
||||
.layer(0, DenseLayer.builder().nIn(3).nOut(4).build())
|
||||
.layer(2, OutputLayer.builder().nIn(4).nOut(5).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -336,8 +336,8 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
public void testListOverloads() {
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.layer(0, new DenseLayer.Builder().nIn(3).nOut(4).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.layer(0, DenseLayer.builder().nIn(3).nOut(4).build())
|
||||
.layer(1, OutputLayer.builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -350,16 +350,16 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
assertEquals(5, ol.getNOut());
|
||||
|
||||
NeuralNetConfiguration conf2 = NeuralNetConfiguration.builder().seed(12345)
|
||||
.layer(0, new DenseLayer.Builder().nIn(3).nOut(4).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.layer(0, DenseLayer.builder().nIn(3).nOut(4).build())
|
||||
.layer(1, OutputLayer.builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
|
||||
net2.init();
|
||||
|
||||
NeuralNetConfiguration conf3 = NeuralNetConfiguration.builder().seed(12345)
|
||||
.layer(new DenseLayer.Builder().nIn(3).nOut(4).build())
|
||||
.layer(DenseLayer.builder().nIn(3).nOut(4).build())
|
||||
.layer(
|
||||
new OutputLayer.Builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
OutputLayer.builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
MultiLayerNetwork net3 = new MultiLayerNetwork(conf3);
|
||||
net3.init();
|
||||
|
@ -375,14 +375,16 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.updater(new Adam(1e-2))
|
||||
.biasUpdater(new Adam(0.5))
|
||||
.layer(0, new ConvolutionLayer.Builder(5, 5).nOut(5).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, ConvolutionLayer.builder(5, 5).nOut(5).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(1, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(2, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10)
|
||||
.layer(1, DenseLayer.builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(2, DenseLayer.builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(3, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10)
|
||||
.weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(28, 28, 1)).build();
|
||||
|
||||
conf.init();
|
||||
|
||||
BaseLayerConfiguration l0 = (BaseLayerConfiguration) conf.getConf(0).getLayer();
|
||||
BaseLayerConfiguration l1 = (BaseLayerConfiguration) conf.getConf(1).getLayer();
|
||||
BaseLayerConfiguration l2 = (BaseLayerConfiguration) conf.getConf(2).getLayer();
|
||||
|
@ -432,10 +434,10 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
try {
|
||||
NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(!lossLayer ? new OutputLayer.Builder().nIn(10).nOut(nOut[i])
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(!lossLayer ? OutputLayer.builder().nIn(10).nOut(nOut[i])
|
||||
.activation(activations[i]).lossFunction(lf[i]).build()
|
||||
: new LossLayer.Builder().activation(activations[i]).lossFunction(lf[i])
|
||||
: LossLayer.builder().lossFunction().activation(activations[i]).lossFunction(lf[i])
|
||||
.build())
|
||||
.validateOutputLayerConfig(validate)
|
||||
.build();
|
||||
|
|
|
@ -67,9 +67,9 @@ public class MultiNeuralNetConfLayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration multiConf1 =
|
||||
NeuralNetConfiguration.builder()
|
||||
.layer(0, new DenseLayer.Builder().nIn(newNumIn).nOut(newNumOut).activation(act)
|
||||
.layer(0, DenseLayer.builder().nIn(newNumIn).nOut(newNumOut).activation(act)
|
||||
.build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(newNumIn + 1).nOut(newNumOut + 1)
|
||||
.layer(1, DenseLayer.builder().nIn(newNumIn + 1).nOut(newNumOut + 1)
|
||||
.activation(act).build())
|
||||
.build();
|
||||
NeuralNetConfiguration firstLayer = multiConf1.getConf(0).getNetConfiguration();
|
||||
|
|
|
@ -113,7 +113,7 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testRNG() {
|
||||
DenseLayer layer = new DenseLayer.Builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes())
|
||||
DenseLayer layer = DenseLayer.builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes())
|
||||
.weightInit(WeightInit.UNIFORM).activation(Activation.TANH).build();
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
|
||||
|
@ -125,7 +125,7 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
INDArray modelWeights = model.getParam(DefaultParamInitializer.WEIGHT_KEY);
|
||||
|
||||
|
||||
DenseLayer layer2 = new DenseLayer.Builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes())
|
||||
DenseLayer layer2 = DenseLayer.builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes())
|
||||
.weightInit(WeightInit.UNIFORM).activation(Activation.TANH).build();
|
||||
NeuralNetConfiguration conf2 = NeuralNetConfiguration.builder().seed(123)
|
||||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).layer(layer2).build();
|
||||
|
@ -197,7 +197,7 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
|
||||
|
||||
private static NeuralNetConfiguration getConfig(int nIn, int nOut, IWeightInit weightInit, boolean pretrain) {
|
||||
DenseLayer layer = new DenseLayer.Builder().nIn(nIn).nOut(nOut).weightInit(weightInit)
|
||||
DenseLayer layer = DenseLayer.builder().nIn(nIn).nOut(nOut).weightInit(weightInit)
|
||||
.activation(Activation.TANH).build();
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
@ -226,10 +226,10 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
INDArray gradientW = Nd4j.ones(nIns[0], nOuts[0]);
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3))
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIns[0]).nOut(nOuts[0])
|
||||
.layer(0, DenseLayer.builder().nIn(nIns[0]).nOut(nOuts[0])
|
||||
.updater(new Sgd(lr)).biasUpdater(new Sgd(biasLr)).build())
|
||||
.layer(1, new BatchNormalization.Builder().nIn(nIns[1]).nOut(nOuts[1]).updater(new Sgd(0.7)).build())
|
||||
.layer(2, new OutputLayer.Builder().nIn(nIns[2]).nOut(nOuts[2]).lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.layer(1,BatchNormalization.builder().nIn(nIns[1]).nOut(nOuts[1]).updater(new Sgd(0.7)).build())
|
||||
.layer(2, OutputLayer.builder().nIn(nIns[2]).nOut(nOuts[2]).lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -287,9 +287,9 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l1(l1)
|
||||
.l2(l2)
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIns[0]).nOut(nOuts[0]).build())
|
||||
.layer(1, new BatchNormalization.Builder().nIn(nIns[1]).nOut(nOuts[1]).l2(0.5).build())
|
||||
.layer(2, new OutputLayer.Builder().nIn(nIns[2]).nOut(nOuts[2]).lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.layer(0, DenseLayer.builder().nIn(nIns[0]).nOut(nOuts[0]).build())
|
||||
.layer(1,BatchNormalization.builder().nIn(nIns[1]).nOut(nOuts[1]).l2(0.5).build())
|
||||
.layer(2, OutputLayer.builder().nIn(nIns[2]).nOut(nOuts[2]).lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -318,7 +318,7 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
public void testLayerPretrainConfig() {
|
||||
boolean pretrain = true;
|
||||
|
||||
VariationalAutoencoder layer = new VariationalAutoencoder.Builder()
|
||||
VariationalAutoencoder layer = VariationalAutoencoder.builder()
|
||||
.nIn(10).nOut(5).updater(new Sgd(1e-1))
|
||||
.lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE).build();
|
||||
|
||||
|
|
|
@ -71,9 +71,9 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.updater(new Sgd(0.0))
|
||||
.dist(new NormalDistribution(0, 5))
|
||||
|
||||
.layer(new LSTM.Builder().nIn(12).nOut(10)
|
||||
.layer(LSTM.builder().nIn(12).nOut(10)
|
||||
.constrainRecurrent(lc).build())
|
||||
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -124,9 +124,9 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 5))
|
||||
.biasInit(10.0)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
|
||||
.layer(DenseLayer.builder().nIn(12).nOut(10)
|
||||
.constrainBias(lc).build())
|
||||
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -176,9 +176,9 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.updater(new Sgd(0.0))
|
||||
.dist(new NormalDistribution(0, 5))
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
|
||||
.layer(DenseLayer.builder().nIn(12).nOut(10)
|
||||
.constrainWeights(lc).build())
|
||||
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -229,9 +229,9 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 5))
|
||||
.biasInit(0.2)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
|
||||
.layer(DenseLayer.builder().nIn(12).nOut(10)
|
||||
.constrainAllParameters(lc).build())
|
||||
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -290,9 +290,9 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 5))
|
||||
.biasInit(0.2)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
|
||||
.layer(DenseLayer.builder().nIn(12).nOut(10)
|
||||
.constrainWeights(lc).constrainBias(lc).build())
|
||||
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -351,8 +351,8 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0,5))
|
||||
.biasInit(1)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(12).nOut(10).build())
|
||||
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.layer(DenseLayer.builder().nIn(12).nOut(10).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -406,7 +406,7 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.graphBuilder()
|
||||
.addInputs("input_lstm", "input_cpc")
|
||||
.addLayer("first_lstm_layer",
|
||||
new LSTM.Builder()
|
||||
LSTM.builder()
|
||||
.nIn(nIn)
|
||||
.nOut(lstmLayerSize)
|
||||
.activation(Activation.RELU)
|
||||
|
@ -417,7 +417,7 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.addVertex("merge", new MergeVertex(),
|
||||
"lastTimeStep", "input_cpc")
|
||||
.addLayer("dense",
|
||||
new DenseLayer.Builder()
|
||||
DenseLayer.builder()
|
||||
.constrainWeights(new NonNegativeConstraint())
|
||||
.nIn(lstmLayerSize + 1)
|
||||
.nOut(lstmLayerSize/2)
|
||||
|
@ -425,7 +425,7 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.build(),
|
||||
"merge")
|
||||
.addLayer("second_dense",
|
||||
new DenseLayer.Builder()
|
||||
DenseLayer.builder()
|
||||
.constrainWeights(new NonNegativeConstraint())
|
||||
.nIn(lstmLayerSize/2)
|
||||
.nOut(lstmLayerSize/8)
|
||||
|
@ -433,7 +433,7 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.build(),
|
||||
"dense")
|
||||
.addLayer("output_layer",
|
||||
new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.constrainWeights(new NonNegativeConstraint())
|
||||
.nIn(lstmLayerSize/8)
|
||||
.nOut(1)
|
||||
|
|
|
@ -62,29 +62,29 @@ public class TestDropout extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.dropOut(0.6)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).dropOut(0.7).build())
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).dropOut(new AlphaDropout(0.5)).build())
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).dropOut(0.7).build())
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).dropOut(new AlphaDropout(0.5)).build())
|
||||
.build();
|
||||
|
||||
assertEquals(new Dropout(0.6), conf.getFlattenedLayerConfigurations().get(0).getIDropout());
|
||||
assertEquals(new Dropout(0.7), conf.getFlattenedLayerConfigurations().get(1).getIDropout());
|
||||
assertEquals(new AlphaDropout(0.5), conf.getFlattenedLayerConfigurations().get(2).getIDropout());
|
||||
assertEquals(new Dropout(0.6), conf.getFlattenedLayerConfigurations().get(0).getDropOut());
|
||||
assertEquals(new Dropout(0.7), conf.getFlattenedLayerConfigurations().get(1).getDropOut());
|
||||
assertEquals(new AlphaDropout(0.5), conf.getFlattenedLayerConfigurations().get(2).getDropOut());
|
||||
|
||||
|
||||
ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder()
|
||||
.dropOut( new Dropout(0.6))
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in")
|
||||
.addLayer("1", new DenseLayer.Builder().nIn(10).nOut(10).dropOut(0.7).build(), "0")
|
||||
.addLayer("2", new DenseLayer.Builder().nIn(10).nOut(10).dropOut(new AlphaDropout(0.5)).build(), "1")
|
||||
.addLayer("0", DenseLayer.builder().nIn(10).nOut(10).build(), "in")
|
||||
.addLayer("1", DenseLayer.builder().nIn(10).nOut(10).dropOut(0.7).build(), "0")
|
||||
.addLayer("2", DenseLayer.builder().nIn(10).nOut(10).dropOut(new AlphaDropout(0.5)).build(), "1")
|
||||
.setOutputs("2")
|
||||
.build();
|
||||
|
||||
assertEquals(new Dropout(0.6), ((LayerVertex)conf2.getVertices().get("0")).getLayerConfiguration().getIDropout());
|
||||
assertEquals(new Dropout(0.7), ((LayerVertex)conf2.getVertices().get("1")).getLayerConfiguration().getIDropout());
|
||||
assertEquals(new AlphaDropout(0.5), ((LayerVertex)conf2.getVertices().get("2")).getLayerConfiguration().getIDropout());
|
||||
assertEquals(new Dropout(0.6), ((LayerVertex)conf2.getVertices().get("0")).getLayerConfiguration().getDropOut());
|
||||
assertEquals(new Dropout(0.7), ((LayerVertex)conf2.getVertices().get("1")).getLayerConfiguration().getDropOut());
|
||||
assertEquals(new AlphaDropout(0.5), ((LayerVertex)conf2.getVertices().get("2")).getLayerConfiguration().getDropOut());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -95,8 +95,8 @@ public class TestDropout extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(4).nOut(3).dropOut(d1).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MSE).dropOut(d2).nIn(3).nOut(3).build())
|
||||
.layer(DenseLayer.builder().nIn(4).nOut(3).dropOut(d1).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).dropOut(d2).nIn(3).nOut(3).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -131,8 +131,8 @@ public class TestDropout extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(4).nOut(3).dropOut(d1).build(), "in")
|
||||
.addLayer("1", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).dropOut(d2).nIn(3).nOut(3).build(), "0")
|
||||
.addLayer("0", DenseLayer.builder().nIn(4).nOut(3).dropOut(d1).build(), "in")
|
||||
.addLayer("1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).dropOut(d2).nIn(3).nOut(3).build(), "0")
|
||||
.setOutputs("1")
|
||||
.build();
|
||||
|
||||
|
@ -188,8 +188,8 @@ public class TestDropout extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.dropOut(id)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(4).nOut(3).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(3).nOut(3).build())
|
||||
.layer(DenseLayer.builder().nIn(4).nOut(3).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(3).nOut(3).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -200,8 +200,8 @@ public class TestDropout extends BaseDL4JTest {
|
|||
.dropOut(id)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(4).nOut(3).build(), "in")
|
||||
.addLayer("1", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(3).nOut(3).build(), "0")
|
||||
.addLayer("0", DenseLayer.builder().nIn(4).nOut(3).build(), "in")
|
||||
.addLayer("1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(3).nOut(3).build(), "0")
|
||||
.setOutputs("1")
|
||||
.build();
|
||||
|
||||
|
@ -602,7 +602,7 @@ public class TestDropout extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new DropoutLayer.Builder(new SpatialDropout(0.5)).build())
|
||||
.layer(DropoutLayer.builder(new SpatialDropout(0.5)).build())
|
||||
.build();
|
||||
|
||||
String asJson = conf.toJson();
|
||||
|
|
|
@ -73,7 +73,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder()
|
||||
.addInputs("input1", "input2", "input3")
|
||||
.addLayer("denselayer",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
|
||||
DenseLayer.builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
|
||||
.build(),
|
||||
"input1")
|
||||
/* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
|
||||
|
@ -87,7 +87,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
*/
|
||||
.addVertex("elementwiseAdd", new ElementWiseVertex(ElementWiseVertex.Op.Add), "input1",
|
||||
"input2", "input3")
|
||||
.addLayer("Add", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(),
|
||||
.addLayer("Add", ActivationLayer.builder().activation(Activation.IDENTITY).build(),
|
||||
"elementwiseAdd")
|
||||
.setOutputs("Add", "denselayer").build();
|
||||
|
||||
|
@ -114,7 +114,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder()
|
||||
.addInputs("input1", "input2", "input3")
|
||||
.addLayer("denselayer",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
|
||||
DenseLayer.builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
|
||||
.build(),
|
||||
"input1")
|
||||
/* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
|
||||
|
@ -128,7 +128,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
*/
|
||||
.addVertex("elementwiseProduct", new ElementWiseVertex(ElementWiseVertex.Op.Product), "input1",
|
||||
"input2", "input3")
|
||||
.addLayer("Product", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(),
|
||||
.addLayer("Product", ActivationLayer.builder().activation(Activation.IDENTITY).build(),
|
||||
"elementwiseProduct")
|
||||
.setOutputs("Product", "denselayer").build();
|
||||
|
||||
|
@ -155,7 +155,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder()
|
||||
.addInputs("input1", "input2")
|
||||
.addLayer("denselayer",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
|
||||
DenseLayer.builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
|
||||
.build(),
|
||||
"input1")
|
||||
/* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
|
||||
|
@ -169,7 +169,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
*/
|
||||
.addVertex("elementwiseSubtract", new ElementWiseVertex(ElementWiseVertex.Op.Subtract),
|
||||
"input1", "input2")
|
||||
.addLayer("Subtract", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(),
|
||||
.addLayer("Subtract", ActivationLayer.builder().activation(Activation.IDENTITY).build(),
|
||||
"elementwiseSubtract")
|
||||
.setOutputs("Subtract", "denselayer").build();
|
||||
|
||||
|
@ -200,21 +200,21 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
|
||||
.addInputs("input1", "input2", "input3")
|
||||
.addLayer("dense1",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
"input1")
|
||||
.addLayer("dense2",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
"input2")
|
||||
.addLayer("dense3",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
"input3")
|
||||
.addVertex("elementwiseAdd", new ElementWiseVertex(ElementWiseVertex.Op.Add), "dense1",
|
||||
"dense2", "dense3")
|
||||
.addLayer("output",
|
||||
new OutputLayer.Builder().nIn(midsz).nOut(outputsz)
|
||||
OutputLayer.builder().nIn(midsz).nOut(outputsz)
|
||||
.activation(new ActivationSigmoid())
|
||||
.lossFunction(LossFunction.MSE).build(),
|
||||
"elementwiseAdd")
|
||||
|
@ -376,21 +376,21 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
|
||||
.addInputs("input1", "input2", "input3")
|
||||
.addLayer("dense1",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
"input1")
|
||||
.addLayer("dense2",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
"input2")
|
||||
.addLayer("dense3",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
"input3")
|
||||
.addVertex("elementwiseProduct", new ElementWiseVertex(ElementWiseVertex.Op.Product), "dense1",
|
||||
"dense2", "dense3")
|
||||
.addLayer("output",
|
||||
new OutputLayer.Builder().nIn(midsz).nOut(outputsz)
|
||||
OutputLayer.builder().nIn(midsz).nOut(outputsz)
|
||||
.activation(new ActivationSigmoid())
|
||||
.lossFunction(LossFunction.MSE).build(),
|
||||
"elementwiseProduct")
|
||||
|
@ -551,17 +551,17 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
|
||||
.addInputs("input1", "input2")
|
||||
.addLayer("dense1",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
"input1")
|
||||
.addLayer("dense2",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
"input2")
|
||||
.addVertex("elementwiseSubtract", new ElementWiseVertex(ElementWiseVertex.Op.Subtract),
|
||||
"dense1", "dense2")
|
||||
.addLayer("output",
|
||||
new OutputLayer.Builder().nIn(midsz).nOut(outputsz)
|
||||
OutputLayer.builder().nIn(midsz).nOut(outputsz)
|
||||
.activation(new ActivationSigmoid())
|
||||
.lossFunction(LossFunction.MSE).build(),
|
||||
"elementwiseSubtract")
|
||||
|
|
|
@ -86,7 +86,7 @@ public class ShiftVertexTest extends BaseDL4JTest {
|
|||
double sf = 4.1;
|
||||
ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder().addInputs("input")
|
||||
.addLayer("denselayer",
|
||||
new DenseLayer.Builder().nIn(input.columns()).nOut(1)
|
||||
DenseLayer.builder().nIn(input.columns()).nOut(1)
|
||||
.activation(Activation.IDENTITY).build(),
|
||||
"input")
|
||||
/* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
|
||||
|
@ -99,10 +99,10 @@ public class ShiftVertexTest extends BaseDL4JTest {
|
|||
* at org.deeplearning4j.nn.graph.ComputationGraph.init(ComputationGraph.java:341)
|
||||
*/
|
||||
.addLayer("identityinputactivation",
|
||||
new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), "input")
|
||||
ActivationLayer.builder().activation(Activation.IDENTITY).build(), "input")
|
||||
.addVertex("shiftvertex", new ShiftVertex(sf), "identityinputactivation")
|
||||
.addLayer("identityshiftvertex",
|
||||
new ActivationLayer.Builder().activation(Activation.IDENTITY).build(),
|
||||
ActivationLayer.builder().activation(Activation.IDENTITY).build(),
|
||||
"shiftvertex")
|
||||
.setOutputs("identityshiftvertex", "denselayer").build();
|
||||
|
||||
|
@ -144,12 +144,12 @@ public class ShiftVertexTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
|
||||
.addInputs("input")
|
||||
.addLayer("denselayer",
|
||||
new DenseLayer.Builder().nIn(input.columns()).nOut(input.columns())
|
||||
DenseLayer.builder().nIn(input.columns()).nOut(input.columns())
|
||||
.activation(a1).build(),
|
||||
"input")
|
||||
.addVertex("shiftvertex", new ShiftVertex(sf), "denselayer")
|
||||
.addLayer("output",
|
||||
new OutputLayer.Builder().nIn(input.columns()).nOut(target.columns())
|
||||
OutputLayer.builder().nIn(input.columns()).nOut(target.columns())
|
||||
.activation(a2).lossFunction(LossFunction.MSE).build(),
|
||||
"shiftvertex")
|
||||
.setOutputs("output").build();
|
||||
|
|
|
@ -67,7 +67,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testLayer() throws Exception {
|
||||
DenseLayer layer = new DenseLayer.Builder().activation(act).weightInit(weight).dropOut(dropOut)
|
||||
DenseLayer layer = DenseLayer.builder().activation(act).weightInit(weight).dropOut(dropOut)
|
||||
.updater(updater).gradientNormalization(gradNorm)
|
||||
.gradientNormalizationThreshold(gradNormThreshold).build();
|
||||
|
||||
|
@ -75,7 +75,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
assertEquals(act, layer.getActivationFn());
|
||||
assertEquals(weight.getWeightInitFunction(), layer.getWeightInit());
|
||||
assertEquals(new Dropout(dropOut), layer.getIDropout());
|
||||
assertEquals(new Dropout(dropOut), layer.getDropOut());
|
||||
assertEquals(updater, layer.getIUpdater());
|
||||
assertEquals(gradNorm, layer.getGradientNormalization());
|
||||
assertEquals(gradNormThreshold, layer.getGradientNormalizationThreshold(), 0.0);
|
||||
|
@ -83,7 +83,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testFeedForwardLayer() throws Exception {
|
||||
DenseLayer ff = new DenseLayer.Builder().nIn(numIn).nOut(numOut).build();
|
||||
DenseLayer ff = DenseLayer.builder().nIn(numIn).nOut(numOut).build();
|
||||
|
||||
checkSerialization(ff);
|
||||
|
||||
|
@ -93,7 +93,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testConvolutionLayer() throws Exception {
|
||||
ConvolutionLayer conv = new ConvolutionLayer.Builder(kernelSize, stride, padding).build();
|
||||
ConvolutionLayer conv = ConvolutionLayer.builder(kernelSize, stride, padding).build();
|
||||
|
||||
checkSerialization(conv);
|
||||
|
||||
|
@ -106,7 +106,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testSubsamplingLayer() throws Exception {
|
||||
SubsamplingLayer sample =
|
||||
new SubsamplingLayer.Builder(poolType, stride).kernelSize(kernelSize).padding(padding).build();
|
||||
SubsamplingLayer.builder(poolType, stride).kernelSize(kernelSize).padding(padding).build();
|
||||
|
||||
checkSerialization(sample);
|
||||
|
||||
|
@ -118,21 +118,21 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testOutputLayer() throws Exception {
|
||||
OutputLayer out = new OutputLayer.Builder(loss).build();
|
||||
OutputLayer out = OutputLayer.builder(loss).build();
|
||||
|
||||
checkSerialization(out);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRnnOutputLayer() throws Exception {
|
||||
RnnOutputLayer out = new RnnOutputLayer.Builder(loss).build();
|
||||
RnnOutputLayer out = RnnOutputLayer.builder(loss).build();
|
||||
|
||||
checkSerialization(out);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAutoEncoder() throws Exception {
|
||||
AutoEncoder enc = new AutoEncoder.Builder().corruptionLevel(corruptionLevel).sparsity(sparsity).build();
|
||||
AutoEncoder enc = AutoEncoder.builder().corruptionLevel(corruptionLevel).sparsity(sparsity).build();
|
||||
|
||||
checkSerialization(enc);
|
||||
|
||||
|
@ -142,7 +142,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testGravesLSTM() throws Exception {
|
||||
GravesLSTM glstm = new GravesLSTM.Builder().forgetGateBiasInit(1.5).activation(Activation.TANH).nIn(numIn)
|
||||
GravesLSTM glstm = GravesLSTM.builder().forgetGateBiasInit(1.5).activation(Activation.TANH).nIn(numIn)
|
||||
.nOut(numOut).build();
|
||||
|
||||
checkSerialization(glstm);
|
||||
|
@ -155,7 +155,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testGravesBidirectionalLSTM() throws Exception {
|
||||
final GravesBidirectionalLSTM glstm = new GravesBidirectionalLSTM.Builder().forgetGateBiasInit(1.5)
|
||||
final GravesBidirectionalLSTM glstm = GravesBidirectionalLSTM.builder().forgetGateBiasInit(1.5)
|
||||
.activation(Activation.TANH).nIn(numIn).nOut(numOut).build();
|
||||
|
||||
checkSerialization(glstm);
|
||||
|
@ -168,7 +168,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testEmbeddingLayer() throws Exception {
|
||||
EmbeddingLayer el = new EmbeddingLayer.Builder().nIn(10).nOut(5).build();
|
||||
EmbeddingLayer el = EmbeddingLayer.builder().nIn(10).nOut(5).build();
|
||||
checkSerialization(el);
|
||||
|
||||
assertEquals(10, el.getNIn());
|
||||
|
@ -177,7 +177,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testBatchNormLayer() throws Exception {
|
||||
BatchNormalization bN = new BatchNormalization.Builder().nIn(numIn).nOut(numOut).gamma(2).beta(1).decay(0.5)
|
||||
BatchNormalization bN =BatchNormalization.builder().nIn(numIn).nOut(numOut).gamma(2).beta(1).decay(0.5)
|
||||
.lockGammaBeta(true).build();
|
||||
|
||||
checkSerialization(bN);
|
||||
|
@ -192,11 +192,11 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testActivationLayer() throws Exception {
|
||||
ActivationLayer activationLayer = new ActivationLayer.Builder().activation(act).build();
|
||||
ActivationLayer activationLayer = ActivationLayer.builder().activation(act).build();
|
||||
|
||||
checkSerialization(activationLayer);
|
||||
|
||||
assertEquals(act, activationLayer.activationFn);
|
||||
assertEquals(act, activationLayer.getActivation());
|
||||
}
|
||||
|
||||
private void checkSerialization(LayerConfiguration layer) throws Exception {
|
||||
|
@ -225,7 +225,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
assertEquals(confExpected.getFlattenedLayerConfigurations().get(0), confActual.getFlattenedLayerConfigurations().get(0), "unequal YAML serialization");
|
||||
|
||||
// check the layer's use of callSuper on equals method
|
||||
confActual.getFlattenedLayerConfigurations().get(0).setIDropout(new Dropout(new java.util.Random().nextDouble()));
|
||||
confActual.getFlattenedLayerConfigurations().get(0).setDropOut(new Dropout(new java.util.Random().nextDouble()));
|
||||
assertNotEquals( confExpected, confActual, "broken equals method (missing callSuper?)");
|
||||
}
|
||||
|
||||
|
|
|
@ -53,13 +53,13 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
String name2 = "bill";
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).name(name1).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).name(name2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).name(name1).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).name(name2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
assertEquals(name1, conf.getConf(0).getLayer().getLayerName());
|
||||
assertEquals(name2, conf.getConf(1).getLayer().getLayerName());
|
||||
assertEquals(name1, conf.getConf(0).getLayer().getName());
|
||||
assertEquals(name2, conf.getConf(1).getLayer().getName());
|
||||
|
||||
}
|
||||
|
||||
|
@ -67,8 +67,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
public void testActivationLayerwiseOverride() {
|
||||
//Without layerwise override:
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().activation(Activation.RELU)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
|
@ -77,8 +77,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
|
||||
//With
|
||||
conf = NeuralNetConfiguration.builder().activation(Activation.RELU)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).activation(Activation.TANH).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).activation(Activation.TANH).build()).build();
|
||||
|
||||
net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -94,8 +94,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
final Distribution defaultDistribution = new NormalDistribution(0, 1.0);
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.dist(defaultDistribution).biasInit(1)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
|
@ -109,8 +109,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
final Distribution overriddenDistribution = new UniformDistribution(0, 1);
|
||||
conf = NeuralNetConfiguration.builder()
|
||||
.dist(defaultDistribution).biasInit(1)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()).layer(1,
|
||||
new DenseLayer.Builder().nIn(2).nOut(2)
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build()).layer(1,
|
||||
DenseLayer.builder().nIn(2).nOut(2)
|
||||
.dist(overriddenDistribution).biasInit(0).build())
|
||||
.build();
|
||||
|
||||
|
@ -181,23 +181,23 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testDropoutLayerwiseOverride() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().dropOut(1.0)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
assertEquals(new Dropout(1.0), conf.getConf(0).getLayer().getIDropout());
|
||||
assertEquals(new Dropout(1.0), conf.getConf(1).getLayer().getIDropout());
|
||||
assertEquals(new Dropout(1.0), conf.getConf(0).getLayer().getDropOut());
|
||||
assertEquals(new Dropout(1.0), conf.getConf(1).getLayer().getDropOut());
|
||||
|
||||
conf = NeuralNetConfiguration.builder().dropOut(1.0)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).dropOut(2.0).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).dropOut(2.0).build()).build();
|
||||
|
||||
net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
assertEquals(new Dropout(1.0), conf.getConf(0).getLayer().getIDropout());
|
||||
assertEquals(new Dropout(2.0), conf.getConf(1).getLayer().getIDropout());
|
||||
assertEquals(new Dropout(1.0), conf.getConf(0).getLayer().getDropOut());
|
||||
assertEquals(new Dropout(2.0), conf.getConf(1).getLayer().getDropOut());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -208,8 +208,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter)))
|
||||
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
|
@ -221,7 +221,7 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
|
||||
conf = NeuralNetConfiguration.builder().updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter) ))
|
||||
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()).layer(1, new DenseLayer.Builder()
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build()).layer(1, DenseLayer.builder()
|
||||
.nIn(2).nOut(2).updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter2))).build())
|
||||
.build();
|
||||
|
||||
|
@ -234,8 +234,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testUpdaterRhoRmsDecayLayerwiseOverride() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new AdaDelta(0.5, 0.9))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).updater(new AdaDelta(0.01,0.9)).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).updater(new AdaDelta(0.01,0.9)).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
|
@ -245,8 +245,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
assertEquals(0.01, ((AdaDelta)((BaseLayerConfiguration) conf.getConf(1).getLayer()).getIUpdater()).getRho(), 0.0);
|
||||
|
||||
conf = NeuralNetConfiguration.builder().updater(new RmsProp(1.0, 2.0, RmsProp.DEFAULT_RMSPROP_EPSILON))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).updater(new RmsProp(1.0, 1.0, RmsProp.DEFAULT_RMSPROP_EPSILON)).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).updater(new AdaDelta(0.5,AdaDelta.DEFAULT_ADADELTA_EPSILON)).build())
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).updater(new RmsProp(1.0, 1.0, RmsProp.DEFAULT_RMSPROP_EPSILON)).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).updater(new AdaDelta(0.5,AdaDelta.DEFAULT_ADADELTA_EPSILON)).build())
|
||||
.build();
|
||||
|
||||
net = new MultiLayerNetwork(conf);
|
||||
|
@ -264,8 +264,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.updater(new Adam(1.0, 0.5, 0.5, 1e-8))
|
||||
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).updater(new Adam(1.0, 0.6, 0.7, 1e-8)).build())
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).updater(new Adam(1.0, 0.6, 0.7, 1e-8)).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -283,8 +283,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
|
||||
.gradientNormalizationThreshold(10)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
BaseLayerConfiguration bconf = (BaseLayerConfiguration) conf.getConf(0).getLayer();
|
||||
|
@ -297,8 +297,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
conf = NeuralNetConfiguration.builder()
|
||||
.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
|
||||
.gradientNormalizationThreshold(10)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2)
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2)
|
||||
.gradientNormalization(GradientNormalization.None)
|
||||
.gradientNormalizationThreshold(2.5).build())
|
||||
.build();
|
||||
|
|
|
@ -56,8 +56,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
public void testDropConnect() {
|
||||
// Warning thrown only since some layers may not have l1 or l2
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.1)).weightNoise(new DropConnect(0.5))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
}
|
||||
|
@ -67,8 +67,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
public void testL1L2NotSet() {
|
||||
// Warning thrown only since some layers may not have l1 or l2
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
}
|
||||
|
@ -78,8 +78,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
public void testRegNotSetL1Global() {
|
||||
assertThrows(IllegalStateException.class, () -> {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3)).l1(0.5)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
});
|
||||
|
@ -90,8 +90,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
public void testRegNotSetL2Local() {
|
||||
assertThrows(IllegalStateException.class, () -> {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).l2(0.5).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).l2(0.5).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
});
|
||||
|
@ -102,8 +102,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
// Warning thrown only since global dist can be set with a different weight init locally
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder().updater(new Sgd(0.3)).dist(new GaussianDistribution(1e-3, 2))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
}
|
||||
|
@ -116,8 +116,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder().updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter)))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
}
|
||||
|
@ -130,12 +130,12 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
/* Graph Builder */
|
||||
.updater(Updater.RMSPROP.getIUpdaterWithDefaultConfig()).graphBuilder().addInputs("in")
|
||||
.addLayer("L" + 1,
|
||||
new GravesLSTM.Builder().nIn(20).updater(Updater.RMSPROP).nOut(10)
|
||||
GravesLSTM.builder().nIn(20).updater(Updater.RMSPROP).nOut(10)
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.dropOut(0.4).l1(0.3).activation(Activation.SIGMOID).build(),
|
||||
"in")
|
||||
.addLayer("output",
|
||||
new RnnOutputLayer.Builder().nIn(20).nOut(10).activation(Activation.SOFTMAX)
|
||||
RnnOutputLayer.builder().nIn(20).nOut(10).activation(Activation.SOFTMAX)
|
||||
.weightInit(WeightInit.RELU_UNIFORM).build(),
|
||||
"L" + 1)
|
||||
.setOutputs("output");
|
||||
|
@ -157,8 +157,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
|
||||
// Nesterovs Updater
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Nesterovs(0.9))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).l2(0.5).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).updater(new Nesterovs(0.3, 0.4)).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).l2(0.5).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).updater(new Nesterovs(0.3, 0.4)).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
|
@ -173,8 +173,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
// Adam Updater
|
||||
conf = NeuralNetConfiguration.builder().updater(new Adam(0.3))
|
||||
.weightInit(expectedDist)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).l2(0.5).l1(0.3).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).l2(0.5).l1(0.3).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
|
@ -191,8 +191,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
|
||||
//RMSProp Updater
|
||||
conf = NeuralNetConfiguration.builder().updater(new RmsProp(0.3))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).updater(new RmsProp(0.3, 0.4, RmsProp.DEFAULT_RMSPROP_EPSILON)).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).updater(new RmsProp(0.3, 0.4, RmsProp.DEFAULT_RMSPROP_EPSILON)).build()).build();
|
||||
net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
|
|
|
@ -249,7 +249,7 @@ public class CNNProcessorTest extends BaseDL4JTest {
|
|||
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
// Building the DL4J network
|
||||
.layer(0, new ConvolutionLayer.Builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.layer(0, ConvolutionLayer.builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.name("cnn1")
|
||||
.convolutionMode(ConvolutionMode.Strict)
|
||||
.nIn(2) // 2 input channels
|
||||
|
@ -258,7 +258,7 @@ public class CNNProcessorTest extends BaseDL4JTest {
|
|||
.activation(Activation.RELU)
|
||||
.biasInit(1e-2).build())
|
||||
|
||||
.layer(1, new ConvolutionLayer.Builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.layer(1, ConvolutionLayer.builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.name("cnn2")
|
||||
.convolutionMode(ConvolutionMode.Strict)
|
||||
.nOut(processWidth)
|
||||
|
@ -267,21 +267,21 @@ public class CNNProcessorTest extends BaseDL4JTest {
|
|||
.biasInit(1e-2)
|
||||
.build())
|
||||
|
||||
.layer(2, new ConvolutionLayer.Builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.layer(2, ConvolutionLayer.builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.name("cnn3")
|
||||
.convolutionMode(ConvolutionMode.Strict)
|
||||
.nOut(processWidth)
|
||||
.weightInit(WeightInit.XAVIER_UNIFORM)
|
||||
.activation(Activation.RELU).build())
|
||||
|
||||
.layer(3, new ConvolutionLayer.Builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.layer(3, ConvolutionLayer.builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.name("cnn4")
|
||||
.convolutionMode(ConvolutionMode.Strict)
|
||||
.nOut(processWidth)
|
||||
.weightInit(WeightInit.XAVIER_UNIFORM)
|
||||
.activation(Activation.RELU).build())
|
||||
|
||||
.layer(4, new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
|
||||
.layer(4, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.name("output")
|
||||
.nOut(1)
|
||||
.activation(Activation.TANH)
|
||||
|
|
|
@ -39,8 +39,8 @@ public class CustomPreprocessorTest extends BaseDL4JTest {
|
|||
//Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder()
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10)
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(10)
|
||||
.activation(Activation.SOFTMAX).nOut(10).build())
|
||||
.inputPreProcessor(0, new MyCustomPreprocessor())
|
||||
.build();
|
||||
|
|
|
@ -58,7 +58,7 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
|
||||
RnnToFeedForwardPreProcessor proc = new RnnToFeedForwardPreProcessor();
|
||||
NeuralNetConfiguration nnc = NeuralNetConfiguration.builder()
|
||||
.layer(new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(layerSize)
|
||||
.layer(org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(layerSize)
|
||||
.nOut(layerSize).build())
|
||||
.build();
|
||||
|
||||
|
@ -143,7 +143,7 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
FeedForwardToRnnPreProcessor proc = new FeedForwardToRnnPreProcessor();
|
||||
|
||||
NeuralNetConfiguration nnc = NeuralNetConfiguration.builder()
|
||||
.layer(new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(layerSize)
|
||||
.layer(org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(layerSize)
|
||||
.nOut(layerSize).build())
|
||||
.build();
|
||||
|
||||
|
@ -227,7 +227,7 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration nnc =
|
||||
NeuralNetConfiguration.builder()
|
||||
.layer(new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
|
||||
.layer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder(
|
||||
inputWidth, inputHeight).nIn(cnnNChannelsIn)
|
||||
.nOut(nChannels).build())
|
||||
.build();
|
||||
|
@ -309,7 +309,7 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration nnc =
|
||||
NeuralNetConfiguration.builder()
|
||||
.layer(new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
|
||||
.layer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder(
|
||||
inputWidth, inputHeight).nIn(cnnNChannelsIn)
|
||||
.nOut(nChannels).build())
|
||||
.build();
|
||||
|
@ -397,12 +397,12 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
//FF->RNN and RNN->FF
|
||||
NeuralNetConfiguration conf1 =
|
||||
NeuralNetConfiguration.builder()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(5)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(5)
|
||||
.nOut(6).build())
|
||||
.layer(1, new GravesLSTM.Builder().nIn(6).nOut(7).build())
|
||||
.layer(2, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(7)
|
||||
.layer(1, GravesLSTM.builder().nIn(6).nOut(7).build())
|
||||
.layer(2, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(7)
|
||||
.nOut(8).build())
|
||||
.layer(3, new RnnOutputLayer.Builder().nIn(8).nOut(9).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(3, RnnOutputLayer.builder().nIn(8).nOut(9).activation(Activation.SOFTMAX).build()).build();
|
||||
//Expect preprocessors: layer1: FF->RNN; 2: RNN->FF; 3: FF->RNN
|
||||
assertEquals(3, conf1.getInputPreProcessors().size());
|
||||
assertTrue(conf1.getInputPreProcess(1) instanceof FeedForwardToRnnPreProcessor);
|
||||
|
@ -412,10 +412,10 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
|
||||
//FF-> CNN, CNN-> FF, FF->RNN
|
||||
NeuralNetConfiguration conf2 = NeuralNetConfiguration.builder()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder().nOut(10)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder().nOut(10)
|
||||
.kernelSize(5, 5).stride(1, 1).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nOut(6).build())
|
||||
.layer(2, new RnnOutputLayer.Builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nOut(6).build())
|
||||
.layer(2, RnnOutputLayer.builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutionalFlat(28, 28, 1)).build();
|
||||
//Expect preprocessors: 0: FF->CNN; 1: CNN->FF; 2: FF->RNN
|
||||
assertEquals(3, conf2.getInputPreProcessors().size());
|
||||
|
@ -425,10 +425,10 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
|
||||
//CNN-> FF, FF->RNN - InputType.convolutional instead of convolutionalFlat
|
||||
NeuralNetConfiguration conf2a = NeuralNetConfiguration.builder()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder().nOut(10)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder().nOut(10)
|
||||
.kernelSize(5, 5).stride(1, 1).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nOut(6).build())
|
||||
.layer(2, new RnnOutputLayer.Builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nOut(6).build())
|
||||
.layer(2, RnnOutputLayer.builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(28, 28, 1)).build();
|
||||
//Expect preprocessors: 1: CNN->FF; 2: FF->RNN
|
||||
assertEquals(2, conf2a.getInputPreProcessors().size());
|
||||
|
@ -438,10 +438,10 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
|
||||
//FF->CNN and CNN->RNN:
|
||||
NeuralNetConfiguration conf3 = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder().nOut(10)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder().nOut(10)
|
||||
.kernelSize(5, 5).stride(1, 1).build())
|
||||
.layer(1, new GravesLSTM.Builder().nOut(6).build())
|
||||
.layer(2, new RnnOutputLayer.Builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.layer(1, GravesLSTM.builder().nOut(6).build())
|
||||
.layer(2, RnnOutputLayer.builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutionalFlat(28, 28, 1)).build();
|
||||
//Expect preprocessors: 0: FF->CNN, 1: CNN->RNN;
|
||||
assertEquals(2, conf3.getInputPreProcessors().size());
|
||||
|
@ -454,16 +454,16 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder()
|
||||
.list().layer(0,
|
||||
new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
|
||||
org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder(
|
||||
4, 4) // 28*28*1 => 15*15*10
|
||||
.nIn(1).nOut(10).padding(2, 2)
|
||||
.stride(2, 2)
|
||||
.weightInit(WeightInit.RELU)
|
||||
.activation(Activation.RELU)
|
||||
.build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder()
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.DenseLayer.builder()
|
||||
.activation(Activation.RELU).nOut(200).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(200)
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(200)
|
||||
.nOut(5).weightInit(WeightInit.RELU)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutionalFlat(28, 28, 1))
|
||||
|
|
|
@ -67,9 +67,9 @@ public class TestWeightNoise extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.weightNoise(wn)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(new DropConnect(0.25)).build())
|
||||
.layer(new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).weightNoise(new DropConnect(0.25)).build())
|
||||
.layer(OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -86,9 +86,9 @@ public class TestWeightNoise extends BaseDL4JTest {
|
|||
.weightNoise(wn)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in")
|
||||
.layer("1", new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(new DropConnect(0.25)).build(), "0")
|
||||
.layer("2", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "1")
|
||||
.layer("0", DenseLayer.builder().nIn(10).nOut(10).build(), "in")
|
||||
.layer("1", DenseLayer.builder().nIn(10).nOut(10).weightNoise(new DropConnect(0.25)).build(), "0")
|
||||
.layer("2", OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "1")
|
||||
.setOutputs("2")
|
||||
.build();
|
||||
|
||||
|
@ -145,9 +145,9 @@ public class TestWeightNoise extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(wn1).build())
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(wn2).build())
|
||||
.layer(new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).weightNoise(wn3).build())
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).weightNoise(wn1).build())
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).weightNoise(wn2).build())
|
||||
.layer(OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).weightNoise(wn3).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -170,9 +170,9 @@ public class TestWeightNoise extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(wn1).build(), "in")
|
||||
.layer("1", new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(wn2).build(), "0")
|
||||
.layer("2", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).weightNoise(wn3).build(), "1")
|
||||
.layer("0", DenseLayer.builder().nIn(10).nOut(10).weightNoise(wn1).build(), "in")
|
||||
.layer("1", DenseLayer.builder().nIn(10).nOut(10).weightNoise(wn2).build(), "0")
|
||||
.layer("2", OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).weightNoise(wn3).build(), "1")
|
||||
.setOutputs("2")
|
||||
.build();
|
||||
|
||||
|
@ -249,7 +249,7 @@ public class TestWeightNoise extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.weightInit(WeightInit.ONES)
|
||||
|
||||
.layer(new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
|
|
@ -305,9 +305,9 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.updater(new Adam(0.01))
|
||||
.dataType(DataType.DOUBLE)
|
||||
.list()
|
||||
.layer(new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build())
|
||||
.layer(new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build())
|
||||
.layer(new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.layer(DenseLayer.builder().activation(Activation.TANH).nIn(10).nOut(10).build())
|
||||
.layer(DenseLayer.builder().activation(Activation.TANH).nIn(10).nOut(10).build())
|
||||
.layer(OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -389,9 +389,9 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("l0", new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "in")
|
||||
.layer("l1", new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "l0")
|
||||
.layer("out", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "l1")
|
||||
.layer("l0", DenseLayer.builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "in")
|
||||
.layer("l1", DenseLayer.builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "l0")
|
||||
.layer("out", OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "l1")
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
||||
|
@ -477,24 +477,24 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
LayerConfiguration secondLast;
|
||||
switch (outputLayer) {
|
||||
case 0:
|
||||
ol = new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new GlobalPoolingLayer(PoolingType.MAX);
|
||||
ol = OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = GlobalPoolingLayer.builder(PoolingType.MAX).build();
|
||||
break;
|
||||
case 1:
|
||||
ol = new LossLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new FrozenLayerWithBackprop(new DenseLayer.Builder().nOut(10).activation(Activation.SIGMOID).build());
|
||||
ol = LossLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction()).build();
|
||||
secondLast = new FrozenLayerWithBackprop(DenseLayer.builder().nOut(10).activation(Activation.SIGMOID).build());
|
||||
break;
|
||||
case 2:
|
||||
ol = new CenterLossOutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new VariationalAutoencoder.Builder().encoderLayerSizes(10).decoderLayerSizes(10).nOut(10).activation(Activation.SIGMOID).build();
|
||||
ol =CenterLossOutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = VariationalAutoencoder.builder().encoderLayerSizes(10).decoderLayerSizes(10).nOut(10).activation(Activation.SIGMOID).build();
|
||||
break;
|
||||
case 3:
|
||||
ol = new CnnLossLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(3).activation(Activation.TANH).build();
|
||||
ol = CnnLossLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction()).build();
|
||||
secondLast = ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(3).activation(Activation.TANH).build();
|
||||
break;
|
||||
case 4:
|
||||
ol = new Yolo2OutputLayer.Builder().boundingBoxPriors(Nd4j.create(new double[][]{{1.0, 1.0}, {2.0, 2.0}}).castTo(networkDtype)).build();
|
||||
secondLast = new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(14).activation(Activation.TANH).build();
|
||||
secondLast = ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(14).activation(Activation.TANH).build();
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException();
|
||||
|
@ -505,28 +505,28 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.dataType(networkDtype)
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.updater(new Adam(1e-2))
|
||||
.list()
|
||||
.layer(new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(3).activation(Activation.TANH).build())
|
||||
|
||||
.layer(ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(new LocalResponseNormalization())
|
||||
.layer(new DropoutLayer(0.5))
|
||||
.layer(new DropoutLayer(new AlphaDropout(0.5)))
|
||||
.layer(new DropoutLayer(new GaussianDropout(0.5)))
|
||||
.layer(new DropoutLayer(new GaussianNoise(0.1)))
|
||||
.layer(new DropoutLayer(new SpatialDropout(0.5)))
|
||||
.layer(new SubsamplingLayer.Builder().poolingType(SubsamplingLayer.PoolingType.AVG).kernelSize(3, 3).stride(2, 2).build())
|
||||
.layer(new Pooling2D.Builder().poolingType(SubsamplingLayer.PoolingType.AVG).kernelSize(2, 2).stride(1, 1).build())
|
||||
.layer(new Deconvolution2D.Builder().kernelSize(2, 2).stride(2, 2).nOut(3).activation(Activation.TANH).build())
|
||||
// .layer(new LocallyConnected2D.Builder().nOut(3).kernelSize(2,2).stride(1,1).activation(Activation.SIGMOID).build()) //EXCEPTION
|
||||
.layer(new ZeroPaddingLayer(1, 1))
|
||||
.layer(new Cropping2D(1, 1))
|
||||
.layer(DropoutLayer.builder(0.5).build())
|
||||
.layer(DropoutLayer.builder(new AlphaDropout(0.5)).build())
|
||||
.layer(DropoutLayer.builder(new GaussianDropout(0.5)).build())
|
||||
.layer(DropoutLayer.builder(new GaussianNoise(0.1)).build())
|
||||
.layer(DropoutLayer.builder(new SpatialDropout(0.5)).build())
|
||||
.layer(SubsamplingLayer.builder().poolingType(SubsamplingLayer.PoolingType.AVG.toPoolingType()).kernelSize(3, 3).stride(2, 2).build())
|
||||
.layer(Pooling2D.builder().poolingType(SubsamplingLayer.PoolingType.AVG.toPoolingType()).kernelSize(2, 2).stride(1, 1).build())
|
||||
.layer(Deconvolution2D.builder().kernelSize(2, 2).stride(2, 2).nOut(3).activation(Activation.TANH).build())
|
||||
// .layer(LocallyConnected2D.builder().nOut(3).kernelSize(2,2).stride(1,1).activation(Activation.SIGMOID).build()) //EXCEPTION
|
||||
.layer(ZeroPaddingLayer.builder(1, 1).build())
|
||||
.layer(Cropping2D.builder(1, 1).build())
|
||||
.layer(new IdentityLayer())
|
||||
.layer(new Upsampling2D.Builder().size(2).build())
|
||||
.layer(new SubsamplingLayer.Builder().kernelSize(2, 2).stride(2, 2).build())
|
||||
.layer(new DepthwiseConvolution2D.Builder().nOut(3).activation(Activation.RELU).build())
|
||||
.layer(new SeparableConvolution2D.Builder().nOut(3).activation(Activation.HARDTANH).build())
|
||||
.layer(Upsampling2D.builder().size(2).build())
|
||||
.layer(SubsamplingLayer.builder().kernelSize(2, 2).stride(2, 2).build())
|
||||
.layer(DepthwiseConvolution2D.builder().nOut(3).activation(Activation.RELU).build())
|
||||
.layer(SeparableConvolution2D.builder().nOut(3).activation(Activation.HARDTANH).build())
|
||||
.layer(new MaskLayer())
|
||||
.layer(new BatchNormalization.Builder().build())
|
||||
.layer(new ActivationLayer(Activation.LEAKYRELU))
|
||||
.layer(BatchNormalization.builder().build())
|
||||
.layer(ActivationLayer.builder().activation(Activation.LEAKYRELU).build())
|
||||
.layer(secondLast)
|
||||
.layer(ol)
|
||||
.inputType(InputType.convolutionalFlat(8, 8, 1))
|
||||
|
@ -603,16 +603,16 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
LayerConfiguration secondLast;
|
||||
switch (outputLayer) {
|
||||
case 0:
|
||||
ol = new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new GlobalPoolingLayer(PoolingType.AVG);
|
||||
ol = OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = GlobalPoolingLayer.builder(PoolingType.AVG).build();
|
||||
break;
|
||||
case 1:
|
||||
ol = new Cnn3DLossLayer.Builder(Convolution3D.DataFormat.NCDHW).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new Convolution3D.Builder().nOut(3).activation(Activation.ELU).build();
|
||||
ol = Cnn3DLossLayer.builder().dataFormat(Convolution3D.DataFormat.NCDHW).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction()).build();
|
||||
secondLast = Convolution3D.builder().nOut(3).activation(Activation.ELU).build();
|
||||
break;
|
||||
case 2:
|
||||
ol = new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new Convolution3D.Builder().nOut(3).activation(Activation.ELU).build();
|
||||
ol = OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = Convolution3D.builder().nOut(3).activation(Activation.ELU).build();
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException();
|
||||
|
@ -623,15 +623,15 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.dataType(networkDtype)
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.updater(new Nesterovs(1e-2, 0.9))
|
||||
.list()
|
||||
.layer(new Convolution3D.Builder().kernelSize(2, 2, 2).stride(1, 1, 1).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(new Convolution3D.Builder().kernelSize(2, 2, 2).stride(1, 1, 1).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(new Subsampling3DLayer.Builder().poolingType(PoolingType.AVG).kernelSize(2, 2, 2).stride(2, 2, 2).build())
|
||||
.layer(new Deconvolution3D.Builder().kernelSize(2,2,2).stride(1,1,1).nIn(3).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(new Cropping3D.Builder(1, 1, 1, 1, 1, 1).build())
|
||||
.layer(new ZeroPadding3DLayer.Builder(1, 1, 1, 1, 1, 1).build())
|
||||
.layer(new ActivationLayer(Activation.LEAKYRELU))
|
||||
.layer(new Upsampling3D.Builder().size(2).build())
|
||||
|
||||
.layer(Convolution3D.builder().kernelSize(2, 2, 2).stride(1, 1, 1).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(Convolution3D.builder().kernelSize(2, 2, 2).stride(1, 1, 1).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(Subsampling3DLayer.builder().poolingType(PoolingType.AVG).kernelSize(2, 2, 2).stride(2, 2, 2).build())
|
||||
.layer(Deconvolution3D.builder().kernelSize(2,2,2).stride(1,1,1).nIn(3).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(Cropping3D.builder(1, 1, 1, 1, 1, 1).build())
|
||||
.layer(ZeroPadding3DLayer.builder(1, 1, 1, 1, 1, 1).build())
|
||||
.layer(ActivationLayer.builder(Activation.LEAKYRELU).build())
|
||||
.layer(Upsampling3D.builder().size(2).build())
|
||||
.layer(secondLast)
|
||||
.layer(ol)
|
||||
.inputType(InputType.convolutional3D(Convolution3D.DataFormat.NCDHW, 8, 8, 8, 1))
|
||||
|
@ -714,16 +714,16 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
LayerConfiguration secondLast;
|
||||
switch (outputLayer) {
|
||||
case 0:
|
||||
ol = new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new GlobalPoolingLayer(PoolingType.MAX);
|
||||
ol = OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = GlobalPoolingLayer.builder(PoolingType.MAX).build();
|
||||
break;
|
||||
case 1:
|
||||
ol = new RnnOutputLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).nOut(5).build();
|
||||
secondLast = new Convolution1D.Builder().kernelSize(2).nOut(5).build();
|
||||
ol = RnnOutputLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).nOut(5).build();
|
||||
secondLast = Convolution1D.builder().kernelSize(2).nOut(5).build();
|
||||
break;
|
||||
case 2:
|
||||
ol = new RnnLossLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new Convolution1D.Builder().kernelSize(2).nOut(5).build();
|
||||
ol = RnnLossLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction()).build();
|
||||
secondLast = Convolution1D.builder().kernelSize(2).nOut(5).build();
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException();
|
||||
|
@ -737,14 +737,14 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.convolutionMode(ConvolutionMode.Same)
|
||||
.updater(new Adam(1e-2))
|
||||
.list()
|
||||
.layer(new Convolution1D.Builder()
|
||||
.layer(Convolution1D.builder()
|
||||
.kernelSize(2)
|
||||
.stride(1).nOut(3).
|
||||
activation(Activation.TANH).build())
|
||||
.layer(new Subsampling1DLayer.Builder().poolingType(PoolingType.MAX).kernelSize(5).stride(1).build())
|
||||
.layer(new Cropping1D.Builder(1).build())
|
||||
.layer(new ZeroPadding1DLayer(1))
|
||||
.layer(new Upsampling1D.Builder(2).build())
|
||||
.layer(Subsampling1DLayer.builder().poolingType(PoolingType.MAX).kernelSize(5).stride(1).build())
|
||||
.layer(Cropping1D.builder(1).build())
|
||||
.layer(ZeroPadding1DLayer.builder(1).build())
|
||||
.layer(Upsampling1D.builder(2).build())
|
||||
.layer(secondLast)
|
||||
.layer(ol)
|
||||
.inputType(InputType.recurrent(5, 10,RNNFormat.NCW))
|
||||
|
@ -819,7 +819,7 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.list()
|
||||
.layer(new SpaceToBatchLayer.Builder().blocks(1, 1).build())
|
||||
.layer(new SpaceToDepthLayer.Builder().blocks(2).build())
|
||||
.layer(new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.layer(OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.convolutional(28, 28, 5))
|
||||
.build();
|
||||
|
||||
|
@ -880,16 +880,16 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
LayerConfiguration secondLast;
|
||||
switch (outputLayer) {
|
||||
case 0:
|
||||
ol = new RnnOutputLayer.Builder().nOut(5).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new SimpleRnn.Builder().nOut(5).activation(Activation.TANH).build();
|
||||
ol = RnnOutputLayer.builder().nOut(5).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = SimpleRnn.builder().nOut(5).activation(Activation.TANH).build();
|
||||
break;
|
||||
case 1:
|
||||
ol = new RnnLossLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new SimpleRnn.Builder().nOut(5).activation(Activation.TANH).build();
|
||||
ol = RnnLossLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = SimpleRnn.builder().nOut(5).activation(Activation.TANH).build();
|
||||
break;
|
||||
case 2:
|
||||
ol = new OutputLayer.Builder().nOut(5).build();
|
||||
secondLast = new LastTimeStep(new SimpleRnn.Builder().nOut(5).activation(Activation.TANH).build());
|
||||
ol = OutputLayer.builder().nOut(5).build();
|
||||
secondLast = new LastTimeStep(SimpleRnn.builder().nOut(5).activation(Activation.TANH).build());
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException();
|
||||
|
@ -899,15 +899,15 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.dataType(networkDtype)
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.updater(new Adam(1e-2))
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build())
|
||||
.layer(new GravesLSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build())
|
||||
.layer(new DenseLayer.Builder().nOut(5).build())
|
||||
.layer(new GravesBidirectionalLSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build())
|
||||
.layer(new Bidirectional(new LSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build()))
|
||||
.layer(new TimeDistributed(new DenseLayer.Builder().nIn(10).nOut(5).activation(Activation.TANH).build()))
|
||||
.layer(new SimpleRnn.Builder().nIn(5).nOut(5).build())
|
||||
.layer(new MaskZeroLayer.Builder().underlying(new SimpleRnn.Builder().nIn(5).nOut(5).build()).maskValue(0.0).build())
|
||||
|
||||
.layer(LSTM.builder().nIn(5).nOut(5).activation(Activation.TANH).build())
|
||||
.layer(GravesLSTM.builder().nIn(5).nOut(5).activation(Activation.TANH).build())
|
||||
.layer(DenseLayer.builder().nOut(5).build())
|
||||
.layer(GravesBidirectionalLSTM.builder().nIn(5).nOut(5).activation(Activation.TANH).build())
|
||||
.layer(Bidirectional.builder(LSTM.builder().nIn(5).nOut(5).activation(Activation.TANH).build()).build())
|
||||
.layer(new TimeDistributed(DenseLayer.builder().nIn(10).nOut(5).activation(Activation.TANH).build()))
|
||||
.layer(SimpleRnn.builder().nIn(5).nOut(5).build())
|
||||
.layer(new MaskZeroLayer.Builder().underlying(SimpleRnn.builder().nIn(5).nOut(5).build()).maskValue(0.0).build())
|
||||
.layer(secondLast)
|
||||
.layer(ol)
|
||||
.build();
|
||||
|
@ -990,10 +990,10 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.kernelSize(3, 3)
|
||||
.stride(2, 2)
|
||||
.build())
|
||||
.layer(new CapsuleLayer.Builder(capsule, capsuleDim, routing).build())
|
||||
.layer(new CapsuleStrengthLayer.Builder().build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationSoftmax()).build())
|
||||
.layer(new LossLayer.Builder(new LossNegativeLogLikelihood()).build())
|
||||
.layer(CapsuleLayer.builder(capsule, capsuleDim, routing).build())
|
||||
.layer(CapsuleStrengthLayer.builder().build())
|
||||
.layer(ActivationLayer.builder(new ActivationSoftmax()).build())
|
||||
.layer(LossLayer.builder().lossFunction(new LossNegativeLogLikelihood()).build())
|
||||
.inputType(InputType.convolutional(height, width, inputDepth))
|
||||
.build();
|
||||
|
||||
|
@ -1062,33 +1062,33 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
INDArray input;
|
||||
if (test == 0) {
|
||||
if (frozen) {
|
||||
conf.layer("0", new FrozenLayer(new EmbeddingLayer.Builder().nIn(5).nOut(5).build()), "in");
|
||||
conf.layer("0", new FrozenLayer(EmbeddingLayer.builder().nIn(5).nOut(5).build()), "in");
|
||||
} else {
|
||||
conf.layer("0", new EmbeddingLayer.Builder().nIn(5).nOut(5).build(), "in");
|
||||
conf.layer("0", EmbeddingLayer.builder().nIn(5).nOut(5).build(), "in");
|
||||
}
|
||||
|
||||
input = Nd4j.zeros(networkDtype, 10, 1).muli(5).castTo(DataType.INT);
|
||||
conf.setInputTypes(InputType.feedForward(1));
|
||||
} else if (test == 1) {
|
||||
if (frozen) {
|
||||
conf.layer("0", new FrozenLayer(new EmbeddingSequenceLayer.Builder().nIn(5).nOut(5).build()), "in");
|
||||
conf.layer("0", new FrozenLayer(EmbeddingSequenceLayer.builder().nIn(5).nOut(5).build()), "in");
|
||||
} else {
|
||||
conf.layer("0", new EmbeddingSequenceLayer.Builder().nIn(5).nOut(5).build(), "in");
|
||||
conf.layer("0", EmbeddingSequenceLayer.builder().nIn(5).nOut(5).build(), "in");
|
||||
}
|
||||
conf.layer("gp", new GlobalPoolingLayer.Builder(PoolingType.PNORM).pnorm(2).poolingDimensions(2).build(), "0");
|
||||
conf.layer("gp", GlobalPoolingLayer.builder(PoolingType.PNORM).pnorm(2).poolingDimensions(2).build(), "0");
|
||||
input = Nd4j.zeros(networkDtype, 10, 1, 5).muli(5).castTo(DataType.INT);
|
||||
conf.setInputTypes(InputType.recurrent(1));
|
||||
} else {
|
||||
conf.layer("0", new RepeatVector.Builder().repetitionFactor(5).nOut(5).build(), "in");
|
||||
conf.layer("gp", new GlobalPoolingLayer.Builder(PoolingType.SUM).build(), "0");
|
||||
conf.layer("0", RepeatVector.builder().repetitionFactor(5).nOut(5).build(), "in");
|
||||
conf.layer("gp", GlobalPoolingLayer.builder(PoolingType.SUM).build(), "0");
|
||||
input = Nd4j.zeros(networkDtype, 10, 5);
|
||||
conf.setInputTypes(InputType.feedForward(5));
|
||||
}
|
||||
|
||||
conf.appendLayer("el", new ElementWiseMultiplicationLayer.Builder().nOut(5).build())
|
||||
.appendLayer("ae", new AutoEncoder.Builder().nOut(5).build())
|
||||
.appendLayer("prelu", new PReLULayer.Builder().nOut(5).inputShape(5).build())
|
||||
.appendLayer("out", new OutputLayer.Builder().nOut(10).build());
|
||||
conf.appendLayer("el", ElementWiseMultiplicationLayer.builder().nOut(5).build())
|
||||
.appendLayer("ae", AutoEncoder.builder().nOut(5).build())
|
||||
.appendLayer("prelu", PReLULayer.builder().nOut(5).inputShape(5).build())
|
||||
.appendLayer("out", OutputLayer.builder().nOut(10).build());
|
||||
|
||||
ComputationGraph net = new ComputationGraph(conf.build());
|
||||
net.init();
|
||||
|
@ -1153,34 +1153,34 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
switch (test) {
|
||||
case 0:
|
||||
b.addInputs("in")
|
||||
.addLayer("l", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(1).build(), "in")
|
||||
.addLayer("l", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(1).build(), "in")
|
||||
.addVertex("preproc", new PreprocessorVertex(new CnnToRnnPreProcessor(28, 28, 1)), "l")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "preproc")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).build(), "preproc")
|
||||
.setInputTypes(InputType.convolutional(28, 28, 1))
|
||||
.setOutputs("out");
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 28, 28)};
|
||||
break;
|
||||
case 1:
|
||||
b.addInputs("in")
|
||||
.addLayer("l", new DenseLayer.Builder().nOut(16).build(), "in")
|
||||
.addLayer("l", DenseLayer.builder().nOut(16).build(), "in")
|
||||
.addVertex("preproc", new PreprocessorVertex(new FeedForwardToCnn3DPreProcessor(2, 2, 2, 2, true)), "l")
|
||||
.addVertex("preproc2", new PreprocessorVertex(new PermutePreprocessor(0, 2, 3, 4, 1)), "preproc")
|
||||
.addVertex("preproc3", new PreprocessorVertex(new ReshapePreprocessor(new long[]{2, 2, 2, 2}, new long[]{16}, false)), "preproc2")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(16).nOut(10).build(), "preproc3")
|
||||
.addLayer("out", OutputLayer.builder().nIn(16).nOut(10).build(), "preproc3")
|
||||
.setInputTypes(InputType.feedForward(5))
|
||||
.setOutputs("out");
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5)};
|
||||
break;
|
||||
case 2:
|
||||
b.addInputs("in")
|
||||
.addLayer("1", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(1).build(), "in")
|
||||
.addLayer("1", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(1).build(), "in")
|
||||
.addVertex("1a", new PoolHelperVertex(), "1")
|
||||
.addVertex("2", new ShiftVertex(1), "1a")
|
||||
.addVertex("3", new ScaleVertex(2), "2")
|
||||
.addVertex("4", new ReshapeVertex(2, -1), "3")
|
||||
.addVertex("5", new SubsetVertex(0, 99), "4")
|
||||
.addVertex("6", new L2NormalizeVertex(), "5")
|
||||
.addLayer("out", new OCNNOutputLayer.Builder().hiddenLayerSize(10).nIn(100).build(), "6")
|
||||
.addLayer("out",OCNNOutputLayer.builder().hiddenLayerSize(10).nIn(100).build(), "6")
|
||||
.setInputTypes(InputType.convolutional(28, 28, 1))
|
||||
.setOutputs("out");
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 28, 28)};
|
||||
|
@ -1193,23 +1193,23 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.addVertex("3", new StackVertex(), "2a", "2b")
|
||||
.addVertex("4", new DuplicateToTimeSeriesVertex("in3"), "3")
|
||||
.addVertex("5", new ReverseTimeSeriesVertex(), "4")
|
||||
.addLayer("6", new GlobalPoolingLayer(PoolingType.AVG), "5")
|
||||
.addLayer("6", GlobalPoolingLayer.builder(PoolingType.AVG).build(), "5")
|
||||
.addVertex("7", new LastTimeStepVertex("in3"), "in3")
|
||||
.addVertex("8", new MergeVertex(), "6", "7")
|
||||
.addVertex("9", new PreprocessorVertex(new ComposableInputPreProcessor()), "8")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "9")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).build(), "9")
|
||||
.setInputTypes(InputType.feedForward(8), InputType.feedForward(8), InputType.recurrent(8))
|
||||
.setOutputs("out");
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 8), Nd4j.rand(networkDtype, 2, 8), Nd4j.rand(networkDtype, 2, 8, 5)};
|
||||
break;
|
||||
case 4:
|
||||
b.addInputs("in1", "in2")
|
||||
.addLayer("1", new LSTM.Builder().nOut(8).build(), "in1")
|
||||
.addLayer("1", LSTM.builder().nOut(8).build(), "in1")
|
||||
.addVertex("preproc1", new PreprocessorVertex(new RnnToCnnPreProcessor(2, 2, 2)), "1")
|
||||
.addVertex("preproc2", new PreprocessorVertex(new CnnToRnnPreProcessor(2, 2, 2)), "preproc1")
|
||||
.addLayer("pool", new GlobalPoolingLayer(), "preproc2")
|
||||
.addLayer("pool2", new GlobalPoolingLayer(), "in2")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "pool", "pool2")
|
||||
.addLayer("pool", GlobalPoolingLayer.builder().build(), "preproc2")
|
||||
.addLayer("pool2", GlobalPoolingLayer.builder().build(), "in2")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).build(), "pool", "pool2")
|
||||
.setInputTypes(InputType.recurrent(8), InputType.convolutional(28, 28, 1))
|
||||
.setOutputs("out");
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 8, 5), Nd4j.rand(networkDtype, 2, 1, 28, 28)};
|
||||
|
@ -1217,28 +1217,28 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
case 5:
|
||||
b.addInputs("in1", "in2")
|
||||
.addVertex("fv", new FrozenVertex(new ScaleVertex(2.0)), "in1")
|
||||
.addLayer("1", new DenseLayer.Builder().nOut(5).build(), "fv")
|
||||
.addLayer("2", new DenseLayer.Builder().nOut(5).build(), "in2")
|
||||
.addLayer("1", DenseLayer.builder().nOut(5).build(), "fv")
|
||||
.addLayer("2", DenseLayer.builder().nOut(5).build(), "in2")
|
||||
.addVertex("v", new L2Vertex(), "1", "2")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "v")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).build(), "v")
|
||||
.setInputTypes(InputType.feedForward(5), InputType.feedForward(5))
|
||||
.setOutputs("out");
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5), Nd4j.rand(networkDtype, 2, 5)};
|
||||
break;
|
||||
case 6:
|
||||
b.addInputs("in")
|
||||
.addLayer("1", new LSTM.Builder().nOut(5).build(), "in")
|
||||
.addLayer("1", LSTM.builder().nOut(5).build(), "in")
|
||||
.addVertex("2", new PreprocessorVertex(new KerasFlattenRnnPreprocessor(5, 4)), "1")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "2")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).build(), "2")
|
||||
.setOutputs("out")
|
||||
.setInputTypes(InputType.recurrent(5, 4));
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5, 4)};
|
||||
break;
|
||||
case 7:
|
||||
b.addInputs("in")
|
||||
.addLayer("1", new ConvolutionLayer.Builder().kernelSize(2, 2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in")
|
||||
.addLayer("1", ConvolutionLayer.builder().kernelSize(2, 2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in")
|
||||
.addVertex("2", new PreprocessorVertex(new CnnToFeedForwardPreProcessor(28, 28, 5)), "1")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "2")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).build(), "2")
|
||||
.setOutputs("out")
|
||||
.setInputTypes(InputType.convolutional(28, 28, 1));
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 28, 28)};
|
||||
|
@ -1311,9 +1311,9 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
switch (test) {
|
||||
case 0:
|
||||
b.addInputs("in")
|
||||
.addLayer("1", new LSTM.Builder().nOut(5).build(), "in")
|
||||
.addLayer("2", new LocallyConnected1D.Builder().kernelSize(2).nOut(4).build(), "1")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nOut(10).build(), "2")
|
||||
.addLayer("1", LSTM.builder().nOut(5).build(), "in")
|
||||
.addLayer("2", LocallyConnected1D.builder().kernelSize(2).nOut(4).build(), "1")
|
||||
.addLayer("out", RnnOutputLayer.builder().nOut(10).build(), "2")
|
||||
.setOutputs("out")
|
||||
.setInputTypes(InputType.recurrent(5, 2));
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5, 2)};
|
||||
|
@ -1321,9 +1321,9 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
break;
|
||||
case 1:
|
||||
b.addInputs("in")
|
||||
.addLayer("1", new ConvolutionLayer.Builder().kernelSize(2, 2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in")
|
||||
.addLayer("2", new LocallyConnected2D.Builder().kernelSize(2, 2).nOut(5).build(), "1")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "2")
|
||||
.addLayer("1", ConvolutionLayer.builder().kernelSize(2, 2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in")
|
||||
.addLayer("2", LocallyConnected2D.builder().kernelSize(2, 2).nOut(5).build(), "1")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).build(), "2")
|
||||
.setOutputs("out")
|
||||
.setInputTypes(InputType.convolutional(8, 8, 1));
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 8, 8)};
|
||||
|
@ -1399,12 +1399,12 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(LSTM.builder().nOut(layerSize).build())
|
||||
.layer(new SelfAttentionLayer.Builder().nOut(8).nHeads(2).projectInput(true).build())
|
||||
.layer(new LearnedSelfAttentionLayer.Builder().nOut(8).nHeads(2).nQueries(numQueries).projectInput(true).build())
|
||||
.layer(new RecurrentAttentionLayer.Builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build())
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -1487,12 +1487,12 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.graphBuilder()
|
||||
.addInputs("input")
|
||||
.addLayer("lstmKeys", new LSTM.Builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("lstmQueries", new LSTM.Builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("lstmValues", new LSTM.Builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("lstmKeys", LSTM.builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("lstmQueries", LSTM.builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("lstmValues", LSTM.builder().nOut(layerSize).build(), "input")
|
||||
.addVertex("attention", new AttentionVertex.Builder().nOut(8).nHeads(2).projectInput(true).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build(), "lstmQueries", "lstmKeys", "lstmValues")
|
||||
.addLayer("pooling", new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build(), "attention")
|
||||
.addLayer("output", new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling")
|
||||
.addLayer("pooling", GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build(), "attention")
|
||||
.addLayer("output", OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling")
|
||||
.setOutputs("output")
|
||||
.setInputTypes(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
|
|
@ -68,18 +68,18 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
|
|||
//4 layer network: 2 GravesLSTM + DenseLayerConfiguration + RnnOutputLayer. Hence also tests preprocessors.
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(5).nOut(7)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(5).nOut(7)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(7).nOut(8)
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(7).nOut(8)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(), "0")
|
||||
.addLayer("2", new DenseLayer.Builder().nIn(8).nOut(9).activation(Activation.TANH)
|
||||
.addLayer("2", DenseLayer.builder().nIn(8).nOut(9).activation(Activation.TANH)
|
||||
|
||||
.dist(new NormalDistribution(0,
|
||||
0.5))
|
||||
.build(), "1")
|
||||
.addLayer("3", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.addLayer("3", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.nIn(9).nOut(4)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(), "2")
|
||||
|
@ -157,15 +157,15 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
|
|||
int timeSeriesLength = 6;
|
||||
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(5).nOut(7)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(5).nOut(7)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(7).nOut(8)
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(7).nOut(8)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0,
|
||||
0.5))
|
||||
.build(), "0")
|
||||
.addLayer("2", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.addLayer("2", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.nIn(8).nOut(4)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(), "1")
|
||||
|
@ -214,27 +214,27 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345).graphBuilder()
|
||||
.addInputs("in0", "in1")
|
||||
.addLayer("lstm0",
|
||||
new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(5).nOut(6)
|
||||
org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(5).nOut(6)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(),
|
||||
"in0")
|
||||
.addLayer("lstm1",
|
||||
new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(4).nOut(5)
|
||||
org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(4).nOut(5)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(),
|
||||
"in1")
|
||||
.addLayer("dense", new DenseLayer.Builder().nIn(6 + 5).nOut(9).activation(Activation.TANH)
|
||||
.addLayer("dense", DenseLayer.builder().nIn(6 + 5).nOut(9).activation(Activation.TANH)
|
||||
|
||||
.dist(new NormalDistribution(0,
|
||||
0.5))
|
||||
.build(), "lstm0", "lstm1")
|
||||
.addLayer("out0", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.addLayer("out0", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.nIn(9).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.dist(new NormalDistribution(0,
|
||||
0.5))
|
||||
.build(), "dense")
|
||||
.addLayer("out1", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.addLayer("out1", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.nIn(9).nOut(4)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(), "dense")
|
||||
|
@ -344,15 +344,15 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
|
|||
.trainingWorkspaceMode(WorkspaceMode.NONE).inferenceWorkspaceMode(WorkspaceMode.NONE)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(nIn).nOut(7)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(nIn).nOut(7)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(7).nOut(8)
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(7).nOut(8)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0,
|
||||
0.5))
|
||||
.build(), "0")
|
||||
.addLayer("out", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.addLayer("out", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.nIn(8).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(), "1")
|
||||
|
@ -364,15 +364,15 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
|
|||
.trainingWorkspaceMode(WorkspaceMode.NONE).inferenceWorkspaceMode(WorkspaceMode.NONE)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(nIn).nOut(7)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(nIn).nOut(7)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(7).nOut(8)
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(7).nOut(8)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0,
|
||||
0.5))
|
||||
.build(), "0")
|
||||
.addLayer("out", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.addLayer("out", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.nIn(8).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(), "1")
|
||||
|
@ -459,15 +459,15 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(nIn).nOut(7)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(nIn).nOut(7)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(7).nOut(8)
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(7).nOut(8)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0,
|
||||
0.5))
|
||||
.build(), "0")
|
||||
.addLayer("out", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.addLayer("out", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.nIn(8).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(), "1")
|
||||
|
@ -496,15 +496,15 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(nIn).nOut(7)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(nIn).nOut(7)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(7).nOut(8)
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(7).nOut(8)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0,
|
||||
0.5))
|
||||
.build(), "0")
|
||||
.addLayer("out", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.addLayer("out", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.nIn(8).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.dist(new NormalDistribution(0, 0.5)).build(), "1")
|
||||
|
@ -532,7 +532,7 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
|
|||
//Simple "does it throw an exception" type test...
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("out", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE)
|
||||
.addLayer("out", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.activation(Activation.IDENTITY).nIn(1).nOut(1).build(), "in")
|
||||
.setOutputs("out").backpropType(BackpropType.TruncatedBPTT).tbpttFwdLength(8)
|
||||
.setInputTypes(InputType.recurrent(1,1,RNNFormat.NCW))
|
||||
|
@ -555,7 +555,7 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
|
|||
//Simple "does it throw an exception" type test...
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("out", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE)
|
||||
.addLayer("out", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.activation(Activation.IDENTITY).nIn(1).nOut(1).build(), "in")
|
||||
.setOutputs("out").backpropType(tbptt ? BackpropType.TruncatedBPTT : BackpropType.Standard)
|
||||
.tbpttFwdLength(8).tbpttBackLength(8).build();
|
||||
|
@ -619,9 +619,9 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
|
|||
NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new org.deeplearning4j.nn.conf.layers.LSTM.Builder().nIn(nIn).nOut(nHiddenUnits).build(), "in")
|
||||
.layer("1", new GlobalPoolingLayer(), "0")
|
||||
.layer("2", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(nHiddenUnits)
|
||||
.layer("0", org.deeplearning4j.nn.conf.layers.LSTM.builder().nIn(nIn).nOut(nHiddenUnits).build(), "in")
|
||||
.layer("1", GlobalPoolingLayer.builder().build(), "0")
|
||||
.layer("2", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(nHiddenUnits)
|
||||
.nOut(nOut)
|
||||
.activation(Activation.TANH).build(), "1")
|
||||
.setOutputs("2")
|
||||
|
|
|
@ -62,19 +62,19 @@ public class TestCompGraphCNN extends BaseDL4JTest {
|
|||
.graphBuilder().addInputs("input")
|
||||
.setInputTypes(InputType.convolutional(32, 32, 3))
|
||||
.addLayer("cnn1",
|
||||
new ConvolutionLayer.Builder(4, 4).stride(2, 2).nIn(3).nOut(3)
|
||||
ConvolutionLayer.builder(4, 4).stride(2, 2).nIn(3).nOut(3)
|
||||
.build(),
|
||||
"input")
|
||||
.addLayer("cnn2",
|
||||
new ConvolutionLayer.Builder(4, 4).stride(2, 2).nIn(3).nOut(3)
|
||||
ConvolutionLayer.builder(4, 4).stride(2, 2).nIn(3).nOut(3)
|
||||
.build(),
|
||||
"input")
|
||||
.addLayer("max1",
|
||||
new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
|
||||
SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX)
|
||||
.stride(1, 1).kernelSize(2, 2).build(),
|
||||
"cnn1", "cnn2")
|
||||
.addLayer("dnn1", new DenseLayer.Builder().nOut(7).build(), "max1")
|
||||
.addLayer("output", new OutputLayer.Builder().nIn(7).nOut(10).activation(Activation.SOFTMAX).build(), "dnn1")
|
||||
.addLayer("dnn1", DenseLayer.builder().nOut(7).build(), "max1")
|
||||
.addLayer("output", OutputLayer.builder().nIn(7).nOut(10).activation(Activation.SOFTMAX).build(), "dnn1")
|
||||
.setOutputs("output").build();
|
||||
|
||||
return conf;
|
||||
|
@ -159,19 +159,19 @@ public class TestCompGraphCNN extends BaseDL4JTest {
|
|||
.seed(123).graphBuilder().addInputs("input")
|
||||
.setInputTypes(InputType.convolutional(nChannels, imageWidth,
|
||||
imageHeight))
|
||||
.addLayer("conv1", new ConvolutionLayer.Builder()
|
||||
.addLayer("conv1", ConvolutionLayer.builder()
|
||||
.kernelSize(kernelHeight, kernelWidth).stride(1, 1)
|
||||
.dataFormat(CNN2DFormat.NCHW)
|
||||
.nIn(nChannels).nOut(2).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build(), "input")
|
||||
.addLayer("pool1",
|
||||
new SubsamplingLayer.Builder()
|
||||
SubsamplingLayer.builder()
|
||||
.dataFormat(CNN2DFormat.NCHW)
|
||||
.poolingType(SubsamplingLayer.PoolingType.MAX)
|
||||
.poolingType(SubsamplingLayer.PoolingType.MAX.toPoolingType())
|
||||
.kernelSize(imageHeight - kernelHeight + 1, 1)
|
||||
.stride(1, 1).build(),
|
||||
"conv1")
|
||||
.addLayer("output", new OutputLayer.Builder().nOut(classes).activation(Activation.SOFTMAX).build(), "pool1")
|
||||
.addLayer("output", OutputLayer.builder().nOut(classes).activation(Activation.SOFTMAX).build(), "pool1")
|
||||
.setOutputs("output").build();
|
||||
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@ public class TestCompGraphUnsupervised extends BaseDL4JTest {
|
|||
.trainingWorkspaceMode(wsm)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("vae1", new VariationalAutoencoder.Builder()
|
||||
.addLayer("vae1", VariationalAutoencoder.builder()
|
||||
.nIn(784)
|
||||
.nOut(32)
|
||||
.encoderLayerSizes(16)
|
||||
|
@ -76,7 +76,7 @@ public class TestCompGraphUnsupervised extends BaseDL4JTest {
|
|||
.pzxActivationFunction(Activation.SIGMOID)
|
||||
.reconstructionDistribution(new BernoulliReconstructionDistribution(Activation.SIGMOID))
|
||||
.build(), "in")
|
||||
.addLayer("vae2", new VariationalAutoencoder.Builder()
|
||||
.addLayer("vae2", VariationalAutoencoder.builder()
|
||||
.nIn(32)
|
||||
.nOut(8)
|
||||
.encoderLayerSizes(16)
|
||||
|
@ -142,7 +142,7 @@ public class TestCompGraphUnsupervised extends BaseDL4JTest {
|
|||
.inferenceWorkspaceMode(wsm)
|
||||
.trainingWorkspaceMode(wsm)
|
||||
|
||||
.layer(new VariationalAutoencoder.Builder()
|
||||
.layer(VariationalAutoencoder.builder()
|
||||
.nIn(784)
|
||||
.nOut(32)
|
||||
.encoderLayerSizes(16)
|
||||
|
@ -151,7 +151,7 @@ public class TestCompGraphUnsupervised extends BaseDL4JTest {
|
|||
.pzxActivationFunction(Activation.SIGMOID)
|
||||
.reconstructionDistribution(new BernoulliReconstructionDistribution(Activation.SIGMOID))
|
||||
.build())
|
||||
.layer(new VariationalAutoencoder.Builder()
|
||||
.layer(VariationalAutoencoder.builder()
|
||||
.nIn(32)
|
||||
.nOut(8)
|
||||
.encoderLayerSizes(16)
|
||||
|
|
|
@ -101,16 +101,16 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
return NeuralNetConfiguration.builder().seed(12345)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
|
||||
.addInputs("input")
|
||||
.addLayer("firstLayer", new DenseLayer.Builder().nIn(4).nOut(5).build(), "input")
|
||||
.addLayer("outputLayer", new OutputLayer.Builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(), "firstLayer")
|
||||
.addLayer("firstLayer", DenseLayer.builder().nIn(4).nOut(5).build(), "input")
|
||||
.addLayer("outputLayer", OutputLayer.builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(), "firstLayer")
|
||||
.setOutputs("outputLayer").build();
|
||||
}
|
||||
|
||||
private static NeuralNetConfiguration getIrisMLNConfiguration() {
|
||||
return NeuralNetConfiguration.builder().seed(12345)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(5).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(5).build())
|
||||
.layer(1, OutputLayer.builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build()).build();
|
||||
}
|
||||
|
||||
private static int getNumParams() {
|
||||
|
@ -335,8 +335,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration config = NeuralNetConfiguration.builder()
|
||||
.updater(new Sgd(0.1))
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(2).build(), "in").addLayer("out",
|
||||
new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(2).nOut(3)
|
||||
.addLayer("dense", DenseLayer.builder().nIn(4).nOut(2).build(), "in").addLayer("out",
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(2).nOut(3)
|
||||
.build(),
|
||||
"dense")
|
||||
.setOutputs("out").build();
|
||||
|
@ -403,8 +403,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
//First: check FF -> RNN
|
||||
ComputationGraphConfiguration conf1 = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.setInputTypes(InputType.feedForward(5))
|
||||
.addLayer("rnn", new GravesLSTM.Builder().nOut(5).build(), "in")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nOut(5).activation(Activation.SOFTMAX).build(), "rnn").setOutputs("out").build();
|
||||
.addLayer("rnn", GravesLSTM.builder().nOut(5).build(), "in")
|
||||
.addLayer("out", RnnOutputLayer.builder().nOut(5).activation(Activation.SOFTMAX).build(), "rnn").setOutputs("out").build();
|
||||
|
||||
assertEquals(5, ((FeedForwardLayer) ((LayerVertex) conf1.getVertices().get("rnn")).getNetConfiguration().getFlattenedLayerConfigurations().get(0))
|
||||
.getNIn());
|
||||
|
@ -419,8 +419,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
//Check RNN -> FF -> RNN
|
||||
ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.setInputTypes(InputType.recurrent(5))
|
||||
.addLayer("ff", new DenseLayer.Builder().nOut(5).build(), "in")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nOut(5).activation(Activation.SOFTMAX).build(), "ff")
|
||||
.addLayer("ff", DenseLayer.builder().nOut(5).build(), "in")
|
||||
.addLayer("out", RnnOutputLayer.builder().nOut(5).activation(Activation.SOFTMAX).build(), "ff")
|
||||
.setOutputs("out").build();
|
||||
|
||||
assertEquals(5, ((FeedForwardLayer) ((LayerVertex) conf2.getVertices().get("ff")).getNetConfiguration().getFlattenedLayerConfigurations().get(0))
|
||||
|
@ -436,14 +436,14 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
//CNN -> Dense
|
||||
ComputationGraphConfiguration conf3 = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.setInputTypes(InputType.convolutional(28, 28, 1))
|
||||
.addLayer("cnn", new ConvolutionLayer.Builder().kernelSize(2, 2).padding(0, 0).stride(2, 2)
|
||||
.addLayer("cnn", ConvolutionLayer.builder().kernelSize(2, 2).padding(0, 0).stride(2, 2)
|
||||
.nOut(3).build(), "in") //(28-2+0)/2+1 = 14
|
||||
.addLayer("pool",
|
||||
new SubsamplingLayer.Builder().kernelSize(2, 2).padding(0, 0).stride(2, 2)
|
||||
SubsamplingLayer.builder().kernelSize(2, 2).padding(0, 0).stride(2, 2)
|
||||
.build(),
|
||||
"cnn") //(14-2+0)/2+1=7
|
||||
.addLayer("dense", new DenseLayer.Builder().nOut(10).build(), "pool")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(10).nOut(5).activation(Activation.SOFTMAX).build(), "dense").setOutputs("out")
|
||||
.addLayer("dense", DenseLayer.builder().nOut(10).build(), "pool")
|
||||
.addLayer("out", OutputLayer.builder().nIn(10).nOut(5).activation(Activation.SOFTMAX).build(), "dense").setOutputs("out")
|
||||
.build();
|
||||
//Check preprocessors:
|
||||
lv1 = (LayerVertex) conf3.getVertices().get("cnn");
|
||||
|
@ -466,16 +466,16 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf4 =
|
||||
NeuralNetConfiguration.builder().graphBuilder().addInputs("inCNN", "inRNN")
|
||||
.setInputTypes(InputType.convolutional(28, 28, 1), InputType.recurrent(5))
|
||||
.addLayer("cnn", new ConvolutionLayer.Builder().kernelSize(2, 2).padding(0, 0)
|
||||
.addLayer("cnn", ConvolutionLayer.builder().kernelSize(2, 2).padding(0, 0)
|
||||
.stride(2, 2).nOut(3).build(), "inCNN") //(28-2+0)/2+1 = 14
|
||||
.addLayer("pool",
|
||||
new SubsamplingLayer.Builder().kernelSize(2, 2).padding(0, 0)
|
||||
SubsamplingLayer.builder().kernelSize(2, 2).padding(0, 0)
|
||||
.stride(2, 2).build(),
|
||||
"cnn") //(14-2+0)/2+1=7
|
||||
.addLayer("dense", new DenseLayer.Builder().nOut(10).build(), "pool")
|
||||
.addLayer("dense2", new DenseLayer.Builder().nOut(10).build(), "inRNN")
|
||||
.addLayer("dense", DenseLayer.builder().nOut(10).build(), "pool")
|
||||
.addLayer("dense2", DenseLayer.builder().nOut(10).build(), "inRNN")
|
||||
.addVertex("merge", new MergeVertex(), "dense", "dense2")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nOut(5).activation(Activation.SOFTMAX).build(), "merge")
|
||||
.addLayer("out", RnnOutputLayer.builder().nOut(5).activation(Activation.SOFTMAX).build(), "merge")
|
||||
.setOutputs("out").build();
|
||||
|
||||
//Check preprocessors:
|
||||
|
@ -507,18 +507,18 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.graphBuilder().addInputs("input")
|
||||
.setInputTypes(InputType.convolutional(28, 28, 1))
|
||||
.addLayer("cnn_1",
|
||||
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(3)
|
||||
ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(3)
|
||||
.build(),
|
||||
"input")
|
||||
.addLayer("cnn_2",
|
||||
new ConvolutionLayer.Builder(4, 4).stride(2, 2).padding(1, 1)
|
||||
ConvolutionLayer.builder(4, 4).stride(2, 2).padding(1, 1)
|
||||
.nIn(1).nOut(3).build(),
|
||||
"input")
|
||||
.addLayer("max_1",
|
||||
new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
|
||||
SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX)
|
||||
.kernelSize(2, 2).build(),
|
||||
"cnn_1", "cnn_2")
|
||||
.addLayer("output", new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build(), "max_1") //.nIn(7 * 7 * 6)
|
||||
.addLayer("output", OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build(), "max_1") //.nIn(7 * 7 * 6)
|
||||
.setOutputs("output").build();
|
||||
lv1 = (LayerVertex) conf5.getVertices().get("cnn_1");
|
||||
assertNull(lv1.getPreProcessor()); //Expect no preprocessor: cnn data -> cnn layer
|
||||
|
@ -578,8 +578,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
|
||||
.addInputs("input")
|
||||
.addLayer("first_layer", new DenseLayer.Builder().nIn(4).nOut(5).build(), "input")
|
||||
.addLayer("output_layer", new OutputLayer.Builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(), "first_layer")
|
||||
.addLayer("first_layer", DenseLayer.builder().nIn(4).nOut(5).build(), "input")
|
||||
.addLayer("output_layer", OutputLayer.builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(), "first_layer")
|
||||
.setOutputs("output_layer").build();
|
||||
|
||||
ComputationGraph net = new ComputationGraph(conf);
|
||||
|
@ -599,7 +599,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.updater(new Sgd(1e-6))
|
||||
.l2(2e-4).graphBuilder().addInputs("in")
|
||||
.addLayer("layer0",
|
||||
new VariationalAutoencoder.Builder().nIn(4).nOut(3)
|
||||
VariationalAutoencoder.builder().nIn(4).nOut(3)
|
||||
|
||||
.dist(new UniformDistribution(0,
|
||||
1))
|
||||
|
@ -608,7 +608,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.build(),
|
||||
"in")
|
||||
.addLayer("layer1",
|
||||
new VariationalAutoencoder.Builder().nIn(4).nOut(3)
|
||||
VariationalAutoencoder.builder().nIn(4).nOut(3)
|
||||
|
||||
.dist(new UniformDistribution(0,
|
||||
1))
|
||||
|
@ -617,7 +617,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.build(),
|
||||
"in")
|
||||
.addLayer("layer2",
|
||||
new VariationalAutoencoder.Builder().nIn(3).nOut(3)
|
||||
VariationalAutoencoder.builder().nIn(3).nOut(3)
|
||||
|
||||
.dist(new UniformDistribution(0,
|
||||
1))
|
||||
|
@ -625,7 +625,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE)
|
||||
.build(),
|
||||
"layer1")
|
||||
.addLayer("out", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("out", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(3 + 3).nOut(3)
|
||||
|
||||
.dist(new UniformDistribution(0, 1))
|
||||
|
@ -652,9 +652,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.updater(new Sgd(0.1))
|
||||
.activation(Activation.TANH).weightInit(WeightInit.XAVIER)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(nIn).nOut(20).build(), "in")
|
||||
.addLayer("1", new DenseLayer.Builder().nIn(20).nOut(30).build(), "0")
|
||||
.addLayer("2", new OutputLayer.Builder()
|
||||
.addLayer("0", DenseLayer.builder().nIn(nIn).nOut(20).build(), "in")
|
||||
.addLayer("1", DenseLayer.builder().nIn(20).nOut(30).build(), "0")
|
||||
.addLayer("2", OutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut)
|
||||
.build(), "1")
|
||||
.setOutputs("2").build();
|
||||
|
@ -662,9 +662,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration confNoReg =
|
||||
NeuralNetConfiguration.builder().seed(12345).updater(new Sgd(0.1)).activation(Activation.TANH)
|
||||
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(nIn).nOut(20).build(), "in")
|
||||
.addLayer("1", new DenseLayer.Builder().nIn(20).nOut(30).build(), "0")
|
||||
.addLayer("2", new OutputLayer.Builder()
|
||||
.addLayer("0", DenseLayer.builder().nIn(nIn).nOut(20).build(), "in")
|
||||
.addLayer("1", DenseLayer.builder().nIn(20).nOut(30).build(), "0")
|
||||
.addLayer("2", OutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut)
|
||||
.build(), "1")
|
||||
.setOutputs("2").build();
|
||||
|
@ -720,8 +720,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration standard = NeuralNetConfiguration.builder().updater(new Sgd(0.1))
|
||||
.trainingWorkspaceMode(ws).inferenceWorkspaceMode(ws)
|
||||
.seed(12345).graphBuilder().addInputs("in")
|
||||
.addLayer("l0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in")
|
||||
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10)
|
||||
.addLayer("l0", DenseLayer.builder().nIn(10).nOut(10).build(), "in")
|
||||
.addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10)
|
||||
.nOut(10).build(), "l0")
|
||||
.setOutputs("out").build();
|
||||
ComputationGraph s = new ComputationGraph(standard);
|
||||
|
@ -732,7 +732,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration external = NeuralNetConfiguration.builder().updater(new Sgd(0.1))
|
||||
.trainingWorkspaceMode(ws).inferenceWorkspaceMode(ws)
|
||||
.seed(12345).graphBuilder().addInputs("in")
|
||||
.addLayer("l0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in").setOutputs("l0")
|
||||
.addLayer("l0", DenseLayer.builder().nIn(10).nOut(10).build(), "in").setOutputs("l0")
|
||||
.build();
|
||||
|
||||
ComputationGraph e = new ComputationGraph(external);
|
||||
|
@ -778,9 +778,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.graphBuilder()
|
||||
.addInputs("features")
|
||||
.addVertex("rnn2ffn", new PreprocessorVertex(new RnnToFeedForwardPreProcessor()), "features")
|
||||
.addLayer("predict", new DenseLayer.Builder().nIn(nIn).nOut(nOut).activation(Activation.RELU).build(), "rnn2ffn")
|
||||
.addLayer("predict", DenseLayer.builder().nIn(nIn).nOut(nOut).activation(Activation.RELU).build(), "rnn2ffn")
|
||||
.addVertex("ffn2rnn", new PreprocessorVertex(new FeedForwardToRnnPreProcessor()), "predict")
|
||||
.addLayer("output", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), "ffn2rnn")
|
||||
.addLayer("output", ActivationLayer.builder().activation(Activation.IDENTITY).build(), "ffn2rnn")
|
||||
.setOutputs("output")
|
||||
.build();
|
||||
|
||||
|
@ -822,9 +822,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(nIn).nOut(4).activation(Activation.RELU).build(), "in")
|
||||
.addLayer("1", new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.RELU).build(), "0")
|
||||
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(nOut).build(), "1")
|
||||
.addLayer("0", DenseLayer.builder().nIn(nIn).nOut(4).activation(Activation.RELU).build(), "in")
|
||||
.addLayer("1", DenseLayer.builder().nIn(4).nOut(4).activation(Activation.RELU).build(), "0")
|
||||
.addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(nOut).build(), "1")
|
||||
.setOutputs("out")
|
||||
.setInputTypes(InputType.feedForward(nIn))
|
||||
.build();
|
||||
|
@ -859,8 +859,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
|
||||
.addInputs("input").addLayer("first", new DenseLayer.Builder().nIn(4).nOut(5).build(), "input")
|
||||
.addLayer("output", new OutputLayer.Builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(), "first")
|
||||
.addInputs("input").addLayer("first", DenseLayer.builder().nIn(4).nOut(5).build(), "input")
|
||||
.addLayer("output", OutputLayer.builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(), "first")
|
||||
.setOutputs("output").build();
|
||||
|
||||
ComputationGraph net = new ComputationGraph(conf);
|
||||
|
@ -896,10 +896,10 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.setInputTypes(InputType.convolutional(10, 8, 3))
|
||||
.addLayer("layer",
|
||||
new ConvolutionLayer.Builder().kernelSize(2, 2).padding(0, 0).stride(1, 1)
|
||||
ConvolutionLayer.builder().kernelSize(2, 2).padding(0, 0).stride(1, 1)
|
||||
.build(),
|
||||
"in")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build(), "layer").setOutputs("out")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build(), "layer").setOutputs("out")
|
||||
.build();
|
||||
|
||||
LayerVertex lv = (LayerVertex) conf.getVertices().get("layer");
|
||||
|
@ -913,10 +913,10 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.setInputTypes(InputType.convolutionalFlat(10, 8, 3))
|
||||
.addLayer("layer",
|
||||
new ConvolutionLayer.Builder().kernelSize(2, 2).padding(0, 0).stride(1, 1)
|
||||
ConvolutionLayer.builder().kernelSize(2, 2).padding(0, 0).stride(1, 1)
|
||||
.build(),
|
||||
"in")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build(), "layer").setOutputs("out")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build(), "layer").setOutputs("out")
|
||||
.build();
|
||||
|
||||
lv = (LayerVertex) conf.getVertices().get("layer");
|
||||
|
@ -934,13 +934,13 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
//Finally, check configuration with a subsampling layer
|
||||
conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.setInputTypes(InputType.convolutionalFlat(10, 8, 3))
|
||||
.addLayer("l0", new SubsamplingLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.addLayer("l0", SubsamplingLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.build(), "in")
|
||||
.addLayer("layer",
|
||||
new ConvolutionLayer.Builder().kernelSize(2, 2).padding(0, 0).stride(1, 1)
|
||||
ConvolutionLayer.builder().kernelSize(2, 2).padding(0, 0).stride(1, 1)
|
||||
.build(),
|
||||
"l0")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build(), "layer").setOutputs("out")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build(), "layer").setOutputs("out")
|
||||
.build();
|
||||
|
||||
//Check subsampling layer:
|
||||
|
@ -1001,8 +1001,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf =
|
||||
NeuralNetConfiguration.builder().optimizationAlgo(oa).graphBuilder()
|
||||
.addInputs("input")
|
||||
.addLayer("first", new DenseLayer.Builder().nIn(4).nOut(5).build(), "input")
|
||||
.addLayer("output", new OutputLayer.Builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(),
|
||||
.addLayer("first", DenseLayer.builder().nIn(4).nOut(5).build(), "input")
|
||||
.addLayer("output", OutputLayer.builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(),
|
||||
"first")
|
||||
.setOutputs("output").build();
|
||||
|
||||
|
@ -1019,9 +1019,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(123)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.addLayer("0", DenseLayer.builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder(
|
||||
LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
|
||||
.build(),
|
||||
"0")
|
||||
|
@ -1058,24 +1058,24 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.activation(Activation.IDENTITY);
|
||||
|
||||
ComputationGraphConfiguration conf = overallConf.graphBuilder().addInputs("inCentre", "inRight")
|
||||
.addLayer("denseCentre0", new DenseLayer.Builder().nIn(10).nOut(9).build(), "inCentre")
|
||||
.addLayer("denseCentre1", new DenseLayer.Builder().nIn(9).nOut(8).build(), "denseCentre0")
|
||||
.addLayer("denseCentre2", new DenseLayer.Builder().nIn(8).nOut(7).build(), "denseCentre1")
|
||||
.addLayer("denseCentre3", new DenseLayer.Builder().nIn(7).nOut(7).build(), "denseCentre2")
|
||||
.addLayer("denseCentre0", DenseLayer.builder().nIn(10).nOut(9).build(), "inCentre")
|
||||
.addLayer("denseCentre1", DenseLayer.builder().nIn(9).nOut(8).build(), "denseCentre0")
|
||||
.addLayer("denseCentre2", DenseLayer.builder().nIn(8).nOut(7).build(), "denseCentre1")
|
||||
.addLayer("denseCentre3", DenseLayer.builder().nIn(7).nOut(7).build(), "denseCentre2")
|
||||
.addLayer("outCentre",
|
||||
new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(7).nOut(4).build(),
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(7).nOut(4).build(),
|
||||
"denseCentre3")
|
||||
.addVertex("subsetLeft", new SubsetVertex(0, 3), "denseCentre1")
|
||||
.addLayer("denseLeft0", new DenseLayer.Builder().nIn(4).nOut(5).build(), "subsetLeft")
|
||||
.addLayer("denseLeft0", DenseLayer.builder().nIn(4).nOut(5).build(), "subsetLeft")
|
||||
.addLayer("outLeft",
|
||||
new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(5).nOut(6).build(),
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(5).nOut(6).build(),
|
||||
"denseLeft0")
|
||||
.addLayer("denseRight", new DenseLayer.Builder().nIn(7).nOut(7).build(), "denseCentre2")
|
||||
.addLayer("denseRight0", new DenseLayer.Builder().nIn(2).nOut(3).build(), "inRight")
|
||||
.addLayer("denseRight", DenseLayer.builder().nIn(7).nOut(7).build(), "denseCentre2")
|
||||
.addLayer("denseRight0", DenseLayer.builder().nIn(2).nOut(3).build(), "inRight")
|
||||
.addVertex("mergeRight", new MergeVertex(), "denseRight", "denseRight0")
|
||||
.addLayer("denseRight1", new DenseLayer.Builder().nIn(10).nOut(5).build(), "mergeRight")
|
||||
.addLayer("denseRight1", DenseLayer.builder().nIn(10).nOut(5).build(), "mergeRight")
|
||||
.addLayer("outRight",
|
||||
new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(5).nOut(5).build(),
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(5).nOut(5).build(),
|
||||
"denseRight1")
|
||||
.setOutputs("outLeft", "outCentre", "outRight").build();
|
||||
|
||||
|
@ -1096,10 +1096,10 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
public void testFeedForwardIncludeNonLayerVertices() {
|
||||
|
||||
ComputationGraphConfiguration c = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).build(), "in")
|
||||
.addLayer("1", new DenseLayer.Builder().nIn(5).nOut(5).build(), "in")
|
||||
.addLayer("0", DenseLayer.builder().nIn(5).nOut(5).build(), "in")
|
||||
.addLayer("1", DenseLayer.builder().nIn(5).nOut(5).build(), "in")
|
||||
.addVertex("merge", new MergeVertex(), "0", "1")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(10).nOut(5).activation(Activation.SOFTMAX).build(), "merge").setOutputs("out")
|
||||
.addLayer("out", OutputLayer.builder().nIn(10).nOut(5).activation(Activation.SOFTMAX).build(), "merge").setOutputs("out")
|
||||
.build();
|
||||
|
||||
ComputationGraph cg = new ComputationGraph(c);
|
||||
|
@ -1124,7 +1124,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
//Users generally shouldn't do this, but multiple setOutputs calls should *replace* not *add* outputs
|
||||
|
||||
ComputationGraphConfiguration c = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(10).nOut(5).activation(Activation.SOFTMAX).build(), "in").setOutputs("out")
|
||||
.addLayer("out", OutputLayer.builder().nIn(10).nOut(5).activation(Activation.SOFTMAX).build(), "in").setOutputs("out")
|
||||
.setOutputs("out").build();
|
||||
|
||||
List<String> l = c.getNetworkOutputs();
|
||||
|
@ -1138,7 +1138,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
NeuralNetConfiguration.builder().weightNoise(new DropConnect(0.5))
|
||||
.graphBuilder().setInputTypes(InputType.feedForward(1)).addInputs("input1")
|
||||
.addLayer("output",
|
||||
new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(1).nOut(1)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(1).nOut(1)
|
||||
.activation(Activation.SIGMOID).build(),
|
||||
"input1")
|
||||
.setOutputs("output").backpropType(BackpropType.Standard)
|
||||
|
@ -1153,17 +1153,17 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration c =
|
||||
NeuralNetConfiguration.builder().l1(0.5).l2(0.6).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("sub1", new SubsamplingLayer.Builder(2, 2).build(), "in")
|
||||
.addLayer("sub2", new Subsampling1DLayer.Builder(2).build(), "sub1")
|
||||
.addLayer("act", new ActivationLayer.Builder().activation(Activation.TANH)
|
||||
.addLayer("sub1", SubsamplingLayer.builder(2, 2).build(), "in")
|
||||
.addLayer("sub2", Subsampling1DLayer.builder(2).build(), "sub1")
|
||||
.addLayer("act", ActivationLayer.builder().activation(Activation.TANH)
|
||||
.build(), "sub2")
|
||||
.addLayer("pad", new ZeroPaddingLayer.Builder(2, 3).build(), "act")
|
||||
.addLayer("lrn", new LocalResponseNormalization.Builder().build(), "pad")
|
||||
.addLayer("pool", new GlobalPoolingLayer.Builder(PoolingType.AVG).build(),
|
||||
.addLayer("pad", ZeroPaddingLayer.builder(2, 3).build(), "act")
|
||||
.addLayer("lrn", LocalResponseNormalization.builder().build(), "pad")
|
||||
.addLayer("pool", GlobalPoolingLayer.builder(PoolingType.AVG).build(),
|
||||
"act")
|
||||
.addLayer("drop", new DropoutLayer.Builder(0.5).build(), "pool")
|
||||
.addLayer("dense", new DenseLayer.Builder().nIn(1).nOut(1).build(), "drop")
|
||||
.addLayer("loss", new LossLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.addLayer("drop", DropoutLayer.builder(0.5).build(), "pool")
|
||||
.addLayer("dense", DenseLayer.builder().nIn(1).nOut(1).build(), "drop")
|
||||
.addLayer("loss", LossLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.build(), "dense")
|
||||
.allowDisconnected(true)
|
||||
.setOutputs("loss").build();
|
||||
|
@ -1179,7 +1179,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
public void testErrorNoOutputLayer() {
|
||||
|
||||
ComputationGraphConfiguration c = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.addLayer("dense", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in").setOutputs("dense")
|
||||
.addLayer("dense", DenseLayer.builder().nIn(10).nOut(10).build(), "in").setOutputs("dense")
|
||||
.build();
|
||||
|
||||
ComputationGraph cg = new ComputationGraph(c);
|
||||
|
@ -1203,7 +1203,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
//vertex
|
||||
|
||||
NeuralNetConfiguration nnc = NeuralNetConfiguration.builder().build();
|
||||
nnc.setLayer(new DenseLayer.Builder().build());
|
||||
nnc.setLayer(DenseLayer.builder().build());
|
||||
GraphVertex[] singleInputVertices = new GraphVertex[]{new L2NormalizeVertex(), new LayerVertex(nnc, null),
|
||||
new PoolHelperVertex(), new PreprocessorVertex(), new ReshapeVertex(1, 1),
|
||||
new ScaleVertex(1.0), new ShiftVertex(1.0), new SubsetVertex(1, 1), new UnstackVertex(0, 2),
|
||||
|
@ -1241,7 +1241,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("input")
|
||||
.addLayer("L1", new ConvolutionLayer.Builder(new int[]{1, 1}, new int[]{1, 1}, new int[]{0, 0}).nIn(depth).nOut(depth)
|
||||
.addLayer("L1", ConvolutionLayer.builder(new int[]{1, 1}, new int[]{1, 1}, new int[]{0, 0}).nIn(depth).nOut(depth)
|
||||
.build(), "input")
|
||||
.addVertex("L2", new ReshapeVertex(minibatch, 1, 36, 48), "L1")
|
||||
.setOutputs("L2")
|
||||
|
@ -1265,7 +1265,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
|
||||
.addLayer("out", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
||||
|
@ -1305,23 +1305,23 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
NeuralNetConfiguration.builder().seed(12345).l2(0.001) //l2 regularization on all layers
|
||||
.updater(new AdaGrad(0.4)).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("layer0", new ConvolutionLayer.Builder(10, 10).nIn(3) //3 channels: RGB
|
||||
.addLayer("layer0", ConvolutionLayer.builder(10, 10).nIn(3) //3 channels: RGB
|
||||
.nOut(30).stride(4, 4).activation(Activation.RELU).weightInit(
|
||||
WeightInit.RELU).build(),"in") //Output: (130-10+0)/4+1 = 31 -> 31*31*30
|
||||
.addLayer("layer1", new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
|
||||
.addLayer("layer1", SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX)
|
||||
.kernelSize(3, 3).stride(2, 2).build(),"layer0") //(31-3+0)/2+1 = 15
|
||||
.addLayer("layer2", new ConvolutionLayer.Builder(3, 3).nIn(30).nOut(10).stride(2, 2)
|
||||
.addLayer("layer2", ConvolutionLayer.builder(3, 3).nIn(30).nOut(10).stride(2, 2)
|
||||
.activation(Activation.RELU).weightInit(WeightInit.RELU)
|
||||
.updater(Updater.ADAGRAD).build(), "layer1") //Output: (15-3+0)/2+1 = 7 -> 7*7*10 = 490
|
||||
.addLayer("layer3", new DenseLayer.Builder().activation(Activation.RELU).nIn(490).nOut(50)
|
||||
.addLayer("layer3", DenseLayer.builder().activation(Activation.RELU).nIn(490).nOut(50)
|
||||
.weightInit(WeightInit.RELU).gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
|
||||
.gradientNormalizationThreshold(10).build(), "layer2")
|
||||
.addLayer("layer4", new GravesLSTM.Builder().activation(Activation.SOFTSIGN).nIn(50)
|
||||
.addLayer("layer4", GravesLSTM.builder().activation(Activation.SOFTSIGN).nIn(50)
|
||||
.nOut(50).weightInit(WeightInit.XAVIER).updater(Updater.ADAGRAD)
|
||||
.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
|
||||
.gradientNormalizationThreshold(10)
|
||||
.build(), "layer3")
|
||||
.addLayer("layer5", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.addLayer("layer5", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(50).nOut(4) //4 possible shapes: circle, square, arc, line
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
|
||||
|
@ -1351,10 +1351,10 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.convolutionMode(ConvolutionMode.Same)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new ConvolutionLayer.Builder().kernelSize(2,2).stride(1,1).nIn(1).nOut(1).build(), "in")
|
||||
.addLayer("1", new SubsamplingLayer.Builder().kernelSize(2,2).stride(1,1).build(), "0")
|
||||
.addLayer("2", new DenseLayer.Builder().nOut(10).build(), "1")
|
||||
.addLayer("3", new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build(), "2")
|
||||
.addLayer("0", ConvolutionLayer.builder().kernelSize(2,2).stride(1,1).nIn(1).nOut(1).build(), "in")
|
||||
.addLayer("1", SubsamplingLayer.builder().kernelSize(2,2).stride(1,1).build(), "0")
|
||||
.addLayer("2", DenseLayer.builder().nOut(10).build(), "1")
|
||||
.addLayer("3", OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build(), "2")
|
||||
.setOutputs("3")
|
||||
.setInputTypes(InputType.convolutional(28,28,1))
|
||||
.build();
|
||||
|
@ -1386,9 +1386,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration.GraphBuilder b = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().activation(Activation.SIGMOID).nOut(8).build(), "in")
|
||||
.addLayer("1", new DenseLayer.Builder().activation(Activation.SIGMOID).nOut(8).build(), "in") //Disconnected
|
||||
.addLayer("O", new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nOut(10).build(), "0")
|
||||
.addLayer("0", DenseLayer.builder().activation(Activation.SIGMOID).nOut(8).build(), "in")
|
||||
.addLayer("1", DenseLayer.builder().activation(Activation.SIGMOID).nOut(8).build(), "in") //Disconnected
|
||||
.addLayer("O", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nOut(10).build(), "0")
|
||||
.setOutputs("O")
|
||||
.setInputTypes(InputType.feedForward(8));
|
||||
|
||||
|
@ -1418,10 +1418,10 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new ConvolutionLayer.Builder().kernelSize(2,2).nOut(6).build(), "in")
|
||||
.layer("1", new SubsamplingLayer.Builder().kernelSize(2,2).build(), "0")
|
||||
.layer("2", new DenseLayer.Builder().nOut(30).build(), "1")
|
||||
.layer("3", new OutputLayer.Builder().nOut(13).activation(Activation.SOFTMAX).build(), "2")
|
||||
.layer("0", ConvolutionLayer.builder().kernelSize(2,2).nOut(6).build(), "in")
|
||||
.layer("1", SubsamplingLayer.builder().kernelSize(2,2).build(), "0")
|
||||
.layer("2", DenseLayer.builder().nOut(30).build(), "1")
|
||||
.layer("3", OutputLayer.builder().nOut(13).activation(Activation.SOFTMAX).build(), "2")
|
||||
.setOutputs("3")
|
||||
.setInputTypes(InputType.convolutional(28,28,3))
|
||||
.build();
|
||||
|
@ -1456,8 +1456,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new SubsamplingLayer.Builder().kernelSize(2,2).stride(2,2).build(), "in")
|
||||
.layer("1", new LossLayer.Builder().activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build(), "0")
|
||||
.layer("0", SubsamplingLayer.builder().kernelSize(2,2).stride(2,2).build(), "in")
|
||||
.layer("1", LossLayer.builder().lossFunction().activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build(), "0")
|
||||
.setOutputs("1")
|
||||
.setInputTypes(InputType.convolutionalFlat(28,28,1))
|
||||
.build();
|
||||
|
@ -1501,7 +1501,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.setOutputs(outputName)
|
||||
.setInputTypes(InputType.inferInputType(input))
|
||||
.addVertex(scaleName, new ScaleVertex(scaleFactor), inputName)
|
||||
.addLayer(outputName, new OutputLayer.Builder()
|
||||
.addLayer(outputName, OutputLayer.builder()
|
||||
.activation(new ActivationIdentity())
|
||||
.lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.nOut(input.length())
|
||||
|
@ -1539,7 +1539,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.seed(12345)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("layer", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
|
||||
.layer("layer", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
|
||||
.setOutputs("layer")
|
||||
.build();
|
||||
ComputationGraph cg = new ComputationGraph(conf);
|
||||
|
@ -1561,11 +1561,11 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration.GraphBuilder builder = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in1", "in2")
|
||||
.layer("0", new DenseLayer.Builder().nOut(10).build(), "in1")
|
||||
.layer("1", new DenseLayer.Builder().nOut(9).build(), "in1", "in2")
|
||||
.layer("2", new DenseLayer.Builder().nOut(8).build(), "in2")
|
||||
.layer("3", new DenseLayer.Builder().nOut(7).build(), "0")
|
||||
.layer("4", new DenseLayer.Builder().nOut(6).build(), "1", "2")
|
||||
.layer("0", DenseLayer.builder().nOut(10).build(), "in1")
|
||||
.layer("1", DenseLayer.builder().nOut(9).build(), "in1", "in2")
|
||||
.layer("2", DenseLayer.builder().nOut(8).build(), "in2")
|
||||
.layer("3", DenseLayer.builder().nOut(7).build(), "0")
|
||||
.layer("4", DenseLayer.builder().nOut(6).build(), "1", "2")
|
||||
.setInputTypes(InputType.feedForward(5), InputType.feedForward(6))
|
||||
.allowNoOutput(true);
|
||||
|
||||
|
@ -1598,14 +1598,14 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in1", "in2")
|
||||
.addLayer("l0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in1")
|
||||
.addLayer("l1", new DenseLayer.Builder().nIn(20).nOut(10).build(), "in1", "in2")
|
||||
.addLayer("l2", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in2")
|
||||
.addLayer("l3", new DenseLayer.Builder().nIn(10).nOut(10).build(), "l0")
|
||||
.addLayer("l4", new DenseLayer.Builder().nIn(10).nOut(10).build(), "l1")
|
||||
.addLayer("l5", new DenseLayer.Builder().nIn(10).nOut(10).build(), "l2")
|
||||
.addLayer("l6", new OutputLayer.Builder().nIn(20).nOut(10).activation(Activation.SOFTMAX).build(), "l3", "l5")
|
||||
.addLayer("l7", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "l4")
|
||||
.addLayer("l0", DenseLayer.builder().nIn(10).nOut(10).build(), "in1")
|
||||
.addLayer("l1", DenseLayer.builder().nIn(20).nOut(10).build(), "in1", "in2")
|
||||
.addLayer("l2", DenseLayer.builder().nIn(10).nOut(10).build(), "in2")
|
||||
.addLayer("l3", DenseLayer.builder().nIn(10).nOut(10).build(), "l0")
|
||||
.addLayer("l4", DenseLayer.builder().nIn(10).nOut(10).build(), "l1")
|
||||
.addLayer("l5", DenseLayer.builder().nIn(10).nOut(10).build(), "l2")
|
||||
.addLayer("l6", OutputLayer.builder().nIn(20).nOut(10).activation(Activation.SOFTMAX).build(), "l3", "l5")
|
||||
.addLayer("l7", OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "l4")
|
||||
.setOutputs("l6", "l7")
|
||||
.build();
|
||||
|
||||
|
@ -1698,9 +1698,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new VariationalAutoencoder.Builder()
|
||||
.layer("0", VariationalAutoencoder.builder()
|
||||
.nIn(10).nOut(10).encoderLayerSizes(10).decoderLayerSizes(10).build(), "in")
|
||||
.layer("1", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "0")
|
||||
.layer("1", OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "0")
|
||||
.setOutputs("1")
|
||||
|
||||
.build();
|
||||
|
@ -1746,13 +1746,13 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
|
||||
.graphBuilder()
|
||||
.addInputs("in1", "in2")
|
||||
.layer("0", new DenseLayer.Builder().nOut(10).build(), "in1") //Modification should not be allowed on input
|
||||
.layer("1", new DenseLayer.Builder().nOut(10).build(), "in2") //Modification should not be allowed on input
|
||||
.layer("2", new DenseLayer.Builder().nOut(10).build(), "0") //Modification SHOULD be allowed
|
||||
.layer("3", new DenseLayer.Builder().nOut(10).build(), "1") //First in topo sort for using this input - not allowed
|
||||
.layer("4", new DenseLayer.Builder().nOut(10).build(), "1") //Second in topo sort - not allowed
|
||||
.layer("5", new DenseLayer.Builder().nOut(10).build(), "1") //Last in topo sort - allowed
|
||||
.layer("6", new DenseLayer.Builder().nOut(10).build(), "2", "3", "4", "5") //Input from merge vertex - allowed
|
||||
.layer("0", DenseLayer.builder().nOut(10).build(), "in1") //Modification should not be allowed on input
|
||||
.layer("1", DenseLayer.builder().nOut(10).build(), "in2") //Modification should not be allowed on input
|
||||
.layer("2", DenseLayer.builder().nOut(10).build(), "0") //Modification SHOULD be allowed
|
||||
.layer("3", DenseLayer.builder().nOut(10).build(), "1") //First in topo sort for using this input - not allowed
|
||||
.layer("4", DenseLayer.builder().nOut(10).build(), "1") //Second in topo sort - not allowed
|
||||
.layer("5", DenseLayer.builder().nOut(10).build(), "1") //Last in topo sort - allowed
|
||||
.layer("6", DenseLayer.builder().nOut(10).build(), "2", "3", "4", "5") //Input from merge vertex - allowed
|
||||
.setOutputs("6")
|
||||
.setInputTypes(InputType.feedForward(10), InputType.feedForward(10))
|
||||
.build();
|
||||
|
@ -1787,19 +1787,19 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.addInputs("in1", "in2")
|
||||
.addVertex("merge", new MergeVertex(), "in1", "in2")
|
||||
.addLayer("lstm",
|
||||
new Bidirectional(Bidirectional.Mode.CONCAT, new LSTM.Builder()
|
||||
Bidirectional.builder(Bidirectional.Mode.CONCAT, LSTM.builder()
|
||||
.nIn(10).nOut(5)
|
||||
.activation(Activation.TANH)
|
||||
.dropOut(new GaussianNoise(0.05))
|
||||
.build())
|
||||
,"merge")
|
||||
.addLayer("out1",
|
||||
new RnnOutputLayer.Builder().activation(Activation.SOFTMAX)
|
||||
RnnOutputLayer.builder().activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).nIn(10)
|
||||
.nOut(6).build(),
|
||||
"lstm")
|
||||
.addLayer("out2",
|
||||
new RnnOutputLayer.Builder().activation(Activation.SOFTMAX)
|
||||
RnnOutputLayer.builder().activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).nIn(10)
|
||||
.nOut(4).build(),
|
||||
"lstm")
|
||||
|
@ -1825,18 +1825,18 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.addInputs("in1", "in2")
|
||||
.addVertex("merge", new MergeVertex(), "in1", "in2")
|
||||
.addLayer("dense",
|
||||
new DenseLayer.Builder()
|
||||
DenseLayer.builder()
|
||||
.nIn(10).nOut(5)
|
||||
.activation(Activation.TANH)
|
||||
.dropOut(new GaussianNoise(0.05))
|
||||
.build(),"merge")
|
||||
.addLayer("out1",
|
||||
new OutputLayer.Builder().activation(Activation.SOFTMAX)
|
||||
OutputLayer.builder().activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).nIn(5)
|
||||
.nOut(6).build(),
|
||||
"dense")
|
||||
.addLayer("out2",
|
||||
new OutputLayer.Builder().activation(Activation.SOFTMAX)
|
||||
OutputLayer.builder().activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).nIn(5)
|
||||
.nOut(4).build(),
|
||||
"dense")
|
||||
|
@ -1867,8 +1867,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("layer_zero", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in")
|
||||
.layer("layer_one", new OutputLayer.Builder().nIn(10).nOut(10).build(), "layer_zero")
|
||||
.layer("layer_zero", DenseLayer.builder().nIn(10).nOut(10).build(), "in")
|
||||
.layer("layer_one", OutputLayer.builder().nIn(10).nOut(10).build(), "layer_zero")
|
||||
.setOutputs("layer_one")
|
||||
.build();
|
||||
|
||||
|
@ -1894,10 +1894,10 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.seed(12345)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new DenseLayer.Builder().nIn(10).nOut(9).build(), "in")
|
||||
.layer("1", new DenseLayer.Builder().nIn(9).nOut(8).build(), "0")
|
||||
.layer("2", new DenseLayer.Builder().nIn(8).nOut(7).build(), "1")
|
||||
.layer("3", new OutputLayer.Builder().nIn(7).nOut(6).build(), "2")
|
||||
.layer("0", DenseLayer.builder().nIn(10).nOut(9).build(), "in")
|
||||
.layer("1", DenseLayer.builder().nIn(9).nOut(8).build(), "0")
|
||||
.layer("2", DenseLayer.builder().nIn(8).nOut(7).build(), "1")
|
||||
.layer("3", OutputLayer.builder().nIn(7).nOut(6).build(), "2")
|
||||
.setOutputs("3")
|
||||
.build();
|
||||
|
||||
|
@ -1923,7 +1923,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.setInputTypes(inputType)
|
||||
.addInputs("input")
|
||||
.setOutputs("output")
|
||||
.addLayer("0", new ConvolutionLayer.Builder().nOut(5).convolutionMode(ConvolutionMode.Same).build(),"input" )
|
||||
.addLayer("0", ConvolutionLayer.builder().nOut(5).convolutionMode(ConvolutionMode.Same).build(),"input" )
|
||||
.addVertex("dummyAdd", new ElementWiseVertex(ElementWiseVertex.Op.Add), "0")
|
||||
.addLayer("output", new CnnLossLayer(), "dummyAdd")
|
||||
.build());
|
||||
|
@ -1943,7 +1943,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.addInputs("input")
|
||||
.addLayer(
|
||||
"dense",
|
||||
new DenseLayer.Builder()
|
||||
DenseLayer.builder()
|
||||
.nIn(10)
|
||||
.nOut(10)
|
||||
.activation(Activation.RELU)
|
||||
|
@ -1952,7 +1952,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.build(),
|
||||
"input")
|
||||
.addLayer("output",
|
||||
new OutputLayer.Builder()
|
||||
OutputLayer.builder()
|
||||
.nIn(10)
|
||||
.nOut(1)
|
||||
.lossFunction(LossFunctions.LossFunction.XENT)
|
||||
|
@ -1968,8 +1968,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
|
||||
ComputationGraph cg2 = model.clone();
|
||||
|
||||
IDropout d1 = model.getLayer(0).getLayerConfiguration().getIDropout();
|
||||
IDropout d2 = cg2.getLayer(0).getLayerConfiguration().getIDropout();
|
||||
IDropout d1 = model.getLayer(0).getLayerConfiguration().getDropOut();
|
||||
IDropout d2 = cg2.getLayer(0).getLayerConfiguration().getDropOut();
|
||||
|
||||
assertNotSame(d1, d2); //Should not be same object!
|
||||
assertEquals(d1, d2); //But should be equal
|
||||
|
@ -1986,15 +1986,15 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.updater(new Adam())
|
||||
.graphBuilder()
|
||||
.addInputs("x_emb")
|
||||
.addLayer("agg_lstm", new Bidirectional(CONCAT, new LSTM.Builder().nOut(hiddenSize/2).build()), "x_emb")
|
||||
.addLayer("agg_att", new DenseLayer.Builder().nIn(100).nOut(1).activation(Activation.SOFTMAX).build(), "agg_lstm")
|
||||
.addLayer("agg_lstm", Bidirectional.builder(CONCAT, LSTM.builder().nOut(hiddenSize/2).build()), "x_emb")
|
||||
.addLayer("agg_att", DenseLayer.builder().nIn(100).nOut(1).activation(Activation.SOFTMAX).build(), "agg_lstm")
|
||||
.addVertex("att", new PreprocessorVertex(new ComposableInputPreProcessor(new FeedForwardToRnnPreProcessor(), new PermutePreprocessor(0,2,1), new RnnToFeedForwardPreProcessor())), "agg_att")
|
||||
.addLayer("att_repeat", new RepeatVector.Builder(hiddenSize).build(),"att")
|
||||
.addVertex("att_trans", new PreprocessorVertex(new PermutePreprocessor(0, 2, 1)), "att_repeat")
|
||||
.addVertex("mult", new ElementWiseVertex(ElementWiseVertex.Op.Product), "agg_lstm", "att_trans")
|
||||
.addLayer("sum", new GlobalPoolingLayer.Builder().build(), "mult")
|
||||
.addLayer("agg_out", new DenseLayer.Builder().nIn(100).nOut(6).activation(Activation.TANH).build(), "sum")
|
||||
.addLayer("output", new OutputLayer.Builder().nIn(6).nOut(6).lossFunction(LossFunctions.LossFunction.RECONSTRUCTION_CROSSENTROPY).build(), "agg_out")
|
||||
.addLayer("sum", GlobalPoolingLayer.builder().build(), "mult")
|
||||
.addLayer("agg_out", DenseLayer.builder().nIn(100).nOut(6).activation(Activation.TANH).build(), "sum")
|
||||
.addLayer("output", OutputLayer.builder().nIn(6).nOut(6).lossFunction(LossFunctions.LossFunction.RECONSTRUCTION_CROSSENTROPY).build(), "agg_out")
|
||||
.setOutputs("output")
|
||||
.setInputTypes(InputType.recurrent(inputSize,seqLen,RNNFormat.NCW))
|
||||
.build();
|
||||
|
@ -2029,9 +2029,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.backpropType(BackpropType.Standard)
|
||||
.addInputs("in")
|
||||
.setOutputs("out")
|
||||
.addLayer("0",new DenseLayer.Builder().nIn(5).nOut(3).build(),"in")
|
||||
.addLayer("1",new DenseLayer.Builder().nIn(3).nOut(2).build(),"0")
|
||||
.addLayer("out",new OutputLayer.Builder(LossFunctions.LossFunction.XENT).nIn(2).nOut(1)
|
||||
.addLayer("0",DenseLayer.builder().nIn(5).nOut(3).build(),"in")
|
||||
.addLayer("1",DenseLayer.builder().nIn(3).nOut(2).build(),"0")
|
||||
.addLayer("out",OutputLayer.builder(LossFunctions.LossFunction.XENT).nIn(2).nOut(1)
|
||||
.activation(Activation.SIGMOID).build(),"1")
|
||||
.build();
|
||||
|
||||
|
@ -2129,9 +2129,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.setOutputs("out")
|
||||
.addLayer("0",new DenseLayer.Builder().nIn(inputSize).nOut(layerSize).build(),"in")
|
||||
.addLayer("0",DenseLayer.builder().nIn(inputSize).nOut(layerSize).build(),"in")
|
||||
.addVertex("combine", new MergeVertex(), "0", "0", "0")
|
||||
.addLayer("out",new OutputLayer.Builder(LossFunctions.LossFunction.XENT).nIn(3*layerSize).nOut(outputSize)
|
||||
.addLayer("out",OutputLayer.builder(LossFunctions.LossFunction.XENT).nIn(3*layerSize).nOut(outputSize)
|
||||
.activation(Activation.SIGMOID).build(),"combine")
|
||||
.build();
|
||||
|
||||
|
@ -2155,8 +2155,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.graphBuilder()
|
||||
.addLayer("l0", new Convolution3D.Builder().kernelSize(2,2,2).stride(1,1,1).nIn(3).nOut(3).dataFormat(Convolution3D.DataFormat.NCDHW).build(), "in")
|
||||
.addLayer("l1", new Convolution3D.Builder().kernelSize(2,2,2).stride(1,1,1).nIn(3).nOut(3).dataFormat(Convolution3D.DataFormat.NCDHW).build(), "in")
|
||||
.addLayer("l0", Convolution3D.builder().kernelSize(2,2,2).stride(1,1,1).nIn(3).nOut(3).dataFormat(Convolution3D.DataFormat.NCDHW).build(), "in")
|
||||
.addLayer("l1", Convolution3D.builder().kernelSize(2,2,2).stride(1,1,1).nIn(3).nOut(3).dataFormat(Convolution3D.DataFormat.NCDHW).build(), "in")
|
||||
.addVertex("out", new MergeVertex(), "l0", "l1")
|
||||
.setInputTypes(InputType.convolutional3D(Convolution3D.DataFormat.NCDHW, 16, 16, 16, 3))
|
||||
.addInputs("in")
|
||||
|
@ -2175,9 +2175,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("e1", new EmbeddingLayer.Builder().nIn(10).nOut(5).build(), "in")
|
||||
.addLayer("e2", new EmbeddingLayer.Builder().nIn(10).nOut(5).build(), "in")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(10).nOut(2).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "e1", "e2")
|
||||
.addLayer("e1", EmbeddingLayer.builder().nIn(10).nOut(5).build(), "in")
|
||||
.addLayer("e2", EmbeddingLayer.builder().nIn(10).nOut(5).build(), "in")
|
||||
.addLayer("out", OutputLayer.builder().nIn(10).nOut(2).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "e1", "e2")
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
||||
|
@ -2195,18 +2195,18 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
|
|||
.convolutionMode(ConvolutionMode.Same)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("l0", new ConvolutionLayer.Builder()
|
||||
.layer("l0", ConvolutionLayer.builder()
|
||||
.nOut(16)
|
||||
.dataFormat(CNN2DFormat.NHWC)
|
||||
.kernelSize(2,2).stride(1,1)
|
||||
.build(), "in")
|
||||
.layer("l1", new ConvolutionLayer.Builder()
|
||||
.layer("l1", ConvolutionLayer.builder()
|
||||
.nOut(8)
|
||||
.dataFormat(CNN2DFormat.NHWC)
|
||||
.kernelSize(2,2).stride(1,1)
|
||||
.build(), "in")
|
||||
.addVertex("merge", new MergeVertex(), "l0", "l1")
|
||||
.layer("out", new CnnLossLayer.Builder().activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "merge")
|
||||
.layer("out", CnnLossLayer.builder().activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "merge")
|
||||
.setOutputs("out")
|
||||
.setInputTypes(InputType.convolutional(32, 32, 3, CNN2DFormat.NHWC))
|
||||
.build();
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue