Using @SuperBuilder for LayerConfigurations

Signed-off-by: brian <brian@brutex.de>
master
Brian Rosenberger 2023-04-24 18:09:11 +02:00
parent 4482113f23
commit f6100c362d
507 changed files with 14684 additions and 16570 deletions

View File

@ -207,7 +207,7 @@ public class TupleStreamDataSetIteratorTest extends SolrCloudTestCase {
final MultiLayerNetwork model = new MultiLayerNetwork( final MultiLayerNetwork model = new MultiLayerNetwork(
NeuralNetConfiguration.builder() NeuralNetConfiguration.builder()
.list( .list(
new OutputLayer.Builder(LossFunction.MSE) OutputLayer.builder(LossFunction.MSE)
.nIn(3) .nIn(3)
.nOut(1) .nOut(1)
.weightInit(WeightInit.ONES) .weightInit(WeightInit.ONES)

View File

@ -155,7 +155,7 @@ public class ModelTupleStreamIntegrationTest extends SolrCloudTestCase {
final NeuralNetConfiguration conf = NeuralNetConfiguration.builder() final NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.list( .list(
new OutputLayer.Builder() OutputLayer.builder()
.nIn(numInputs) .nIn(numInputs)
.nOut(numOutputs) .nOut(numOutputs)
.activation(Activation.IDENTITY) .activation(Activation.IDENTITY)

View File

@ -244,7 +244,7 @@ public class ModelTupleStreamTest {
final NeuralNetConfiguration conf = NeuralNetConfiguration.builder() final NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.list( .list(
new OutputLayer.Builder() OutputLayer.builder()
.nIn(numInputs) .nIn(numInputs)
.nOut(numOutputs) .nOut(numOutputs)
.activation(Activation.IDENTITY) .activation(Activation.IDENTITY)
@ -278,7 +278,7 @@ public class ModelTupleStreamTest {
.graphBuilder() .graphBuilder()
.addInputs("inputLayer") .addInputs("inputLayer")
.addLayer("outputLayer", .addLayer("outputLayer",
new OutputLayer.Builder() OutputLayer.builder()
.nIn(numInputs) .nIn(numInputs)
.nOut(numOutputs) .nOut(numOutputs)
.activation(Activation.IDENTITY) .activation(Activation.IDENTITY)

View File

@ -194,7 +194,7 @@ public class ScoringModelTest {
final NeuralNetConfiguration conf = NeuralNetConfiguration.builder() final NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.list( .list(
new OutputLayer.Builder().nIn(numFeatures).nOut(1).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).build() OutputLayer.builder().nIn(numFeatures).nOut(1).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).build()
) )
.build(); .build();
@ -221,7 +221,7 @@ public class ScoringModelTest {
.graphBuilder() .graphBuilder()
.addInputs("inputLayer") .addInputs("inputLayer")
.addLayer("outputLayer", .addLayer("outputLayer",
new OutputLayer.Builder().nIn(numFeatures).nOut(1).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).build(), OutputLayer.builder().nIn(numFeatures).nOut(1).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).build(),
"inputLayer") "inputLayer")
.setOutputs("outputLayer") .setOutputs("outputLayer")
.build(); .build();

View File

@ -75,8 +75,8 @@ public class JsonModelServerTest extends BaseDL4JTest {
.updater(new Adam(0.119f)) .updater(new Adam(0.119f))
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.list() .list()
.layer(0, new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(10).build()) .layer(0, DenseLayer.builder().activation(Activation.TANH).nIn(4).nOut(10).build())
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.SQUARED_LOSS).activation(Activation.SIGMOID).nIn(10).nOut(1).build()) .layer(1, OutputLayer.builder(LossFunctions.LossFunction.SQUARED_LOSS).activation(Activation.SIGMOID).nIn(10).nOut(1).build())
.build(); .build();
model = new MultiLayerNetwork(conf); model = new MultiLayerNetwork(conf);
@ -543,8 +543,8 @@ public class JsonModelServerTest extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.list() .list()
.layer(new DenseLayer.Builder().nIn(784).nOut(10).build()) .layer(DenseLayer.builder().nIn(784).nOut(10).build())
.layer(new LossLayer.Builder().activation(Activation.SOFTMAX).build()) .layer(LossLayer.builder().lossFunction().activation(Activation.SOFTMAX).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -600,10 +600,10 @@ public class JsonModelServerTest extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("input1", "input2") .addInputs("input1", "input2")
.addLayer("L1", new DenseLayer.Builder().nIn(3).nOut(4).build(), "input1") .addLayer("L1", DenseLayer.builder().nIn(3).nOut(4).build(), "input1")
.addLayer("L2", new DenseLayer.Builder().nIn(3).nOut(4).build(), "input2") .addLayer("L2", DenseLayer.builder().nIn(3).nOut(4).build(), "input2")
.addVertex("merge", new MergeVertex(), "L1", "L2") .addVertex("merge", new MergeVertex(), "L1", "L2")
.addLayer("out", new OutputLayer.Builder().nIn(4+4).nOut(3).build(), "merge") .addLayer("out", OutputLayer.builder().nIn(4+4).nOut(3).build(), "merge")
.setOutputs("out") .setOutputs("out")
.build(); .build();
@ -656,11 +656,11 @@ public class JsonModelServerTest extends BaseDL4JTest {
.updater(new Sgd(0.01)) .updater(new Sgd(0.01))
.graphBuilder() .graphBuilder()
.addInputs("input") .addInputs("input")
.addLayer("L1", new DenseLayer.Builder().nIn(8).nOut(4).build(), "input") .addLayer("L1", DenseLayer.builder().nIn(8).nOut(4).build(), "input")
.addLayer("out1", new OutputLayer.Builder() .addLayer("out1", OutputLayer.builder()
.lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nIn(4).nOut(3).build(), "L1") .nIn(4).nOut(3).build(), "L1")
.addLayer("out2", new OutputLayer.Builder() .addLayer("out2", OutputLayer.builder()
.lossFunction(LossFunctions.LossFunction.MSE) .lossFunction(LossFunctions.LossFunction.MSE)
.nIn(4).nOut(2).build(), "L1") .nIn(4).nOut(2).build(), "L1")
.setOutputs("out1","out2") .setOutputs("out1","out2")

View File

@ -129,9 +129,9 @@ public abstract class BaseSparkTest extends BaseDL4JTest implements Serializable
protected NeuralNetConfiguration getBasicConf() { protected NeuralNetConfiguration getBasicConf() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
.updater(new Nesterovs(0.1, 0.9)).list() .updater(new Nesterovs(0.1, 0.9)).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3) .layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(nIn).nOut(3)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut) LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
.build(); .build();

View File

@ -137,7 +137,7 @@ public class GradientSharingTrainingTest extends BaseSparkTest {
.updater(new AMSGrad(0.1)) .updater(new AMSGrad(0.1))
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("out", new OutputLayer.Builder().nIn(784).nOut(10).activation(Activation.SOFTMAX) .layer("out", OutputLayer.builder().nIn(784).nOut(10).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in") .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("out") .setOutputs("out")
.build(); .build();
@ -272,15 +272,15 @@ public class GradientSharingTrainingTest extends BaseSparkTest {
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.seed(12345) .seed(12345)
.list() .list()
.layer(new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build()) .layer(OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
} else { } else {
conf = NeuralNetConfiguration.builder() conf = NeuralNetConfiguration.builder()
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.seed(12345) .seed(12345)
.list() .list()
.layer(new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build()) .layer(DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build())
.layer(new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build()) .layer(OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
} }
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -358,7 +358,7 @@ public class GradientSharingTrainingTest extends BaseSparkTest {
.updater(new AMSGrad(0.001)) .updater(new AMSGrad(0.001))
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("out", new OutputLayer.Builder().nIn(784).nOut(10).activation(Activation.SOFTMAX) .layer("out", OutputLayer.builder().nIn(784).nOut(10).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in") .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("out") .setOutputs("out")
.build(); .build();

View File

@ -132,9 +132,9 @@ public abstract class BaseSparkTest extends BaseDL4JTest implements Serializable
protected NeuralNetConfiguration getBasicConf() { protected NeuralNetConfiguration getBasicConf() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
.updater(new Nesterovs(0.1, 0.9)).list() .updater(new Nesterovs(0.1, 0.9)).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3) .layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(nIn).nOut(3)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut) LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
.build(); .build();

View File

@ -71,7 +71,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd()).weightInit(WeightInit.XAVIER).list() .updater(new Sgd()).weightInit(WeightInit.XAVIER).list()
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3) .layer(0, OutputLayer.builder().nIn(4).nOut(3)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -127,7 +127,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(10.0)) //Intentionally huge LR .updater(new Sgd(10.0)) //Intentionally huge LR
.weightInit(WeightInit.XAVIER).list() .weightInit(WeightInit.XAVIER).list()
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY) .layer(0, OutputLayer.builder().nIn(4).nOut(3).activation(Activation.IDENTITY)
.lossFunction(LossFunctions.LossFunction.MSE).build()) .lossFunction(LossFunctions.LossFunction.MSE).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -166,7 +166,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER).list() .updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER).list()
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3) .layer(0, OutputLayer.builder().nIn(4).nOut(3)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -212,7 +212,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).list() .updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).list()
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3) .layer(0, OutputLayer.builder().nIn(4).nOut(3)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -249,7 +249,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd()).weightInit(WeightInit.XAVIER).list() .updater(new Sgd()).weightInit(WeightInit.XAVIER).list()
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3) .layer(0, OutputLayer.builder().nIn(4).nOut(3)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);

View File

@ -74,7 +74,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd()).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in") .updater(new Sgd()).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3) .addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in") .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("0").build(); .setOutputs("0").build();
ComputationGraph net = new ComputationGraph(conf); ComputationGraph net = new ComputationGraph(conf);
@ -128,7 +128,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(2.0)) //Intentionally huge LR .updater(new Sgd(2.0)) //Intentionally huge LR
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in") .weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY) .addLayer("0", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.IDENTITY)
.lossFunction(LossFunctions.LossFunction.MSE).build(), "in") .lossFunction(LossFunctions.LossFunction.MSE).build(), "in")
.setOutputs("0").build(); .setOutputs("0").build();
ComputationGraph net = new ComputationGraph(conf); ComputationGraph net = new ComputationGraph(conf);
@ -169,7 +169,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER).graphBuilder() .updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER).graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3) .addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in") .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("0").build(); .setOutputs("0").build();
ComputationGraph net = new ComputationGraph(conf); ComputationGraph net = new ComputationGraph(conf);
@ -217,7 +217,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).graphBuilder() .updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3) .addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in") .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("0").build(); .setOutputs("0").build();
ComputationGraph net = new ComputationGraph(conf); ComputationGraph net = new ComputationGraph(conf);
@ -256,7 +256,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd()).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in") .updater(new Sgd()).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3) .addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in") .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("0").build(); .setOutputs("0").build();
ComputationGraph net = new ComputationGraph(conf); ComputationGraph net = new ComputationGraph(conf);

View File

@ -69,7 +69,7 @@ public class TestKryo extends BaseSparkKryoTest {
m.put(0, 0.5); m.put(0, 0.5);
m.put(10, 0.1); m.put(10, 0.1);
NeuralNetConfiguration mlc = NeuralNetConfiguration.builder() NeuralNetConfiguration mlc = NeuralNetConfiguration.builder()
.updater(new Nadam(new MapSchedule(ScheduleType.ITERATION,m))).list().layer(0, new OutputLayer.Builder().nIn(10).nOut(10).build()) .updater(new Nadam(new MapSchedule(ScheduleType.ITERATION,m))).list().layer(0, OutputLayer.builder().nIn(10).nOut(10).build())
.build(); .build();
testSerialization(mlc, si); testSerialization(mlc, si);
@ -79,23 +79,23 @@ public class TestKryo extends BaseSparkKryoTest {
.dist(new UniformDistribution(-1, 1)) .dist(new UniformDistribution(-1, 1))
.updater(new Adam(new MapSchedule(ScheduleType.ITERATION,m))) .updater(new Adam(new MapSchedule(ScheduleType.ITERATION,m)))
.graphBuilder() .graphBuilder()
.addInputs("in").addLayer("out", new OutputLayer.Builder().nIn(10).nOut(10).build(), "in") .addInputs("in").addLayer("out", OutputLayer.builder().nIn(10).nOut(10).build(), "in")
.setOutputs("out").build(); .setOutputs("out").build();
testSerialization(cgc, si); testSerialization(cgc, si);
//Check main layers: //Check main layers:
Layer[] layers = new Layer[] {new OutputLayer.Builder().nIn(10).nOut(10).build(), Layer[] layers = new Layer[] {OutputLayer.builder().nIn(10).nOut(10).build(),
new RnnOutputLayer.Builder().nIn(10).nOut(10).build(), new LossLayer.Builder().build(), RnnOutputLayer.builder().nIn(10).nOut(10).build(), LossLayer.builder().lossFunction().build(),
new CenterLossOutputLayer.Builder().nIn(10).nOut(10).build(), CenterLossOutputLayer.builder().nIn(10).nOut(10).build(),
new DenseLayer.Builder().nIn(10).nOut(10).build(), DenseLayer.builder().nIn(10).nOut(10).build(),
new ConvolutionLayer.Builder().nIn(10).nOut(10).build(), new SubsamplingLayer.Builder().build(), ConvolutionLayer.builder().nIn(10).nOut(10).build(), SubsamplingLayer.builder().build(),
new Convolution1DLayer.Builder(2, 2).nIn(10).nOut(10).build(), new Convolution1DLayer.Builder(2, 2).nIn(10).nOut(10).build(),
new ActivationLayer.Builder().activation(Activation.TANH).build(), ActivationLayer.builder().activation(Activation.TANH).build(),
new GlobalPoolingLayer.Builder().build(), new GravesLSTM.Builder().nIn(10).nOut(10).build(), GlobalPoolingLayer.builder().build(), GravesLSTM.builder().nIn(10).nOut(10).build(),
new LSTM.Builder().nIn(10).nOut(10).build(), new DropoutLayer.Builder(0.5).build(), LSTM.builder().nIn(10).nOut(10).build(), DropoutLayer.builder(0.5).build(),
new BatchNormalization.Builder().build(), new LocalResponseNormalization.Builder().build()}; BatchNormalization.builder().build(), LocalResponseNormalization.builder().build()};
for (Layer l : layers) { for (Layer l : layers) {
testSerialization(l, si); testSerialization(l, si);

View File

@ -86,9 +86,9 @@ public class TestPreProcessedData extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(Updater.RMSPROP) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(Updater.RMSPROP)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(3) .layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(4).nOut(3)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3).activation(Activation.SOFTMAX) LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3).activation(Activation.SOFTMAX)
.build()) .build())
.build(); .build();
@ -137,9 +137,9 @@ public class TestPreProcessedData extends BaseSparkTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(Updater.RMSPROP) ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(Updater.RMSPROP)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.graphBuilder().addInputs("in") .graphBuilder().addInputs("in")
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(3) .addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(4).nOut(3)
.activation(Activation.TANH).build(), "in") .activation(Activation.TANH).build(), "in")
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3).activation(Activation.SOFTMAX) LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3).activation(Activation.SOFTMAX)
.build(), .build(),
"0") "0")
@ -191,9 +191,9 @@ public class TestPreProcessedData extends BaseSparkTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(Updater.RMSPROP) ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(Updater.RMSPROP)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.graphBuilder().addInputs("in") .graphBuilder().addInputs("in")
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(3) .addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(4).nOut(3)
.activation(Activation.TANH).build(), "in") .activation(Activation.TANH).build(), "in")
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3).activation(Activation.SOFTMAX) LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3).activation(Activation.SOFTMAX)
.build(), .build(),
"0") "0")

View File

@ -41,7 +41,7 @@ public class TestKryoWarning {
try { try {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new OutputLayer.Builder().nIn(10).nOut(10).build()) .layer(0, OutputLayer.builder().nIn(10).nOut(10).build())
.build(); .build();
TrainingMaster tm = new ParameterAveragingTrainingMaster.Builder(1).build(); TrainingMaster tm = new ParameterAveragingTrainingMaster.Builder(1).build();
@ -58,7 +58,7 @@ public class TestKryoWarning {
try { try {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in") ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(10).nOut(10).build(), "in").setOutputs("0") .addLayer("0", OutputLayer.builder().nIn(10).nOut(10).build(), "in").setOutputs("0")
.build(); .build();
TrainingMaster tm = new ParameterAveragingTrainingMaster.Builder(1).build(); TrainingMaster tm = new ParameterAveragingTrainingMaster.Builder(1).build();

View File

@ -53,9 +53,9 @@ public class TestCustomLayer extends BaseSparkTest {
//Custom layers are tested more extensively in dl4j core //Custom layers are tested more extensively in dl4j core
NeuralNetConfiguration conf = NeuralNetConfiguration conf =
NeuralNetConfiguration.builder().updater(new Sgd(0.1)).list() NeuralNetConfiguration.builder().updater(new Sgd(0.1)).list()
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()) .layer(0, DenseLayer.builder().nIn(10).nOut(10).build())
.layer(1, new CustomLayer(3.14159)).layer(2, .layer(1, new CustomLayer(3.14159)).layer(2,
new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.nIn(10).nOut(10).build()) .nIn(10).nOut(10).build())
.build(); .build();

View File

@ -79,8 +79,8 @@ public class TestSparkComputationGraph extends BaseSparkTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER) ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER)
.graphBuilder().addInputs("in") .graphBuilder().addInputs("in")
.addLayer("l0", new DenseLayer.Builder().nIn(4).nOut(10).build(), "in") .addLayer("l0", DenseLayer.builder().nIn(4).nOut(10).build(), "in")
.addLayer("l1", new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .addLayer("l1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(10).nOut(2).build(), "l0") .activation(Activation.SOFTMAX).nIn(10).nOut(2).build(), "l0")
.setOutputs("l1").build(); .setOutputs("l1").build();
@ -107,8 +107,8 @@ public class TestSparkComputationGraph extends BaseSparkTest {
ComputationGraphConfiguration config = NeuralNetConfiguration.builder() ComputationGraphConfiguration config = NeuralNetConfiguration.builder()
.updater(new Sgd(0.1)) .updater(new Sgd(0.1))
.graphBuilder().addInputs("in") .graphBuilder().addInputs("in")
.addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(2).build(), "in").addLayer("out", .addLayer("dense", DenseLayer.builder().nIn(4).nOut(2).build(), "in").addLayer("out",
new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3)
.build(), .build(),
"dense") "dense")
.setOutputs("out").build(); .setOutputs("out").build();
@ -141,9 +141,9 @@ public class TestSparkComputationGraph extends BaseSparkTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().l1(0.1).l2(0.1) ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().l1(0.1).l2(0.1)
.seed(123).updater(new Nesterovs(0.1, 0.9)).graphBuilder() .seed(123).updater(new Nesterovs(0.1, 0.9)).graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3) .addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(nIn).nOut(3)
.activation(Activation.TANH).build(), "in") .activation(Activation.TANH).build(), "in")
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut) LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut)
.activation(Activation.SOFTMAX).build(), .activation(Activation.SOFTMAX).build(),
"0") "0")
@ -220,9 +220,9 @@ public class TestSparkComputationGraph extends BaseSparkTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(Updater.RMSPROP) ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(Updater.RMSPROP)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in") .weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(4) .addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(4).nOut(4)
.activation(Activation.TANH).build(), "in") .activation(Activation.TANH).build(), "in")
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(4).nOut(3).activation(Activation.SOFTMAX) LossFunctions.LossFunction.MCXENT).nIn(4).nOut(3).activation(Activation.SOFTMAX)
.build(), .build(),
"0") "0")
@ -421,8 +421,8 @@ public class TestSparkComputationGraph extends BaseSparkTest {
.graphBuilder() .graphBuilder()
.addInputs("input1", "input2") .addInputs("input1", "input2")
.addVertex("avg",new ElementWiseVertex(ElementWiseVertex.Op.Average),"input1","input2") .addVertex("avg",new ElementWiseVertex(ElementWiseVertex.Op.Average),"input1","input2")
.addLayer("dense",new DenseLayer.Builder().dropOut(0.9).nIn(featSize).nOut(featSize / 2).build(),"avg") .addLayer("dense",DenseLayer.builder().dropOut(0.9).nIn(featSize).nOut(featSize / 2).build(),"avg")
.addLayer("output",new OutputLayer.Builder().nIn(featSize / 2).nOut(2).lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).hasBias(false).build(),"dense") .addLayer("output",OutputLayer.builder().nIn(featSize / 2).nOut(2).lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).hasBias(false).build(),"dense")
.setOutputs("output") .setOutputs("output")
.build(); .build();

View File

@ -62,10 +62,10 @@ public class TestFrozenLayers extends BaseSparkTest {
int nOut = 3; int nOut = 3;
MultiLayerNetwork origModel = new MultiLayerNetwork(overallConf.clone().list() MultiLayerNetwork origModel = new MultiLayerNetwork(overallConf.clone().list()
.layer(0, new DenseLayer.Builder().nIn(6).nOut(5).build()) .layer(0, DenseLayer.builder().nIn(6).nOut(5).build())
.layer(1, new DenseLayer.Builder().nIn(5).nOut(4).build()) .layer(1, DenseLayer.builder().nIn(5).nOut(4).build())
.layer(2, new DenseLayer.Builder().nIn(4).nOut(3).build()) .layer(2, DenseLayer.builder().nIn(4).nOut(3).build())
.layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(3, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3) LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
.build()) .build())
.build()); .build());
@ -138,10 +138,10 @@ public class TestFrozenLayers extends BaseSparkTest {
ComputationGraph origModel = new ComputationGraph(NeuralNetConfiguration.builder().updater(new Sgd(0.1)) ComputationGraph origModel = new ComputationGraph(NeuralNetConfiguration.builder().updater(new Sgd(0.1))
.activation(Activation.TANH).graphBuilder().addInputs("in") .activation(Activation.TANH).graphBuilder().addInputs("in")
.addLayer("0", new DenseLayer.Builder().nIn(6).nOut(5).build(), "in") .addLayer("0", DenseLayer.builder().nIn(6).nOut(5).build(), "in")
.addLayer("1", new DenseLayer.Builder().nIn(5).nOut(4).build(), "0") .addLayer("1", DenseLayer.builder().nIn(5).nOut(4).build(), "0")
.addLayer("2", new DenseLayer.Builder().nIn(4).nOut(3).build(), "1") .addLayer("2", DenseLayer.builder().nIn(4).nOut(3).build(), "1")
.addLayer("3", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .addLayer("3", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3) LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
.build(), .build(),
"2") "2")

View File

@ -58,8 +58,8 @@ public class TestMiscFunctions extends BaseSparkTest {
public void testFeedForwardWithKey() { public void testFeedForwardWithKey() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list()
.layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build()) .layer(0, DenseLayer.builder().nIn(4).nOut(3).build())
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3) .layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
.build(); .build();
@ -109,9 +109,9 @@ public class TestMiscFunctions extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER)
.list() .list()
.layer( new LSTM.Builder().nIn(4).nOut(3).build()) .layer( LSTM.builder().nIn(4).nOut(3).build())
.layer(new GlobalPoolingLayer(PoolingType.AVG)) .layer(GlobalPoolingLayer.builder(PoolingType.AVG))
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
.build(); .build();
@ -164,9 +164,9 @@ public class TestMiscFunctions extends BaseSparkTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER) ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER)
.graphBuilder().addInputs("in1", "in2") .graphBuilder().addInputs("in1", "in2")
.addLayer("0", new DenseLayer.Builder().nIn(4).nOut(3).build(), "in1") .addLayer("0", DenseLayer.builder().nIn(4).nOut(3).build(), "in1")
.addLayer("1", new DenseLayer.Builder().nIn(4).nOut(3).build(), "in2").addLayer("2", .addLayer("1", DenseLayer.builder().nIn(4).nOut(3).build(), "in2").addLayer("2",
new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(6).nOut(3) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(6).nOut(3)
.activation(Activation.SOFTMAX).build(), .activation(Activation.SOFTMAX).build(),
"0", "1") "0", "1")
.setOutputs("2").build(); .setOutputs("2").build();

View File

@ -111,9 +111,9 @@ public class TestSparkDl4jMultiLayer extends BaseSparkTest {
.updater(new Adam(1e-3)) .updater(new Adam(1e-3))
.l2(1e-5) .l2(1e-5)
.list() .list()
.layer(0, new DenseLayer.Builder().nIn(28 * 28).nOut(500).build()) .layer(0, DenseLayer.builder().nIn(28 * 28).nOut(500).build())
.layer(1, new DenseLayer.Builder().nIn(500).nOut(100).build()) .layer(1, DenseLayer.builder().nIn(500).nOut(100).build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .layer(2, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.activation(Activation.SOFTMAX).nIn(100).nOut(10).build()) .activation(Activation.SOFTMAX).nIn(100).nOut(10).build())
.build(); .build();

View File

@ -68,7 +68,7 @@ public class TestCompareParameterAveragingSparkVsSingleMachine {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.weightInit(WeightInit.XAVIER).updater(updater).seed(seed).list() .weightInit(WeightInit.XAVIER).updater(updater).seed(seed).list()
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new OutputLayer.Builder() .layer(0, DenseLayer.builder().nIn(10).nOut(10).build()).layer(1, OutputLayer.builder()
.lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(10).build()) .lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(10).build())
.build(); .build();
return conf; return conf;
@ -79,11 +79,11 @@ public class TestCompareParameterAveragingSparkVsSingleMachine {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.weightInit(WeightInit.XAVIER).updater(updater).seed(seed).list() .weightInit(WeightInit.XAVIER).updater(updater).seed(seed).list()
.layer(0, new ConvolutionLayer.Builder().nOut(3).kernelSize(2, 2).stride(1, 1).padding(0, 0) .layer(0, ConvolutionLayer.builder().nOut(3).kernelSize(2, 2).stride(1, 1).padding(0, 0)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(1, new ConvolutionLayer.Builder().nOut(3).kernelSize(2, 2).stride(1, 1).padding(0, 0) .layer(1, ConvolutionLayer.builder().nOut(3).kernelSize(2, 2).stride(1, 1).padding(0, 0)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(10) .layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(10)
.build()) .build())
.inputType(InputType.convolutional(10, 10, 3)).build(); .inputType(InputType.convolutional(10, 10, 3)).build();
return conf; return conf;
@ -95,8 +95,8 @@ public class TestCompareParameterAveragingSparkVsSingleMachine {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.weightInit(WeightInit.XAVIER).updater(updater).seed(seed).graphBuilder() .weightInit(WeightInit.XAVIER).updater(updater).seed(seed).graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in").addLayer("1", .addLayer("0", DenseLayer.builder().nIn(10).nOut(10).build(), "in").addLayer("1",
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10)
.nOut(10).build(), .nOut(10).build(),
"0") "0")
.setOutputs("1").build(); .setOutputs("1").build();
@ -109,11 +109,11 @@ public class TestCompareParameterAveragingSparkVsSingleMachine {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.weightInit(WeightInit.XAVIER).updater(updater).seed(seed).graphBuilder() .weightInit(WeightInit.XAVIER).updater(updater).seed(seed).graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new ConvolutionLayer.Builder().nOut(3).kernelSize(2, 2).stride(1, 1) .addLayer("0", ConvolutionLayer.builder().nOut(3).kernelSize(2, 2).stride(1, 1)
.padding(0, 0).activation(Activation.TANH).build(), "in") .padding(0, 0).activation(Activation.TANH).build(), "in")
.addLayer("1", new ConvolutionLayer.Builder().nOut(3).kernelSize(2, 2).stride(1, 1) .addLayer("1", ConvolutionLayer.builder().nOut(3).kernelSize(2, 2).stride(1, 1)
.padding(0, 0).activation(Activation.TANH).build(), "0") .padding(0, 0).activation(Activation.TANH).build(), "0")
.addLayer("2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(10) .addLayer("2", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(10)
.build(), "1") .build(), "1")
.setOutputs("2").setInputTypes(InputType.convolutional(10, 10, 3)) .setOutputs("2").setInputTypes(InputType.convolutional(10, 10, 3))
.build(); .build();

View File

@ -129,9 +129,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
DataSet d = new IrisDataSetIterator(150, 150).next(); DataSet d = new IrisDataSetIterator(150, 150).next();
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
.layer(0, new DenseLayer.Builder().nIn(4).nOut(100).weightInit(WeightInit.XAVIER) .layer(0, DenseLayer.builder().nIn(4).nOut(100).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build()) .activation(Activation.RELU).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(100).nOut(3) LossFunctions.LossFunction.MCXENT).nIn(100).nOut(3)
.activation(Activation.SOFTMAX).weightInit(WeightInit.XAVIER) .activation(Activation.SOFTMAX).weightInit(WeightInit.XAVIER)
.build()) .build())
@ -167,9 +167,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
.updater(new Adam(1e-6)) .updater(new Adam(1e-6))
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.list() .list()
.layer(new BatchNormalization.Builder().nIn(4).nOut(4).build()) .layer(BatchNormalization.builder().nIn(4).nOut(4).build())
.layer(new DenseLayer.Builder().nIn(4).nOut(32).activation(Activation.RELU).build()) .layer(DenseLayer.builder().nIn(4).nOut(32).activation(Activation.RELU).build())
.layer(new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(32).nOut(3) .layer(org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(32).nOut(3)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
.build(); .build();
@ -277,9 +277,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
} }
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp()) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3) .layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(nIn).nOut(3)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder(
LossFunctions.LossFunction.MSE).nIn(3).nOut(nOut).activation(Activation.SOFTMAX) LossFunctions.LossFunction.MSE).nIn(3).nOut(nOut).activation(Activation.SOFTMAX)
.build()) .build())
.build(); .build();
@ -302,9 +302,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l1(0.1).l2(0.1) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l1(0.1).l2(0.1)
.seed(123).updater(new Nesterovs(0.1, 0.9)).list() .seed(123).updater(new Nesterovs(0.1, 0.9)).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3) .layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(nIn).nOut(3)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut) LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
.build(); .build();
@ -391,9 +391,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp()) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50) .layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10) LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
.build(); .build();
@ -455,9 +455,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp()) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50) .layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10) LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
.build(); .build();
@ -525,9 +525,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp()) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50) .layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10) LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
.build(); .build();
@ -614,9 +614,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp()) ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.graphBuilder().addInputs("in") .graphBuilder().addInputs("in")
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50) .addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
.activation(Activation.TANH).build(), "in") .activation(Activation.TANH).build(), "in")
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10) LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
.activation(Activation.SOFTMAX).build(), .activation(Activation.SOFTMAX).build(),
"0") "0")
@ -687,9 +687,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(new RmsProp()) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(new RmsProp())
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.weightInit(WeightInit.XAVIER).list() .weightInit(WeightInit.XAVIER).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(4) .layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(4).nOut(4)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(4).nOut(3).activation(Activation.SOFTMAX) LossFunctions.LossFunction.MCXENT).nIn(4).nOut(3).activation(Activation.SOFTMAX)
.build()) .build())
.build(); .build();
@ -771,9 +771,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp()) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50) .layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10) LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
.build(); .build();
@ -822,9 +822,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp()) ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.graphBuilder().addInputs("in") .graphBuilder().addInputs("in")
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50) .addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
.activation(Activation.TANH).build(), "in") .activation(Activation.TANH).build(), "in")
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10) LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
.activation(Activation.SOFTMAX).build(), .activation(Activation.SOFTMAX).build(),
"0") "0")
@ -862,7 +862,7 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
Nd4j.getRandom().setSeed(12345); Nd4j.getRandom().setSeed(12345);
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(new RmsProp()) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(new RmsProp())
.weightInit(WeightInit.XAVIER).list() .weightInit(WeightInit.XAVIER).list()
.layer(0, new VariationalAutoencoder.Builder().nIn(8).nOut(10).encoderLayerSizes(12) .layer(0, VariationalAutoencoder.builder().nIn(8).nOut(10).encoderLayerSizes(12)
.decoderLayerSizes(13).reconstructionDistribution( .decoderLayerSizes(13).reconstructionDistribution(
new GaussianReconstructionDistribution(Activation.IDENTITY)) new GaussianReconstructionDistribution(Activation.IDENTITY))
.build()) .build())
@ -898,7 +898,7 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
Nd4j.getRandom().setSeed(12345); Nd4j.getRandom().setSeed(12345);
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(new RmsProp()) ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(new RmsProp())
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in") .weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
.addLayer("0", new VariationalAutoencoder.Builder().nIn(8).nOut(10).encoderLayerSizes(12) .addLayer("0", VariationalAutoencoder.builder().nIn(8).nOut(10).encoderLayerSizes(12)
.decoderLayerSizes(13).reconstructionDistribution( .decoderLayerSizes(13).reconstructionDistribution(
new GaussianReconstructionDistribution(Activation.IDENTITY)) new GaussianReconstructionDistribution(Activation.IDENTITY))
.build(), "in") .build(), "in")
@ -938,8 +938,8 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration conf =
NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list() NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list()
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(layerSize).build()) .layer(0, DenseLayer.builder().nIn(nIn).nOut(layerSize).build())
.layer(1, new OutputLayer.Builder().nIn(layerSize).nOut(nOut) .layer(1, OutputLayer.builder().nIn(layerSize).nOut(nOut)
.activation(Activation.SOFTMAX).lossFunction( .activation(Activation.SOFTMAX).lossFunction(
LossFunctions.LossFunction.MCXENT) LossFunctions.LossFunction.MCXENT)
.build()) .build())
@ -993,8 +993,8 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration conf =
NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list() NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list()
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(layerSize).build()) .layer(0, DenseLayer.builder().nIn(nIn).nOut(layerSize).build())
.layer(1, new OutputLayer.Builder().nIn(layerSize).nOut(nOut) .layer(1, OutputLayer.builder().nIn(layerSize).nOut(nOut)
.activation(Activation.SOFTMAX).lossFunction( .activation(Activation.SOFTMAX).lossFunction(
LossFunctions.LossFunction.MCXENT) LossFunctions.LossFunction.MCXENT)
.build()) .build())
@ -1047,13 +1047,13 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
} }
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.list() .list()
.layer(new OutputLayer.Builder().nIn(4).nOut(3).build()) .layer(OutputLayer.builder().nIn(4).nOut(3).build())
.build(); .build();
ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("out", new OutputLayer.Builder().nIn(4).nOut(3).build(), "in") .addLayer("out", OutputLayer.builder().nIn(4).nOut(3).build(), "in")
.setOutputs("out") .setOutputs("out")
.build(); .build();

View File

@ -69,8 +69,8 @@ public class TestTrainingStatsCollection extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()) .layer(0, DenseLayer.builder().nIn(10).nOut(10).build())
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).build()) .layer(1, OutputLayer.builder().nIn(10).nOut(10).build())
.build(); .build();
int miniBatchSizePerWorker = 10; int miniBatchSizePerWorker = 10;

View File

@ -62,9 +62,9 @@ public class TestListeners extends BaseSparkTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
.layer(0, new DenseLayer.Builder().nIn(4).nOut(100).weightInit(WeightInit.XAVIER) .layer(0, DenseLayer.builder().nIn(4).nOut(100).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build()) .activation(Activation.RELU).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(100).nOut(3) LossFunctions.LossFunction.MCXENT).nIn(100).nOut(3)
.activation(Activation.SOFTMAX).weightInit(WeightInit.XAVIER) .activation(Activation.SOFTMAX).weightInit(WeightInit.XAVIER)
.build()) .build())

View File

@ -65,27 +65,27 @@ public class ActorCriticFactoryCompGraphStdConv implements ActorCriticFactoryCom
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.l2(conf.getL2()).graphBuilder() .l2(conf.getL2()).graphBuilder()
.addInputs("input").addLayer("0", .addInputs("input").addLayer("0",
new ConvolutionLayer.Builder(8, 8).nIn(shapeInputs[0]).nOut(16) ConvolutionLayer.builder(8, 8).nIn(shapeInputs[0]).nOut(16)
.stride(4, 4).activation(Activation.RELU).build(), .stride(4, 4).activation(Activation.RELU).build(),
"input"); "input");
confB.addLayer("1", new ConvolutionLayer.Builder(4, 4).nIn(16).nOut(32).stride(2, 2).activation(Activation.RELU).build(), "0"); confB.addLayer("1", ConvolutionLayer.builder(4, 4).nIn(16).nOut(32).stride(2, 2).activation(Activation.RELU).build(), "0");
confB.addLayer("2", new DenseLayer.Builder().nIn(w * h * 32).nOut(256).activation(Activation.RELU).build(), "1"); confB.addLayer("2", DenseLayer.builder().nIn(w * h * 32).nOut(256).activation(Activation.RELU).build(), "1");
if (conf.isUseLSTM()) { if (conf.isUseLSTM()) {
confB.addLayer("3", new LSTM.Builder().nIn(256).nOut(256).activation(Activation.TANH).build(), "2"); confB.addLayer("3", LSTM.builder().nIn(256).nOut(256).activation(Activation.TANH).build(), "2");
confB.addLayer("value", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY) confB.addLayer("value", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
.nIn(256).nOut(1).build(), "3"); .nIn(256).nOut(1).build(), "3");
confB.addLayer("softmax", new RnnOutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX) confB.addLayer("softmax", RnnOutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
.nIn(256).nOut(numOutputs).build(), "3"); .nIn(256).nOut(numOutputs).build(), "3");
} else { } else {
confB.addLayer("value", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY) confB.addLayer("value", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
.nIn(256).nOut(1).build(), "2"); .nIn(256).nOut(1).build(), "2");
confB.addLayer("softmax", new OutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX) confB.addLayer("softmax", OutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
.nIn(256).nOut(numOutputs).build(), "2"); .nIn(256).nOut(numOutputs).build(), "2");
} }

View File

@ -56,31 +56,31 @@ public class ActorCriticFactoryCompGraphStdDense implements ActorCriticFactoryCo
.l2(conf.getL2()).graphBuilder() .l2(conf.getL2()).graphBuilder()
.setInputTypes(conf.isUseLSTM() ? InputType.recurrent(nIn) .setInputTypes(conf.isUseLSTM() ? InputType.recurrent(nIn)
: InputType.feedForward(nIn)).addInputs("input") : InputType.feedForward(nIn)).addInputs("input")
.addLayer("0", new DenseLayer.Builder().nIn(nIn) .addLayer("0", DenseLayer.builder().nIn(nIn)
.nOut(conf.getNumHiddenNodes()).activation(Activation.RELU).build(), .nOut(conf.getNumHiddenNodes()).activation(Activation.RELU).build(),
"input"); "input");
for (int i = 1; i < conf.getNumLayers(); i++) { for (int i = 1; i < conf.getNumLayers(); i++) {
confB.addLayer(i + "", new DenseLayer.Builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes()) confB.addLayer(i + "", DenseLayer.builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
.activation(Activation.RELU).build(), (i - 1) + ""); .activation(Activation.RELU).build(), (i - 1) + "");
} }
if (conf.isUseLSTM()) { if (conf.isUseLSTM()) {
confB.addLayer(getConf().getNumLayers() + "", new LSTM.Builder().activation(Activation.TANH) confB.addLayer(getConf().getNumLayers() + "", LSTM.builder().activation(Activation.TANH)
.nOut(conf.getNumHiddenNodes()).build(), (getConf().getNumLayers() - 1) + ""); .nOut(conf.getNumHiddenNodes()).build(), (getConf().getNumLayers() - 1) + "");
confB.addLayer("value", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY) confB.addLayer("value", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
.nOut(1).build(), getConf().getNumLayers() + ""); .nOut(1).build(), getConf().getNumLayers() + "");
confB.addLayer("softmax", new RnnOutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX) confB.addLayer("softmax", RnnOutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
.nOut(numOutputs).build(), getConf().getNumLayers() + ""); .nOut(numOutputs).build(), getConf().getNumLayers() + "");
} else { } else {
confB.addLayer("value", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY) confB.addLayer("value", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
.nOut(1).build(), (getConf().getNumLayers() - 1) + ""); .nOut(1).build(), (getConf().getNumLayers() - 1) + "");
confB.addLayer("softmax", new OutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX) confB.addLayer("softmax", OutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
.nOut(numOutputs).build(), (getConf().getNumLayers() - 1) + ""); .nOut(numOutputs).build(), (getConf().getNumLayers() - 1) + "");
} }

View File

@ -61,22 +61,22 @@ public class ActorCriticFactorySeparateStdDense implements ActorCriticFactorySep
.updater(conf.getUpdater() != null ? conf.getUpdater() : new Adam()) .updater(conf.getUpdater() != null ? conf.getUpdater() : new Adam())
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.l2(conf.getL2()) .l2(conf.getL2())
.list().layer(0, new DenseLayer.Builder().nIn(nIn).nOut(conf.getNumHiddenNodes()) .list().layer(0, DenseLayer.builder().nIn(nIn).nOut(conf.getNumHiddenNodes())
.activation(Activation.RELU).build()); .activation(Activation.RELU).build());
for (int i = 1; i < conf.getNumLayers(); i++) { for (int i = 1; i < conf.getNumLayers(); i++) {
confB.layer(i, new DenseLayer.Builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes()) confB.layer(i, DenseLayer.builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
.activation(Activation.RELU).build()); .activation(Activation.RELU).build());
} }
if (conf.isUseLSTM()) { if (conf.isUseLSTM()) {
confB.layer(conf.getNumLayers(), new LSTM.Builder().nOut(conf.getNumHiddenNodes()).activation(Activation.TANH).build()); confB.layer(conf.getNumLayers(), LSTM.builder().nOut(conf.getNumHiddenNodes()).activation(Activation.TANH).build());
confB.layer(conf.getNumLayers() + 1, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY) confB.layer(conf.getNumLayers() + 1, RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
.nIn(conf.getNumHiddenNodes()).nOut(1).build()); .nIn(conf.getNumHiddenNodes()).nOut(1).build());
} else { } else {
confB.layer(conf.getNumLayers(), new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY) confB.layer(conf.getNumLayers(), OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
.nIn(conf.getNumHiddenNodes()).nOut(1).build()); .nIn(conf.getNumHiddenNodes()).nOut(1).build());
} }
@ -96,22 +96,22 @@ public class ActorCriticFactorySeparateStdDense implements ActorCriticFactorySep
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
//.regularization(true) //.regularization(true)
//.l2(conf.getL2()) //.l2(conf.getL2())
.list().layer(0, new DenseLayer.Builder().nIn(nIn).nOut(conf.getNumHiddenNodes()) .list().layer(0, DenseLayer.builder().nIn(nIn).nOut(conf.getNumHiddenNodes())
.activation(Activation.RELU).build()); .activation(Activation.RELU).build());
for (int i = 1; i < conf.getNumLayers(); i++) { for (int i = 1; i < conf.getNumLayers(); i++) {
confB2.layer(i, new DenseLayer.Builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes()) confB2.layer(i, DenseLayer.builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
.activation(Activation.RELU).build()); .activation(Activation.RELU).build());
} }
if (conf.isUseLSTM()) { if (conf.isUseLSTM()) {
confB2.layer(conf.getNumLayers(), new LSTM.Builder().nOut(conf.getNumHiddenNodes()).activation(Activation.TANH).build()); confB2.layer(conf.getNumLayers(), LSTM.builder().nOut(conf.getNumHiddenNodes()).activation(Activation.TANH).build());
confB2.layer(conf.getNumLayers() + 1, new RnnOutputLayer.Builder(new ActorCriticLoss()) confB2.layer(conf.getNumLayers() + 1, RnnOutputLayer.builder(new ActorCriticLoss())
.activation(Activation.SOFTMAX).nIn(conf.getNumHiddenNodes()).nOut(numOutputs).build()); .activation(Activation.SOFTMAX).nIn(conf.getNumHiddenNodes()).nOut(numOutputs).build());
} else { } else {
confB2.layer(conf.getNumLayers(), new OutputLayer.Builder(new ActorCriticLoss()) confB2.layer(conf.getNumLayers(), OutputLayer.builder(new ActorCriticLoss())
.activation(Activation.SOFTMAX).nIn(conf.getNumHiddenNodes()).nOut(numOutputs).build()); .activation(Activation.SOFTMAX).nIn(conf.getNumHiddenNodes()).nOut(numOutputs).build());
} }

View File

@ -60,15 +60,15 @@ public class DQNFactoryStdConv implements DQNFactory {
.l2(conf.getL2()) .l2(conf.getL2())
.updater(conf.getUpdater() != null ? conf.getUpdater() : new Adam()) .updater(conf.getUpdater() != null ? conf.getUpdater() : new Adam())
.weightInit(WeightInit.XAVIER).l2(conf.getL2()).list() .weightInit(WeightInit.XAVIER).l2(conf.getL2()).list()
.layer(0, new ConvolutionLayer.Builder(8, 8).nIn(shapeInputs[0]).nOut(16).stride(4, 4) .layer(0, ConvolutionLayer.builder(8, 8).nIn(shapeInputs[0]).nOut(16).stride(4, 4)
.activation(Activation.RELU).build()); .activation(Activation.RELU).build());
confB.layer(1, new ConvolutionLayer.Builder(4, 4).nOut(32).stride(2, 2).activation(Activation.RELU).build()); confB.layer(1, ConvolutionLayer.builder(4, 4).nOut(32).stride(2, 2).activation(Activation.RELU).build());
confB.layer(2, new DenseLayer.Builder().nOut(256).activation(Activation.RELU).build()); confB.layer(2, DenseLayer.builder().nOut(256).activation(Activation.RELU).build());
confB.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(numOutputs) confB.layer(3, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(numOutputs)
.build()); .build());
confB.inputType(InputType.convolutional(shapeInputs[1], shapeInputs[2], shapeInputs[0])); confB.inputType(InputType.convolutional(shapeInputs[1], shapeInputs[2], shapeInputs[0]));

View File

@ -61,7 +61,7 @@ public class DQNFactoryStdDense implements DQNFactory {
.l2(conf.getL2()) .l2(conf.getL2())
.list() .list()
.layer(0, .layer(0,
new DenseLayer.Builder() DenseLayer.builder()
.nIn(nIn) .nIn(nIn)
.nOut(conf.getNumHiddenNodes()) .nOut(conf.getNumHiddenNodes())
.activation(Activation.RELU).build() .activation(Activation.RELU).build()
@ -69,12 +69,12 @@ public class DQNFactoryStdDense implements DQNFactory {
for (int i = 1; i < conf.getNumLayers(); i++) { for (int i = 1; i < conf.getNumLayers(); i++) {
confB.layer(i, new DenseLayer.Builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes()) confB.layer(i, DenseLayer.builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
.activation(Activation.RELU).build()); .activation(Activation.RELU).build());
} }
confB.layer(conf.getNumLayers(), confB.layer(conf.getNumLayers(),
new OutputLayer.Builder(LossFunctions.LossFunction.MSE) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
.activation(Activation.IDENTITY) .activation(Activation.IDENTITY)
.nIn(conf.getNumHiddenNodes()) .nIn(conf.getNumHiddenNodes())
.nOut(numOutputs) .nOut(numOutputs)

View File

@ -141,16 +141,16 @@ public class NStepRnn {
.graphBuilder() .graphBuilder()
.addInputs("input") .addInputs("input")
.setInputTypes(InputType.recurrent(NUM_INPUTS)) .setInputTypes(InputType.recurrent(NUM_INPUTS))
.addLayer("lstm", new LSTM.Builder().nOut(lstmLayerSize).activation(Activation.TANH).build(), "input") .addLayer("lstm", LSTM.builder().nOut(lstmLayerSize).activation(Activation.TANH).build(), "input")
.addLayer("dl", new DenseLayer.Builder().nOut(dl1Size).activation(Activation.RELU).build(), "input", "lstm") .addLayer("dl", DenseLayer.builder().nOut(dl1Size).activation(Activation.RELU).build(), "input", "lstm")
.addLayer("dl-1", new DenseLayer.Builder().nOut(dl2Size).activation(Activation.RELU).build(), "dl") .addLayer("dl-1", DenseLayer.builder().nOut(dl2Size).activation(Activation.RELU).build(), "dl")
.addVertex("dl-rnn", new PreprocessorVertex(new FeedForwardToRnnPreProcessor()), "dl-1"); .addVertex("dl-rnn", new PreprocessorVertex(new FeedForwardToRnnPreProcessor()), "dl-1");
} }
private static ITrainableNeuralNet buildActorCriticNetwork() { private static ITrainableNeuralNet buildActorCriticNetwork() {
ComputationGraphConfiguration valueConfiguration = buildBaseNetworkConfiguration(COMBINED_LSTM_LAYER_SIZE, COMBINED_DL1_LAYER_SIZE, COMBINED_DL2_LAYER_SIZE) ComputationGraphConfiguration valueConfiguration = buildBaseNetworkConfiguration(COMBINED_LSTM_LAYER_SIZE, COMBINED_DL1_LAYER_SIZE, COMBINED_DL2_LAYER_SIZE)
.addLayer("value", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(1).build(), "dl-rnn", "lstm") .addLayer("value", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(1).build(), "dl-rnn", "lstm")
.addLayer("softmax", new RnnOutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX).nOut(NUM_ACTIONS).build(), "dl-rnn", "lstm") .addLayer("softmax", RnnOutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX).nOut(NUM_ACTIONS).build(), "dl-rnn", "lstm")
.setOutputs("value", "softmax") .setOutputs("value", "softmax")
.build(); .build();
@ -164,12 +164,12 @@ public class NStepRnn {
private static ITrainableNeuralNet buildSeparateActorCriticNetwork() { private static ITrainableNeuralNet buildSeparateActorCriticNetwork() {
ComputationGraphConfiguration valueConfiguration = buildBaseNetworkConfiguration(SEPARATE_LSTM_LAYER_SIZE, SEPARATE_DL1_LAYER_SIZE, SEPARATE_DL2_LAYER_SIZE) ComputationGraphConfiguration valueConfiguration = buildBaseNetworkConfiguration(SEPARATE_LSTM_LAYER_SIZE, SEPARATE_DL1_LAYER_SIZE, SEPARATE_DL2_LAYER_SIZE)
.addLayer("value", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(1).build(), "dl-rnn", "lstm") .addLayer("value", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(1).build(), "dl-rnn", "lstm")
.setOutputs("value") .setOutputs("value")
.build(); .build();
ComputationGraphConfiguration policyConfiguration = buildBaseNetworkConfiguration(SEPARATE_LSTM_LAYER_SIZE, SEPARATE_DL1_LAYER_SIZE, SEPARATE_DL2_LAYER_SIZE) ComputationGraphConfiguration policyConfiguration = buildBaseNetworkConfiguration(SEPARATE_LSTM_LAYER_SIZE, SEPARATE_DL1_LAYER_SIZE, SEPARATE_DL2_LAYER_SIZE)
.addLayer("softmax", new RnnOutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX).nOut(NUM_ACTIONS).build(), "dl-rnn", "lstm") .addLayer("softmax", RnnOutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX).nOut(NUM_ACTIONS).build(), "dl-rnn", "lstm")
.setOutputs("softmax") .setOutputs("softmax")
.build(); .build();

View File

@ -197,13 +197,13 @@ public class RobotLakeExample {
InputType.feedForward(4)) // radar ) InputType.feedForward(4)) // radar )
.addInputs("tracker-in", "radar-in") .addInputs("tracker-in", "radar-in")
.layer("dl_1", new DenseLayer.Builder().activation(Activation.RELU).nOut(40).build(), "tracker-in", "radar-in") .layer("dl_1", DenseLayer.builder().activation(Activation.RELU).nOut(40).build(), "tracker-in", "radar-in")
.layer("dl_out", new DenseLayer.Builder().activation(Activation.RELU).nOut(40).build(), "dl_1"); .layer("dl_out", DenseLayer.builder().activation(Activation.RELU).nOut(40).build(), "dl_1");
} }
private static ITrainableNeuralNet buildQNetwork() { private static ITrainableNeuralNet buildQNetwork() {
ComputationGraphConfiguration conf = buildBaseNetworkConfiguration() ComputationGraphConfiguration conf = buildBaseNetworkConfiguration()
.addLayer("output", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY) .addLayer("output", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
.nOut(RobotLake.NUM_ACTIONS).build(), "dl_out") .nOut(RobotLake.NUM_ACTIONS).build(), "dl_out")
.setOutputs("output") .setOutputs("output")
@ -220,9 +220,9 @@ public class RobotLakeExample {
private static ITrainableNeuralNet buildActorCriticNetwork() { private static ITrainableNeuralNet buildActorCriticNetwork() {
ComputationGraphConfiguration conf = buildBaseNetworkConfiguration() ComputationGraphConfiguration conf = buildBaseNetworkConfiguration()
.addLayer("value", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY) .addLayer("value", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
.nOut(1).build(), "dl_out") .nOut(1).build(), "dl_out")
.addLayer("softmax", new OutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX) .addLayer("softmax", OutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
.nOut(RobotLake.NUM_ACTIONS).build(), "dl_out") .nOut(RobotLake.NUM_ACTIONS).build(), "dl_out")
.setOutputs("value", "softmax") .setOutputs("value", "softmax")
.build(); .build();

View File

@ -181,18 +181,18 @@ public class TMazeExample {
.graphBuilder() .graphBuilder()
.setInputTypes(InputType.recurrent(NUM_INPUTS)) .setInputTypes(InputType.recurrent(NUM_INPUTS))
.addInputs("input") .addInputs("input")
.addLayer("goal", new LSTM.Builder() .addLayer("goal", LSTM.builder()
.nOut(40) .nOut(40)
.activation(Activation.TANH) .activation(Activation.TANH)
.build(), "input") .build(), "input")
.addLayer("corridor", new DenseLayer.Builder().nOut(40).activation(Activation.RELU).build(), "input", "goal") .addLayer("corridor", DenseLayer.builder().nOut(40).activation(Activation.RELU).build(), "input", "goal")
.addLayer("corridor-1", new DenseLayer.Builder().nOut(20).activation(Activation.RELU).build(), "corridor") .addLayer("corridor-1", DenseLayer.builder().nOut(20).activation(Activation.RELU).build(), "corridor")
.addVertex("corridor-rnn", new PreprocessorVertex(new FeedForwardToRnnPreProcessor()), "corridor-1"); .addVertex("corridor-rnn", new PreprocessorVertex(new FeedForwardToRnnPreProcessor()), "corridor-1");
} }
private static ITrainableNeuralNet buildQNetwork() { private static ITrainableNeuralNet buildQNetwork() {
ComputationGraphConfiguration conf = buildBaseNetworkConfiguration() ComputationGraphConfiguration conf = buildBaseNetworkConfiguration()
.addLayer("output", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY) .addLayer("output", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
.nOut(NUM_ACTIONS).build(), "goal", "corridor-rnn") .nOut(NUM_ACTIONS).build(), "goal", "corridor-rnn")
.setOutputs("output") .setOutputs("output")
@ -207,9 +207,9 @@ public class TMazeExample {
private static ITrainableNeuralNet buildActorCriticNetwork() { private static ITrainableNeuralNet buildActorCriticNetwork() {
ComputationGraphConfiguration conf = buildBaseNetworkConfiguration() ComputationGraphConfiguration conf = buildBaseNetworkConfiguration()
.addLayer("value", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY) .addLayer("value", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
.nOut(1).build(), "goal", "corridor-rnn") .nOut(1).build(), "goal", "corridor-rnn")
.addLayer("softmax", new RnnOutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX) .addLayer("softmax", RnnOutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
.nOut(NUM_ACTIONS).build(), "goal", "corridor-rnn") .nOut(NUM_ACTIONS).build(), "goal", "corridor-rnn")
.setOutputs("value", "softmax") .setOutputs("value", "softmax")
.build(); .build();

View File

@ -167,9 +167,9 @@ public class PolicyTest {
@Test @Test
public void testACPolicy() throws Exception { public void testACPolicy() throws Exception {
ComputationGraph cg = new ComputationGraph(NeuralNetConfiguration.builder().seed(444).graphBuilder().addInputs("input") ComputationGraph cg = new ComputationGraph(NeuralNetConfiguration.builder().seed(444).graphBuilder().addInputs("input")
.addLayer("output", new OutputLayer.Builder().nOut(1).lossFunction(LossFunctions.LossFunction.XENT).activation(Activation.SIGMOID).build(), "input").setOutputs("output").build()); .addLayer("output", OutputLayer.builder().nOut(1).lossFunction(LossFunctions.LossFunction.XENT).activation(Activation.SIGMOID).build(), "input").setOutputs("output").build());
MultiLayerNetwork mln = new MultiLayerNetwork(NeuralNetConfiguration.builder().seed(555).list() MultiLayerNetwork mln = new MultiLayerNetwork(NeuralNetConfiguration.builder().seed(555).list()
.layer(0, new OutputLayer.Builder().nOut(1).lossFunction(LossFunctions.LossFunction.XENT).activation(Activation.SIGMOID).build()).build()); .layer(0, OutputLayer.builder().nOut(1).lossFunction(LossFunctions.LossFunction.XENT).activation(Activation.SIGMOID).build()).build());
ACPolicy policy = new ACPolicy(new DummyAC(mln), true, Nd4j.getRandom()); ACPolicy policy = new ACPolicy(new DummyAC(mln), true, Nd4j.getRandom());

View File

@ -95,13 +95,13 @@ public class App {
private static LayerConfiguration[] genLayers() { private static LayerConfiguration[] genLayers() {
return new LayerConfiguration[] { return new LayerConfiguration[] {
new DenseLayer.Builder().nIn(INPUT).nOut(X_DIM*Y_DIM*CHANNELS).weightInit(WeightInit.NORMAL).build(), DenseLayer.builder().nIn(INPUT).nOut(X_DIM*Y_DIM*CHANNELS).weightInit(WeightInit.NORMAL).build(),
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(), ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
new DenseLayer.Builder().nIn(X_DIM*Y_DIM*CHANNELS).nOut(X_DIM*Y_DIM).build(), DenseLayer.builder().nIn(X_DIM*Y_DIM*CHANNELS).nOut(X_DIM*Y_DIM).build(),
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(), ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
new DenseLayer.Builder().nIn(X_DIM*Y_DIM).nOut(X_DIM*Y_DIM).build(), DenseLayer.builder().nIn(X_DIM*Y_DIM).nOut(X_DIM*Y_DIM).build(),
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(), ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
new DenseLayer.Builder().nIn(X_DIM*Y_DIM).nOut(X_DIM*Y_DIM*CHANNELS).activation(Activation.TANH) DenseLayer.builder().nIn(X_DIM*Y_DIM).nOut(X_DIM*Y_DIM*CHANNELS).activation(Activation.TANH)
.build() .build()
}; };
} }
@ -131,19 +131,19 @@ public class App {
private static LayerConfiguration[] disLayers() { private static LayerConfiguration[] disLayers() {
return new LayerConfiguration[]{ return new LayerConfiguration[]{
new DenseLayer.Builder().nOut(X_DIM*Y_DIM*CHANNELS*2).build(), //input is set by setInputType on the network DenseLayer.builder().nOut(X_DIM*Y_DIM*CHANNELS*2).build(), //input is set by setInputType on the network
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(), ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
new DropoutLayer.Builder(1 - 0.5).build(), DropoutLayer.builder(1 - 0.5).build(),
new DenseLayer.Builder().nIn(X_DIM * Y_DIM*CHANNELS*2).nOut(X_DIM*Y_DIM*CHANNELS*4).build(), //HxBxC DenseLayer.builder().nIn(X_DIM * Y_DIM*CHANNELS*2).nOut(X_DIM*Y_DIM*CHANNELS*4).build(), //HxBxC
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(), ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
new DropoutLayer.Builder(1 - 0.5).build(), DropoutLayer.builder(1 - 0.5).build(),
new DenseLayer.Builder().nIn(X_DIM*Y_DIM*CHANNELS*4).nOut(X_DIM*Y_DIM*CHANNELS).build(), DenseLayer.builder().nIn(X_DIM*Y_DIM*CHANNELS*4).nOut(X_DIM*Y_DIM*CHANNELS).build(),
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(), ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
new DropoutLayer.Builder(1 - 0.5).build(), DropoutLayer.builder(1 - 0.5).build(),
new DenseLayer.Builder().nIn(X_DIM*Y_DIM*CHANNELS).nOut(X_DIM*Y_DIM).build(), DenseLayer.builder().nIn(X_DIM*Y_DIM*CHANNELS).nOut(X_DIM*Y_DIM).build(),
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(), ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
new DropoutLayer.Builder(1 - 0.5).build(), DropoutLayer.builder(1 - 0.5).build(),
new OutputLayer.Builder(LossFunction.XENT).nIn(X_DIM*Y_DIM).nOut(1).activation(Activation.SIGMOID).build() OutputLayer.builder().lossFunction(LossFunction.XENT).nIn(X_DIM*Y_DIM).nOut(1).activation(Activation.SIGMOID).build()
}; };
} }
@ -242,6 +242,7 @@ public class App {
gan.addTrainingListeners(new ScoreToChartListener("gan")); gan.addTrainingListeners(new ScoreToChartListener("gan"));
//dis.setListeners(new ScoreToChartListener("dis")); //dis.setListeners(new ScoreToChartListener("dis"));
System.out.println(gan.toString());
gan.fit(Nd4j.rand(batchSize, CHANNELS, X_DIM, Y_DIM), Nd4j.zeros(batchSize, 1)); gan.fit(Nd4j.rand(batchSize, CHANNELS, X_DIM, Y_DIM), Nd4j.zeros(batchSize, 1));
//gan.fit(new DataSet(trainData.next().getFeatures(), Nd4j.zeros(batchSize, 1))); //gan.fit(new DataSet(trainData.next().getFeatures(), Nd4j.zeros(batchSize, 1)));

View File

@ -101,21 +101,21 @@ public class MnistDCGANExample {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
Supplier<MultiLayerNetwork> genSupplier = () -> { Supplier<MultiLayerNetwork> genSupplier = () -> {
return new MultiLayerNetwork(NeuralNetConfiguration.builder() return new MultiLayerNetwork(NeuralNetConfiguration.builder()
.layer(0, new DenseLayer.Builder().nIn(latentDim).nOut(width / 2 * height / 2 * 128) .layer(0, DenseLayer.builder().nIn(latentDim).nOut(width / 2 * height / 2 * 128)
.activation(Activation.LEAKYRELU).weightInit(WeightInit.NORMAL).build()) .activation(Activation.LEAKYRELU).weightInit(WeightInit.NORMAL).build())
.layer(1, new Convolution2D.Builder().nIn(128).nOut(128).kernelSize(5, 5) .layer(1, Convolution2D.builder().nIn(128).nOut(128).kernelSize(5, 5)
.convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build()) .convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build())
// Up-sampling to 28x28x256 // Up-sampling to 28x28x256
.layer(2, new Deconvolution2D.Builder().nIn(128).nOut(128).stride(2, 2) .layer(2, Deconvolution2D.builder().nIn(128).nOut(128).stride(2, 2)
.kernelSize(5, 5).convolutionMode(ConvolutionMode.Same) .kernelSize(5, 5).convolutionMode(ConvolutionMode.Same)
.activation(Activation.LEAKYRELU).build()) .activation(Activation.LEAKYRELU).build())
.layer(3, new Convolution2D.Builder().nIn(128).nOut(128).kernelSize(5, 5) .layer(3, Convolution2D.builder().nIn(128).nOut(128).kernelSize(5, 5)
.convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build()) .convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build())
.layer(4, new Convolution2D.Builder().nIn(128).nOut(128).kernelSize(5, 5) .layer(4, Convolution2D.builder().nIn(128).nOut(128).kernelSize(5, 5)
.convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build()) .convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build())
.layer(5, new Convolution2D.Builder().nIn(128).nOut(channels).kernelSize(7, 7) .layer(5, Convolution2D.builder().nIn(128).nOut(channels).kernelSize(7, 7)
.convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build()) .convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build())
.layer(6, new ActivationLayer.Builder().activation(Activation.TANH).build()) .layer(6, ActivationLayer.builder().activation(Activation.TANH).build())
.inputPreProcessor(1, .inputPreProcessor(1,
new FeedForwardToCnnPreProcessor(height / 2, width / 2, 128)) new FeedForwardToCnnPreProcessor(height / 2, width / 2, 128))
.inputPreProcessor(6, new CnnToFeedForwardPreProcessor(height, width, channels)) .inputPreProcessor(6, new CnnToFeedForwardPreProcessor(height, width, channels))
@ -129,17 +129,17 @@ public class MnistDCGANExample {
//.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue) //.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
//.gradientNormalizationThreshold(100.0) //.gradientNormalizationThreshold(100.0)
.layer(0, new Convolution2D.Builder().nIn(channels).nOut(64).kernelSize(3, 3) .layer(0, Convolution2D.builder().nIn(channels).nOut(64).kernelSize(3, 3)
.activation(Activation.LEAKYRELU).build()) .activation(Activation.LEAKYRELU).build())
.layer(1, new Convolution2D.Builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2) .layer(1, Convolution2D.builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2)
.activation(Activation.LEAKYRELU).build()) .activation(Activation.LEAKYRELU).build())
.layer(2, new Convolution2D.Builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2) .layer(2, Convolution2D.builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2)
.activation(Activation.LEAKYRELU).build()) .activation(Activation.LEAKYRELU).build())
.layer(3, new Convolution2D.Builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2) .layer(3, Convolution2D.builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2)
.activation(Activation.LEAKYRELU).build()) .activation(Activation.LEAKYRELU).build())
.layer(4, new DropoutLayer.Builder().dropOut(0.5).build()) .layer(4, DropoutLayer.builder().dropOut(0.5).build())
.layer(5, new DenseLayer.Builder().nIn(64 * 2 * 2).nOut(1).activation(Activation.SIGMOID).build()) .layer(5, DenseLayer.builder().nIn(64 * 2 * 2).nOut(1).activation(Activation.SIGMOID).build())
.layer(6, new LossLayer.Builder().lossFunction(LossFunctions.LossFunction.XENT).build()) .layer(6, LossLayer.builder().lossFunction(LossFunctions.LossFunction.XENT.getILossFunction()).build())
.inputPreProcessor(0, new FeedForwardToCnnPreProcessor(height, width, channels)) .inputPreProcessor(0, new FeedForwardToCnnPreProcessor(height, width, channels))
.inputPreProcessor(4, new CnnToFeedForwardPreProcessor(2, 2, 64)) .inputPreProcessor(4, new CnnToFeedForwardPreProcessor(2, 2, 64))
.inputType(InputType.convolutionalFlat(height, width, channels)) .inputType(InputType.convolutionalFlat(height, width, channels))

View File

@ -62,13 +62,13 @@ public class MnistSimpleGAN {
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer) .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
.gradientNormalizationThreshold(100) .gradientNormalizationThreshold(100)
.layer(new DenseLayer.Builder().nIn(100).nOut(256).weightInit(WeightInit.NORMAL).build()) .layer(DenseLayer.builder().nIn(100).nOut(256).weightInit(WeightInit.NORMAL).build())
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build()) .layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
.layer(new DenseLayer.Builder().nIn(256).nOut(512).build()) .layer(DenseLayer.builder().nIn(256).nOut(512).build())
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build()) .layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
.layer(new DenseLayer.Builder().nIn(512).nOut(1024).build()) .layer(DenseLayer.builder().nIn(512).nOut(1024).build())
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build()) .layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
.layer(new DenseLayer.Builder().nIn(1024).nOut(784).activation(Activation.TANH).build()) .layer(DenseLayer.builder().nIn(1024).nOut(784).activation(Activation.TANH).build())
.build(); .build();
return new MultiLayerNetwork(genConf); return new MultiLayerNetwork(genConf);
} }
@ -83,16 +83,16 @@ public class MnistSimpleGAN {
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer) .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
.gradientNormalizationThreshold(100) .gradientNormalizationThreshold(100)
.layer(new DenseLayer.Builder().nIn(784).nOut(1024).updater(updater).build()) .layer(DenseLayer.builder().nIn(784).nOut(1024).updater(updater).build())
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build()) .layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
.layer(new DropoutLayer.Builder(1 - 0.5).build()) .layer(DropoutLayer.builder(1 - 0.5).build())
.layer(new DenseLayer.Builder().nIn(1024).nOut(512).updater(updater).build()) .layer(DenseLayer.builder().nIn(1024).nOut(512).updater(updater).build())
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build()) .layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
.layer(new DropoutLayer.Builder(1 - 0.5).build()) .layer(DropoutLayer.builder(1 - 0.5).build())
.layer(new DenseLayer.Builder().nIn(512).nOut(256).updater(updater).build()) .layer(DenseLayer.builder().nIn(512).nOut(256).updater(updater).build())
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build()) .layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
.layer(new DropoutLayer.Builder(1 - 0.5).build()) .layer(DropoutLayer.builder(1 - 0.5).build())
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.XENT).nIn(256).nOut(1) .layer(OutputLayer.builder(LossFunctions.LossFunction.XENT).nIn(256).nOut(1)
.activation(Activation.SIGMOID).updater(updater).build()) .activation(Activation.SIGMOID).updater(updater).build())
.build(); .build();

View File

@ -288,12 +288,12 @@ public class BrianTest extends BaseSparkSessionTest {
.seed(123) .seed(123)
.updater(new Nesterovs(0.1, 0.9)) .updater(new Nesterovs(0.1, 0.9))
.layer(0, new DenseLayer.Builder().nIn(5).nOut(20).weightInit(WeightInit.XAVIER) .layer(0, DenseLayer.builder().nIn(5).nOut(20).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).l2(0.001).build()) .activation(Activation.RELU).l2(0.001).build())
.layer(1, new DenseLayer.Builder().nIn(20).nOut(20).weightInit(WeightInit.XAVIER) .layer(1, DenseLayer.builder().nIn(20).nOut(20).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build()) .activation(Activation.RELU).build())
//.layer(2, new DenseLayerConfiguration.Builder().nIn(9).nOut(9).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build()) //.layer(2, new DenseLayerConfiguration.Builder().nIn(9).nOut(9).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.XENT).nIn(20).nOut(4) .layer(2, OutputLayer.builder(LossFunctions.LossFunction.XENT).nIn(20).nOut(4)
.weightInit(WeightInit.XAVIER).activation(Activation.SIGMOID).build()) .weightInit(WeightInit.XAVIER).activation(Activation.SIGMOID).build())
.build(); .build();

View File

@ -298,10 +298,10 @@ public class BrianTest2 /*extends BaseDL4JTest*/ {
.seed(123) .seed(123)
.updater(new Nesterovs(0.1, 0.9)) .updater(new Nesterovs(0.1, 0.9))
.layer(0, new DenseLayer.Builder().nIn(5).nOut(20).weightInit(WeightInit.XAVIER).activation(Activation.RELU).l2(0.001).build()) .layer(0, DenseLayer.builder().nIn(5).nOut(20).weightInit(WeightInit.XAVIER).activation(Activation.RELU).l2(0.001).build())
.layer(1, new DenseLayer.Builder().nIn(20).nOut(20).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build()) .layer(1, DenseLayer.builder().nIn(20).nOut(20).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
//.layer(2, new DenseLayerConfiguration.Builder().nIn(9).nOut(9).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build()) //.layer(2, new DenseLayerConfiguration.Builder().nIn(9).nOut(9).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.XENT).nIn(20).nOut(4).weightInit(WeightInit.XAVIER).activation(Activation.SIGMOID).build()) .layer(2, OutputLayer.builder(LossFunctions.LossFunction.XENT).nIn(20).nOut(4).weightInit(WeightInit.XAVIER).activation(Activation.SIGMOID).build())
.build(); .build();
//Define SparkNet //Define SparkNet

View File

@ -87,15 +87,15 @@ public class TestServer {
.activation(Activation.RELU) .activation(Activation.RELU)
.l2(0) .l2(0)
//.layer(0, new ConvolutionLayer.Builder().nIn(1).kernelSize(1, 5).stride(1,1).padding(0,2).nOut(1).name("1st Filter").updater(new Adam.Builder().learningRate(0.2).build()).build()) //.layer(0, ConvolutionLayer.builder().nIn(1).kernelSize(1, 5).stride(1,1).padding(0,2).nOut(1).name("1st Filter").updater(new Adam.Builder().learningRate(0.2).build()).build())
//.layer(1, new ConvolutionLayer.Builder().nIn(1).kernelSize(1, 2).stride(1,2).padding(0,0).nOut(1).name("2nd Filter").updater(new Adam.Builder().learningRate(0.1).build()).build()) //.layer(1, ConvolutionLayer.builder().nIn(1).kernelSize(1, 2).stride(1,2).padding(0,0).nOut(1).name("2nd Filter").updater(new Adam.Builder().learningRate(0.1).build()).build())
// .layer(1, new DenseLayerConfiguration.Builder().nIn(10).nOut(64).activation(Activation.RELU).build()) // .layer(1, new DenseLayerConfiguration.Builder().nIn(10).nOut(64).activation(Activation.RELU).build())
.layer(0, new DenseLayer.Builder().nIn(10).nOut(100).activation(Activation.RELU).l2(0.003).build()) .layer(0, DenseLayer.builder().nIn(10).nOut(100).activation(Activation.RELU).l2(0.003).build())
.layer(1, new LSTM.Builder().nIn(100).nOut(100).activation(Activation.TANH).build()) .layer(1, LSTM.builder().nIn(100).nOut(100).activation(Activation.TANH).build())
.layer(2, new LSTM.Builder().nIn(100).nOut(100).activation(Activation.TANH).build()) .layer(2, LSTM.builder().nIn(100).nOut(100).activation(Activation.TANH).build())
.layer(3, new DenseLayer.Builder().nIn(100).nOut(16).activation(Activation.RELU).l2(0.001).build()) .layer(3, DenseLayer.builder().nIn(100).nOut(16).activation(Activation.RELU).l2(0.001).build())
.layer(4, new OutputLayer.Builder().nIn(16).nOut(numClasses) .layer(4, OutputLayer.builder().nIn(16).nOut(numClasses)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(new LossMCXENT()) .lossFunction(new LossMCXENT())
.build() .build()

View File

@ -127,15 +127,15 @@ public class TestServer2 {
.activation(Activation.RELU) .activation(Activation.RELU)
.l2(0) .l2(0)
//.layer(0, new ConvolutionLayer.Builder().nIn(1).kernelSize(1, 5).stride(1,1).padding(0,2).nOut(1).name("1st Filter").updater(new Adam.Builder().learningRate(0.2).build()).build()) //.layer(0, ConvolutionLayer.builder().nIn(1).kernelSize(1, 5).stride(1,1).padding(0,2).nOut(1).name("1st Filter").updater(new Adam.Builder().learningRate(0.2).build()).build())
//.layer(1, new ConvolutionLayer.Builder().nIn(1).kernelSize(1, 2).stride(1,2).padding(0,0).nOut(1).name("2nd Filter").updater(new Adam.Builder().learningRate(0.1).build()).build()) //.layer(1, ConvolutionLayer.builder().nIn(1).kernelSize(1, 2).stride(1,2).padding(0,0).nOut(1).name("2nd Filter").updater(new Adam.Builder().learningRate(0.1).build()).build())
// .layer(1, new DenseLayerConfiguration.Builder().nIn(10).nOut(64).activation(Activation.RELU).build()) // .layer(1, new DenseLayerConfiguration.Builder().nIn(10).nOut(64).activation(Activation.RELU).build())
.layer(0, new DenseLayer.Builder().nIn(10).nOut(100).activation(Activation.RELU).l2(0.003).build()) .layer(0, DenseLayer.builder().nIn(10).nOut(100).activation(Activation.RELU).l2(0.003).build())
.layer(1, new LSTM.Builder().nIn(100).nOut(100).activation(Activation.TANH).build()) .layer(1, LSTM.builder().nIn(100).nOut(100).activation(Activation.TANH).build())
.layer(2, new LSTM.Builder().nIn(100).nOut(100).activation(Activation.TANH).build()) .layer(2, LSTM.builder().nIn(100).nOut(100).activation(Activation.TANH).build())
.layer(3, new DenseLayer.Builder().nIn(100).nOut(16).activation(Activation.RELU).l2(0.001).build()) .layer(3, DenseLayer.builder().nIn(100).nOut(16).activation(Activation.RELU).l2(0.001).build())
.layer(4, new OutputLayer.Builder().nIn(16).nOut(numClasses) .layer(4, OutputLayer.builder().nIn(16).nOut(numClasses)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(new LossMCXENT()) .lossFunction(new LossMCXENT())
.build() .build()

View File

@ -832,7 +832,7 @@ public class IntegrationTestRunner {
if(m instanceof MultiLayerNetwork){ if(m instanceof MultiLayerNetwork){
paramPrefix = l.getIndex() + "_"; paramPrefix = l.getIndex() + "_";
} else { } else {
paramPrefix = l.getLayerConfiguration().getLayerName() + "_"; paramPrefix = l.getLayerConfiguration().getName() + "_";
} }
Map<String,INDArray> paramTable = l.getParamTable(); Map<String,INDArray> paramTable = l.getParamTable();
for(Map.Entry<String,INDArray> e : paramTable.entrySet()){ for(Map.Entry<String,INDArray> e : paramTable.entrySet()){

View File

@ -88,11 +88,11 @@ public class CNN1DTestCases {
.convolutionMode(ConvolutionMode.Same)) .convolutionMode(ConvolutionMode.Same))
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new Convolution1DLayer.Builder().nOut(32).activation(Activation.TANH).kernelSize(3).stride(1).build(), "in") .layer("0", Convolution1DLayer.builder().nOut(32).activation(Activation.TANH).kernelSize(3).stride(1).build(), "in")
.layer("1", new Subsampling1DLayer.Builder().kernelSize(2).stride(1).poolingType(SubsamplingLayer.PoolingType.MAX).build(), "0") .layer("1", Subsampling1DLayer.builder().kernelSize(2).stride(1).poolingType(SubsamplingLayer.PoolingType.MAX.toPoolingType()).build(), "0")
.layer("2", new Cropping1D(1), "1") .layer("2", Cropping1D.builder(1).build(), "1")
.layer("3", new ZeroPadding1DLayer(1), "2") .layer("3", ZeroPadding1DLayer.builder(1).build(), "2")
.layer("out", new RnnOutputLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).nOut(nOut).build(), "3") .layer("out", RnnOutputLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).nOut(nOut).build(), "3")
.setInputTypes(InputType.recurrent(nOut)) .setInputTypes(InputType.recurrent(nOut))
.setOutputs("out") .setOutputs("out")
.build(); .build();

View File

@ -105,30 +105,30 @@ public class CNN2DTestCases {
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.updater(new Nesterovs(0.01, 0.9)) .updater(new Nesterovs(0.01, 0.9))
.layer(0, new ConvolutionLayer.Builder(5, 5) .layer(0, ConvolutionLayer.builder(5, 5)
//nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied //nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
.nIn(nChannels) .nIn(nChannels)
.stride(1, 1) .stride(1, 1)
.nOut(20) .nOut(20)
.activation(Activation.IDENTITY) .activation(Activation.IDENTITY)
.build()) .build())
.layer(1, new SubsamplingLayer.Builder(PoolingType.MAX) .layer(1, SubsamplingLayer.builder(PoolingType.MAX)
.kernelSize(2, 2) .kernelSize(2, 2)
.stride(2, 2) .stride(2, 2)
.build()) .build())
.layer(2, new ConvolutionLayer.Builder(5, 5) .layer(2, ConvolutionLayer.builder(5, 5)
//Note that nIn need not be specified in later layers //Note that nIn need not be specified in later layers
.stride(1, 1) .stride(1, 1)
.nOut(50) .nOut(50)
.activation(Activation.IDENTITY) .activation(Activation.IDENTITY)
.build()) .build())
.layer(3, new SubsamplingLayer.Builder(PoolingType.MAX) .layer(3, SubsamplingLayer.builder(PoolingType.MAX)
.kernelSize(2, 2) .kernelSize(2, 2)
.stride(2, 2) .stride(2, 2)
.build()) .build())
.layer(4, new DenseLayer.Builder().activation(Activation.RELU) .layer(4, DenseLayer.builder().activation(Activation.RELU)
.nOut(500).build()) .nOut(500).build())
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .layer(5, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nOut(outputNum) .nOut(outputNum)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.build()) .build())
@ -221,7 +221,7 @@ public class CNN2DTestCases {
.seed(12345) .seed(12345)
.build()) .build())
.removeVertexKeepConnections("predictions") .removeVertexKeepConnections("predictions")
.addLayer("predictions", new OutputLayer.Builder() .addLayer("predictions", OutputLayer.builder()
.nIn(4096) .nIn(4096)
.nOut(200) //Tiny imagenet .nOut(200) //Tiny imagenet
.build(), "fc2") .build(), "fc2")
@ -321,7 +321,7 @@ public class CNN2DTestCases {
.removeVertexKeepConnections("conv2d_9") .removeVertexKeepConnections("conv2d_9")
.removeVertexAndConnections("outputs") .removeVertexAndConnections("outputs")
.addLayer("convolution2d_9", .addLayer("convolution2d_9",
new ConvolutionLayer.Builder(1,1) ConvolutionLayer.builder(1,1)
.nIn(1024) .nIn(1024)
.nOut(nBoxes * (5 + nClasses)) .nOut(nBoxes * (5 + nClasses))
.stride(1,1) .stride(1,1)
@ -417,32 +417,32 @@ public class CNN2DTestCases {
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.updater(new Nesterovs(0.01, 0.9)) .updater(new Nesterovs(0.01, 0.9))
.layer(0, new ConvolutionLayer.Builder(5, 5) .layer(0, ConvolutionLayer.builder(5, 5)
//nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied //nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
.nIn(1) .nIn(1)
.stride(1, 1) .stride(1, 1)
.nOut(20) .nOut(20)
.activation(Activation.IDENTITY) .activation(Activation.IDENTITY)
.build()) .build())
.layer(1, new SubsamplingLayer.Builder(PoolingType.MAX) .layer(1, SubsamplingLayer.builder(PoolingType.MAX)
.kernelSize(2, 2) .kernelSize(2, 2)
.stride(2, 2) .stride(2, 2)
.build()) .build())
.layer(2, new ConvolutionLayer.Builder(5, 5) .layer(2, ConvolutionLayer.builder(5, 5)
//Note that nIn need not be specified in later layers //Note that nIn need not be specified in later layers
.stride(1, 1) .stride(1, 1)
.nOut(50) .nOut(50)
.activation(Activation.IDENTITY) .activation(Activation.IDENTITY)
.dropOut(0.5) //**** Dropout on conv layer .dropOut(0.5) //**** Dropout on conv layer
.build()) .build())
.layer(3, new SubsamplingLayer.Builder(PoolingType.MAX) .layer(3, SubsamplingLayer.builder(PoolingType.MAX)
.kernelSize(2, 2) .kernelSize(2, 2)
.stride(2, 2) .stride(2, 2)
.build()) .build())
.layer(4, new DenseLayer.Builder().activation(Activation.RELU) .layer(4, DenseLayer.builder().activation(Activation.RELU)
.dropOut(0.5) //**** Dropout on dense layer .dropOut(0.5) //**** Dropout on dense layer
.nOut(500).build()) .nOut(500).build())
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .layer(5, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nOut(10) .nOut(10)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.build()) .build())

View File

@ -82,18 +82,18 @@ public class CNN3DTestCases {
.updater(new Nesterovs(0.01, 0.9)) .updater(new Nesterovs(0.01, 0.9))
.convolutionMode(ConvolutionMode.Same) .convolutionMode(ConvolutionMode.Same)
.layer(new Convolution3D.Builder(3,3,3) .layer(Convolution3D.builder(3,3,3)
.dataFormat(Convolution3D.DataFormat.NCDHW) .dataFormat(Convolution3D.DataFormat.NCDHW)
.nIn(nChannels) .nIn(nChannels)
.stride(2, 2, 2) .stride(2, 2, 2)
.nOut(8) .nOut(8)
.activation(Activation.IDENTITY) .activation(Activation.IDENTITY)
.build()) .build())
.layer(new Subsampling3DLayer.Builder(PoolingType.MAX) .layer(Subsampling3DLayer.builder(PoolingType.MAX)
.kernelSize(2, 2, 2) .kernelSize(2, 2, 2)
.stride(2, 2, 2) .stride(2, 2, 2)
.build()) .build())
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .layer(OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nOut(outputNum) .nOut(outputNum)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.build()) .build())

View File

@ -104,8 +104,8 @@ public class MLPTestCases {
.build())) .build()))
.l1(1e-3).l2(1e-3) .l1(1e-3).l2(1e-3)
.layer(new DenseLayer.Builder().activation(Activation.TANH).nOut(64).build()) .layer(DenseLayer.builder().activation(Activation.TANH).nOut(64).build())
.layer(new OutputLayer.Builder().nOut(10) .layer(OutputLayer.builder().nOut(10)
.lossFunction(LossFunctions.LossFunction.MCXENT) .lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.build()) .build())
@ -202,11 +202,11 @@ public class MLPTestCases {
.seed(seed) .seed(seed)
.updater(new Nesterovs(learningRate, 0.9)) .updater(new Nesterovs(learningRate, 0.9))
.layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes) .layer(0, DenseLayer.builder().nIn(numInputs).nOut(numHiddenNodes)
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.activation(Activation.RELU) .activation(Activation.RELU)
.build()) .build())
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .layer(1, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.nIn(numHiddenNodes).nOut(numOutputs).build()) .nIn(numHiddenNodes).nOut(numOutputs).build())

View File

@ -119,11 +119,11 @@ public class RNNTestCases {
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.updater(new Adam(1e-3)) .updater(new Adam(1e-3))
.layer(0, new LSTM.Builder().nIn(iter.inputColumns()).nOut(lstmLayerSize) .layer(0, LSTM.builder().nIn(iter.inputColumns()).nOut(lstmLayerSize)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(1, new LSTM.Builder().nIn(lstmLayerSize).nOut(lstmLayerSize) .layer(1, LSTM.builder().nIn(lstmLayerSize).nOut(lstmLayerSize)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(2, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX) //MCXENT + softmax for classification .layer(2, RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX) //MCXENT + softmax for classification
.nIn(lstmLayerSize).nOut(nOut).build()) .nIn(lstmLayerSize).nOut(nOut).build())
.backpropType(BackpropType.TruncatedBPTT).tbpttFwdLength(tbpttLength).tbpttBackLength(tbpttLength) .backpropType(BackpropType.TruncatedBPTT).tbpttFwdLength(tbpttLength).tbpttBackLength(tbpttLength)
@ -201,9 +201,9 @@ public class RNNTestCases {
.updater(new Adam(5e-2)) .updater(new Adam(5e-2))
.l1(1e-3).l2(1e-3) .l1(1e-3).l2(1e-3)
.layer(0, new LSTM.Builder().activation(Activation.TANH).nOut(10).build()) .layer(0, LSTM.builder().activation(Activation.TANH).nOut(10).build())
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build()) .layer(GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
.layer(new OutputLayer.Builder().nOut(6) .layer(OutputLayer.builder().nOut(6)
.lossFunction(LossFunctions.LossFunction.MCXENT) .lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.build()) .build())
@ -322,9 +322,9 @@ public class RNNTestCases {
.updater(new Adam(5e-2)) .updater(new Adam(5e-2))
.l1(1e-3).l2(1e-3) .l1(1e-3).l2(1e-3)
.layer(0, new Bidirectional(new LSTM.Builder().activation(Activation.TANH).nOut(10).build())) .layer(0, Bidirectional.builder(LSTM.builder().activation(Activation.TANH).nOut(10).build()))
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build()) .layer(GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
.layer(new OutputLayer.Builder().nOut(6) .layer(OutputLayer.builder().nOut(6)
.lossFunction(LossFunctions.LossFunction.MCXENT) .lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.build()) .build())

View File

@ -79,7 +79,7 @@ public class UnsupervisedTestCases {
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.l2(1e-4) .l2(1e-4)
.layer(0, new VariationalAutoencoder.Builder() .layer(0, VariationalAutoencoder.builder()
.activation(Activation.TANH) .activation(Activation.TANH)
.encoderLayerSizes(256, 256) //2 encoder layers, each of size 256 .encoderLayerSizes(256, 256) //2 encoder layers, each of size 256
.decoderLayerSizes(256, 256) //2 decoder layers, each of size 256 .decoderLayerSizes(256, 256) //2 decoder layers, each of size 256

View File

@ -42,9 +42,9 @@ public class RandomTests extends BaseDL4JTest {
final NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp()) final NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(10) .layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(10)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder(
LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10) LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
.build(); .build();

View File

@ -73,7 +73,7 @@ public class TestUtils {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
//Also check the NeuralNetConfiguration is serializable (required by Spark etc) //Also check the NeuralNetConfiguration is serializable (required by Spark etc.)
NeuralNetConfiguration conf = net.getNetConfiguration(); NeuralNetConfiguration conf = net.getNetConfiguration();
serializeDeserializeJava(conf); serializeDeserializeJava(conf);
@ -317,14 +317,14 @@ public class TestUtils {
for(Layer l : layers){ for(Layer l : layers){
//Don't use instanceof here - there are sub conv subclasses //Don't use instanceof here - there are sub conv subclasses
if(l.getClass() == ConvolutionLayer.class || l instanceof SubsamplingLayer || l instanceof BatchNormalization || l instanceof LSTM){ if(l.getClass() == ConvolutionLayer.class || l instanceof SubsamplingLayer || l instanceof BatchNormalization || l instanceof LSTM){
Preconditions.checkNotNull(l.getHelper(), l.getLayerConfiguration().getLayerName()); Preconditions.checkNotNull(l.getHelper(), l.getLayerConfiguration().getName());
} }
} }
} }
public static void assertHelpersAbsent(Layer[] layers) throws Exception { public static void assertHelpersAbsent(Layer[] layers) throws Exception {
for(Layer l : layers){ for(Layer l : layers){
Preconditions.checkState(l.getHelper() == null, l.getLayerConfiguration().getLayerName()); Preconditions.checkState(l.getHelper() == null, l.getLayerConfiguration().getName());
} }
} }
} }

View File

@ -473,9 +473,7 @@ public class RecordReaderDataSetiteratorTest extends BaseDL4JTest {
public Pair<double[][],File> makeRandomCSV(String tempFile, int nLines, int nFeatures) throws IOException { public Pair<double[][],File> makeRandomCSV(String tempFile, int nLines, int nFeatures) throws IOException {
File temp = temporaryFolder; File temp = new File(temporaryFolder, "makeRandomCSV.csv");
temp.mkdirs();
temp.deleteOnExit();
Random rand = new Random(12345); Random rand = new Random(12345);
double[][] dArr = new double[nLines][nFeatures + 1]; double[][] dArr = new double[nLines][nFeatures + 1];

View File

@ -774,7 +774,7 @@ public class RecordReaderMultiDataSetIteratorTest extends BaseDL4JTest {
@Test @Test
public void testExcludeStringColCSV() throws Exception { public void testExcludeStringColCSV() throws Exception {
File csvFile = temporaryFolder; File csvFile = new File(temporaryFolder, "test.csv");
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
for(int i=1; i<=10; i++ ){ for(int i=1; i<=10; i++ ){

View File

@ -41,7 +41,9 @@ import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.listeners.CollectScoresIterationListener; import org.deeplearning4j.optimize.listeners.CollectScoresIterationListener;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener; import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.nd4j.linalg.activations.Activation; import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet; import org.nd4j.linalg.dataset.DataSet;
@ -170,11 +172,11 @@ public class DataSetIteratorTest extends BaseDL4JTest {
NeuralNetConfiguration.NeuralNetConfigurationBuilder builder = NeuralNetConfiguration.builder().seed(seed) NeuralNetConfiguration.NeuralNetConfigurationBuilder builder = NeuralNetConfiguration.builder().seed(seed)
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer) .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.layer(0, new ConvolutionLayer.Builder(5, 5).nIn(numChannels).nOut(6) .layer(0, ConvolutionLayer.builder(5, 5).nIn(numChannels).nOut(6)
.weightInit(WeightInit.XAVIER).activation(Activation.RELU).build()) .weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2}) .layer(1, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
.stride(1, 1).build()) .stride(1, 1).build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .layer(2, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX) .nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
.build()) .build())
.inputType(InputType.convolutionalFlat(numRows, numColumns, numChannels)); .inputType(InputType.convolutionalFlat(numRows, numColumns, numChannels));
@ -207,7 +209,8 @@ public class DataSetIteratorTest extends BaseDL4JTest {
} }
@Test //@Ignore //Ignored for now - CIFAR iterator needs work - https://github.com/eclipse/deeplearning4j/issues/4673 @Test @Timeout(1200) @Disabled("Runs quite some time.")
//Ignored for now - CIFAR iterator needs work - https://github.com/eclipse/deeplearning4j/issues/4673
public void testCifarModel() throws Exception { public void testCifarModel() throws Exception {
// Streaming // Streaming
runCifar(false); runCifar(false);
@ -230,11 +233,11 @@ public class DataSetIteratorTest extends BaseDL4JTest {
NeuralNetConfiguration.NeuralNetConfigurationBuilder builder = NeuralNetConfiguration.builder().seed(seed) NeuralNetConfiguration.NeuralNetConfigurationBuilder builder = NeuralNetConfiguration.builder().seed(seed)
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer) .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.layer(0, new ConvolutionLayer.Builder(5, 5).nIn(channels).nOut(6).weightInit(WeightInit.XAVIER) .layer(0, ConvolutionLayer.builder(5, 5).nIn(channels).nOut(6).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build()) .activation(Activation.RELU).build())
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2}) .layer(1, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
.build()) .build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .layer(2, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX) .nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
.build()) .build())

View File

@ -76,10 +76,12 @@ public class TestFileIterators extends BaseDL4JTest {
assertEquals(exp, act); assertEquals(exp, act);
//Test multiple directories //Test multiple directories
File f2a = new File(folder2, "f2a"); File f2a = new File(folder2, "f2a");
f2a.mkdirs();
File f2b = new File(folder2, "f2b"); File f2b = new File(folder2, "f2b");
f2b.mkdirs();
File f2c = new File(folder2, "f2c"); File f2c = new File(folder2, "f2c");
f2c.mkdirs();
d1.save(new File(f2a, "d1.bin")); d1.save(new File(f2a, "d1.bin"));
d2.save(new File(f2a, "d2.bin")); d2.save(new File(f2a, "d2.bin"));
d3.save(new File(f2b, "d3.bin")); d3.save(new File(f2b, "d3.bin"));
@ -188,8 +190,11 @@ public class TestFileIterators extends BaseDL4JTest {
//Test multiple directories //Test multiple directories
File f2a = new File(folder2, "2-f2a"); File f2a = new File(folder2, "2-f2a");
f2a.mkdirs();
File f2b = new File(folder2, "2-f2b"); File f2b = new File(folder2, "2-f2b");
f2b.mkdirs();
File f2c = new File(folder2, "2-f2C"); File f2c = new File(folder2, "2-f2C");
f2c.mkdirs();
d1.save(new File(f2a, "d1.bin")); d1.save(new File(f2a, "d1.bin"));
d2.save(new File(f2a, "d2.bin")); d2.save(new File(f2a, "d2.bin"));
d3.save(new File(f2b, "d3.bin")); d3.save(new File(f2b, "d3.bin"));

View File

@ -135,8 +135,8 @@ public class TestEarlyStopping extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.seed(12345) .seed(12345)
.updater(new Sgd(0.5)).weightInit(WeightInit.XAVIER) .updater(new Sgd(0.5)).weightInit(WeightInit.XAVIER)
.layer(new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build()) .layer(DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build())
.layer(new OutputLayer.Builder().nIn(4).nOut(3) .layer(OutputLayer.builder().nIn(4).nOut(3)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
@ -221,7 +221,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(0.01)).weightInit(WeightInit.XAVIER) .updater(new Sgd(0.01)).weightInit(WeightInit.XAVIER)
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3) .layer(0, OutputLayer.builder().nIn(4).nOut(3)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
@ -250,7 +250,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER) .updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3) .layer(0, OutputLayer.builder().nIn(4).nOut(3)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
@ -300,7 +300,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(5.0)) //Intentionally huge LR .updater(new Sgd(5.0)) //Intentionally huge LR
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX) .layer(0, OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -338,7 +338,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER) .updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER)
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3) .layer(0, OutputLayer.builder().nIn(4).nOut(3)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
@ -381,7 +381,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER) .updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER)
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3) .layer(0, OutputLayer.builder().nIn(4).nOut(3)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
@ -421,11 +421,11 @@ public class TestEarlyStopping extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Nesterovs(0.0,0.9)) .updater(new Nesterovs(0.0,0.9))
.layer(0, new DenseLayer.Builder().nIn(1).nOut(20) .layer(0, DenseLayer.builder().nIn(1).nOut(20)
.weightInit(WeightInit.XAVIER).activation( .weightInit(WeightInit.XAVIER).activation(
Activation.TANH) Activation.TANH)
.build()) .build())
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MSE).weightInit(WeightInit.XAVIER) .layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).weightInit(WeightInit.XAVIER)
.activation(Activation.IDENTITY).weightInit(WeightInit.XAVIER).nIn(20).nOut(1) .activation(Activation.IDENTITY).weightInit(WeightInit.XAVIER).nIn(20).nOut(1)
.build()) .build())
.build(); .build();
@ -468,7 +468,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER) .updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3) .layer(0, OutputLayer.builder().nIn(4).nOut(3)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
@ -506,7 +506,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER) .updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3) .layer(0, OutputLayer.builder().nIn(4).nOut(3)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
@ -570,8 +570,8 @@ public class TestEarlyStopping extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.layer(new DenseLayer.Builder().nIn(784).nOut(32).build()) .layer(DenseLayer.builder().nIn(784).nOut(32).build())
.layer(new OutputLayer.Builder().nIn(32).nOut(784).activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build()) .layer(OutputLayer.builder().nIn(32).nOut(784).activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -613,7 +613,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.layer(new AutoEncoder.Builder().nIn(784).nOut(32).build()) .layer(AutoEncoder.builder().nIn(784).nOut(32).build())
.build(); .build();
@ -656,7 +656,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.layer(new VariationalAutoencoder.Builder() .layer(VariationalAutoencoder.builder()
.nIn(784).nOut(32) .nIn(784).nOut(32)
.encoderLayerSizes(64) .encoderLayerSizes(64)
.decoderLayerSizes(64) .decoderLayerSizes(64)
@ -701,7 +701,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.layer(new VariationalAutoencoder.Builder() .layer(VariationalAutoencoder.builder()
.nIn(784).nOut(32) .nIn(784).nOut(32)
.encoderLayerSizes(64) .encoderLayerSizes(64)
.decoderLayerSizes(64) .decoderLayerSizes(64)
@ -748,8 +748,8 @@ public class TestEarlyStopping extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.layer(new DenseLayer.Builder().nIn(784).nOut(32).build()) .layer(DenseLayer.builder().nIn(784).nOut(32).build())
.layer(new OutputLayer.Builder().nIn(32).nOut(10).activation(Activation.SOFTMAX).build()) .layer(OutputLayer.builder().nIn(32).nOut(10).activation(Activation.SOFTMAX).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -785,7 +785,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
public void testEarlyStoppingListeners() { public void testEarlyStoppingListeners() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER) .updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3) .layer(0, OutputLayer.builder().nIn(4).nOut(3)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
@ -868,14 +868,14 @@ public class TestEarlyStopping extends BaseDL4JTest {
.ClipElementWiseAbsoluteValue) .ClipElementWiseAbsoluteValue)
.gradientNormalizationThreshold(1.0) .gradientNormalizationThreshold(1.0)
.layer(0, new LSTM.Builder() .layer(0, LSTM.builder()
.nIn(10) .nIn(10)
.nOut(10) .nOut(10)
.activation(Activation.TANH) .activation(Activation.TANH)
.gateActivationFunction(Activation.SIGMOID) .gateActivationFunction(Activation.SIGMOID)
.dropOut(0.5) .dropOut(0.5)
.build()) .build())
.layer(1, new RnnOutputLayer.Builder() .layer(1, RnnOutputLayer.builder()
.nIn(10) .nIn(10)
.nOut(outputs) .nOut(outputs)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)

View File

@ -79,7 +79,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in") .updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3) .addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in") .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("0").build(); .setOutputs("0").build();
@ -124,7 +124,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(5.0)) //Intentionally huge LR .updater(new Sgd(5.0)) //Intentionally huge LR
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in") .weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX) .addLayer("0", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in") .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("0").build(); .setOutputs("0").build();
ComputationGraph net = new ComputationGraph(conf); ComputationGraph net = new ComputationGraph(conf);
@ -160,7 +160,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER).graphBuilder() .updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER).graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3) .addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in") .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("0").build(); .setOutputs("0").build();
@ -202,7 +202,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).graphBuilder() .updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3) .addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in") .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("0").build(); .setOutputs("0").build();
@ -236,7 +236,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in") .updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3) .addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in") .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("0").build(); .setOutputs("0").build();
@ -300,8 +300,8 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new DenseLayer.Builder().nIn(784).nOut(32).build(), "in") .layer("0", DenseLayer.builder().nIn(784).nOut(32).build(), "in")
.layer("1", new OutputLayer.Builder().nIn(32).nOut(784).activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build(), "0") .layer("1", OutputLayer.builder().nIn(32).nOut(784).activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build(), "0")
.setOutputs("1") .setOutputs("1")
.build(); .build();
@ -346,7 +346,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new AutoEncoder.Builder().nIn(784).nOut(32).build(), "in") .layer("0", AutoEncoder.builder().nIn(784).nOut(32).build(), "in")
.setOutputs("0") .setOutputs("0")
.build(); .build();
@ -391,7 +391,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new VariationalAutoencoder.Builder() .layer("0", VariationalAutoencoder.builder()
.nIn(784).nOut(32) .nIn(784).nOut(32)
.encoderLayerSizes(64) .encoderLayerSizes(64)
.decoderLayerSizes(64) .decoderLayerSizes(64)
@ -439,7 +439,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
.updater(new Adam(1e-5)) .updater(new Adam(1e-5))
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new VariationalAutoencoder.Builder() .layer("0", VariationalAutoencoder.builder()
.nIn(784).nOut(32) .nIn(784).nOut(32)
.encoderLayerSizes(64) .encoderLayerSizes(64)
.decoderLayerSizes(64) .decoderLayerSizes(64)
@ -489,8 +489,8 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new DenseLayer.Builder().nIn(784).nOut(32).build(), "in") .layer("0", DenseLayer.builder().nIn(784).nOut(32).build(), "in")
.layer("1", new OutputLayer.Builder().nIn(32).nOut(10).activation(Activation.SOFTMAX).build(), "0") .layer("1", OutputLayer.builder().nIn(32).nOut(10).activation(Activation.SOFTMAX).build(), "0")
.setOutputs("1") .setOutputs("1")
.build(); .build();
@ -530,7 +530,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER) .updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new OutputLayer.Builder().nIn(4).nOut(3) .layer("0", OutputLayer.builder().nIn(4).nOut(3)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in") .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("0") .setOutputs("0")

View File

@ -73,9 +73,9 @@ public class EvalTest extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).seed(42) .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).seed(42)
.updater(new Sgd(1e-6)).list() .updater(new Sgd(1e-6)).list()
.layer(0, new DenseLayer.Builder().nIn(4).nOut(2).activation(Activation.TANH) .layer(0, DenseLayer.builder().nIn(4).nOut(2).activation(Activation.TANH)
.weightInit(WeightInit.XAVIER).build()) .weightInit(WeightInit.XAVIER).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3).weightInit(WeightInit.XAVIER) LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3).weightInit(WeightInit.XAVIER)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
@ -180,7 +180,7 @@ public class EvalTest extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new Sgd(0.1)) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new Sgd(0.1))
.list() .list()
.layer(0, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(0, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(4).nOut(3).build()) .activation(Activation.SOFTMAX).nIn(4).nOut(3).build())
.build(); .build();
@ -300,8 +300,8 @@ public class EvalTest extends BaseDL4JTest {
.trainingWorkspaceMode(ws) .trainingWorkspaceMode(ws)
.inferenceWorkspaceMode(ws) .inferenceWorkspaceMode(ws)
.list() .list()
.layer(new LSTM.Builder().nIn(nIn).nOut(layerSize).build()) .layer(LSTM.builder().nIn(nIn).nOut(layerSize).build())
.layer(new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut) .layer(RnnOutputLayer.builder().nIn(layerSize).nOut(nOut)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.build()) .build())
.build(); .build();
@ -311,8 +311,8 @@ public class EvalTest extends BaseDL4JTest {
.trainingWorkspaceMode(ws) .trainingWorkspaceMode(ws)
.inferenceWorkspaceMode(ws) .inferenceWorkspaceMode(ws)
.list() .list()
.layer(new LSTM.Builder().nIn(nIn).nOut(layerSize).build()) .layer(LSTM.builder().nIn(nIn).nOut(layerSize).build())
.layer(new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut) .layer(RnnOutputLayer.builder().nIn(layerSize).nOut(nOut)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
.tbpttFwdLength(10).tbpttBackLength(10) .tbpttFwdLength(10).tbpttBackLength(10)
.backpropType(BackpropType.TruncatedBPTT) .backpropType(BackpropType.TruncatedBPTT)
@ -377,8 +377,8 @@ public class EvalTest extends BaseDL4JTest {
.inferenceWorkspaceMode(ws) .inferenceWorkspaceMode(ws)
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new LSTM.Builder().nIn(nIn).nOut(layerSize).build(), "in") .addLayer("0", LSTM.builder().nIn(nIn).nOut(layerSize).build(), "in")
.addLayer("1", new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut) .addLayer("1", RnnOutputLayer.builder().nIn(layerSize).nOut(nOut)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.build(), "0") .build(), "0")
.setOutputs("1") .setOutputs("1")
@ -390,8 +390,8 @@ public class EvalTest extends BaseDL4JTest {
.inferenceWorkspaceMode(ws) .inferenceWorkspaceMode(ws)
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new LSTM.Builder().nIn(nIn).nOut(layerSize).build(), "in") .addLayer("0", LSTM.builder().nIn(nIn).nOut(layerSize).build(), "in")
.addLayer("1", new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut) .addLayer("1", RnnOutputLayer.builder().nIn(layerSize).nOut(nOut)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.build(), "0") .build(), "0")
.setOutputs("1") .setOutputs("1")
@ -457,8 +457,8 @@ public class EvalTest extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
.list() .list()
.layer(0, new LSTM.Builder().activation(Activation.TANH).nIn(3).nOut(3).build()) .layer(0, LSTM.builder().activation(Activation.TANH).nIn(3).nOut(3).build())
.layer(1, new RnnOutputLayer.Builder().activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.XENT) .layer(1, RnnOutputLayer.builder().activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.XENT)
.nIn(3).nOut(1).build()) .nIn(3).nOut(1).build())
.backpropType(BackpropType.TruncatedBPTT).tbpttFwdLength(10).tbpttBackLength(10) .backpropType(BackpropType.TruncatedBPTT).tbpttFwdLength(10).tbpttBackLength(10)
.build(); .build();
@ -477,9 +477,9 @@ public class EvalTest extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).seed(42) .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).seed(42)
.updater(new Sgd(1e-6)).list() .updater(new Sgd(1e-6)).list()
.layer(0, new DenseLayer.Builder().nIn(4).nOut(2).activation(Activation.TANH) .layer(0, DenseLayer.builder().nIn(4).nOut(2).activation(Activation.TANH)
.weightInit(WeightInit.XAVIER).build()) .weightInit(WeightInit.XAVIER).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder(
LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3).weightInit(WeightInit.XAVIER) LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3).weightInit(WeightInit.XAVIER)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
.build(); .build();
@ -507,8 +507,8 @@ public class EvalTest extends BaseDL4JTest {
.seed(12345) .seed(12345)
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("out1", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in") .addLayer("out1", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
.addLayer("out2", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in") .addLayer("out2", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
.setOutputs("out1", "out2") .setOutputs("out1", "out2")
.build(); .build();
@ -541,11 +541,11 @@ public class EvalTest extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new EmbeddingSequenceLayer.Builder().nIn(10).nOut(10).build(), "in") .layer("0", EmbeddingSequenceLayer.builder().nIn(10).nOut(10).build(), "in")
.layer("1", new LSTM.Builder().nIn(10).nOut(10).build(), "0") .layer("1", LSTM.builder().nIn(10).nOut(10).build(), "0")
.layer("2", new LSTM.Builder().nIn(10).nOut(10).build(), "0") .layer("2", LSTM.builder().nIn(10).nOut(10).build(), "0")
.layer("out1", new RnnOutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "1") .layer("out1", RnnOutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "1")
.layer("out2", new RnnOutputLayer.Builder().nIn(10).nOut(20).activation(Activation.SOFTMAX).build(), "2") .layer("out2", RnnOutputLayer.builder().nIn(10).nOut(20).activation(Activation.SOFTMAX).build(), "2")
.setOutputs("out1", "out2") .setOutputs("out1", "out2")
.build(); .build();
@ -569,8 +569,8 @@ public class EvalTest extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.list() .list()
.layer(new DenseLayer.Builder().nIn(4).nOut(10).build()) .layer(DenseLayer.builder().nIn(4).nOut(10).build())
.layer(new OutputLayer.Builder().nIn(10).nOut(3).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.RELU).build()) .layer(OutputLayer.builder().nIn(10).nOut(3).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.RELU).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);

View File

@ -48,8 +48,8 @@ public class EvaluationToolsTests extends BaseDL4JTest {
DataSetIterator iter = new IrisDataSetIterator(150, 150); DataSetIterator iter = new IrisDataSetIterator(150, 150);
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list()
.layer(0, new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1, .layer(0, DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1,
new OutputLayer.Builder().nIn(4).nOut(2).activation(Activation.SOFTMAX) OutputLayer.builder().nIn(4).nOut(2).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -90,8 +90,8 @@ public class EvaluationToolsTests extends BaseDL4JTest {
DataSetIterator iter = new IrisDataSetIterator(150, 150); DataSetIterator iter = new IrisDataSetIterator(150, 150);
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list()
.layer(0, new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1, .layer(0, DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1,
new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX) OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);

View File

@ -84,8 +84,8 @@ public class ROCTest extends BaseDL4JTest {
Nd4j.getRandom().setSeed(12345); Nd4j.getRandom().setSeed(12345);
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).seed(12345) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).seed(12345)
.list() .list()
.layer(0, new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1, .layer(0, DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1,
new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX) OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);

View File

@ -49,7 +49,7 @@ public class RegressionEvalTest extends BaseDL4JTest {
//Basic sanity check //Basic sanity check
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.ZERO).list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.ZERO).list()
.layer(0, new OutputLayer.Builder().activation(Activation.TANH) .layer(0, OutputLayer.builder().activation(Activation.TANH)
.lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(5).build()) .lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(5).build())
.build(); .build();
@ -71,7 +71,7 @@ public class RegressionEvalTest extends BaseDL4JTest {
ComputationGraphConfiguration graphConf = ComputationGraphConfiguration graphConf =
NeuralNetConfiguration.builder().weightInit(WeightInit.ZERO).graphBuilder() NeuralNetConfiguration.builder().weightInit(WeightInit.ZERO).graphBuilder()
.addInputs("in").addLayer("0", new OutputLayer.Builder() .addInputs("in").addLayer("0", OutputLayer.builder()
.lossFunction(LossFunctions.LossFunction.MSE) .lossFunction(LossFunctions.LossFunction.MSE)
.activation(Activation.TANH).nIn(10).nOut(5).build(), "in") .activation(Activation.TANH).nIn(10).nOut(5).build(), "in")
.setOutputs("0").build(); .setOutputs("0").build();

View File

@ -41,8 +41,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
public static MultiLayerNetwork getDensePlusOutput(int nIn, int nOut) { public static MultiLayerNetwork getDensePlusOutput(int nIn, int nOut) {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(10).build()) .layer(0, DenseLayer.builder().nIn(nIn).nOut(10).build())
.layer(1, new OutputLayer.Builder().nIn(10).nOut(nOut).build()).build(); .layer(1, OutputLayer.builder().nIn(10).nOut(nOut).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -52,8 +52,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
public static MultiLayerNetwork getLSTMPlusRnnOutput(int nIn, int nOut) { public static MultiLayerNetwork getLSTMPlusRnnOutput(int nIn, int nOut) {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new GravesLSTM.Builder().nIn(nIn).nOut(10).build()) .layer(0, GravesLSTM.builder().nIn(nIn).nOut(10).build())
.layer(1, new RnnOutputLayer.Builder().nIn(10).nOut(nOut).build()).build(); .layer(1, RnnOutputLayer.builder().nIn(10).nOut(nOut).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -63,8 +63,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
public static MultiLayerNetwork getCnnPlusOutputLayer(int depthIn, int inH, int inW, int nOut) { public static MultiLayerNetwork getCnnPlusOutputLayer(int depthIn, int inH, int inW, int nOut) {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new ConvolutionLayer.Builder().nIn(depthIn).nOut(5).build()) .layer(0, ConvolutionLayer.builder().nIn(depthIn).nOut(5).build())
.layer(1, new OutputLayer.Builder().nOut(nOut).build()) .layer(1, OutputLayer.builder().nOut(nOut).build())
.inputType(InputType.convolutional(inH, inW, depthIn)).build(); .inputType(InputType.convolutional(inH, inW, depthIn)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -90,8 +90,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
public void testDenseNout0() { public void testDenseNout0() {
try { try {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new DenseLayer.Builder().nIn(10).nOut(0).build()) .layer(0, DenseLayer.builder().nIn(10).nOut(0).build())
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).build()).build(); .layer(1, OutputLayer.builder().nIn(10).nOut(10).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -147,8 +147,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
public void testLSTMNOut0() { public void testLSTMNOut0() {
try { try {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new GravesLSTM.Builder().nIn(10).nOut(0).build()) .layer(0, GravesLSTM.builder().nIn(10).nOut(0).build())
.layer(1, new RnnOutputLayer.Builder().nIn(10).nOut(10).build()).build(); .layer(1, RnnOutputLayer.builder().nIn(10).nOut(10).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -178,8 +178,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
public void testConvolutionalNOut0() { public void testConvolutionalNOut0() {
try { try {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new ConvolutionLayer.Builder().nIn(5).nOut(0).build()) .layer(0, ConvolutionLayer.builder().nIn(5).nOut(0).build())
.layer(1, new OutputLayer.Builder().nOut(10).build()) .layer(1, OutputLayer.builder().nOut(10).build())
.inputType(InputType.convolutional(10, 10, 5)).build(); .inputType(InputType.convolutional(10, 10, 5)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -208,9 +208,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
try { try {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict)
.list() .list()
.layer(0, new ConvolutionLayer.Builder().kernelSize(3, 2).stride(2, 2).padding(0, 0).nOut(5) .layer(0, ConvolutionLayer.builder().kernelSize(3, 2).stride(2, 2).padding(0, 0).nOut(5)
.build()) .build())
.layer(1, new OutputLayer.Builder().nOut(10).build()) .layer(1, OutputLayer.builder().nOut(10).build())
.inputType(InputType.convolutional(hIn, wIn, depthIn)).build(); .inputType(InputType.convolutional(hIn, wIn, depthIn)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -234,9 +234,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
int wIn = 10; int wIn = 10;
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new ConvolutionLayer.Builder().kernelSize(7, 7).stride(1, 1).padding(0, 0).nOut(5) .layer(0, ConvolutionLayer.builder().kernelSize(7, 7).stride(1, 1).padding(0, 0).nOut(5)
.build()) .build())
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build()) .layer(1, OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build())
.inputType(InputType.convolutional(hIn, wIn, depthIn)).build(); .inputType(InputType.convolutional(hIn, wIn, depthIn)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -266,9 +266,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration conf =
NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict).list() NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict).list()
.layer(0, new ConvolutionLayer.Builder().kernelSize(3, 3).stride(2, 2) .layer(0, ConvolutionLayer.builder().kernelSize(3, 3).stride(2, 2)
.padding(0, 0).nIn(depthIn).nOut(5).build()) .padding(0, 0).nIn(depthIn).nOut(5).build())
.layer(1, new OutputLayer.Builder().nIn(5 * 4 * 4).nOut(10).activation(Activation.SOFTMAX).build()) .layer(1, OutputLayer.builder().nIn(5 * 4 * 4).nOut(10).activation(Activation.SOFTMAX).build())
.inputPreProcessor(1, new CnnToFeedForwardPreProcessor()).build(); .inputPreProcessor(1, new CnnToFeedForwardPreProcessor()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -299,9 +299,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
try { try {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 3).stride(2, 2).padding(0, 0).nOut(5) .layer(0, ConvolutionLayer.builder().kernelSize(2, 3).stride(2, 2).padding(0, 0).nOut(5)
.build()) .build())
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build()) .layer(1, OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build())
.inputType(InputType.convolutional(hIn, wIn, depthIn)).build(); .inputType(InputType.convolutional(hIn, wIn, depthIn)).build();
} catch (Exception e) { } catch (Exception e) {
fail("Did not expect exception with default (truncate)"); fail("Did not expect exception with default (truncate)");
@ -310,9 +310,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
try { try {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict)
.list() .list()
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 3).stride(2, 2).padding(0, 0).nOut(5) .layer(0, ConvolutionLayer.builder().kernelSize(2, 3).stride(2, 2).padding(0, 0).nOut(5)
.build()) .build())
.layer(1, new OutputLayer.Builder().nOut(10).build()) .layer(1, OutputLayer.builder().nOut(10).build())
.inputType(InputType.convolutional(hIn, wIn, depthIn)).build(); .inputType(InputType.convolutional(hIn, wIn, depthIn)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -339,9 +339,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
try { try {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict)
.list() .list()
.layer(0, new SubsamplingLayer.Builder().kernelSize(2, 3).stride(2, 2).padding(0, 0) .layer(0, SubsamplingLayer.builder().kernelSize(2, 3).stride(2, 2).padding(0, 0)
.build()) .build())
.layer(1, new OutputLayer.Builder().nOut(10).build()) .layer(1, OutputLayer.builder().nOut(10).build())
.inputType(InputType.convolutional(hIn, wIn, depthIn)).build(); .inputType(InputType.convolutional(hIn, wIn, depthIn)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -358,84 +358,84 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
@Test @Test
public void testCnnInvalidKernel() { public void testCnnInvalidKernel() {
assertThrows(IllegalStateException.class, () -> { assertThrows(IllegalStateException.class, () -> {
new ConvolutionLayer.Builder().kernelSize(3, 0).build(); ConvolutionLayer.builder().kernelSize(3, 0).build();
}); });
} }
@Test @Test
public void testCnnInvalidKernel2() { public void testCnnInvalidKernel2() {
assertThrows(IllegalStateException.class, () -> { assertThrows(IllegalStateException.class, () -> {
new ConvolutionLayer.Builder().kernelSize(2, 2, 2).build(); ConvolutionLayer.builder().kernelSize(2, 2, 2).build();
}); });
} }
@Test @Test
public void testCnnInvalidStride() { public void testCnnInvalidStride() {
assertThrows(IllegalStateException.class, () -> { assertThrows(IllegalStateException.class, () -> {
new ConvolutionLayer.Builder().kernelSize(3, 3).stride(0, 1).build(); ConvolutionLayer.builder().kernelSize(3, 3).stride(0, 1).build();
}); });
} }
@Test @Test
public void testCnnInvalidStride2() { public void testCnnInvalidStride2() {
assertThrows(IllegalArgumentException.class, () -> { assertThrows(IllegalArgumentException.class, () -> {
new ConvolutionLayer.Builder().kernelSize(3, 3).stride(1).build(); ConvolutionLayer.builder().kernelSize(3, 3).stride(1).build();
}); });
} }
@Test @Test
public void testCnnInvalidPadding() { public void testCnnInvalidPadding() {
assertThrows(IllegalArgumentException.class, () -> { assertThrows(IllegalArgumentException.class, () -> {
new ConvolutionLayer.Builder().kernelSize(3, 3).stride(1, 1).padding(-1, 0).build(); ConvolutionLayer.builder().kernelSize(3, 3).stride(1, 1).padding(-1, 0).build();
}); });
} }
@Test @Test
public void testCnnInvalidPadding2() { public void testCnnInvalidPadding2() {
assertThrows(IllegalArgumentException.class, () -> { assertThrows(IllegalArgumentException.class, () -> {
new ConvolutionLayer.Builder().kernelSize(3, 3).stride(1, 1).padding(0, 0, 0).build(); ConvolutionLayer.builder().kernelSize(3, 3).stride(1, 1).padding(0, 0, 0).build();
}); });
} }
@Test @Test
public void testSubsamplingInvalidKernel() { public void testSubsamplingInvalidKernel() {
assertThrows(IllegalStateException.class, () -> { assertThrows(IllegalStateException.class, () -> {
new SubsamplingLayer.Builder().kernelSize(3, 0).build(); SubsamplingLayer.builder().kernelSize(3, 0).build();
}); });
} }
@Test @Test
public void testSubsamplingInvalidKernel2() { public void testSubsamplingInvalidKernel2() {
assertThrows(IllegalArgumentException.class, () -> { assertThrows(IllegalArgumentException.class, () -> {
new SubsamplingLayer.Builder().kernelSize(2).build(); SubsamplingLayer.builder().kernelSize(2).build();
}); });
} }
@Test @Test
public void testSubsamplingInvalidStride() { public void testSubsamplingInvalidStride() {
assertThrows(IllegalStateException.class, () -> { assertThrows(IllegalStateException.class, () -> {
new SubsamplingLayer.Builder().kernelSize(3, 3).stride(0, 1).build(); SubsamplingLayer.builder().kernelSize(3, 3).stride(0, 1).build();
}); });
} }
@Test @Test
public void testSubsamplingInvalidStride2() { public void testSubsamplingInvalidStride2() {
assertThrows(RuntimeException.class, () -> { assertThrows(RuntimeException.class, () -> {
new SubsamplingLayer.Builder().kernelSize(3, 3).stride(1, 1, 1).build(); SubsamplingLayer.builder().kernelSize(3, 3).stride(1, 1, 1).build();
}); });
} }
@Test @Test
public void testSubsamplingInvalidPadding() { public void testSubsamplingInvalidPadding() {
assertThrows(IllegalArgumentException.class, () -> { assertThrows(IllegalArgumentException.class, () -> {
new SubsamplingLayer.Builder().kernelSize(3, 3).stride(1, 1).padding(-1, 0).build(); SubsamplingLayer.builder().kernelSize(3, 3).stride(1, 1).padding(-1, 0).build();
}); });
} }
@Test @Test
public void testSubsamplingInvalidPadding2() { public void testSubsamplingInvalidPadding2() {
assertThrows(RuntimeException.class, () -> { assertThrows(RuntimeException.class, () -> {
new SubsamplingLayer.Builder().kernelSize(3, 3).stride(1, 1).padding(0).build(); SubsamplingLayer.builder().kernelSize(3, 3).stride(1, 1).padding(0).build();
}); });
} }

View File

@ -43,8 +43,8 @@ public class TestInvalidInput extends BaseDL4JTest {
@Test @Test
public void testInputNinMismatchDense() { public void testInputNinMismatchDense() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()) .layer(0, DenseLayer.builder().nIn(10).nOut(10).build())
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build(); .layer(1, OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -64,8 +64,8 @@ public class TestInvalidInput extends BaseDL4JTest {
@Test @Test
public void testLabelsNOutMismatchOutputLayer() { public void testLabelsNOutMismatchOutputLayer() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()) .layer(0, DenseLayer.builder().nIn(10).nOut(10).build())
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build(); .layer(1, OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -85,8 +85,8 @@ public class TestInvalidInput extends BaseDL4JTest {
@Test @Test
public void testLabelsNOutMismatchRnnOutputLayer() { public void testLabelsNOutMismatchRnnOutputLayer() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new LSTM.Builder().nIn(5).nOut(5).build()) .layer(0, LSTM.builder().nIn(5).nOut(5).build())
.layer(1, new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build(); .layer(1, RnnOutputLayer.builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -112,8 +112,8 @@ public class TestInvalidInput extends BaseDL4JTest {
int d = 3; int d = 3;
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new ConvolutionLayer.Builder().nIn(d).nOut(5).build()) .layer(0, ConvolutionLayer.builder().nIn(d).nOut(5).build())
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build()) .layer(1, OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build())
.inputType(InputType.convolutional(h, w, d)).build(); .inputType(InputType.convolutional(h, w, d)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -139,8 +139,8 @@ public class TestInvalidInput extends BaseDL4JTest {
int d = 3; int d = 3;
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new ConvolutionLayer.Builder().nIn(d).nOut(5).build()) .layer(0, ConvolutionLayer.builder().nIn(d).nOut(5).build())
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build()) .layer(1, OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build())
.inputType(InputType.convolutional(h, w, d)).build(); .inputType(InputType.convolutional(h, w, d)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -165,8 +165,8 @@ public class TestInvalidInput extends BaseDL4JTest {
int d = 3; int d = 3;
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new SubsamplingLayer.Builder().kernelSize(2, 2).build()) .layer(0, SubsamplingLayer.builder().kernelSize(2, 2).build())
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build()) .layer(1, OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build())
.inputType(InputType.convolutional(h, w, d)).build(); .inputType(InputType.convolutional(h, w, d)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -188,8 +188,8 @@ public class TestInvalidInput extends BaseDL4JTest {
public void testInputNinMismatchLSTM() { public void testInputNinMismatchLSTM() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new GravesLSTM.Builder().nIn(5).nOut(5).build()) .layer(0, GravesLSTM.builder().nIn(5).nOut(5).build())
.layer(1, new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build(); .layer(1, RnnOutputLayer.builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -209,8 +209,8 @@ public class TestInvalidInput extends BaseDL4JTest {
public void testInputNinMismatchBidirectionalLSTM() { public void testInputNinMismatchBidirectionalLSTM() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new GravesBidirectionalLSTM.Builder().nIn(5).nOut(5).build()) .layer(0, GravesBidirectionalLSTM.builder().nIn(5).nOut(5).build())
.layer(1, new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build(); .layer(1, RnnOutputLayer.builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -231,8 +231,8 @@ public class TestInvalidInput extends BaseDL4JTest {
public void testInputNinMismatchEmbeddingLayer() { public void testInputNinMismatchEmbeddingLayer() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(0, new EmbeddingLayer.Builder().nIn(10).nOut(10).build()) .layer(0, EmbeddingLayer.builder().nIn(10).nOut(10).build())
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build(); .layer(1, OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -259,13 +259,13 @@ public class TestInvalidInput extends BaseDL4JTest {
LayerConfiguration l; LayerConfiguration l;
switch (layerType){ switch (layerType){
case "simple": case "simple":
l = new SimpleRnn.Builder().nIn(5).nOut(5).build(); l = SimpleRnn.builder().nIn(5).nOut(5).build();
break; break;
case "lstm": case "lstm":
l = new LSTM.Builder().nIn(5).nOut(5).build(); l = LSTM.builder().nIn(5).nOut(5).build();
break; break;
case "graves": case "graves":
l = new GravesLSTM.Builder().nIn(5).nOut(5).build(); l = GravesLSTM.builder().nIn(5).nOut(5).build();
break; break;
default: default:
throw new RuntimeException(); throw new RuntimeException();
@ -273,7 +273,7 @@ public class TestInvalidInput extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
.layer(l) .layer(l)
.layer(new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build(); .layer(RnnOutputLayer.builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();

View File

@ -88,14 +88,13 @@ public class AttentionLayerTest extends BaseDL4JTest {
.activation(Activation.TANH) .activation(Activation.TANH)
.updater(new NoOp()) .updater(new NoOp())
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.list() .layer(LSTM.builder().nOut(layerSize).build())
.layer(new LSTM.Builder().nOut(layerSize).build())
.layer( projectInput ? .layer( projectInput ?
new SelfAttentionLayer.Builder().nOut(4).nHeads(2).projectInput(true).build() new SelfAttentionLayer.Builder().nOut(4).nHeads(2).projectInput(true).build()
: new SelfAttentionLayer.Builder().nHeads(1).projectInput(false).build() : new SelfAttentionLayer.Builder().nHeads(1).projectInput(false).build()
) )
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build()) .layer(GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build())
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX) .layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.inputType(InputType.recurrent(nIn)) .inputType(InputType.recurrent(nIn))
.build(); .build();
@ -150,13 +149,13 @@ public class AttentionLayerTest extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.list() .list()
.layer(new LSTM.Builder().nOut(layerSize).build()) .layer(LSTM.builder().nOut(layerSize).build())
.layer( projectInput ? .layer( projectInput ?
new LearnedSelfAttentionLayer.Builder().nOut(4).nHeads(2).nQueries(numQueries).projectInput(true).build() new LearnedSelfAttentionLayer.Builder().nOut(4).nHeads(2).nQueries(numQueries).projectInput(true).build()
: new LearnedSelfAttentionLayer.Builder().nHeads(1).nQueries(numQueries).projectInput(false).build() : new LearnedSelfAttentionLayer.Builder().nHeads(1).nQueries(numQueries).projectInput(false).build()
) )
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build()) .layer(GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build())
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX) .layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.inputType(InputType.recurrent(nIn)) .inputType(InputType.recurrent(nIn))
.build(); .build();
@ -190,13 +189,13 @@ public class AttentionLayerTest extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.list() .list()
.layer(new LSTM.Builder().nOut(layerSize).build()) .layer(LSTM.builder().nOut(layerSize).build())
.layer( projectInput ? .layer( projectInput ?
new LearnedSelfAttentionLayer.Builder().nOut(4).nHeads(2).nQueries(numQueries).projectInput(true).build() new LearnedSelfAttentionLayer.Builder().nOut(4).nHeads(2).nQueries(numQueries).projectInput(true).build()
: new LearnedSelfAttentionLayer.Builder().nHeads(1).nQueries(numQueries).projectInput(false).build() : new LearnedSelfAttentionLayer.Builder().nHeads(1).nQueries(numQueries).projectInput(false).build()
) )
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build()) .layer(GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build())
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX) .layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.inputType(InputType.recurrent(nIn)) .inputType(InputType.recurrent(nIn))
.build(); .build();
@ -245,10 +244,10 @@ public class AttentionLayerTest extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.list() .list()
.layer(new LSTM.Builder().nOut(layerSize).build()) .layer(LSTM.builder().nOut(layerSize).build())
.layer(new RecurrentAttentionLayer.Builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build()) .layer(new RecurrentAttentionLayer.Builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build())
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build()) .layer(GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX) .layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.inputType(InputType.recurrent(nIn)) .inputType(InputType.recurrent(nIn))
.build(); .build();
@ -308,10 +307,10 @@ public class AttentionLayerTest extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.list() .list()
.layer(new LSTM.Builder().nOut(layerSize).build()) .layer(LSTM.builder().nOut(layerSize).build())
.layer(new RecurrentAttentionLayer.Builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build()) .layer(new RecurrentAttentionLayer.Builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build())
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build()) .layer(GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX) .layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.inputType(InputType.recurrent(nIn)) .inputType(InputType.recurrent(nIn))
.build(); .build();
@ -367,15 +366,15 @@ public class AttentionLayerTest extends BaseDL4JTest {
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.graphBuilder() .graphBuilder()
.addInputs("input") .addInputs("input")
.addLayer("rnnKeys", new SimpleRnn.Builder().nOut(layerSize).build(), "input") .addLayer("rnnKeys", SimpleRnn.builder().nOut(layerSize).build(), "input")
.addLayer("rnnQueries", new SimpleRnn.Builder().nOut(layerSize).build(), "input") .addLayer("rnnQueries", SimpleRnn.builder().nOut(layerSize).build(), "input")
.addLayer("rnnValues", new SimpleRnn.Builder().nOut(layerSize).build(), "input") .addLayer("rnnValues", SimpleRnn.builder().nOut(layerSize).build(), "input")
.addVertex("attention", .addVertex("attention",
projectInput ? projectInput ?
new AttentionVertex.Builder().nOut(4).nHeads(2).projectInput(true).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build() new AttentionVertex.Builder().nOut(4).nHeads(2).projectInput(true).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build()
: new AttentionVertex.Builder().nOut(3).nHeads(1).projectInput(false).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build(), "rnnQueries", "rnnKeys", "rnnValues") : new AttentionVertex.Builder().nOut(3).nHeads(1).projectInput(false).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build(), "rnnQueries", "rnnKeys", "rnnValues")
.addLayer("pooling", new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build(), "attention") .addLayer("pooling", GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build(), "attention")
.addLayer("output", new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling") .addLayer("output", OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling")
.setOutputs("output") .setOutputs("output")
.setInputTypes(InputType.recurrent(nIn)) .setInputTypes(InputType.recurrent(nIn))
.build(); .build();
@ -431,13 +430,13 @@ public class AttentionLayerTest extends BaseDL4JTest {
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.graphBuilder() .graphBuilder()
.addInputs("input") .addInputs("input")
.addLayer("rnn", new SimpleRnn.Builder().activation(Activation.TANH).nOut(layerSize).build(), "input") .addLayer("rnn", SimpleRnn.builder().activation(Activation.TANH).nOut(layerSize).build(), "input")
.addVertex("attention", .addVertex("attention",
projectInput ? projectInput ?
new AttentionVertex.Builder().nOut(4).nHeads(2).projectInput(true).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build() new AttentionVertex.Builder().nOut(4).nHeads(2).projectInput(true).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build()
: new AttentionVertex.Builder().nOut(4).nHeads(1).projectInput(false).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build(), "rnn", "rnn", "rnn") : new AttentionVertex.Builder().nOut(4).nHeads(1).projectInput(false).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build(), "rnn", "rnn", "rnn")
.addLayer("pooling", new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build(), "attention") .addLayer("pooling", GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build(), "attention")
.addLayer("output", new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling") .addLayer("output", OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling")
.setOutputs("output") .setOutputs("output")
.setInputTypes(InputType.recurrent(nIn)) .setInputTypes(InputType.recurrent(nIn))
.build(); .build();

View File

@ -78,11 +78,11 @@ public class BNGradientCheckTest extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.seed(12345L) .seed(12345L)
.dist(new NormalDistribution(0, 1)).list() .dist(new NormalDistribution(0, 1)).list()
.layer(0, new DenseLayer.Builder().nIn(4).nOut(3) .layer(0, DenseLayer.builder().nIn(4).nOut(3)
.activation(Activation.IDENTITY).build()) .activation(Activation.IDENTITY).build())
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).nOut(3).build()) .layer(1,BatchNormalization.builder().useLogStd(useLogStd).nOut(3).build())
.layer(2, new ActivationLayer.Builder().activation(Activation.TANH).build()) .layer(2, ActivationLayer.builder().activation(Activation.TANH).build())
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(3, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(3).nOut(3).build()); .activation(Activation.SOFTMAX).nIn(3).nOut(3).build());
MultiLayerNetwork mln = new MultiLayerNetwork(builder.build()); MultiLayerNetwork mln = new MultiLayerNetwork(builder.build());
@ -122,11 +122,11 @@ public class BNGradientCheckTest extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.updater(new NoOp()).seed(12345L) .updater(new NoOp()).seed(12345L)
.dist(new NormalDistribution(0, 2)).list() .dist(new NormalDistribution(0, 2)).list()
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nIn(depth).nOut(2) .layer(0, ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nIn(depth).nOut(2)
.activation(Activation.IDENTITY).build()) .activation(Activation.IDENTITY).build())
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).build()) .layer(1,BatchNormalization.builder().useLogStd(useLogStd).build())
.layer(2, new ActivationLayer.Builder().activation(Activation.TANH).build()) .layer(2, ActivationLayer.builder().activation(Activation.TANH).build())
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(3, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(nOut).build()) .activation(Activation.SOFTMAX).nOut(nOut).build())
.inputType(InputType.convolutional(hw, hw, depth)); .inputType(InputType.convolutional(hw, hw, depth));
@ -193,14 +193,14 @@ public class BNGradientCheckTest extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT)
.updater(new NoOp()) .updater(new NoOp())
.dist(new UniformDistribution(-2, 2)).seed(12345L).list() .dist(new UniformDistribution(-2, 2)).seed(12345L).list()
.layer(0, new ConvolutionLayer.Builder(2, 2).stride(1, 1).nOut(3) .layer(0, ConvolutionLayer.builder(2, 2).stride(1, 1).nOut(3)
.activation(afn).build()) .activation(afn).build())
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).build()) .layer(1,BatchNormalization.builder().useLogStd(useLogStd).build())
.layer(2, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX) .layer(2, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX)
.kernelSize(2, 2).stride(1, 1).build()) .kernelSize(2, 2).stride(1, 1).build())
.layer(3, new BatchNormalization()) .layer(3, BatchNormalization.builder().build())
.layer(4, new ActivationLayer.Builder().activation(afn).build()) .layer(4, ActivationLayer.builder().activation(afn).build())
.layer(5, new OutputLayer.Builder(lf).activation(outputActivation).nOut(nOut) .layer(5, OutputLayer.builder(lf).activation(outputActivation).nOut(nOut)
.build()) .build())
.inputType(InputType.convolutional(hw, hw, depth)); .inputType(InputType.convolutional(hw, hw, depth));
@ -300,12 +300,12 @@ public class BNGradientCheckTest extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT) .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
.updater(new NoOp()) .updater(new NoOp())
.dist(new UniformDistribution(-2, 2)).seed(12345L).list() .dist(new UniformDistribution(-2, 2)).seed(12345L).list()
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(4) .layer(0, DenseLayer.builder().nIn(nIn).nOut(4)
.activation(afn).build()) .activation(afn).build())
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).build()) .layer(1,BatchNormalization.builder().useLogStd(useLogStd).build())
.layer(2, new DenseLayer.Builder().nIn(4).nOut(4).build()) .layer(2, DenseLayer.builder().nIn(4).nOut(4).build())
.layer(3, new BatchNormalization.Builder().useLogStd(useLogStd).build()) .layer(3,BatchNormalization.builder().useLogStd(useLogStd).build())
.layer(4, new OutputLayer.Builder(lf) .layer(4, OutputLayer.builder(lf)
.activation(outputActivation).nOut(nOut) .activation(outputActivation).nOut(nOut)
.build()); .build());
@ -373,11 +373,11 @@ public class BNGradientCheckTest extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.seed(12345L) .seed(12345L)
.dist(new NormalDistribution(0, 1)).list() .dist(new NormalDistribution(0, 1)).list()
.layer(0, new DenseLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY).build()) .layer(0, DenseLayer.builder().nIn(4).nOut(3).activation(Activation.IDENTITY).build())
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).lockGammaBeta(true).gamma(2.0).beta(0.5).nOut(3) .layer(1,BatchNormalization.builder().useLogStd(useLogStd).lockGammaBeta(true).gamma(2.0).beta(0.5).nOut(3)
.build()) .build())
.layer(2, new ActivationLayer.Builder().activation(Activation.TANH).build()) .layer(2, ActivationLayer.builder().activation(Activation.TANH).build())
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(3, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(3).nOut(3).build()); .activation(Activation.SOFTMAX).nIn(3).nOut(3).build());
MultiLayerNetwork mln = new MultiLayerNetwork(builder.build()); MultiLayerNetwork mln = new MultiLayerNetwork(builder.build());
@ -417,11 +417,11 @@ public class BNGradientCheckTest extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.seed(12345L) .seed(12345L)
.dist(new NormalDistribution(0, 2)).list() .dist(new NormalDistribution(0, 2)).list()
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nIn(depth).nOut(2) .layer(0, ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nIn(depth).nOut(2)
.activation(Activation.IDENTITY).build()) .activation(Activation.IDENTITY).build())
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).lockGammaBeta(true).gamma(2.0).beta(0.5).build()) .layer(1,BatchNormalization.builder().useLogStd(useLogStd).lockGammaBeta(true).gamma(2.0).beta(0.5).build())
.layer(2, new ActivationLayer.Builder().activation(Activation.TANH).build()) .layer(2, ActivationLayer.builder().activation(Activation.TANH).build())
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(3, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(nOut).build()) .activation(Activation.SOFTMAX).nOut(nOut).build())
.inputType(InputType.convolutional(hw, hw, depth)); .inputType(InputType.convolutional(hw, hw, depth));
@ -460,8 +460,8 @@ public class BNGradientCheckTest extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in") .weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
.setInputTypes(InputType.convolutional(height, width, channels)) .setInputTypes(InputType.convolutional(height, width, channels))
.addLayer("bn", new BatchNormalization.Builder().useLogStd(useLogStd).build(), "in") .addLayer("bn",BatchNormalization.builder().useLogStd(useLogStd).build(), "in")
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) .addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(numClasses).build(), "bn") .activation(Activation.SOFTMAX).nOut(numClasses).build(), "bn")
.setOutputs("out").build(); .setOutputs("out").build();
@ -531,14 +531,14 @@ public class BNGradientCheckTest extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.dist(new UniformDistribution(-2, 2)).seed(12345L).graphBuilder() .dist(new UniformDistribution(-2, 2)).seed(12345L).graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new ConvolutionLayer.Builder(2, 2).stride(1, 1).nOut(3) .addLayer("0", ConvolutionLayer.builder(2, 2).stride(1, 1).nOut(3)
.activation(afn).build(), "in") .activation(afn).build(), "in")
.addLayer("1", new BatchNormalization.Builder().useLogStd(useLogStd).build(), "0") .addLayer("1",BatchNormalization.builder().useLogStd(useLogStd).build(), "0")
.addLayer("2", new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX) .addLayer("2", SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX)
.kernelSize(2, 2).stride(1, 1).build(), "1") .kernelSize(2, 2).stride(1, 1).build(), "1")
.addLayer("3", new BatchNormalization.Builder().useLogStd(useLogStd).build(), "2") .addLayer("3",BatchNormalization.builder().useLogStd(useLogStd).build(), "2")
.addLayer("4", new ActivationLayer.Builder().activation(afn).build(), "3") .addLayer("4", ActivationLayer.builder().activation(afn).build(), "3")
.addLayer("5", new OutputLayer.Builder(lf).activation(outputActivation) .addLayer("5", OutputLayer.builder(lf).activation(outputActivation)
.nOut(nOut).build(), "4") .nOut(nOut).build(), "4")
.setOutputs("5").setInputTypes(InputType.convolutional(hw, hw, depth)) .setOutputs("5").setInputTypes(InputType.convolutional(hw, hw, depth))
.build(); .build();

View File

@ -115,16 +115,16 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
.updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL) .updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.list() .list()
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel) .layer(0, Convolution3D.builder().activation(afn).kernelSize(kernel)
.stride(stride).nIn(convNIn).nOut(convNOut1).hasBias(false) .stride(stride).nIn(convNIn).nOut(convNOut1).hasBias(false)
.convolutionMode(mode).dataFormat(df) .convolutionMode(mode).dataFormat(df)
.build()) .build())
.layer(1, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1) .layer(1, Convolution3D.builder().activation(afn).kernelSize(1, 1, 1)
.nIn(convNOut1).nOut(convNOut2).hasBias(false) .nIn(convNOut1).nOut(convNOut2).hasBias(false)
.convolutionMode(mode).dataFormat(df) .convolutionMode(mode).dataFormat(df)
.build()) .build())
.layer(2, new DenseLayer.Builder().nOut(denseNOut).build()) .layer(2, DenseLayer.builder().nOut(denseNOut).build())
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(finalNOut).build()) .activation(Activation.SOFTMAX).nOut(finalNOut).build())
.inputPreProcessor(2, .inputPreProcessor(2,
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth, new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
@ -218,17 +218,17 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
.updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL) .updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.list() .list()
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel) .layer(0, Convolution3D.builder().activation(afn).kernelSize(kernel)
.nIn(convNIn).nOut(convNOut1).hasBias(false) .nIn(convNIn).nOut(convNOut1).hasBias(false)
.convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW) .convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
.build()) .build())
.layer(1, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1) .layer(1, Convolution3D.builder().activation(afn).kernelSize(1, 1, 1)
.nIn(convNOut1).nOut(convNOut2).hasBias(false) .nIn(convNOut1).nOut(convNOut2).hasBias(false)
.convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW) .convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
.build()) .build())
.layer(2, new ZeroPadding3DLayer.Builder(zeroPadding).build()) .layer(2, ZeroPadding3DLayer.builder(zeroPadding).build())
.layer(3, new DenseLayer.Builder().nOut(denseNOut).build()) .layer(3, DenseLayer.builder().nOut(denseNOut).build())
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(finalNOut).build()) .activation(Activation.SOFTMAX).nOut(finalNOut).build())
.inputPreProcessor(3, .inputPreProcessor(3,
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth, new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
@ -314,14 +314,14 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.list() .list()
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1) .layer(0, Convolution3D.builder().activation(afn).kernelSize(1, 1, 1)
.nIn(convNIn).nOut(convNOut).hasBias(false) .nIn(convNIn).nOut(convNOut).hasBias(false)
.convolutionMode(mode).dataFormat(df) .convolutionMode(mode).dataFormat(df)
.build()) .build())
.layer(1, new Subsampling3DLayer.Builder(kernel) .layer(1,Subsampling3DLayer.builder(kernel)
.poolingType(pool).convolutionMode(mode).dataFormat(df).build()) .poolingType(pool.toPoolingType()).convolutionMode(mode).dataFormat(df).build())
.layer(2, new DenseLayer.Builder().nOut(denseNOut).build()) .layer(2, DenseLayer.builder().nOut(denseNOut).build())
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(finalNOut).build()) .activation(Activation.SOFTMAX).nOut(finalNOut).build())
.inputPreProcessor(2, .inputPreProcessor(2,
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,convNOut, df)) new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,convNOut, df))
@ -401,13 +401,13 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.seed(12345) .seed(12345)
.list() .list()
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1) .layer(0, Convolution3D.builder().activation(afn).kernelSize(1, 1, 1)
.nIn(convNIn).nOut(convNOut).hasBias(false) .nIn(convNIn).nOut(convNOut).hasBias(false)
.convolutionMode(mode).dataFormat(df) .convolutionMode(mode).dataFormat(df)
.build()) .build())
.layer(1, new Upsampling3D.Builder(upsamplingSize[0]).dataFormat(df).build()) .layer(1, Upsampling3D.builder(upsamplingSize[0]).dataFormat(df).build())
.layer(2, new DenseLayer.Builder().nOut(denseNOut).build()) .layer(2, DenseLayer.builder().nOut(denseNOut).build())
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(finalNOut).build()) .activation(Activation.SOFTMAX).nOut(finalNOut).build())
.inputPreProcessor(2, .inputPreProcessor(2,
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth, new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
@ -496,17 +496,17 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
.updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL) .updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.list() .list()
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel) .layer(0, Convolution3D.builder().activation(afn).kernelSize(kernel)
.nIn(convNIn).nOut(convNOut1).hasBias(false) .nIn(convNIn).nOut(convNOut1).hasBias(false)
.convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW) .convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
.build()) .build())
.layer(1, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1) .layer(1, Convolution3D.builder().activation(afn).kernelSize(1, 1, 1)
.nIn(convNOut1).nOut(convNOut2).hasBias(false) .nIn(convNOut1).nOut(convNOut2).hasBias(false)
.convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW) .convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
.build()) .build())
.layer(2, new Cropping3D.Builder(cropping).build()) .layer(2, Cropping3D.builder(cropping).build())
.layer(3, new DenseLayer.Builder().nOut(denseNOut).build()) .layer(3, DenseLayer.builder().nOut(denseNOut).build())
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(finalNOut).build()) .activation(Activation.SOFTMAX).nOut(finalNOut).build())
.inputPreProcessor(3, .inputPreProcessor(3,
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth, new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
@ -595,15 +595,15 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.weightInit(new NormalDistribution(0, 0.1)) .weightInit(new NormalDistribution(0, 0.1))
.list() .list()
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel) .layer(0, Convolution3D.builder().activation(afn).kernelSize(kernel)
.stride(stride).nIn(convNIn).nOut(dOut).hasBias(false) .stride(stride).nIn(convNIn).nOut(dOut).hasBias(false)
.convolutionMode(mode).dataFormat(df) .convolutionMode(mode).dataFormat(df)
.build()) .build())
.layer(1, new Deconvolution3D.Builder().activation(afn).kernelSize(kernel) .layer(1, Deconvolution3D.builder().activation(afn).kernelSize(kernel)
.stride(stride).nOut(dOut).hasBias(false) .stride(stride).nOut(dOut).hasBias(false)
.convolutionMode(mode).dataFormat(df) .convolutionMode(mode).dataFormat(df)
.build()) .build())
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(finalNOut).build()) .activation(Activation.SOFTMAX).nOut(finalNOut).build())
.inputType(InputType.convolutional3D(df, depth, height, width, convNIn)).build(); .inputType(InputType.convolutional3D(df, depth, height, width, convNIn)).build();

View File

@ -86,10 +86,10 @@ public class CapsnetGradientCheckTest extends BaseDL4JTest {
.kernelSize(3, 3) .kernelSize(3, 3)
.stride(2, 2) .stride(2, 2)
.build()) .build())
.layer(new CapsuleLayer.Builder(capsule, capsuleDim, routing).build()) .layer(CapsuleLayer.builder(capsule, capsuleDim, routing).build())
.layer(new CapsuleStrengthLayer.Builder().build()) .layer(CapsuleStrengthLayer.builder().build())
.layer(new ActivationLayer.Builder(new ActivationSoftmax()).build()) .layer(ActivationLayer.builder(new ActivationSoftmax()).build())
.layer(new LossLayer.Builder(new LossNegativeLogLikelihood()).build()) .layer(LossLayer.builder().lossFunction(new LossNegativeLogLikelihood()).build())
.inputType(InputType.convolutional(height, width, inputDepth)) .inputType(InputType.convolutional(height, width, inputDepth))
.build(); .build();

View File

@ -97,24 +97,23 @@ public class DropoutGradientCheck extends BaseDL4JTest {
.convolutionMode(ConvolutionMode.Same) .convolutionMode(ConvolutionMode.Same)
.dropOut(dropout) .dropOut(dropout)
.activation(Activation.TANH) .activation(Activation.TANH)
.updater(new NoOp()) .updater(new NoOp());
.list();
if(cnn){ if(cnn){
builder.layer(new ConvolutionLayer.Builder().kernelSize(3,3).stride(2,2).nOut(2).build()); builder.layer(ConvolutionLayer.builder().kernelSize(3,3).stride(2,2).nOut(2).build());
builder.layer(new ConvolutionLayer.Builder().kernelSize(3,3).stride(2,2).nOut(2).build()); builder.layer(ConvolutionLayer.builder().kernelSize(3,3).stride(2,2).nOut(2).build());
builder.inputType(InputType.convolutional(6,6,2)); builder.inputType(InputType.convolutional(6,6,2));
} else { } else {
builder.layer(new DenseLayer.Builder().nOut(3).build()); builder.layer(DenseLayer.builder().nOut(3).build());
builder.layer(new DenseLayer.Builder().nOut(3).build()); builder.layer(DenseLayer.builder().nOut(3).build());
builder.inputType(InputType.feedForward(6)); builder.inputType(InputType.feedForward(6));
} }
builder.layer(new OutputLayer.Builder().nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunction.MCXENT).build()); builder.layer(OutputLayer.builder().nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunction.MCXENT).build());
NeuralNetConfiguration conf = builder.build(); NeuralNetConfiguration conf = builder.build();
//Remove spatial dropout from output layer - can't be used for 2d input //Remove spatial dropout from output layer - can't be used for 2d input
if(i == 4){ if(i == 4){
conf.getFlattenedLayerConfigurations().get(2).setIDropout(null); conf.getFlattenedLayerConfigurations().get(2).setDropOut(null);
} }
MultiLayerNetwork mln = new MultiLayerNetwork(conf); MultiLayerNetwork mln = new MultiLayerNetwork(conf);
@ -157,11 +156,11 @@ public class DropoutGradientCheck extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).build(), "in") .addLayer("0", DenseLayer.builder().nIn(5).nOut(5).build(), "in")
.addLayer("1", new DenseLayer.Builder().nIn(5).nOut(5).build(), "0") .addLayer("1", DenseLayer.builder().nIn(5).nOut(5).build(), "0")
.addLayer("2", new DenseLayer.Builder().nIn(5).nOut(5).build(), "0") .addLayer("2", DenseLayer.builder().nIn(5).nOut(5).build(), "0")
.addLayer("3", new DenseLayer.Builder().nIn(5).nOut(5).build(), "0") .addLayer("3", DenseLayer.builder().nIn(5).nOut(5).build(), "0")
.addLayer("out", new OutputLayer.Builder().nIn(15).nOut(5).activation(Activation.SOFTMAX) .addLayer("out", OutputLayer.builder().nIn(15).nOut(5).activation(Activation.SOFTMAX)
.lossFunction(LossFunction.MCXENT).build(), "1", "2", "3") .lossFunction(LossFunction.MCXENT).build(), "1", "2", "3")
.setOutputs("out") .setOutputs("out")
.build(); .build();

View File

@ -75,10 +75,10 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.updater(new NoOp()) .updater(new NoOp())
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list() .dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
.layer(0, new SimpleRnn.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH) .layer(0, SimpleRnn.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
.build()) .build())
.layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build()) .layer(1, GlobalPoolingLayer.builder().poolingType(pt).build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build()) .activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build())
.build(); .build();
@ -130,12 +130,12 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.updater(new NoOp()) .updater(new NoOp())
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list() .dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1) .layer(0, ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1)
.dataFormat(nchw ? CNN2DFormat.NCHW : CNN2DFormat.NHWC) .dataFormat(nchw ? CNN2DFormat.NCHW : CNN2DFormat.NHWC)
.nOut(layerDepth) .nOut(layerDepth)
.build()) .build())
.layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build()) .layer(1, GlobalPoolingLayer.builder().poolingType(pt).build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(nOut).build()) .activation(Activation.SOFTMAX).nOut(nOut).build())
.inputType(InputType.convolutional(inputH, inputW, inputDepth, nchw ? CNN2DFormat.NCHW : CNN2DFormat.NHWC)).build(); .inputType(InputType.convolutional(inputH, inputW, inputDepth, nchw ? CNN2DFormat.NCHW : CNN2DFormat.NHWC)).build();
@ -188,10 +188,10 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.updater(new NoOp()) .updater(new NoOp())
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list() .dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
.layer(0, new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH) .layer(0, LSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
.build()) .build())
.layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build()) .layer(1, GlobalPoolingLayer.builder().poolingType(pt).build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build()) .activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build())
.build(); .build();
@ -263,10 +263,10 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.dist(new NormalDistribution(0, 1.0)).convolutionMode(ConvolutionMode.Same) .dist(new NormalDistribution(0, 1.0)).convolutionMode(ConvolutionMode.Same)
.seed(12345L).list() .seed(12345L).list()
.layer(0, new ConvolutionLayer.Builder().kernelSize(kernel).stride(stride) .layer(0, ConvolutionLayer.builder().kernelSize(kernel).stride(stride)
.nOut(layerDepth).build()) .nOut(layerDepth).build())
.layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build()) .layer(1, GlobalPoolingLayer.builder().poolingType(pt).build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(nOut).build()) .activation(Activation.SOFTMAX).nOut(nOut).build())
.inputType(InputType.convolutional(inputH, inputW, inputDepth)).build(); .inputType(InputType.convolutional(inputH, inputW, inputDepth)).build();

View File

@ -78,11 +78,11 @@ public class GradientCheckTests extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new NoOp()) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new NoOp())
.list() .list()
.layer(0, .layer(0,
new DenseLayer.Builder().nIn(4).nOut(3) DenseLayer.builder().nIn(4).nOut(3)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.activation(Activation.TANH) .activation(Activation.TANH)
.build()) .build())
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(3).nOut(3).build()) .activation(Activation.SOFTMAX).nIn(3).nOut(3).build())
.build(); .build();
@ -168,11 +168,11 @@ public class GradientCheckTests extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).updater(new NoOp()) .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).updater(new NoOp())
.seed(12345L) .seed(12345L)
.list().layer(0, .list().layer(0,
new DenseLayer.Builder().nIn(4).nOut(3) DenseLayer.builder().nIn(4).nOut(3)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.activation(afn) .activation(afn)
.build()) .build())
.layer(1, new OutputLayer.Builder(lf).activation(outputActivation).nIn(3).nOut(3) .layer(1, OutputLayer.builder(lf).activation(outputActivation).nIn(3).nOut(3)
.dist(new NormalDistribution(0, 1)).build()) .dist(new NormalDistribution(0, 1)).build())
.build(); .build();
@ -259,12 +259,12 @@ public class GradientCheckTests extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT) .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
.seed(12345L) .seed(12345L)
.list().layer(0, .list().layer(0,
new DenseLayer.Builder().nIn(4).nOut(3) DenseLayer.builder().nIn(4).nOut(3)
.dist(new NormalDistribution(0, .dist(new NormalDistribution(0,
1)) 1))
.updater(new NoOp()) .updater(new NoOp())
.activation(afn).build()) .activation(afn).build())
.layer(1, new OutputLayer.Builder(lf).nIn(3).nOut(3) .layer(1, OutputLayer.builder(lf).nIn(3).nOut(3)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.updater(new NoOp()) .updater(new NoOp())
.activation(outputActivation).build()) .activation(outputActivation).build())
@ -327,10 +327,10 @@ public class GradientCheckTests extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l2(0.2).l1(0.1) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l2(0.2).l1(0.1)
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345L) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345L)
.list().layer(new EmbeddingLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER) .list().layer(EmbeddingLayer.builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
.updater(new NoOp()).build()) .updater(new NoOp()).build())
.layer(new PReLULayer.Builder().inputShape(3).sharedAxes(1).updater(new NoOp()).build()) .layer(PReLULayer.builder().inputShape(3).sharedAxes(1).updater(new NoOp()).build())
.layer(new OutputLayer.Builder(LossFunction.MCXENT).nIn(3).nOut(3) .layer(OutputLayer.builder().lossFunction(LossFunction.MCXENT).nIn(3).nOut(3)
.weightInit(WeightInit.XAVIER).dist(new NormalDistribution(0, 1)) .weightInit(WeightInit.XAVIER).dist(new NormalDistribution(0, 1))
.updater(new NoOp()).activation(Activation.SOFTMAX).build()) .updater(new NoOp()).activation(Activation.SOFTMAX).build())
.build(); .build();
@ -365,12 +365,12 @@ public class GradientCheckTests extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l2(0.2).l1(0.1) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l2(0.2).l1(0.1)
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345L) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345L)
.list().layer(0, .layer(0,
new EmbeddingLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER) EmbeddingLayer.builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
.updater(new NoOp()).activation( .updater(new NoOp()).activation(
Activation.TANH) Activation.TANH)
.build()) .build())
.layer(1, new OutputLayer.Builder(LossFunction.MCXENT).nIn(3).nOut(3) .layer(1, OutputLayer.builder().lossFunction(LossFunction.MCXENT).nIn(3).nOut(3)
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.updater(new NoOp()).activation(Activation.SOFTMAX).build()) .updater(new NoOp()).activation(Activation.SOFTMAX).build())
.build(); .build();
@ -437,9 +437,9 @@ public class GradientCheckTests extends BaseDL4JTest {
.seed(12345L) .seed(12345L)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.list().layer(0, .list().layer(0,
new AutoEncoder.Builder().nIn(4).nOut(3) AutoEncoder.builder().nIn(4).nOut(3)
.activation(afn).build()) .activation(afn).build())
.layer(1, new OutputLayer.Builder(lf).nIn(3).nOut(3) .layer(1, OutputLayer.builder(lf).nIn(3).nOut(3)
.activation(outputActivation).build()) .activation(outputActivation).build())
.build(); .build();
@ -497,13 +497,13 @@ public class GradientCheckTests extends BaseDL4JTest {
.weightInit(new UniformDistribution(0, 1)) .weightInit(new UniformDistribution(0, 1))
.graphBuilder() .graphBuilder()
.addInputs("features") .addInputs("features")
.addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(4) .addLayer("dense", DenseLayer.builder().nIn(4).nOut(4)
.activation(Activation.TANH) .activation(Activation.TANH)
.build(), "features") .build(), "features")
.addLayer("elementWiseMul", new ElementWiseMultiplicationLayer.Builder().nIn(4).nOut(4) .addLayer("elementWiseMul", ElementWiseMultiplicationLayer.builder().nIn(4).nOut(4)
.activation(a) .activation(a)
.build(), "dense") .build(), "dense")
.addLayer("loss", new LossLayer.Builder(LossFunctions.LossFunction.COSINE_PROXIMITY) .addLayer("loss", LossLayer.builder().lossFunction(LossFunctions.LossFunction.COSINE_PROXIMITY.getILossFunction())
.activation(Activation.IDENTITY).build(), "elementWiseMul") .activation(Activation.IDENTITY).build(), "elementWiseMul")
.setOutputs("loss") .setOutputs("loss")
.build(); .build();
@ -566,12 +566,12 @@ public class GradientCheckTests extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.weightInit(new NormalDistribution(0, 1)) .weightInit(new NormalDistribution(0, 1))
.list() .list()
.layer(new EmbeddingSequenceLayer.Builder() .layer(EmbeddingSequenceLayer.builder()
.nIn(8) .nIn(8)
.nOut(4) .nOut(4)
.outputDataFormat(seqOutputFormat) .outputDataFormat(seqOutputFormat)
.build()) .build())
.layer(new RnnOutputLayer.Builder().nIn(4).nOut(3).activation(Activation.TANH) .layer(RnnOutputLayer.builder().nIn(4).nOut(3).activation(Activation.TANH)
.dataFormat(seqOutputFormat) .dataFormat(seqOutputFormat)
.lossFunction(LossFunction.MSE).build()) .lossFunction(LossFunction.MSE).build())
.build(); .build();
@ -679,12 +679,12 @@ public class GradientCheckTests extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT) .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
.seed(12345L) .seed(12345L)
.list().layer(0, .list().layer(0,
new DenseLayer.Builder().nIn(4).nOut(3) DenseLayer.builder().nIn(4).nOut(3)
.dist(new NormalDistribution(0, .dist(new NormalDistribution(0,
1)) 1))
.updater(new NoOp()) .updater(new NoOp())
.activation(afn).build()) .activation(afn).build())
.layer(1, new OutputLayer.Builder(lf).nIn(3).nOut(3) .layer(1, OutputLayer.builder(lf).nIn(3).nOut(3)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.updater(new NoOp()) .updater(new NoOp())
.activation(outputActivation).build()) .activation(outputActivation).build())
@ -740,12 +740,12 @@ public class GradientCheckTests extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).updater(new NoOp()) .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).updater(new NoOp())
.seed(12345L) .seed(12345L)
.list().layer(0, .list().layer(0,
new DenseLayer.Builder().nIn(4).nOut(3) DenseLayer.builder().nIn(4).nOut(3)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.hasLayerNorm(layerNorm) .hasLayerNorm(layerNorm)
.activation(afn) .activation(afn)
.build()) .build())
.layer(1, new OutputLayer.Builder(lf).activation(outputActivation).nIn(3).nOut(3) .layer(1, OutputLayer.builder(lf).activation(outputActivation).nIn(3).nOut(3)
.dist(new NormalDistribution(0, 1)).build()) .dist(new NormalDistribution(0, 1)).build())
.build(); .build();

View File

@ -76,10 +76,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.dist(new NormalDistribution(0, 1)).updater(new NoOp()) .dist(new NormalDistribution(0, 1)).updater(new NoOp())
.graphBuilder().addInputs("input") .graphBuilder().addInputs("input")
.addLayer("firstLayer", .addLayer("firstLayer",
new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(), DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
"input") "input")
.addLayer("outputLayer", .addLayer("outputLayer",
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(5).nOut(3).build(), .activation(Activation.SOFTMAX).nIn(5).nOut(3).build(),
"firstLayer") "firstLayer")
.setOutputs("outputLayer").build(); .setOutputs("outputLayer").build();
@ -121,13 +121,13 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.dist(new NormalDistribution(0, 1)).updater(new NoOp()) .dist(new NormalDistribution(0, 1)).updater(new NoOp())
.graphBuilder().addInputs("input") .graphBuilder().addInputs("input")
.addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(), .addLayer("l1", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
"input") "input")
.addLayer("l2", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(), .addLayer("l2", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
"input") "input")
.addVertex("merge", new MergeVertex(), "l1", "l2") .addVertex("merge", new MergeVertex(), "l1", "l2")
.addLayer("outputLayer", .addLayer("outputLayer",
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(5 + 5).nOut(3).build(), .activation(Activation.SOFTMAX).nIn(5 + 5).nOut(3).build(),
"merge") "merge")
.setOutputs("outputLayer").build(); .setOutputs("outputLayer").build();
@ -178,13 +178,13 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.updater(new NoOp()).graphBuilder().addInputs("input") .updater(new NoOp()).graphBuilder().addInputs("input")
.addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(), .addLayer("l1", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
"input") "input")
.addLayer("l2", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.SIGMOID) .addLayer("l2", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.SIGMOID)
.build(), "input") .build(), "input")
.addVertex("elementwise", new ElementWiseVertex(op), "l1", "l2") .addVertex("elementwise", new ElementWiseVertex(op), "l1", "l2")
.addLayer("outputLayer", .addLayer("outputLayer",
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(5).nOut(3).build(), .activation(Activation.SOFTMAX).nIn(5).nOut(3).build(),
"elementwise") "elementwise")
.setOutputs("outputLayer").build(); .setOutputs("outputLayer").build();
@ -236,15 +236,15 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.updater(new NoOp()).graphBuilder().addInputs("input") .updater(new NoOp()).graphBuilder().addInputs("input")
.addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(), .addLayer("l1", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
"input") "input")
.addLayer("l2", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.SIGMOID) .addLayer("l2", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.SIGMOID)
.build(), "input") .build(), "input")
.addLayer("l3", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.RELU).build(), .addLayer("l3", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.RELU).build(),
"input") "input")
.addVertex("elementwise", new ElementWiseVertex(op), "l1", "l2", "l3") .addVertex("elementwise", new ElementWiseVertex(op), "l1", "l2", "l3")
.addLayer("outputLayer", .addLayer("outputLayer",
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(5).nOut(3).build(), .activation(Activation.SOFTMAX).nIn(5).nOut(3).build(),
"elementwise") "elementwise")
.setOutputs("outputLayer").build(); .setOutputs("outputLayer").build();
@ -299,10 +299,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.setOutputs("out") .setOutputs("out")
.layer("l1", new DenseLayer.Builder().nIn(3).nOut(firstSmaller ? 1 : 3).build(), "in") //[mb,3] .layer("l1", DenseLayer.builder().nIn(3).nOut(firstSmaller ? 1 : 3).build(), "in") //[mb,3]
.layer("l2", new DenseLayer.Builder().nIn(3).nOut(firstSmaller ? 3 : 1).build(), "in") //[mb,1] .layer("l2", DenseLayer.builder().nIn(3).nOut(firstSmaller ? 3 : 1).build(), "in") //[mb,1]
.addVertex("ew", new ElementWiseVertex(op), "l1", "l2") .addVertex("ew", new ElementWiseVertex(op), "l1", "l2")
.layer("out", new OutputLayer.Builder().nIn(3).nOut(2).lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).build(), "ew") .layer("out", OutputLayer.builder().nIn(3).nOut(2).lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).build(), "ew")
.build(); .build();
ComputationGraph graph = new ComputationGraph(conf); ComputationGraph graph = new ComputationGraph(conf);
@ -344,15 +344,15 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.dist(new NormalDistribution(0, 0.1)) .dist(new NormalDistribution(0, 0.1))
.updater(new NoOp()).graphBuilder().addInputs("input") .updater(new NoOp()).graphBuilder().addInputs("input")
.addLayer("l1", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0) .addLayer("l1", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
.dataFormat(format) .dataFormat(format)
.nIn(2).nOut(2).activation(Activation.TANH).build(), "input") .nIn(2).nOut(2).activation(Activation.TANH).build(), "input")
.addLayer("l2", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1) .addLayer("l2", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1)
.padding(0, 0).dataFormat(format) .padding(0, 0).dataFormat(format)
.nIn(2).nOut(2).activation(Activation.TANH).build(), "input") .nIn(2).nOut(2).activation(Activation.TANH).build(), "input")
.addVertex("merge", new MergeVertex(), "l1", "l2") .addVertex("merge", new MergeVertex(), "l1", "l2")
.addLayer("outputLayer", .addLayer("outputLayer",
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(5 * 5 * (2 + 2)).nOut(3) .activation(Activation.SOFTMAX).nIn(5 * 5 * (2 + 2)).nOut(3)
.build(), .build(),
"merge") "merge")
@ -401,23 +401,23 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.updater(new NoOp()).graphBuilder().addInputs("input") .updater(new NoOp()).graphBuilder().addInputs("input")
.setOutputs("out") .setOutputs("out")
.addLayer("rnn1", .addLayer("rnn1",
new SimpleRnn.Builder().nOut(3) SimpleRnn.builder().nOut(3)
.activation(Activation.TANH).build(), .activation(Activation.TANH).build(),
"input") "input")
.addLayer("rnn2", .addLayer("rnn2",
new SimpleRnn.Builder().nOut(3) SimpleRnn.builder().nOut(3)
.activation(Activation.TANH).build(), .activation(Activation.TANH).build(),
"rnn1") "rnn1")
.addLayer("dense1", .addLayer("dense1",
new DenseLayer.Builder().nOut(3) DenseLayer.builder().nOut(3)
.activation(Activation.SIGMOID).build(), .activation(Activation.SIGMOID).build(),
"rnn1") "rnn1")
.addLayer("rnn3", .addLayer("rnn3",
new SimpleRnn.Builder().nOut(3) SimpleRnn.builder().nOut(3)
.activation(Activation.TANH).build(), .activation(Activation.TANH).build(),
"dense1") "dense1")
.addVertex("merge", new MergeVertex(), "rnn2", "rnn3") .addVertex("merge", new MergeVertex(), "rnn2", "rnn3")
.addLayer("out", new RnnOutputLayer.Builder().nOut(outSize) .addLayer("out", RnnOutputLayer.builder().nOut(outSize)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), .lossFunction(LossFunctions.LossFunction.MCXENT).build(),
@ -457,10 +457,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.weightInit(new NormalDistribution(0, 1)) .weightInit(new NormalDistribution(0, 1))
.updater(new NoOp()).graphBuilder().addInputs("input").setOutputs("out") .updater(new NoOp()).graphBuilder().addInputs("input").setOutputs("out")
.addLayer("lstm1", new LSTM.Builder().nOut(6).activation(Activation.TANH).build(), .addLayer("lstm1", LSTM.builder().nOut(6).activation(Activation.TANH).build(),
"input") "input")
.addVertex("subset", new SubsetVertex(0, 2), "lstm1") .addVertex("subset", new SubsetVertex(0, 2), "lstm1")
.addLayer("out", new RnnOutputLayer.Builder().nOut(2).activation(Activation.SOFTMAX) .addLayer("out", RnnOutputLayer.builder().nOut(2).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "subset") .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "subset")
.setInputTypes(InputType.recurrent(inLength,timeSeriesLength,RNNFormat.NCW)) .setInputTypes(InputType.recurrent(inLength,timeSeriesLength,RNNFormat.NCW))
.build(); .build();
@ -494,10 +494,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.updater(new NoOp()).graphBuilder().addInputs("input").setOutputs("out") .updater(new NoOp()).graphBuilder().addInputs("input").setOutputs("out")
.addLayer("lstm1", new LSTM.Builder().nIn(3).nOut(4).activation(Activation.TANH).build(), .addLayer("lstm1", LSTM.builder().nIn(3).nOut(4).activation(Activation.TANH).build(),
"input") "input")
.addVertex("lastTS", new LastTimeStepVertex("input"), "lstm1") .addVertex("lastTS", new LastTimeStepVertex("input"), "lstm1")
.addLayer("out", new OutputLayer.Builder().nIn(4).nOut(2).activation(Activation.SOFTMAX) .addLayer("out", OutputLayer.builder().nIn(4).nOut(2).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "lastTS") .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "lastTS")
.build(); .build();
@ -548,16 +548,16 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.updater(new NoOp()).graphBuilder() .updater(new NoOp()).graphBuilder()
.addInputs("input1", "input2").setOutputs("out") .addInputs("input1", "input2").setOutputs("out")
.addLayer("lstm1", .addLayer("lstm1",
new LSTM.Builder().nIn(3).nOut(3) LSTM.builder().nIn(3).nOut(3)
.activation(Activation.TANH).build(), .activation(Activation.TANH).build(),
"input1") "input1")
.addLayer("lstm2", .addLayer("lstm2",
new LSTM.Builder().nIn(2).nOut(4) LSTM.builder().nIn(2).nOut(4)
.activation(Activation.SOFTSIGN).build(), .activation(Activation.SOFTSIGN).build(),
"input2") "input2")
.addVertex("lastTS", new LastTimeStepVertex("input2"), "lstm2") .addVertex("lastTS", new LastTimeStepVertex("input2"), "lstm2")
.addVertex("duplicate", new DuplicateToTimeSeriesVertex("input2"), "lastTS") .addVertex("duplicate", new DuplicateToTimeSeriesVertex("input2"), "lastTS")
.addLayer("out", new RnnOutputLayer.Builder().nIn(3+4).nOut(2) .addLayer("out", RnnOutputLayer.builder().nIn(3+4).nOut(2)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), .lossFunction(LossFunctions.LossFunction.MCXENT).build(),
"lstm1", "duplicate") "lstm1", "duplicate")
@ -598,16 +598,16 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.updater(new NoOp()).graphBuilder() .updater(new NoOp()).graphBuilder()
.addInputs("input").setOutputs("out") .addInputs("input").setOutputs("out")
.addLayer("lstm_a", .addLayer("lstm_a",
new LSTM.Builder().nIn(2).nOut(3) LSTM.builder().nIn(2).nOut(3)
.activation(Activation.TANH).build(), .activation(Activation.TANH).build(),
"input") "input")
.addVertex("input_rev", new ReverseTimeSeriesVertex("input"), "input") .addVertex("input_rev", new ReverseTimeSeriesVertex("input"), "input")
.addLayer("lstm_b", .addLayer("lstm_b",
new LSTM.Builder().nIn(2).nOut(3) LSTM.builder().nIn(2).nOut(3)
.activation(Activation.TANH).build(), .activation(Activation.TANH).build(),
"input_rev") "input_rev")
.addVertex("lstm_b_rev", new ReverseTimeSeriesVertex("input"), "lstm_b") .addVertex("lstm_b_rev", new ReverseTimeSeriesVertex("input"), "lstm_b")
.addLayer("out", new RnnOutputLayer.Builder().nIn(3 + 3).nOut(2) .addLayer("out", RnnOutputLayer.builder().nIn(3 + 3).nOut(2)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), .lossFunction(LossFunctions.LossFunction.MCXENT).build(),
"lstm_a", "lstm_b_rev") "lstm_a", "lstm_b_rev")
@ -655,11 +655,11 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("i0", "i1", "i2") .updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("i0", "i1", "i2")
.addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i0") .addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "i0")
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i1") .addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "i1")
.addLayer("d2", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i2") .addLayer("d2", DenseLayer.builder().nIn(2).nOut(2).build(), "i2")
.addLayer("d3", new DenseLayer.Builder().nIn(6).nOut(2).build(), "d0", "d1", "d2") .addLayer("d3", DenseLayer.builder().nIn(6).nOut(2).build(), "d0", "d1", "d2")
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(2) .addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(2)
.nOut(2).build(), "d3") .nOut(2).build(), "d3")
.setOutputs("out").build(); .setOutputs("out").build();
@ -698,11 +698,11 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("i0") .updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("i0")
.addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i0") .addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "i0")
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "d0") .addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "d0")
.addLayer("d2", new DenseLayer.Builder().nIn(2).nOut(2).build(), "d0") .addLayer("d2", DenseLayer.builder().nIn(2).nOut(2).build(), "d0")
.addLayer("d3", new DenseLayer.Builder().nIn(2).nOut(2).build(), "d0") .addLayer("d3", DenseLayer.builder().nIn(2).nOut(2).build(), "d0")
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(6) .addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(6)
.nOut(2).build(), "d1", "d2", "d3") .nOut(2).build(), "d1", "d2", "d3")
.setOutputs("out").build(); .setOutputs("out").build();
@ -738,14 +738,14 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("i0", "i1", "i2") .updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("i0", "i1", "i2")
.addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i0") .addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "i0")
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i1") .addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "i1")
.addLayer("d2", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i2") .addLayer("d2", DenseLayer.builder().nIn(2).nOut(2).build(), "i2")
.addVertex("m", new MergeVertex(), "d0", "d1", "d2") .addVertex("m", new MergeVertex(), "d0", "d1", "d2")
.addLayer("D0", new DenseLayer.Builder().nIn(6).nOut(2).build(), "m") .addLayer("D0", DenseLayer.builder().nIn(6).nOut(2).build(), "m")
.addLayer("D1", new DenseLayer.Builder().nIn(6).nOut(2).build(), "m") .addLayer("D1", DenseLayer.builder().nIn(6).nOut(2).build(), "m")
.addLayer("D2", new DenseLayer.Builder().nIn(6).nOut(2).build(), "m") .addLayer("D2", DenseLayer.builder().nIn(6).nOut(2).build(), "m")
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(6) .addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(6)
.nOut(2).build(), "D0", "D1", "D2") .nOut(2).build(), "D0", "D1", "D2")
.setOutputs("out").build(); .setOutputs("out").build();
@ -787,18 +787,18 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("input") .updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("input")
.addLayer("l0", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0) .addLayer("l0", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
.nIn(2).nOut(2).activation(Activation.TANH).build(), "input") .nIn(2).nOut(2).activation(Activation.TANH).build(), "input")
.addLayer("l1", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0) .addLayer("l1", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
.nIn(2).nOut(2).activation(Activation.TANH).build(), "l0") .nIn(2).nOut(2).activation(Activation.TANH).build(), "l0")
.addLayer("l2", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0) .addLayer("l2", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
.nIn(2).nOut(2).activation(Activation.TANH).build(), "l0") .nIn(2).nOut(2).activation(Activation.TANH).build(), "l0")
.addVertex("m", new MergeVertex(), "l1", "l2") .addVertex("m", new MergeVertex(), "l1", "l2")
.addLayer("l3", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0) .addLayer("l3", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
.nIn(4).nOut(2).activation(Activation.TANH).build(), "m") .nIn(4).nOut(2).activation(Activation.TANH).build(), "m")
.addLayer("l4", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0) .addLayer("l4", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
.nIn(4).nOut(2).activation(Activation.TANH).build(), "m") .nIn(4).nOut(2).activation(Activation.TANH).build(), "m")
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE) .addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
.activation(Activation.IDENTITY).nOut(2) .activation(Activation.IDENTITY).nOut(2)
.build(), "l3", "l4") .build(), "l3", "l4")
.setOutputs("out").setInputTypes(InputType.convolutional(inH, inW, 2)) .setOutputs("out").setInputTypes(InputType.convolutional(inH, inW, 2))
@ -839,7 +839,7 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.updater(new NoOp()).graphBuilder() .updater(new NoOp()).graphBuilder()
.addInputs("input1", "input2", "input3") .addInputs("input1", "input2", "input3")
.addVertex("stack1", new StackVertex(), "input1", "input2", "input3") .addVertex("stack1", new StackVertex(), "input1", "input2", "input3")
.addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5) .addLayer("l1", DenseLayer.builder().nIn(4).nOut(5)
.activation(Activation.TANH).build(), "stack1") .activation(Activation.TANH).build(), "stack1")
.addVertex("unstack0", new UnstackVertex(0, 3), "l1") .addVertex("unstack0", new UnstackVertex(0, 3), "l1")
.addVertex("unstack1", new UnstackVertex(1, 3), "l1") .addVertex("unstack1", new UnstackVertex(1, 3), "l1")
@ -847,8 +847,8 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.addVertex("l2-1", new L2Vertex(), "unstack1", "unstack0") // x - x- .addVertex("l2-1", new L2Vertex(), "unstack1", "unstack0") // x - x-
.addVertex("l2-2", new L2Vertex(), "unstack1", "unstack2") // x - x+ .addVertex("l2-2", new L2Vertex(), "unstack1", "unstack2") // x - x+
.addLayer("lossLayer", .addLayer("lossLayer",
new LossLayer.Builder() LossLayer.builder()
.lossFunction(LossFunctions.LossFunction.MCXENT) .lossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction())
.activation(Activation.SOFTMAX).build(), .activation(Activation.SOFTMAX).build(),
"l2-1", "l2-2") "l2-1", "l2-2")
.setOutputs("lossLayer").build(); .setOutputs("lossLayer").build();
@ -911,9 +911,9 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.dist(new GaussianDistribution(0, 1)) .dist(new GaussianDistribution(0, 1))
.updater(new NoOp()).graphBuilder().addInputs("input1") .updater(new NoOp()).graphBuilder().addInputs("input1")
.addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH) .addLayer("l1", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH)
.build(), "input1") .build(), "input1")
.addLayer("cl", new CenterLossOutputLayer.Builder() .addLayer("cl",CenterLossOutputLayer.builder()
.lossFunction(LossFunctions.LossFunction.MCXENT).nIn(5).nOut(numLabels) .lossFunction(LossFunctions.LossFunction.MCXENT).nIn(5).nOut(numLabels)
.alpha(1.0).lambda(lambda).gradientCheck(true) .alpha(1.0).lambda(lambda).gradientCheck(true)
.activation(Activation.SOFTMAX).build(), "l1") .activation(Activation.SOFTMAX).build(), "l1")
@ -975,9 +975,9 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.updater(new NoOp()) .updater(new NoOp())
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list() .dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(3).build()) .layer(0, ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(3).build())
.layer(1, new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build()) .layer(1, GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
.layer(2, new CenterLossOutputLayer.Builder() .layer(2,CenterLossOutputLayer.builder()
.lossFunction(LossFunctions.LossFunction.MCXENT).nOut(numLabels) .lossFunction(LossFunctions.LossFunction.MCXENT).nOut(numLabels)
.alpha(1.0).lambda(lambda).gradientCheck(true) .alpha(1.0).lambda(lambda).gradientCheck(true)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
@ -1030,10 +1030,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.activation(Activation.TANH).updater(new NoOp()).graphBuilder() .activation(Activation.TANH).updater(new NoOp()).graphBuilder()
.addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1") .addInputs("in1", "in2").addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "in1")
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2") .addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "in2")
.addVertex("l2", new L2Vertex(), "d0", "d1") .addVertex("l2", new L2Vertex(), "d0", "d1")
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(1) .addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(1)
.nOut(1).activation(Activation.IDENTITY).build(), "l2") .nOut(1).activation(Activation.IDENTITY).build(), "l2")
.setOutputs("out").build(); .setOutputs("out").build();
@ -1083,14 +1083,14 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.activation(Activation.TANH).updater(new NoOp()).graphBuilder() .activation(Activation.TANH).updater(new NoOp()).graphBuilder()
.addInputs("in1", "in2") .addInputs("in1", "in2")
.addLayer("d0", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in1") .addLayer("d0", DenseLayer.builder().nIn(layerSizes).nOut(layerSizes).build(), "in1")
.addLayer("d1", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in2") .addLayer("d1", DenseLayer.builder().nIn(layerSizes).nOut(layerSizes).build(), "in2")
.addVertex("stack", new StackVertex(), "d0", "d1") .addVertex("stack", new StackVertex(), "d0", "d1")
.addLayer("d2", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "stack") .addLayer("d2", DenseLayer.builder().nIn(layerSizes).nOut(layerSizes).build(), "stack")
.addVertex("u1", new UnstackVertex(0, 2), "d2").addVertex("u2", new UnstackVertex(1, 2), "d2") .addVertex("u1", new UnstackVertex(0, 2), "d2").addVertex("u2", new UnstackVertex(1, 2), "d2")
.addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2) .addLayer("out1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2)
.nIn(layerSizes).nOut(layerSizes).activation(Activation.IDENTITY).build(), "u1") .nIn(layerSizes).nOut(layerSizes).activation(Activation.IDENTITY).build(), "u1")
.addLayer("out2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2) .addLayer("out2", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2)
.nIn(layerSizes).nOut(2).activation(Activation.IDENTITY).build(), "u2") .nIn(layerSizes).nOut(2).activation(Activation.IDENTITY).build(), "u2")
.setOutputs("out1", "out2").build(); .setOutputs("out1", "out2").build();
@ -1137,17 +1137,17 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.activation(Activation.TANH).updater(new NoOp()).graphBuilder() .activation(Activation.TANH).updater(new NoOp()).graphBuilder()
.addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1") .addInputs("in1", "in2").addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "in1")
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2") .addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "in2")
.addVertex("stack", new StackVertex(), "d0", "d1") .addVertex("stack", new StackVertex(), "d0", "d1")
.addVertex("u0", new UnstackVertex(0, 2), "stack") .addVertex("u0", new UnstackVertex(0, 2), "stack")
.addVertex("u1", new UnstackVertex(1, 2), "stack") .addVertex("u1", new UnstackVertex(1, 2), "stack")
.addLayer("out1", .addLayer("out1",
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
.nOut(2).activation(Activation.IDENTITY).build(), .nOut(2).activation(Activation.IDENTITY).build(),
"u0") "u0")
.addLayer("out2", .addLayer("out2",
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
.nOut(2).activation(Activation.IDENTITY).build(), .nOut(2).activation(Activation.IDENTITY).build(),
"u1") "u1")
.setOutputs("out1", "out2").build(); .setOutputs("out1", "out2").build();
@ -1198,16 +1198,16 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.activation(Activation.TANH).updater(new NoOp()).graphBuilder() .activation(Activation.TANH).updater(new NoOp()).graphBuilder()
.addInputs("in1", "in2") .addInputs("in1", "in2")
.addLayer("d0", new SimpleRnn.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in1") .addLayer("d0", SimpleRnn.builder().nIn(layerSizes).nOut(layerSizes).build(), "in1")
.addLayer("d1", new SimpleRnn.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in2") .addLayer("d1", SimpleRnn.builder().nIn(layerSizes).nOut(layerSizes).build(), "in2")
.addVertex("stack", new StackVertex(), "d0", "d1") .addVertex("stack", new StackVertex(), "d0", "d1")
.addLayer("d2", new SimpleRnn.Builder().nIn(layerSizes).nOut(layerSizes).build(), "stack") .addLayer("d2", SimpleRnn.builder().nIn(layerSizes).nOut(layerSizes).build(), "stack")
.addVertex("u1", new UnstackVertex(0, 2), "d2").addVertex("u2", new UnstackVertex(1, 2), "d2") .addVertex("u1", new UnstackVertex(0, 2), "d2").addVertex("u2", new UnstackVertex(1, 2), "d2")
.addLayer("p1", new GlobalPoolingLayer.Builder(PoolingType.AVG).build(), "u1") .addLayer("p1", GlobalPoolingLayer.builder(PoolingType.AVG).build(), "u1")
.addLayer("p2", new GlobalPoolingLayer.Builder(PoolingType.AVG).build(), "u2") .addLayer("p2", GlobalPoolingLayer.builder(PoolingType.AVG).build(), "u2")
.addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2) .addLayer("out1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2)
.nIn(layerSizes).nOut(layerSizes).activation(Activation.IDENTITY).build(), "p1") .nIn(layerSizes).nOut(layerSizes).activation(Activation.IDENTITY).build(), "p1")
.addLayer("out2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2) .addLayer("out2", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2)
.nIn(layerSizes).nOut(2).activation(Activation.IDENTITY).build(), "p2") .nIn(layerSizes).nOut(2).activation(Activation.IDENTITY).build(), "p2")
.setOutputs("out1", "out2").build(); .setOutputs("out1", "out2").build();
@ -1260,14 +1260,14 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.activation(Activation.TANH).updater(new NoOp()).graphBuilder() .activation(Activation.TANH).updater(new NoOp()).graphBuilder()
.addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1") .addInputs("in1", "in2").addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "in1")
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2") .addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "in2")
.addLayer("out1", .addLayer("out1",
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
.nOut(2).activation(Activation.IDENTITY).build(), .nOut(2).activation(Activation.IDENTITY).build(),
"d0") "d0")
.addLayer("out2", .addLayer("out2",
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
.nOut(2).activation(Activation.IDENTITY).build(), .nOut(2).activation(Activation.IDENTITY).build(),
"d1") "d1")
.setOutputs("out1", "out2").build(); .setOutputs("out1", "out2").build();
@ -1320,10 +1320,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.activation(Activation.TANH).updater(new NoOp()).graphBuilder() .activation(Activation.TANH).updater(new NoOp()).graphBuilder()
.addInputs("in1").addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(3).build(), "in1") .addInputs("in1").addLayer("d1", DenseLayer.builder().nIn(2).nOut(3).build(), "in1")
.addVertex("norm", new L2NormalizeVertex(definition,L2NormalizeVertex.DEFAULT_EPS), "d1") .addVertex("norm", new L2NormalizeVertex(definition,L2NormalizeVertex.DEFAULT_EPS), "d1")
.addLayer("out1", .addLayer("out1",
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(3) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(3)
.nOut(2).activation(Activation.IDENTITY).build(), .nOut(2).activation(Activation.IDENTITY).build(),
"norm") "norm")
.setOutputs("out1").build(); .setOutputs("out1").build();
@ -1370,11 +1370,11 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.activation(Activation.TANH).updater(new NoOp()).graphBuilder() .activation(Activation.TANH).updater(new NoOp()).graphBuilder()
.addInputs("in1") .addInputs("in1")
.addLayer("d1", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(2).build(), .addLayer("d1", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(2).build(),
"in1") "in1")
.addVertex("norm", new L2NormalizeVertex(), "d1") .addVertex("norm", new L2NormalizeVertex(), "d1")
.addLayer("out1", .addLayer("out1",
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nOut(2) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nOut(2)
.activation(Activation.IDENTITY).build(), .activation(Activation.IDENTITY).build(),
"norm") "norm")
.setOutputs("out1").setInputTypes(InputType.convolutional(h, w, dIn)).build(); .setOutputs("out1").setInputTypes(InputType.convolutional(h, w, dIn)).build();
@ -1420,9 +1420,9 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345L) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345L)
.updater(new NoOp()).graphBuilder().addInputs("in") .updater(new NoOp()).graphBuilder().addInputs("in")
.addLayer("0", new EmbeddingLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER) .addLayer("0", EmbeddingLayer.builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
.activation(Activation.TANH).build(), "in") .activation(Activation.TANH).build(), "in")
.addLayer("1", new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3) .addLayer("1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3)
.activation(Activation.SOFTMAX).build(), "0") .activation(Activation.SOFTMAX).build(), "0")
.setOutputs("1").build(); .setOutputs("1").build();

View File

@ -119,10 +119,10 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L)
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.updater(new NoOp()) .updater(new NoOp())
.list()
.layer(0, new SimpleRnn.Builder().nIn(nIn).nOut(layerSize) .layer(0, SimpleRnn.builder().nIn(nIn).nOut(layerSize)
.weightInit(new NormalDistribution(0, 1)).build()) .weightInit(new NormalDistribution(0, 1)).build())
.layer(1, new RnnOutputLayer.Builder(s.lf).activation(s.act).nIn(layerSize).nOut(s.nOut) .layer(1, RnnOutputLayer.builder().lossFunction(s.lf).activation(s.act).nIn(layerSize).nOut(s.nOut)
.weightInit(new NormalDistribution(0, 1)).build()) .weightInit(new NormalDistribution(0, 1)).build())
.build(); .build();
MultiLayerNetwork mln = new MultiLayerNetwork(conf); MultiLayerNetwork mln = new MultiLayerNetwork(conf);
@ -161,10 +161,10 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list() .dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
.layer(0, new SimpleRnn.Builder().nIn(nIn).nOut(2).activation(Activation.TANH).build()) .layer(0, SimpleRnn.builder().nIn(nIn).nOut(2).activation(Activation.TANH).build())
.layer(1, new GravesBidirectionalLSTM.Builder().nIn(2).nOut(layerSize) .layer(1, GravesBidirectionalLSTM.builder().nIn(2).nOut(layerSize)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(2, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(2, RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build()) .activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build())
.build(); .build();
@ -241,9 +241,9 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.dist(new NormalDistribution(0, 1)).seed(12345) .dist(new NormalDistribution(0, 1)).seed(12345)
.list() .list()
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH) .layer(0, DenseLayer.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
.build()) .build())
.layer(1, new OutputLayer.Builder().nIn(layerSize).nOut(nOut).lossFunction(lf) .layer(1, OutputLayer.builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
.activation(a).build()) .activation(a).build())
.validateOutputLayerConfig(false) .validateOutputLayerConfig(false)
.build(); .build();
@ -335,9 +335,9 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.dist(new NormalDistribution(0, 1)).seed(12345) .dist(new NormalDistribution(0, 1)).seed(12345)
.list() .list()
.layer(0, new SimpleRnn.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH) .layer(0, SimpleRnn.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
.build()) .build())
.layer(1, new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut).lossFunction(lf) .layer(1, RnnOutputLayer.builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
.activation(a).build()) .activation(a).build())
.validateOutputLayerConfig(false) .validateOutputLayerConfig(false)
.inputType(InputType.recurrent(nIn,tsLength, RNNFormat.NCW)) .inputType(InputType.recurrent(nIn,tsLength, RNNFormat.NCW))
@ -368,9 +368,9 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.dist(new NormalDistribution(0, 2)).seed(12345) .dist(new NormalDistribution(0, 2)).seed(12345)
.graphBuilder().addInputs("in") .graphBuilder().addInputs("in")
.addLayer("0", new SimpleRnn.Builder().nOut(layerSize) .addLayer("0", SimpleRnn.builder().nOut(layerSize)
.activation(Activation.TANH).build(), "in") .activation(Activation.TANH).build(), "in")
.addLayer("1", new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut).lossFunction(lf) .addLayer("1", RnnOutputLayer.builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
.activation(a).build(), "0") .activation(a).build(), "0")
.setOutputs("1").validateOutputLayerConfig(false) .setOutputs("1").validateOutputLayerConfig(false)
.setInputTypes(InputType.recurrent(nIn,tsLength,RNNFormat.NCW)) .setInputTypes(InputType.recurrent(nIn,tsLength,RNNFormat.NCW))
@ -401,9 +401,9 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
.weightInit(new NormalDistribution(0,2)) .weightInit(new NormalDistribution(0,2))
.updater(new NoOp()) .updater(new NoOp())
.list() .list()
.layer(new LSTM.Builder().nIn(3).nOut(3).build()) .layer(LSTM.builder().nIn(3).nOut(3).build())
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build()) .layer(GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
.layer(new OutputLayer.Builder().nIn(3).nOut(3).activation(Activation.SOFTMAX).build()) .layer(OutputLayer.builder().nIn(3).nOut(3).activation(Activation.SOFTMAX).build())
.inputType(InputType.recurrent(3)) .inputType(InputType.recurrent(3))
.build(); .build();
@ -457,9 +457,9 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new LSTM.Builder().nIn(3).nOut(3).build(), "in") .layer("0", LSTM.builder().nIn(3).nOut(3).build(), "in")
.layer("1", new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build(), "0") .layer("1", GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build(), "0")
.layer("out", new OutputLayer.Builder().nIn(3).nOut(3).activation(Activation.SOFTMAX).build(), "1") .layer("out", OutputLayer.builder().nIn(3).nOut(3).activation(Activation.SOFTMAX).build(), "1")
.setOutputs("out") .setOutputs("out")
.setInputTypes(InputType.recurrent(3)) .setInputTypes(InputType.recurrent(3))
.build(); .build();

View File

@ -72,10 +72,10 @@ public class LRNGradientCheckTests extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.seed(12345L) .seed(12345L)
.dist(new NormalDistribution(0, 2)).list() .dist(new NormalDistribution(0, 2)).list()
.layer(0, new ConvolutionLayer.Builder().nOut(6).kernelSize(2, 2).stride(1, 1) .layer(0, ConvolutionLayer.builder().nOut(6).kernelSize(2, 2).stride(1, 1)
.activation(Activation.TANH).build()) .activation(Activation.TANH).build())
.layer(1, new LocalResponseNormalization.Builder().build()) .layer(1, LocalResponseNormalization.builder().build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(nOut).build()) .activation(Activation.SOFTMAX).nOut(nOut).build())
.inputType(InputType.convolutional(hw, hw, depth)); .inputType(InputType.convolutional(hw, hw, depth));

View File

@ -73,17 +73,17 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
LayerConfiguration l0; LayerConfiguration l0;
LayerConfiguration l1; LayerConfiguration l1;
if (graves) { if (graves) {
l0 = new GravesLSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.SIGMOID) l0 = GravesLSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.SIGMOID)
.dist(new NormalDistribution(0, 1.0)) .dist(new NormalDistribution(0, 1.0))
.updater(new NoOp()).build(); .updater(new NoOp()).build();
l1 = new GravesLSTM.Builder().nIn(layerSize).nOut(layerSize).activation(Activation.SIGMOID) l1 = GravesLSTM.builder().nIn(layerSize).nOut(layerSize).activation(Activation.SIGMOID)
.dist(new NormalDistribution(0, 1.0)) .dist(new NormalDistribution(0, 1.0))
.updater(new NoOp()).build(); .updater(new NoOp()).build();
} else { } else {
l0 = new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.SIGMOID) l0 = LSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.SIGMOID)
.dist(new NormalDistribution(0, 1.0)) .dist(new NormalDistribution(0, 1.0))
.updater(new NoOp()).build(); .updater(new NoOp()).build();
l1 = new LSTM.Builder().nIn(layerSize).nOut(layerSize).activation(Activation.SIGMOID) l1 = LSTM.builder().nIn(layerSize).nOut(layerSize).activation(Activation.SIGMOID)
.dist(new NormalDistribution(0, 1.0)) .dist(new NormalDistribution(0, 1.0))
.updater(new NoOp()).build(); .updater(new NoOp()).build();
} }
@ -94,7 +94,7 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
.list() .list()
.layer(0, l0).layer(1, .layer(0, l0).layer(1,
l1) l1)
.layer(2, new RnnOutputLayer.Builder(LossFunction.MCXENT) .layer(2, RnnOutputLayer.builder().lossFunction(LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut) .activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut)
.dist(new NormalDistribution(0, 1.0)).updater(new NoOp()) .dist(new NormalDistribution(0, 1.0)).updater(new NoOp())
@ -196,14 +196,14 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
LayerConfiguration layer; LayerConfiguration layer;
if (graves) { if (graves) {
layer = new GravesLSTM.Builder().nIn(nIn).nOut(layerSize).activation(afn).build(); layer = GravesLSTM.builder().nIn(nIn).nOut(layerSize).activation(afn).build();
} else { } else {
layer = new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(afn).build(); layer = LSTM.builder().nIn(nIn).nOut(layerSize).activation(afn).build();
} }
NeuralNetConfiguration.NeuralNetConfigurationBuilder conf2 = (NeuralNetConfigurationBuilder) conf NeuralNetConfiguration.NeuralNetConfigurationBuilder conf2 = (NeuralNetConfigurationBuilder) conf
.layer(0, layer) .layer(0, layer)
.layer(1, new RnnOutputLayer.Builder(lf).activation(outputActivation) .layer(1, RnnOutputLayer.builder(lf).activation(outputActivation)
.nIn(layerSize).nOut(nOut).build()); .nIn(layerSize).nOut(nOut).build());
MultiLayerNetwork mln = new MultiLayerNetwork(conf2.build()); MultiLayerNetwork mln = new MultiLayerNetwork(conf2.build());
@ -251,16 +251,16 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
LayerConfiguration layer; LayerConfiguration layer;
if (graves) { if (graves) {
layer = new GravesLSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build(); layer = GravesLSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build();
} else { } else {
layer = new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build(); layer = LSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build();
} }
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L)
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.updater(new NoOp()).list().layer(0, layer) .updater(new NoOp()).list().layer(0, layer)
.layer(1, new RnnOutputLayer.Builder(LossFunction.MCXENT).activation(Activation.SOFTMAX) .layer(1, RnnOutputLayer.builder().lossFunction(LossFunction.MCXENT).activation(Activation.SOFTMAX)
.nIn(layerSize).nOut(nOut).build()) .nIn(layerSize).nOut(nOut).build())
.build(); .build();
MultiLayerNetwork mln = new MultiLayerNetwork(conf); MultiLayerNetwork mln = new MultiLayerNetwork(conf);
@ -324,11 +324,11 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.updater(new NoOp()) .updater(new NoOp())
.layer(0, .layer(0,
new GravesBidirectionalLSTM.Builder().nIn(nIn).nOut(layerSize) GravesBidirectionalLSTM.builder().nIn(nIn).nOut(layerSize)
.weightInit(new NormalDistribution(0, 1)) .weightInit(new NormalDistribution(0, 1))
.activation(afn) .activation(afn)
.build()) .build())
.layer(1, new RnnOutputLayer.Builder(lf).activation(outputActivation).nIn(layerSize) .layer(1, RnnOutputLayer.builder(lf).activation(outputActivation).nIn(layerSize)
.nOut(nOut) .nOut(nOut)
.dist(new NormalDistribution(0, 1)).updater(new NoOp()).build()) .dist(new NormalDistribution(0, 1)).updater(new NoOp()).build())
.build(); .build();
@ -383,12 +383,12 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L)
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.list() .list()
.layer(0, new GravesBidirectionalLSTM.Builder().nIn(nIn).nOut(layerSize) .layer(0, GravesBidirectionalLSTM.builder().nIn(nIn).nOut(layerSize)
.dist(new NormalDistribution(0, 1)).updater( .dist(new NormalDistribution(0, 1)).updater(
Updater.NONE) Updater.NONE)
.build()) .build())
.layer(1, new RnnOutputLayer.Builder(LossFunction.MCXENT).activation(Activation.SOFTMAX) .layer(1, RnnOutputLayer.builder().lossFunction(LossFunction.MCXENT).activation(Activation.SOFTMAX)
.nIn(layerSize).nOut(nOut) .nIn(layerSize).nOut(nOut)
.dist(new NormalDistribution(0, 1)).updater(new NoOp()).build()) .dist(new NormalDistribution(0, 1)).updater(new NoOp()).build())
.build(); .build();
@ -432,13 +432,13 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new NoOp()).seed(12345) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new NoOp()).seed(12345)
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.dist(new UniformDistribution(-2, 2)).list() .dist(new UniformDistribution(-2, 2)).list()
.layer(0, new ConvolutionLayer.Builder(3, 3).nIn(2).nOut(3).stride(1, 1) .layer(0, ConvolutionLayer.builder(3, 3).nIn(2).nOut(3).stride(1, 1)
.activation(Activation.TANH).build()) //Out: (10-5)/1+1 = 6 -> 6x6x5 .activation(Activation.TANH).build()) //Out: (10-5)/1+1 = 6 -> 6x6x5
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2) .layer(1, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
.stride(1, 1).build()) //Out: (6-2)/1+1 = 5 -> 5x5x5 .stride(1, 1).build()) //Out: (6-2)/1+1 = 5 -> 5x5x5
.layer(2, new DenseLayer.Builder().nIn(27).nOut(4).activation(Activation.TANH).build()) .layer(2, DenseLayer.builder().nIn(27).nOut(4).activation(Activation.TANH).build())
.layer(3, new GravesLSTM.Builder().nIn(4).nOut(3).activation(Activation.TANH).build()) .layer(3, GravesLSTM.builder().nIn(4).nOut(3).activation(Activation.TANH).build())
.layer(4, new RnnOutputLayer.Builder().lossFunction(LossFunction.MCXENT).nIn(3).nOut(nClasses) .layer(4, RnnOutputLayer.builder().lossFunction(LossFunction.MCXENT).nIn(3).nOut(nClasses)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
.inputType(InputType.convolutional(6, 6, 2)).build(); .inputType(InputType.convolutional(6, 6, 2)).build();

View File

@ -187,8 +187,8 @@ public class LossFunctionGradientCheck extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345)
.updater(new NoOp()) .updater(new NoOp())
.dist(new UniformDistribution(-2, 2)).list() .dist(new UniformDistribution(-2, 2)).list()
.layer(0, new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build()) .layer(0, DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build())
.layer(1, new OutputLayer.Builder().lossFunction(lossFunctions[i]) .layer(1, OutputLayer.builder().lossFunction(lossFunctions[i])
.activation(outputActivationFn[i]).nIn(4).nOut(nOut[i]).build()) .activation(outputActivationFn[i]).nIn(4).nOut(nOut[i]).build())
.validateOutputLayerConfig(false) .validateOutputLayerConfig(false)
.build(); .build();
@ -351,9 +351,9 @@ public class LossFunctionGradientCheck extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345)
.updater(new NoOp()) .updater(new NoOp())
.dist(new UniformDistribution(-2, 2)).list() .dist(new UniformDistribution(-2, 2)).list()
.layer(0, new DenseLayer.Builder().nIn(4).nOut(nOut[i]).activation(Activation.TANH) .layer(0, DenseLayer.builder().nIn(4).nOut(nOut[i]).activation(Activation.TANH)
.build()) .build())
.layer(1, new LossLayer.Builder().lossFunction(lossFunctions[i]) .layer(1, LossLayer.builder().lossFunction(lossFunctions[i])
.activation(outputActivationFn[i]).build()) .activation(outputActivationFn[i]).build())
.validateOutputLayerConfig(false) .validateOutputLayerConfig(false)
.build(); .build();
@ -361,7 +361,7 @@ public class LossFunctionGradientCheck extends BaseDL4JTest {
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
assertSame(((LossLayer) net.getLayer(1).getLayerConfiguration()).getLossFn().getClass(), lossFunctions[i] assertSame(((LossLayer) net.getLayer(1).getLayerConfiguration()).getLossFunction().getClass(), lossFunctions[i]
.getClass()); .getClass());
INDArray[] inOut = getFeaturesAndLabels(lossFunctions[i], minibatchSizes[j], 4, nOut[i], 12345); INDArray[] inOut = getFeaturesAndLabels(lossFunctions[i], minibatchSizes[j], 4, nOut[i], 12345);
@ -655,9 +655,9 @@ public class LossFunctionGradientCheck extends BaseDL4JTest {
// .dist(new UniformDistribution(-3, 3)) // .dist(new UniformDistribution(-3, 3))
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.list() .list()
.layer(0, new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH) .layer(0, DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH)
.build()) .build())
.layer(1, new OutputLayer.Builder().lossFunction(lossFunctions[i]) .layer(1, OutputLayer.builder().lossFunction(lossFunctions[i])
.activation(outputActivationFn[i]).nIn(4).nOut(3).build()) .activation(outputActivationFn[i]).nIn(4).nOut(3).build())
.validateOutputLayerConfig(false) .validateOutputLayerConfig(false)
.build(); .build();

View File

@ -73,19 +73,19 @@ public class NoBiasGradientCheckTests extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.seed(12345L) .seed(12345L)
.list() .list()
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(layerSize) .layer(0, DenseLayer.builder().nIn(nIn).nOut(layerSize)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.activation(Activation.TANH) .activation(Activation.TANH)
.hasBias(true) //ILayer 0: Always have a bias .hasBias(true) //ILayer 0: Always have a bias
.build()) .build())
.layer(1, new DenseLayer.Builder().nIn(layerSize).nOut(layerSize) .layer(1, DenseLayer.builder().nIn(layerSize).nOut(layerSize)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.activation(Activation.TANH) .activation(Activation.TANH)
.hasBias(denseHasBias) .hasBias(denseHasBias)
.build()) .build())
.layer(2, new OutputLayer.Builder(LossFunction.MCXENT) .layer(2, OutputLayer.builder().lossFunction(LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut) .activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
@ -144,12 +144,12 @@ public class NoBiasGradientCheckTests extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.seed(12345L) .seed(12345L)
.list() .list()
.layer(0, new LSTM.Builder().nIn(nIn).nOut(layerSize) .layer(0, LSTM.builder().nIn(nIn).nOut(layerSize)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.activation(Activation.TANH) .activation(Activation.TANH)
.build()) .build())
.layer(1, new RnnOutputLayer.Builder(LossFunction.MCXENT) .layer(1, RnnOutputLayer.builder().lossFunction(LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut) .activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
@ -205,13 +205,13 @@ public class NoBiasGradientCheckTests extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.seed(12345L) .seed(12345L)
.list() .list()
.layer(0, new EmbeddingLayer.Builder().nIn(nIn).nOut(layerSize) .layer(0, EmbeddingLayer.builder().nIn(nIn).nOut(layerSize)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.activation(Activation.TANH) .activation(Activation.TANH)
.hasBias(embeddingHasBias) .hasBias(embeddingHasBias)
.build()) .build())
.layer(1, new OutputLayer.Builder(LossFunction.MCXENT) .layer(1, OutputLayer.builder().lossFunction(LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut) .activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
@ -271,17 +271,17 @@ public class NoBiasGradientCheckTests extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.list() .list()
.layer(new ConvolutionLayer.Builder(kernel, .layer(ConvolutionLayer.builder(kernel,
stride, padding).nIn(inputDepth) stride, padding).nIn(inputDepth)
.hasBias(false) .hasBias(false)
.nOut(3).build())//output: (5-2+0)/1+1 = 4 .nOut(3).build())//output: (5-2+0)/1+1 = 4
.layer(new SubsamplingLayer.Builder(PoolingType.MAX) .layer(SubsamplingLayer.builder(PoolingType.MAX)
.kernelSize(kernel).stride(stride).padding(padding) .kernelSize(kernel).stride(stride).padding(padding)
.pnorm(pNorm).build()) //output: (4-2+0)/1+1 =3 -> 3x3x3 .pnorm(pNorm).build()) //output: (4-2+0)/1+1 =3 -> 3x3x3
.layer(new ConvolutionLayer.Builder(kernel, stride, padding) .layer(ConvolutionLayer.builder(kernel, stride, padding)
.hasBias(cnnHasBias) .hasBias(cnnHasBias)
.nOut(2).build()) //Output: (3-2+0)/1+1 = 2 .nOut(2).build()) //Output: (3-2+0)/1+1 = 2
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.nOut(4).build()) .nOut(4).build())
.inputType(InputType.convolutionalFlat(height, width, inputDepth)) .inputType(InputType.convolutionalFlat(height, width, inputDepth))

View File

@ -121,10 +121,10 @@ public class OutputLayerGradientChecks extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.updater(new NoOp()) .updater(new NoOp())
.list() .list()
.layer(new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH) .layer(LSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
.dist(new NormalDistribution(0, 1.0)) .dist(new NormalDistribution(0, 1.0))
.updater(new NoOp()).build()) .updater(new NoOp()).build())
.layer(new RnnLossLayer.Builder(lf) .layer(RnnLossLayer.builder().lossFunction(lf)
.activation(oa) .activation(oa)
.build()) .build())
.validateOutputLayerConfig(false).build(); .validateOutputLayerConfig(false).build();
@ -228,10 +228,10 @@ public class OutputLayerGradientChecks extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.convolutionMode(ConvolutionMode.Same) .convolutionMode(ConvolutionMode.Same)
.list() .list()
.layer(new ConvolutionLayer.Builder().nIn(dIn).nOut(dOut).activation(Activation.TANH) .layer(ConvolutionLayer.builder().nIn(dIn).nOut(dOut).activation(Activation.TANH)
.dist(new NormalDistribution(0, 1.0)) .dist(new NormalDistribution(0, 1.0))
.updater(new NoOp()).build()) .updater(new NoOp()).build())
.layer(new CnnLossLayer.Builder(lf) .layer(CnnLossLayer.builder().lossFunction(lf)
.activation(oa) .activation(oa)
.build()) .build())
.validateOutputLayerConfig(false).build(); .validateOutputLayerConfig(false).build();
@ -375,11 +375,11 @@ public class OutputLayerGradientChecks extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.convolutionMode(ConvolutionMode.Same) .convolutionMode(ConvolutionMode.Same)
.list() .list()
.layer(new Convolution3D.Builder().nIn(chIn).nOut(chOut).activation(Activation.TANH) .layer(Convolution3D.builder().nIn(chIn).nOut(chOut).activation(Activation.TANH)
.dist(new NormalDistribution(0, 1.0)) .dist(new NormalDistribution(0, 1.0))
.dataFormat(dataFormat) .dataFormat(dataFormat)
.updater(new NoOp()).build()) .updater(new NoOp()).build())
.layer(new Cnn3DLossLayer.Builder(dataFormat) .layer(Cnn3DLossLayer.builder().dataFormat(dataFormat)
.lossFunction(lf) .lossFunction(lf)
.activation(oa) .activation(oa)
.build()) .build())

View File

@ -112,12 +112,12 @@ public class RnnGradientChecks extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.list() .list()
.layer(new LSTM.Builder().nIn(nIn).nOut(3).build()) .layer(LSTM.builder().nIn(nIn).nOut(3).build())
.layer(new Bidirectional(m, .layer(Bidirectional.builder(m,
(simple ? (simple ?
new SimpleRnn.Builder().nIn(3).nOut(3).hasLayerNorm(hasLayerNorm).build() : SimpleRnn.builder().nIn(3).nOut(3).hasLayerNorm(hasLayerNorm).build() :
new LSTM.Builder().nIn(3).nOut(3).build()))) LSTM.builder().nIn(3).nOut(3).build())))
.layer(new RnnOutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX).build()) .layer(RnnOutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX).build())
.build(); .build();
@ -194,9 +194,9 @@ public class RnnGradientChecks extends BaseDL4JTest {
.l1(l1s[l]) .l1(l1s[l])
.l2(l2s[l]) .l2(l2s[l])
.list() .list()
.layer(new SimpleRnn.Builder().nIn(nIn).nOut(layerSize).hasLayerNorm(hasLayerNorm).build()) .layer(SimpleRnn.builder().nIn(nIn).nOut(layerSize).hasLayerNorm(hasLayerNorm).build())
.layer(new SimpleRnn.Builder().nIn(layerSize).nOut(layerSize).hasLayerNorm(hasLayerNorm).build()) .layer(SimpleRnn.builder().nIn(layerSize).nOut(layerSize).hasLayerNorm(hasLayerNorm).build())
.layer(new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut) .layer(RnnOutputLayer.builder().nIn(layerSize).nOut(nOut)
.activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT)
.build()) .build())
.build(); .build();
@ -268,11 +268,11 @@ public class RnnGradientChecks extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.list() .list()
.layer(simple ? new SimpleRnn.Builder().nOut(layerSize).hasLayerNorm(hasLayerNorm).build() : .layer(simple ? SimpleRnn.builder().nOut(layerSize).hasLayerNorm(hasLayerNorm).build() :
new LSTM.Builder().nOut(layerSize).build()) LSTM.builder().nOut(layerSize).build())
.layer(new LastTimeStep(simple ? new SimpleRnn.Builder().nOut(layerSize).hasLayerNorm(hasLayerNorm).build() : .layer(new LastTimeStep(simple ? SimpleRnn.builder().nOut(layerSize).hasLayerNorm(hasLayerNorm).build() :
new LSTM.Builder().nOut(layerSize).build())) LSTM.builder().nOut(layerSize).build()))
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX) .layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.inputType(InputType.recurrent(nIn)) .inputType(InputType.recurrent(nIn))
.build(); .build();
@ -334,9 +334,9 @@ public class RnnGradientChecks extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.list() .list()
.layer(new LSTM.Builder().nOut(layerSize).build()) .layer(LSTM.builder().nOut(layerSize).build())
.layer(new TimeDistributed(new DenseLayer.Builder().nOut(layerSize).activation(Activation.SOFTMAX).build())) .layer(new TimeDistributed(DenseLayer.builder().nOut(layerSize).activation(Activation.SOFTMAX).build()))
.layer(new RnnOutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX) .layer(RnnOutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.inputType(InputType.recurrent(nIn)) .inputType(InputType.recurrent(nIn))
.build(); .build();

View File

@ -132,27 +132,27 @@ public class UtilLayerGradientChecks extends BaseDL4JTest {
InputType it; InputType it;
switch (inputRank){ switch (inputRank){
case 2: case 2:
l1 = new DenseLayer.Builder().nOut(3).build(); l1 = DenseLayer.builder().nOut(3).build();
l2 = new DenseLayer.Builder().nOut(3).build(); l2 = DenseLayer.builder().nOut(3).build();
l3 = new OutputLayer.Builder().nOut(3).lossFunction(LossFunctions.LossFunction.MSE) l3 = OutputLayer.builder().nOut(3).lossFunction(LossFunctions.LossFunction.MSE)
.activation(Activation.TANH).build(); .activation(Activation.TANH).build();
it = InputType.feedForward(3); it = InputType.feedForward(3);
break; break;
case 3: case 3:
l1 = new SimpleRnn.Builder().nIn(3).nOut(3).activation(Activation.TANH).build(); l1 = SimpleRnn.builder().nIn(3).nOut(3).activation(Activation.TANH).build();
l2 = new SimpleRnn.Builder().nIn(3).nOut(3).activation(Activation.TANH).build(); l2 = SimpleRnn.builder().nIn(3).nOut(3).activation(Activation.TANH).build();
l3 = new RnnOutputLayer.Builder().nIn(3).nOut(3).lossFunction(LossFunctions.LossFunction.SQUARED_LOSS) l3 = RnnOutputLayer.builder().nIn(3).nOut(3).lossFunction(LossFunctions.LossFunction.SQUARED_LOSS)
.activation(Activation.IDENTITY).build(); .activation(Activation.IDENTITY).build();
it = InputType.recurrent(3); it = InputType.recurrent(3);
break; break;
case 4: case 4:
l1 = new ConvolutionLayer.Builder().nOut(5).convolutionMode(ConvolutionMode.Truncate) l1 = ConvolutionLayer.builder().nOut(5).convolutionMode(ConvolutionMode.Truncate)
.stride(1,1).kernelSize(2,2).padding(0,0) .stride(1,1).kernelSize(2,2).padding(0,0)
.build(); .build();
l2 = new ConvolutionLayer.Builder().nOut(5).convolutionMode(ConvolutionMode.Truncate) l2 = ConvolutionLayer.builder().nOut(5).convolutionMode(ConvolutionMode.Truncate)
.stride(1,1).kernelSize(2,2).padding(0,0) .stride(1,1).kernelSize(2,2).padding(0,0)
.build(); .build();
l3 = new OutputLayer.Builder().nOut(5).lossFunction(LossFunctions.LossFunction.SQUARED_LOSS) l3 = OutputLayer.builder().nOut(5).lossFunction(LossFunctions.LossFunction.SQUARED_LOSS)
.activation(Activation.IDENTITY) .activation(Activation.IDENTITY)
.build(); .build();
it = InputType.convolutional(5,5,1); it = InputType.convolutional(5,5,1);
@ -201,14 +201,14 @@ public class UtilLayerGradientChecks extends BaseDL4JTest {
.seed(12345) .seed(12345)
.updater(Updater.NONE.getIUpdaterWithDefaultConfig()) .updater(Updater.NONE.getIUpdaterWithDefaultConfig())
.list() .list()
.layer(new DenseLayer.Builder().nIn(10).nOut(10) .layer(DenseLayer.builder().nIn(10).nOut(10)
.activation(Activation.TANH).weightInit(WeightInit.XAVIER).build()) .activation(Activation.TANH).weightInit(WeightInit.XAVIER).build())
.layer(new FrozenLayerWithBackprop(new DenseLayer.Builder().nIn(10).nOut(10) .layer(new FrozenLayerWithBackprop(DenseLayer.builder().nIn(10).nOut(10)
.activation(Activation.TANH).weightInit(WeightInit.XAVIER).build())) .activation(Activation.TANH).weightInit(WeightInit.XAVIER).build()))
.layer(new FrozenLayerWithBackprop( .layer(new FrozenLayerWithBackprop(
new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH) DenseLayer.builder().nIn(10).nOut(10).activation(Activation.TANH)
.weightInit(WeightInit.XAVIER).build())) .weightInit(WeightInit.XAVIER).build()))
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(10).nOut(10).build()) .activation(Activation.SOFTMAX).nIn(10).nOut(10).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf2); MultiLayerNetwork net = new MultiLayerNetwork(conf2);

View File

@ -99,14 +99,14 @@ public class VaeGradientCheckTests extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.l2Bias(biasL2[i]).l1Bias(biasL1[i]) .l2Bias(biasL2[i]).l1Bias(biasL1[i])
.updater(new NoOp()).seed(12345L).list() .updater(new NoOp()).seed(12345L).list()
.layer(0, new VariationalAutoencoder.Builder().nIn(4) .layer(0, VariationalAutoencoder.builder().nIn(4)
.nOut(3).encoderLayerSizes(encoderSizes) .nOut(3).encoderLayerSizes(encoderSizes)
.decoderLayerSizes(decoderSizes) .decoderLayerSizes(decoderSizes)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
.activation(afn) .activation(afn)
.build()) .build())
.layer(1, new OutputLayer.Builder(lf) .layer(1, OutputLayer.builder(lf)
.activation(outputActivation).nIn(3).nOut(3) .activation(outputActivation).nIn(3).nOut(3)
.dist(new NormalDistribution(0, 1)) .dist(new NormalDistribution(0, 1))
@ -173,7 +173,7 @@ public class VaeGradientCheckTests extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.l1(l1).l2Bias(biasL2[i]).l1Bias(biasL1[i]).updater(new NoOp()) .l1(l1).l2Bias(biasL2[i]).l1Bias(biasL1[i]).updater(new NoOp())
.seed(12345L).weightInit(WeightInit.XAVIER).list() .seed(12345L).weightInit(WeightInit.XAVIER).list()
.layer(0, new VariationalAutoencoder.Builder().nIn(4).nOut(3) .layer(0, VariationalAutoencoder.builder().nIn(4).nOut(3)
.encoderLayerSizes(encoderSizes).decoderLayerSizes(decoderSizes) .encoderLayerSizes(encoderSizes).decoderLayerSizes(decoderSizes)
.pzxActivationFunction(pzxAfn) .pzxActivationFunction(pzxAfn)
.reconstructionDistribution( .reconstructionDistribution(
@ -263,7 +263,7 @@ public class VaeGradientCheckTests extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.seed(12345L).dist(new NormalDistribution(0, 1)) .seed(12345L).dist(new NormalDistribution(0, 1))
.list().layer(0, .list().layer(0,
new VariationalAutoencoder.Builder().nIn(inOutSize).nOut(3) VariationalAutoencoder.builder().nIn(inOutSize).nOut(3)
.encoderLayerSizes(4).decoderLayerSizes(3) .encoderLayerSizes(4).decoderLayerSizes(3)
.pzxActivationFunction(Activation.TANH) .pzxActivationFunction(Activation.TANH)
.reconstructionDistribution( .reconstructionDistribution(
@ -306,7 +306,7 @@ public class VaeGradientCheckTests extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.updater(new NoOp()) .updater(new NoOp())
.seed(12345L).weightInit(WeightInit.XAVIER).list() .seed(12345L).weightInit(WeightInit.XAVIER).list()
.layer(0, new VariationalAutoencoder.Builder().nIn(4).nOut(3).encoderLayerSizes(2, 3) .layer(0, VariationalAutoencoder.builder().nIn(4).nOut(3).encoderLayerSizes(2, 3)
.decoderLayerSizes(4, 3).pzxActivationFunction(Activation.TANH) .decoderLayerSizes(4, 3).pzxActivationFunction(Activation.TANH)
.reconstructionDistribution( .reconstructionDistribution(
new GaussianReconstructionDistribution(Activation.TANH)) new GaussianReconstructionDistribution(Activation.TANH))

View File

@ -116,7 +116,7 @@ public class YoloGradientCheckTests extends BaseDL4JTest {
.l1(l1[i]).l2(l2[i]) .l1(l1[i]).l2(l2[i])
.convolutionMode(ConvolutionMode.Same) .convolutionMode(ConvolutionMode.Same)
.list() .list()
.layer(new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1) .layer(ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1)
.dataFormat(format) .dataFormat(format)
.nIn(depthIn).nOut(yoloDepth).build())//output: (5-2+0)/1+1 = 4 .nIn(depthIn).nOut(yoloDepth).build())//output: (5-2+0)/1+1 = 4
.layer(new Yolo2OutputLayer.Builder() .layer(new Yolo2OutputLayer.Builder()
@ -234,9 +234,9 @@ public class YoloGradientCheckTests extends BaseDL4JTest {
.dist(new GaussianDistribution(0,0.1)) .dist(new GaussianDistribution(0,0.1))
.seed(12345) .seed(12345)
.list() .list()
.layer(new ConvolutionLayer.Builder().kernelSize(3,3).stride(1,1).nOut(4).build()) .layer(ConvolutionLayer.builder().kernelSize(3,3).stride(1,1).nOut(4).build())
.layer(new SubsamplingLayer.Builder().kernelSize(2,2).stride(2,2).build()) .layer(SubsamplingLayer.builder().kernelSize(2,2).stride(2,2).build())
.layer(new ConvolutionLayer.Builder().activation(Activation.IDENTITY).kernelSize(3,3).stride(1,1).nOut(depthOut).build()) .layer(ConvolutionLayer.builder().activation(Activation.IDENTITY).kernelSize(3,3).stride(1,1).nOut(depthOut).build())
.layer(new Yolo2OutputLayer.Builder() .layer(new Yolo2OutputLayer.Builder()
.boundingBoxPriors(bbPriors) .boundingBoxPriors(bbPriors)
.build()) .build())

View File

@ -62,9 +62,9 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
.dist(new NormalDistribution(0, 1)).updater(new NoOp()) .dist(new NormalDistribution(0, 1)).updater(new NoOp())
.graphBuilder().addInputs("input") .graphBuilder().addInputs("input")
.appendLayer("firstLayer", .appendLayer("firstLayer",
new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build()) DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build())
.addLayer("outputLayer", .addLayer("outputLayer",
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(5).nOut(3).build(), .activation(Activation.SOFTMAX).nIn(5).nOut(3).build(),
"firstLayer") "firstLayer")
.setOutputs("outputLayer").build(); .setOutputs("outputLayer").build();
@ -83,20 +83,20 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.graphBuilder().addInputs("input") .graphBuilder().addInputs("input")
.addLayer("cnn1", .addLayer("cnn1",
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5) ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
.build(), .build(),
"input") "input")
.addLayer("cnn2", .addLayer("cnn2",
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5) ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
.build(), .build(),
"input") "input")
.addLayer("max1", .addLayer("max1",
new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX) SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX)
.kernelSize(2, 2).build(), .kernelSize(2, 2).build(),
"cnn1", "cnn2") "cnn1", "cnn2")
.addLayer("dnn1", new DenseLayer.Builder().nOut(7).build(), "max1") .addLayer("dnn1", DenseLayer.builder().nOut(7).build(), "max1")
.addLayer("max2", new SubsamplingLayer.Builder().build(), "max1") .addLayer("max2", SubsamplingLayer.builder().build(), "max1")
.addLayer("output", new OutputLayer.Builder().nIn(7).nOut(10).activation(Activation.SOFTMAX).build(), "dnn1", .addLayer("output", OutputLayer.builder().nIn(7).nOut(10).activation(Activation.SOFTMAX).build(), "dnn1",
"max2") "max2")
.setOutputs("output") .setOutputs("output")
.inputPreProcessor("cnn1", new FeedForwardToCnnPreProcessor(32, 32, 3)) .inputPreProcessor("cnn1", new FeedForwardToCnnPreProcessor(32, 32, 3))
@ -119,20 +119,20 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.graphBuilder().addInputs("input1", "input2") .graphBuilder().addInputs("input1", "input2")
.addLayer("cnn1", .addLayer("cnn1",
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5) ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
.build(), .build(),
"input1") "input1")
.addLayer("cnn2", .addLayer("cnn2",
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5) ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
.build(), .build(),
"input2") "input2")
.addVertex("merge1", new MergeVertex(), "cnn1", "cnn2") .addVertex("merge1", new MergeVertex(), "cnn1", "cnn2")
.addVertex("subset1", new SubsetVertex(0, 1), "merge1") .addVertex("subset1", new SubsetVertex(0, 1), "merge1")
.addLayer("dense1", new DenseLayer.Builder().nIn(20).nOut(5).build(), "subset1") .addLayer("dense1", DenseLayer.builder().nIn(20).nOut(5).build(), "subset1")
.addLayer("dense2", new DenseLayer.Builder().nIn(20).nOut(5).build(), "subset1") .addLayer("dense2", DenseLayer.builder().nIn(20).nOut(5).build(), "subset1")
.addVertex("add", new ElementWiseVertex(ElementWiseVertex.Op.Add), "dense1", .addVertex("add", new ElementWiseVertex(ElementWiseVertex.Op.Add), "dense1",
"dense2") "dense2")
.addLayer("out", new OutputLayer.Builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "add") .addLayer("out", OutputLayer.builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "add")
.setOutputs("out").build(); .setOutputs("out").build();
String json = conf.toJson(); String json = conf.toJson();
@ -150,8 +150,8 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
//Test no inputs for a layer: //Test no inputs for a layer:
try { try {
NeuralNetConfiguration.builder().graphBuilder().addInputs("input1") NeuralNetConfiguration.builder().graphBuilder().addInputs("input1")
.addLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "input1") .addLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build(), "input1")
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).build()).setOutputs("out") .addLayer("out", OutputLayer.builder().nIn(2).nOut(2).build()).setOutputs("out")
.build(); .build();
fail("No exception thrown for invalid configuration"); fail("No exception thrown for invalid configuration");
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
@ -162,8 +162,8 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
// Use appendLayer on first layer // Use appendLayer on first layer
try { try {
NeuralNetConfiguration.builder().graphBuilder() NeuralNetConfiguration.builder().graphBuilder()
.appendLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build()) .appendLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build())
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).build()).setOutputs("out") .addLayer("out", OutputLayer.builder().nIn(2).nOut(2).build()).setOutputs("out")
.build(); .build();
fail("No exception thrown for invalid configuration"); fail("No exception thrown for invalid configuration");
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
@ -174,8 +174,8 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
//Test no network inputs //Test no network inputs
try { try {
NeuralNetConfiguration.builder().graphBuilder() NeuralNetConfiguration.builder().graphBuilder()
.addLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "input1") .addLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build(), "input1")
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).build(), "dense1") .addLayer("out", OutputLayer.builder().nIn(2).nOut(2).build(), "dense1")
.setOutputs("out").build(); .setOutputs("out").build();
fail("No exception thrown for invalid configuration"); fail("No exception thrown for invalid configuration");
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
@ -186,8 +186,8 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
//Test no network outputs //Test no network outputs
try { try {
NeuralNetConfiguration.builder().graphBuilder().addInputs("input1") NeuralNetConfiguration.builder().graphBuilder().addInputs("input1")
.addLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "input1") .addLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build(), "input1")
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).build(), "dense1").build(); .addLayer("out", OutputLayer.builder().nIn(2).nOut(2).build(), "dense1").build();
fail("No exception thrown for invalid configuration"); fail("No exception thrown for invalid configuration");
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
//OK - exception is good //OK - exception is good
@ -197,8 +197,8 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
//Test: invalid input //Test: invalid input
try { try {
NeuralNetConfiguration.builder().graphBuilder().addInputs("input1") NeuralNetConfiguration.builder().graphBuilder().addInputs("input1")
.addLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "input1") .addLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build(), "input1")
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).build(), "thisDoesntExist") .addLayer("out", OutputLayer.builder().nIn(2).nOut(2).build(), "thisDoesntExist")
.setOutputs("out").build(); .setOutputs("out").build();
fail("No exception thrown for invalid configuration"); fail("No exception thrown for invalid configuration");
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
@ -209,10 +209,10 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
//Test: graph with cycles //Test: graph with cycles
try { try {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("input1") ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("input1")
.addLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "input1", "dense3") .addLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build(), "input1", "dense3")
.addLayer("dense2", new DenseLayer.Builder().nIn(2).nOut(2).build(), "dense1") .addLayer("dense2", DenseLayer.builder().nIn(2).nOut(2).build(), "dense1")
.addLayer("dense3", new DenseLayer.Builder().nIn(2).nOut(2).build(), "dense2") .addLayer("dense3", DenseLayer.builder().nIn(2).nOut(2).build(), "dense2")
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).lossFunction(LossFunctions.LossFunction.MSE).build(), "dense1") .addLayer("out", OutputLayer.builder().nIn(2).nOut(2).lossFunction(LossFunctions.LossFunction.MSE).build(), "dense1")
.setOutputs("out").build(); .setOutputs("out").build();
//Cycle detection happens in ComputationGraph.init() //Cycle detection happens in ComputationGraph.init()
ComputationGraph graph = new ComputationGraph(conf); ComputationGraph graph = new ComputationGraph(conf);
@ -229,20 +229,20 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
NeuralNetConfiguration.builder().graphBuilder().addInputs("input1", "input2") NeuralNetConfiguration.builder().graphBuilder().addInputs("input1", "input2")
.setInputTypes(new InputType.InputTypeRecurrent(10, 12)) .setInputTypes(new InputType.InputTypeRecurrent(10, 12))
.addLayer("cnn1", .addLayer("cnn1",
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5) ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
.build(), .build(),
"input1") "input1")
.addLayer("cnn2", .addLayer("cnn2",
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5) ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
.build(), .build(),
"input2") "input2")
.addVertex("merge1", new MergeVertex(), "cnn1", "cnn2") .addVertex("merge1", new MergeVertex(), "cnn1", "cnn2")
.addVertex("subset1", new SubsetVertex(0, 1), "merge1") .addVertex("subset1", new SubsetVertex(0, 1), "merge1")
.addLayer("dense1", new DenseLayer.Builder().nIn(20).nOut(5).build(), "subset1") .addLayer("dense1", DenseLayer.builder().nIn(20).nOut(5).build(), "subset1")
.addLayer("dense2", new DenseLayer.Builder().nIn(20).nOut(5).build(), "subset1") .addLayer("dense2", DenseLayer.builder().nIn(20).nOut(5).build(), "subset1")
.addVertex("add", new ElementWiseVertex(ElementWiseVertex.Op.Add), "dense1", .addVertex("add", new ElementWiseVertex(ElementWiseVertex.Op.Add), "dense1",
"dense2") "dense2")
.addLayer("out", new OutputLayer.Builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "add") .addLayer("out", OutputLayer.builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "add")
.setOutputs("out").build(); .setOutputs("out").build();
fail("No exception thrown for invalid configuration"); fail("No exception thrown for invalid configuration");
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
@ -283,9 +283,9 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
@Test @Test
public void testOutputOrderDoesntChangeWhenCloning() { public void testOutputOrderDoesntChangeWhenCloning() {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in") ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
.addLayer("out1", new OutputLayer.Builder().nIn(1).nOut(1).build(), "in") .addLayer("out1", OutputLayer.builder().nIn(1).nOut(1).build(), "in")
.addLayer("out2", new OutputLayer.Builder().nIn(1).nOut(1).build(), "in") .addLayer("out2", OutputLayer.builder().nIn(1).nOut(1).build(), "in")
.addLayer("out3", new OutputLayer.Builder().nIn(1).nOut(1).build(), "in") .addLayer("out3", OutputLayer.builder().nIn(1).nOut(1).build(), "in")
.validateOutputLayerConfig(false) .validateOutputLayerConfig(false)
.setOutputs("out1", "out2", "out3").build(); .setOutputs("out1", "out2", "out3").build();
@ -301,14 +301,14 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
public void testAllowDisconnectedLayers() { public void testAllowDisconnectedLayers() {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in") ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
.addLayer("bidirectional", .addLayer("bidirectional",
new Bidirectional(new LSTM.Builder().activation(Activation.TANH).nOut(10).build()), Bidirectional.builder(LSTM.builder().activation(Activation.TANH).nOut(10).build()).build(),
"in") "in")
.addLayer("out", new RnnOutputLayer.Builder().nOut(6) .addLayer("out", RnnOutputLayer.builder().nOut(6)
.lossFunction(LossFunctions.LossFunction.MCXENT) .lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.build(), "bidirectional") .build(), "bidirectional")
.addLayer("disconnected_layer", .addLayer("disconnected_layer",
new Bidirectional(new LSTM.Builder().activation(Activation.TANH).nOut(10).build()), Bidirectional.builder(LSTM.builder().activation(Activation.TANH).nOut(10).build()).build(),
"in") "in")
.setOutputs("out") .setOutputs("out")
.setInputTypes(new InputType.InputTypeRecurrent(10, 12)) .setInputTypes(new InputType.InputTypeRecurrent(10, 12))
@ -323,9 +323,9 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
public void testBidirectionalGraphSummary() { public void testBidirectionalGraphSummary() {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in") ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
.addLayer("bidirectional", .addLayer("bidirectional",
new Bidirectional(new LSTM.Builder().activation(Activation.TANH).nOut(10).build()), Bidirectional.builder(LSTM.builder().activation(Activation.TANH).nOut(10).build()).build(),
"in") "in")
.addLayer("out", new RnnOutputLayer.Builder().nOut(6) .addLayer("out", RnnOutputLayer.builder().nOut(6)
.lossFunction(LossFunctions.LossFunction.MCXENT) .lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.build(), "bidirectional") .build(), "bidirectional")
@ -411,10 +411,10 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
NeuralNetConfiguration.builder() NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in") .layer("0", DenseLayer.builder().nIn(10).nOut(10).build(), "in")
.layer("1", .layer("1",
!lossLayer ? new OutputLayer.Builder().nIn(10).nOut(nOut[i]).activation(activations[i]).lossFunction(lf[i]).build() !lossLayer ? OutputLayer.builder().nIn(10).nOut(nOut[i]).activation(activations[i]).lossFunction(lf[i]).build()
: new LossLayer.Builder().activation(activations[i]).lossFunction(lf[i]).build(), "0") : LossLayer.builder().activation(activations[i]).lossFunction(lf[i].getILossFunction()).build(), "0")
.setOutputs("1") .setOutputs("1")
.validateOutputLayerConfig(validate) .validateOutputLayerConfig(validate)
.build(); .build();

View File

@ -99,8 +99,8 @@ public class JsonTest extends BaseDL4JTest {
for (int i = 0; i < lossFunctions.length; i++) { for (int i = 0; i < lossFunctions.length; i++) {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(Updater.ADAM.getIUpdaterWithDefaultConfig()) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(Updater.ADAM.getIUpdaterWithDefaultConfig())
.layer(0, new DenseLayer.Builder().nIn(4).nOut(nOut[i]).activation(Activation.TANH).build()) .layer(0, DenseLayer.builder().nIn(4).nOut(nOut[i]).activation(Activation.TANH).build())
.layer(1, new LossLayer.Builder().lossFunction(lossFunctions[i]) .layer(1, LossLayer.builder().lossFunction(lossFunctions[i])
.activation(outputActivationFn[i]).build()) .activation(outputActivationFn[i]).build())
.validateOutputLayerConfig(false).build(); .validateOutputLayerConfig(false).build();

View File

@ -69,9 +69,9 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
private static NeuralNetConfiguration getConf() { private static NeuralNetConfiguration getConf() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L)
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2) .layer(0, DenseLayer.builder().nIn(2).nOut(2)
.dist(new NormalDistribution(0, 1)).build()) .dist(new NormalDistribution(0, 1)).build())
.layer(1, new OutputLayer.Builder().nIn(2).nOut(1) .layer(1, OutputLayer.builder().nIn(2).nOut(1)
.activation(Activation.TANH) .activation(Activation.TANH)
.dist(new NormalDistribution(0, 1)).lossFunction(LossFunctions.LossFunction.MSE) .dist(new NormalDistribution(0, 1)).lossFunction(LossFunctions.LossFunction.MSE)
.build()) .build())
@ -82,7 +82,7 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
@Test @Test
public void testJson() throws Exception { public void testJson() throws Exception {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.layer(0, new DenseLayer.Builder().dist(new NormalDistribution(1, 1e-1)).build()) .layer(0, DenseLayer.builder().dist(new NormalDistribution(1, 1e-1)).build())
.inputPreProcessor(0, new CnnToFeedForwardPreProcessor()).build(); .inputPreProcessor(0, new CnnToFeedForwardPreProcessor()).build();
String json = conf.toJson(); String json = conf.toJson();
@ -123,17 +123,17 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
.l1(1e-1).l2(2e-4).weightNoise(new DropConnect(0.5)).miniBatch(true) .l1(1e-1).l2(2e-4).weightNoise(new DropConnect(0.5)).miniBatch(true)
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT) .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
.layer(0, .layer(0,
new ConvolutionLayer.Builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER) ConvolutionLayer.builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build()) .activation(Activation.RELU).build())
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[]{2, 2}) .layer(1, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX, new int[]{2, 2})
.build()) .build())
.layer(2, .layer(2,
new ConvolutionLayer.Builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER) ConvolutionLayer.builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build()) .activation(Activation.RELU).build())
.layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[]{2, 2}) .layer(3, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX, new int[]{2, 2})
.build()) .build())
.layer(4, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build()) .layer(4, DenseLayer.builder().nOut(100).activation(Activation.RELU).build())
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .layer(5, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX) .nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
.build()) .build())
@ -157,15 +157,15 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
NeuralNetConfiguration.NeuralNetConfigurationBuilder builder = NeuralNetConfiguration.builder().seed(seed) NeuralNetConfiguration.NeuralNetConfigurationBuilder builder = NeuralNetConfiguration.builder().seed(seed)
.l1(1e-1).l2(2e-4).dropOut(0.5).miniBatch(true) .l1(1e-1).l2(2e-4).dropOut(0.5).miniBatch(true)
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT) .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
.layer(new ConvolutionLayer.Builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER) .layer(ConvolutionLayer.builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build()) .activation(Activation.RELU).build())
.layer(new Upsampling2D.Builder().size(2).build()) .layer(Upsampling2D.builder().size(2).build())
.layer(2, .layer(2,
new ConvolutionLayer.Builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER) ConvolutionLayer.builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build()) .activation(Activation.RELU).build())
.layer(new Upsampling2D.Builder().size(2).build()) .layer(Upsampling2D.builder().size(2).build())
.layer(4, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build()) .layer(4, DenseLayer.builder().nOut(100).activation(Activation.RELU).build())
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .layer(5, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX) .nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
.build()) .build())
@ -181,9 +181,9 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
public void testGlobalPoolingJson() { public void testGlobalPoolingJson() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new NoOp()) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new NoOp())
.dist(new NormalDistribution(0, 1.0)).seed(12345L) .dist(new NormalDistribution(0, 1.0)).seed(12345L)
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(5).build()) .layer(0, ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(5).build())
.layer(1, new GlobalPoolingLayer.Builder().poolingType(PoolingType.PNORM).pnorm(3).build()) .layer(1, GlobalPoolingLayer.builder().poolingType(PoolingType.PNORM).pnorm(3).build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(3).build()) .activation(Activation.SOFTMAX).nOut(3).build())
.inputType(InputType.convolutional(32, 32, 1)).build(); .inputType(InputType.convolutional(32, 32, 1)).build();
@ -196,7 +196,7 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
@Test @Test
public void testYaml() throws Exception { public void testYaml() throws Exception {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.layer(0, new DenseLayer.Builder().dist(new NormalDistribution(1, 1e-1)).build()) .layer(0, DenseLayer.builder().dist(new NormalDistribution(1, 1e-1)).build())
.inputPreProcessor(0, new CnnToFeedForwardPreProcessor()).build(); .inputPreProcessor(0, new CnnToFeedForwardPreProcessor()).build();
String json = conf.toYaml(); String json = conf.toYaml();
NeuralNetConfiguration from = NeuralNetConfiguration.fromYaml(json); NeuralNetConfiguration from = NeuralNetConfiguration.fromYaml(json);
@ -226,8 +226,8 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
@Test @Test
public void testClone() { public void testClone() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.layer(0, new DenseLayer.Builder().build()) .layer(0, DenseLayer.builder().build())
.layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).build()) .layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).build())
.inputPreProcessor(1, new CnnToFeedForwardPreProcessor()).build(); .inputPreProcessor(1, new CnnToFeedForwardPreProcessor()).build();
NeuralNetConfiguration conf2 = conf.clone(); NeuralNetConfiguration conf2 = conf.clone();
@ -301,8 +301,8 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
try { try {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
.layer(1, new DenseLayer.Builder().nIn(3).nOut(4).build()) .layer(1, DenseLayer.builder().nIn(3).nOut(4).build())
.layer(2, new OutputLayer.Builder().nIn(4).nOut(5).build()) .layer(2, OutputLayer.builder().nIn(4).nOut(5).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -317,8 +317,8 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
try { try {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
.layer(0, new DenseLayer.Builder().nIn(3).nOut(4).build()) .layer(0, DenseLayer.builder().nIn(3).nOut(4).build())
.layer(2, new OutputLayer.Builder().nIn(4).nOut(5).build()) .layer(2, OutputLayer.builder().nIn(4).nOut(5).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -336,8 +336,8 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
public void testListOverloads() { public void testListOverloads() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
.layer(0, new DenseLayer.Builder().nIn(3).nOut(4).build()) .layer(0, DenseLayer.builder().nIn(3).nOut(4).build())
.layer(1, new OutputLayer.Builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build()) .layer(1, OutputLayer.builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -350,16 +350,16 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
assertEquals(5, ol.getNOut()); assertEquals(5, ol.getNOut());
NeuralNetConfiguration conf2 = NeuralNetConfiguration.builder().seed(12345) NeuralNetConfiguration conf2 = NeuralNetConfiguration.builder().seed(12345)
.layer(0, new DenseLayer.Builder().nIn(3).nOut(4).build()) .layer(0, DenseLayer.builder().nIn(3).nOut(4).build())
.layer(1, new OutputLayer.Builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build()) .layer(1, OutputLayer.builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
.build(); .build();
MultiLayerNetwork net2 = new MultiLayerNetwork(conf2); MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
net2.init(); net2.init();
NeuralNetConfiguration conf3 = NeuralNetConfiguration.builder().seed(12345) NeuralNetConfiguration conf3 = NeuralNetConfiguration.builder().seed(12345)
.layer(new DenseLayer.Builder().nIn(3).nOut(4).build()) .layer(DenseLayer.builder().nIn(3).nOut(4).build())
.layer( .layer(
new OutputLayer.Builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build()) OutputLayer.builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
.build(); .build();
MultiLayerNetwork net3 = new MultiLayerNetwork(conf3); MultiLayerNetwork net3 = new MultiLayerNetwork(conf3);
net3.init(); net3.init();
@ -375,14 +375,16 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
.updater(new Adam(1e-2)) .updater(new Adam(1e-2))
.biasUpdater(new Adam(0.5)) .biasUpdater(new Adam(0.5))
.layer(0, new ConvolutionLayer.Builder(5, 5).nOut(5).weightInit(WeightInit.XAVIER) .layer(0, ConvolutionLayer.builder(5, 5).nOut(5).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build()) .activation(Activation.RELU).build())
.layer(1, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build()) .layer(1, DenseLayer.builder().nOut(100).activation(Activation.RELU).build())
.layer(2, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build()) .layer(2, DenseLayer.builder().nOut(100).activation(Activation.RELU).build())
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10) .layer(3, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10)
.weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build()) .weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build())
.inputType(InputType.convolutional(28, 28, 1)).build(); .inputType(InputType.convolutional(28, 28, 1)).build();
conf.init();
BaseLayerConfiguration l0 = (BaseLayerConfiguration) conf.getConf(0).getLayer(); BaseLayerConfiguration l0 = (BaseLayerConfiguration) conf.getConf(0).getLayer();
BaseLayerConfiguration l1 = (BaseLayerConfiguration) conf.getConf(1).getLayer(); BaseLayerConfiguration l1 = (BaseLayerConfiguration) conf.getConf(1).getLayer();
BaseLayerConfiguration l2 = (BaseLayerConfiguration) conf.getConf(2).getLayer(); BaseLayerConfiguration l2 = (BaseLayerConfiguration) conf.getConf(2).getLayer();
@ -432,10 +434,10 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
try { try {
NeuralNetConfiguration.builder() NeuralNetConfiguration.builder()
.layer(new DenseLayer.Builder().nIn(10).nOut(10).build()) .layer(DenseLayer.builder().nIn(10).nOut(10).build())
.layer(!lossLayer ? new OutputLayer.Builder().nIn(10).nOut(nOut[i]) .layer(!lossLayer ? OutputLayer.builder().nIn(10).nOut(nOut[i])
.activation(activations[i]).lossFunction(lf[i]).build() .activation(activations[i]).lossFunction(lf[i]).build()
: new LossLayer.Builder().activation(activations[i]).lossFunction(lf[i]) : LossLayer.builder().lossFunction().activation(activations[i]).lossFunction(lf[i])
.build()) .build())
.validateOutputLayerConfig(validate) .validateOutputLayerConfig(validate)
.build(); .build();

View File

@ -67,9 +67,9 @@ public class MultiNeuralNetConfLayerBuilderTest extends BaseDL4JTest {
NeuralNetConfiguration multiConf1 = NeuralNetConfiguration multiConf1 =
NeuralNetConfiguration.builder() NeuralNetConfiguration.builder()
.layer(0, new DenseLayer.Builder().nIn(newNumIn).nOut(newNumOut).activation(act) .layer(0, DenseLayer.builder().nIn(newNumIn).nOut(newNumOut).activation(act)
.build()) .build())
.layer(1, new DenseLayer.Builder().nIn(newNumIn + 1).nOut(newNumOut + 1) .layer(1, DenseLayer.builder().nIn(newNumIn + 1).nOut(newNumOut + 1)
.activation(act).build()) .activation(act).build())
.build(); .build();
NeuralNetConfiguration firstLayer = multiConf1.getConf(0).getNetConfiguration(); NeuralNetConfiguration firstLayer = multiConf1.getConf(0).getNetConfiguration();

View File

@ -113,7 +113,7 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
@Test @Test
public void testRNG() { public void testRNG() {
DenseLayer layer = new DenseLayer.Builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes()) DenseLayer layer = DenseLayer.builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes())
.weightInit(WeightInit.UNIFORM).activation(Activation.TANH).build(); .weightInit(WeightInit.UNIFORM).activation(Activation.TANH).build();
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
@ -125,7 +125,7 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
INDArray modelWeights = model.getParam(DefaultParamInitializer.WEIGHT_KEY); INDArray modelWeights = model.getParam(DefaultParamInitializer.WEIGHT_KEY);
DenseLayer layer2 = new DenseLayer.Builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes()) DenseLayer layer2 = DenseLayer.builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes())
.weightInit(WeightInit.UNIFORM).activation(Activation.TANH).build(); .weightInit(WeightInit.UNIFORM).activation(Activation.TANH).build();
NeuralNetConfiguration conf2 = NeuralNetConfiguration.builder().seed(123) NeuralNetConfiguration conf2 = NeuralNetConfiguration.builder().seed(123)
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).layer(layer2).build(); .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).layer(layer2).build();
@ -197,7 +197,7 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
private static NeuralNetConfiguration getConfig(int nIn, int nOut, IWeightInit weightInit, boolean pretrain) { private static NeuralNetConfiguration getConfig(int nIn, int nOut, IWeightInit weightInit, boolean pretrain) {
DenseLayer layer = new DenseLayer.Builder().nIn(nIn).nOut(nOut).weightInit(weightInit) DenseLayer layer = DenseLayer.builder().nIn(nIn).nOut(nOut).weightInit(weightInit)
.activation(Activation.TANH).build(); .activation(Activation.TANH).build();
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
@ -226,10 +226,10 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
INDArray gradientW = Nd4j.ones(nIns[0], nOuts[0]); INDArray gradientW = Nd4j.ones(nIns[0], nOuts[0]);
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3)) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3))
.layer(0, new DenseLayer.Builder().nIn(nIns[0]).nOut(nOuts[0]) .layer(0, DenseLayer.builder().nIn(nIns[0]).nOut(nOuts[0])
.updater(new Sgd(lr)).biasUpdater(new Sgd(biasLr)).build()) .updater(new Sgd(lr)).biasUpdater(new Sgd(biasLr)).build())
.layer(1, new BatchNormalization.Builder().nIn(nIns[1]).nOut(nOuts[1]).updater(new Sgd(0.7)).build()) .layer(1,BatchNormalization.builder().nIn(nIns[1]).nOut(nOuts[1]).updater(new Sgd(0.7)).build())
.layer(2, new OutputLayer.Builder().nIn(nIns[2]).nOut(nOuts[2]).lossFunction(LossFunctions.LossFunction.MSE).build()) .layer(2, OutputLayer.builder().nIn(nIns[2]).nOut(nOuts[2]).lossFunction(LossFunctions.LossFunction.MSE).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -287,9 +287,9 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l1(l1) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l1(l1)
.l2(l2) .l2(l2)
.layer(0, new DenseLayer.Builder().nIn(nIns[0]).nOut(nOuts[0]).build()) .layer(0, DenseLayer.builder().nIn(nIns[0]).nOut(nOuts[0]).build())
.layer(1, new BatchNormalization.Builder().nIn(nIns[1]).nOut(nOuts[1]).l2(0.5).build()) .layer(1,BatchNormalization.builder().nIn(nIns[1]).nOut(nOuts[1]).l2(0.5).build())
.layer(2, new OutputLayer.Builder().nIn(nIns[2]).nOut(nOuts[2]).lossFunction(LossFunctions.LossFunction.MSE).build()) .layer(2, OutputLayer.builder().nIn(nIns[2]).nOut(nOuts[2]).lossFunction(LossFunctions.LossFunction.MSE).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -318,7 +318,7 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
public void testLayerPretrainConfig() { public void testLayerPretrainConfig() {
boolean pretrain = true; boolean pretrain = true;
VariationalAutoencoder layer = new VariationalAutoencoder.Builder() VariationalAutoencoder layer = VariationalAutoencoder.builder()
.nIn(10).nOut(5).updater(new Sgd(1e-1)) .nIn(10).nOut(5).updater(new Sgd(1e-1))
.lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE).build(); .lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE).build();

View File

@ -71,9 +71,9 @@ public class TestConstraints extends BaseDL4JTest {
.updater(new Sgd(0.0)) .updater(new Sgd(0.0))
.dist(new NormalDistribution(0, 5)) .dist(new NormalDistribution(0, 5))
.layer(new LSTM.Builder().nIn(12).nOut(10) .layer(LSTM.builder().nIn(12).nOut(10)
.constrainRecurrent(lc).build()) .constrainRecurrent(lc).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build()) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -124,9 +124,9 @@ public class TestConstraints extends BaseDL4JTest {
.dist(new NormalDistribution(0, 5)) .dist(new NormalDistribution(0, 5))
.biasInit(10.0) .biasInit(10.0)
.layer(new DenseLayer.Builder().nIn(12).nOut(10) .layer(DenseLayer.builder().nIn(12).nOut(10)
.constrainBias(lc).build()) .constrainBias(lc).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build()) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -176,9 +176,9 @@ public class TestConstraints extends BaseDL4JTest {
.updater(new Sgd(0.0)) .updater(new Sgd(0.0))
.dist(new NormalDistribution(0, 5)) .dist(new NormalDistribution(0, 5))
.layer(new DenseLayer.Builder().nIn(12).nOut(10) .layer(DenseLayer.builder().nIn(12).nOut(10)
.constrainWeights(lc).build()) .constrainWeights(lc).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build()) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -229,9 +229,9 @@ public class TestConstraints extends BaseDL4JTest {
.dist(new NormalDistribution(0, 5)) .dist(new NormalDistribution(0, 5))
.biasInit(0.2) .biasInit(0.2)
.layer(new DenseLayer.Builder().nIn(12).nOut(10) .layer(DenseLayer.builder().nIn(12).nOut(10)
.constrainAllParameters(lc).build()) .constrainAllParameters(lc).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build()) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -290,9 +290,9 @@ public class TestConstraints extends BaseDL4JTest {
.dist(new NormalDistribution(0, 5)) .dist(new NormalDistribution(0, 5))
.biasInit(0.2) .biasInit(0.2)
.layer(new DenseLayer.Builder().nIn(12).nOut(10) .layer(DenseLayer.builder().nIn(12).nOut(10)
.constrainWeights(lc).constrainBias(lc).build()) .constrainWeights(lc).constrainBias(lc).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build()) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -351,8 +351,8 @@ public class TestConstraints extends BaseDL4JTest {
.dist(new NormalDistribution(0,5)) .dist(new NormalDistribution(0,5))
.biasInit(1) .biasInit(1)
.layer(new DenseLayer.Builder().nIn(12).nOut(10).build()) .layer(DenseLayer.builder().nIn(12).nOut(10).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build()) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -406,7 +406,7 @@ public class TestConstraints extends BaseDL4JTest {
.graphBuilder() .graphBuilder()
.addInputs("input_lstm", "input_cpc") .addInputs("input_lstm", "input_cpc")
.addLayer("first_lstm_layer", .addLayer("first_lstm_layer",
new LSTM.Builder() LSTM.builder()
.nIn(nIn) .nIn(nIn)
.nOut(lstmLayerSize) .nOut(lstmLayerSize)
.activation(Activation.RELU) .activation(Activation.RELU)
@ -417,7 +417,7 @@ public class TestConstraints extends BaseDL4JTest {
.addVertex("merge", new MergeVertex(), .addVertex("merge", new MergeVertex(),
"lastTimeStep", "input_cpc") "lastTimeStep", "input_cpc")
.addLayer("dense", .addLayer("dense",
new DenseLayer.Builder() DenseLayer.builder()
.constrainWeights(new NonNegativeConstraint()) .constrainWeights(new NonNegativeConstraint())
.nIn(lstmLayerSize + 1) .nIn(lstmLayerSize + 1)
.nOut(lstmLayerSize/2) .nOut(lstmLayerSize/2)
@ -425,7 +425,7 @@ public class TestConstraints extends BaseDL4JTest {
.build(), .build(),
"merge") "merge")
.addLayer("second_dense", .addLayer("second_dense",
new DenseLayer.Builder() DenseLayer.builder()
.constrainWeights(new NonNegativeConstraint()) .constrainWeights(new NonNegativeConstraint())
.nIn(lstmLayerSize/2) .nIn(lstmLayerSize/2)
.nOut(lstmLayerSize/8) .nOut(lstmLayerSize/8)
@ -433,7 +433,7 @@ public class TestConstraints extends BaseDL4JTest {
.build(), .build(),
"dense") "dense")
.addLayer("output_layer", .addLayer("output_layer",
new OutputLayer.Builder(LossFunctions.LossFunction.MSE) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
.constrainWeights(new NonNegativeConstraint()) .constrainWeights(new NonNegativeConstraint())
.nIn(lstmLayerSize/8) .nIn(lstmLayerSize/8)
.nOut(1) .nOut(1)

View File

@ -62,29 +62,29 @@ public class TestDropout extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.dropOut(0.6) .dropOut(0.6)
.layer(new DenseLayer.Builder().nIn(10).nOut(10).build()) .layer(DenseLayer.builder().nIn(10).nOut(10).build())
.layer(new DenseLayer.Builder().nIn(10).nOut(10).dropOut(0.7).build()) .layer(DenseLayer.builder().nIn(10).nOut(10).dropOut(0.7).build())
.layer(new DenseLayer.Builder().nIn(10).nOut(10).dropOut(new AlphaDropout(0.5)).build()) .layer(DenseLayer.builder().nIn(10).nOut(10).dropOut(new AlphaDropout(0.5)).build())
.build(); .build();
assertEquals(new Dropout(0.6), conf.getFlattenedLayerConfigurations().get(0).getIDropout()); assertEquals(new Dropout(0.6), conf.getFlattenedLayerConfigurations().get(0).getDropOut());
assertEquals(new Dropout(0.7), conf.getFlattenedLayerConfigurations().get(1).getIDropout()); assertEquals(new Dropout(0.7), conf.getFlattenedLayerConfigurations().get(1).getDropOut());
assertEquals(new AlphaDropout(0.5), conf.getFlattenedLayerConfigurations().get(2).getIDropout()); assertEquals(new AlphaDropout(0.5), conf.getFlattenedLayerConfigurations().get(2).getDropOut());
ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder()
.dropOut( new Dropout(0.6)) .dropOut( new Dropout(0.6))
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in") .addLayer("0", DenseLayer.builder().nIn(10).nOut(10).build(), "in")
.addLayer("1", new DenseLayer.Builder().nIn(10).nOut(10).dropOut(0.7).build(), "0") .addLayer("1", DenseLayer.builder().nIn(10).nOut(10).dropOut(0.7).build(), "0")
.addLayer("2", new DenseLayer.Builder().nIn(10).nOut(10).dropOut(new AlphaDropout(0.5)).build(), "1") .addLayer("2", DenseLayer.builder().nIn(10).nOut(10).dropOut(new AlphaDropout(0.5)).build(), "1")
.setOutputs("2") .setOutputs("2")
.build(); .build();
assertEquals(new Dropout(0.6), ((LayerVertex)conf2.getVertices().get("0")).getLayerConfiguration().getIDropout()); assertEquals(new Dropout(0.6), ((LayerVertex)conf2.getVertices().get("0")).getLayerConfiguration().getDropOut());
assertEquals(new Dropout(0.7), ((LayerVertex)conf2.getVertices().get("1")).getLayerConfiguration().getIDropout()); assertEquals(new Dropout(0.7), ((LayerVertex)conf2.getVertices().get("1")).getLayerConfiguration().getDropOut());
assertEquals(new AlphaDropout(0.5), ((LayerVertex)conf2.getVertices().get("2")).getLayerConfiguration().getIDropout()); assertEquals(new AlphaDropout(0.5), ((LayerVertex)conf2.getVertices().get("2")).getLayerConfiguration().getDropOut());
} }
@Test @Test
@ -95,8 +95,8 @@ public class TestDropout extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.layer(new DenseLayer.Builder().nIn(4).nOut(3).dropOut(d1).build()) .layer(DenseLayer.builder().nIn(4).nOut(3).dropOut(d1).build())
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MSE).dropOut(d2).nIn(3).nOut(3).build()) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).dropOut(d2).nIn(3).nOut(3).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -131,8 +131,8 @@ public class TestDropout extends BaseDL4JTest {
ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new DenseLayer.Builder().nIn(4).nOut(3).dropOut(d1).build(), "in") .addLayer("0", DenseLayer.builder().nIn(4).nOut(3).dropOut(d1).build(), "in")
.addLayer("1", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).dropOut(d2).nIn(3).nOut(3).build(), "0") .addLayer("1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).dropOut(d2).nIn(3).nOut(3).build(), "0")
.setOutputs("1") .setOutputs("1")
.build(); .build();
@ -188,8 +188,8 @@ public class TestDropout extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.dropOut(id) .dropOut(id)
.layer(new DenseLayer.Builder().nIn(4).nOut(3).build()) .layer(DenseLayer.builder().nIn(4).nOut(3).build())
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(3).nOut(3).build()) .layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(3).nOut(3).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -200,8 +200,8 @@ public class TestDropout extends BaseDL4JTest {
.dropOut(id) .dropOut(id)
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new DenseLayer.Builder().nIn(4).nOut(3).build(), "in") .addLayer("0", DenseLayer.builder().nIn(4).nOut(3).build(), "in")
.addLayer("1", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(3).nOut(3).build(), "0") .addLayer("1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(3).nOut(3).build(), "0")
.setOutputs("1") .setOutputs("1")
.build(); .build();
@ -602,7 +602,7 @@ public class TestDropout extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.layer(new DropoutLayer.Builder(new SpatialDropout(0.5)).build()) .layer(DropoutLayer.builder(new SpatialDropout(0.5)).build())
.build(); .build();
String asJson = conf.toJson(); String asJson = conf.toJson();

View File

@ -73,7 +73,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder() ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder()
.addInputs("input1", "input2", "input3") .addInputs("input1", "input2", "input3")
.addLayer("denselayer", .addLayer("denselayer",
new DenseLayer.Builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY) DenseLayer.builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
.build(), .build(),
"input1") "input1")
/* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get /* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
@ -87,7 +87,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
*/ */
.addVertex("elementwiseAdd", new ElementWiseVertex(ElementWiseVertex.Op.Add), "input1", .addVertex("elementwiseAdd", new ElementWiseVertex(ElementWiseVertex.Op.Add), "input1",
"input2", "input3") "input2", "input3")
.addLayer("Add", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), .addLayer("Add", ActivationLayer.builder().activation(Activation.IDENTITY).build(),
"elementwiseAdd") "elementwiseAdd")
.setOutputs("Add", "denselayer").build(); .setOutputs("Add", "denselayer").build();
@ -114,7 +114,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder() ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder()
.addInputs("input1", "input2", "input3") .addInputs("input1", "input2", "input3")
.addLayer("denselayer", .addLayer("denselayer",
new DenseLayer.Builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY) DenseLayer.builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
.build(), .build(),
"input1") "input1")
/* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get /* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
@ -128,7 +128,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
*/ */
.addVertex("elementwiseProduct", new ElementWiseVertex(ElementWiseVertex.Op.Product), "input1", .addVertex("elementwiseProduct", new ElementWiseVertex(ElementWiseVertex.Op.Product), "input1",
"input2", "input3") "input2", "input3")
.addLayer("Product", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), .addLayer("Product", ActivationLayer.builder().activation(Activation.IDENTITY).build(),
"elementwiseProduct") "elementwiseProduct")
.setOutputs("Product", "denselayer").build(); .setOutputs("Product", "denselayer").build();
@ -155,7 +155,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder() ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder()
.addInputs("input1", "input2") .addInputs("input1", "input2")
.addLayer("denselayer", .addLayer("denselayer",
new DenseLayer.Builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY) DenseLayer.builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
.build(), .build(),
"input1") "input1")
/* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get /* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
@ -169,7 +169,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
*/ */
.addVertex("elementwiseSubtract", new ElementWiseVertex(ElementWiseVertex.Op.Subtract), .addVertex("elementwiseSubtract", new ElementWiseVertex(ElementWiseVertex.Op.Subtract),
"input1", "input2") "input1", "input2")
.addLayer("Subtract", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), .addLayer("Subtract", ActivationLayer.builder().activation(Activation.IDENTITY).build(),
"elementwiseSubtract") "elementwiseSubtract")
.setOutputs("Subtract", "denselayer").build(); .setOutputs("Subtract", "denselayer").build();
@ -200,21 +200,21 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
.addInputs("input1", "input2", "input3") .addInputs("input1", "input2", "input3")
.addLayer("dense1", .addLayer("dense1",
new DenseLayer.Builder().nIn(featuresz).nOut(midsz) DenseLayer.builder().nIn(featuresz).nOut(midsz)
.activation(new ActivationTanH()).build(), .activation(new ActivationTanH()).build(),
"input1") "input1")
.addLayer("dense2", .addLayer("dense2",
new DenseLayer.Builder().nIn(featuresz).nOut(midsz) DenseLayer.builder().nIn(featuresz).nOut(midsz)
.activation(new ActivationTanH()).build(), .activation(new ActivationTanH()).build(),
"input2") "input2")
.addLayer("dense3", .addLayer("dense3",
new DenseLayer.Builder().nIn(featuresz).nOut(midsz) DenseLayer.builder().nIn(featuresz).nOut(midsz)
.activation(new ActivationTanH()).build(), .activation(new ActivationTanH()).build(),
"input3") "input3")
.addVertex("elementwiseAdd", new ElementWiseVertex(ElementWiseVertex.Op.Add), "dense1", .addVertex("elementwiseAdd", new ElementWiseVertex(ElementWiseVertex.Op.Add), "dense1",
"dense2", "dense3") "dense2", "dense3")
.addLayer("output", .addLayer("output",
new OutputLayer.Builder().nIn(midsz).nOut(outputsz) OutputLayer.builder().nIn(midsz).nOut(outputsz)
.activation(new ActivationSigmoid()) .activation(new ActivationSigmoid())
.lossFunction(LossFunction.MSE).build(), .lossFunction(LossFunction.MSE).build(),
"elementwiseAdd") "elementwiseAdd")
@ -376,21 +376,21 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
.addInputs("input1", "input2", "input3") .addInputs("input1", "input2", "input3")
.addLayer("dense1", .addLayer("dense1",
new DenseLayer.Builder().nIn(featuresz).nOut(midsz) DenseLayer.builder().nIn(featuresz).nOut(midsz)
.activation(new ActivationTanH()).build(), .activation(new ActivationTanH()).build(),
"input1") "input1")
.addLayer("dense2", .addLayer("dense2",
new DenseLayer.Builder().nIn(featuresz).nOut(midsz) DenseLayer.builder().nIn(featuresz).nOut(midsz)
.activation(new ActivationTanH()).build(), .activation(new ActivationTanH()).build(),
"input2") "input2")
.addLayer("dense3", .addLayer("dense3",
new DenseLayer.Builder().nIn(featuresz).nOut(midsz) DenseLayer.builder().nIn(featuresz).nOut(midsz)
.activation(new ActivationTanH()).build(), .activation(new ActivationTanH()).build(),
"input3") "input3")
.addVertex("elementwiseProduct", new ElementWiseVertex(ElementWiseVertex.Op.Product), "dense1", .addVertex("elementwiseProduct", new ElementWiseVertex(ElementWiseVertex.Op.Product), "dense1",
"dense2", "dense3") "dense2", "dense3")
.addLayer("output", .addLayer("output",
new OutputLayer.Builder().nIn(midsz).nOut(outputsz) OutputLayer.builder().nIn(midsz).nOut(outputsz)
.activation(new ActivationSigmoid()) .activation(new ActivationSigmoid())
.lossFunction(LossFunction.MSE).build(), .lossFunction(LossFunction.MSE).build(),
"elementwiseProduct") "elementwiseProduct")
@ -551,17 +551,17 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
.addInputs("input1", "input2") .addInputs("input1", "input2")
.addLayer("dense1", .addLayer("dense1",
new DenseLayer.Builder().nIn(featuresz).nOut(midsz) DenseLayer.builder().nIn(featuresz).nOut(midsz)
.activation(new ActivationTanH()).build(), .activation(new ActivationTanH()).build(),
"input1") "input1")
.addLayer("dense2", .addLayer("dense2",
new DenseLayer.Builder().nIn(featuresz).nOut(midsz) DenseLayer.builder().nIn(featuresz).nOut(midsz)
.activation(new ActivationTanH()).build(), .activation(new ActivationTanH()).build(),
"input2") "input2")
.addVertex("elementwiseSubtract", new ElementWiseVertex(ElementWiseVertex.Op.Subtract), .addVertex("elementwiseSubtract", new ElementWiseVertex(ElementWiseVertex.Op.Subtract),
"dense1", "dense2") "dense1", "dense2")
.addLayer("output", .addLayer("output",
new OutputLayer.Builder().nIn(midsz).nOut(outputsz) OutputLayer.builder().nIn(midsz).nOut(outputsz)
.activation(new ActivationSigmoid()) .activation(new ActivationSigmoid())
.lossFunction(LossFunction.MSE).build(), .lossFunction(LossFunction.MSE).build(),
"elementwiseSubtract") "elementwiseSubtract")

View File

@ -86,7 +86,7 @@ public class ShiftVertexTest extends BaseDL4JTest {
double sf = 4.1; double sf = 4.1;
ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder().addInputs("input") ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder().addInputs("input")
.addLayer("denselayer", .addLayer("denselayer",
new DenseLayer.Builder().nIn(input.columns()).nOut(1) DenseLayer.builder().nIn(input.columns()).nOut(1)
.activation(Activation.IDENTITY).build(), .activation(Activation.IDENTITY).build(),
"input") "input")
/* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get /* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
@ -99,10 +99,10 @@ public class ShiftVertexTest extends BaseDL4JTest {
* at org.deeplearning4j.nn.graph.ComputationGraph.init(ComputationGraph.java:341) * at org.deeplearning4j.nn.graph.ComputationGraph.init(ComputationGraph.java:341)
*/ */
.addLayer("identityinputactivation", .addLayer("identityinputactivation",
new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), "input") ActivationLayer.builder().activation(Activation.IDENTITY).build(), "input")
.addVertex("shiftvertex", new ShiftVertex(sf), "identityinputactivation") .addVertex("shiftvertex", new ShiftVertex(sf), "identityinputactivation")
.addLayer("identityshiftvertex", .addLayer("identityshiftvertex",
new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), ActivationLayer.builder().activation(Activation.IDENTITY).build(),
"shiftvertex") "shiftvertex")
.setOutputs("identityshiftvertex", "denselayer").build(); .setOutputs("identityshiftvertex", "denselayer").build();
@ -144,12 +144,12 @@ public class ShiftVertexTest extends BaseDL4JTest {
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
.addInputs("input") .addInputs("input")
.addLayer("denselayer", .addLayer("denselayer",
new DenseLayer.Builder().nIn(input.columns()).nOut(input.columns()) DenseLayer.builder().nIn(input.columns()).nOut(input.columns())
.activation(a1).build(), .activation(a1).build(),
"input") "input")
.addVertex("shiftvertex", new ShiftVertex(sf), "denselayer") .addVertex("shiftvertex", new ShiftVertex(sf), "denselayer")
.addLayer("output", .addLayer("output",
new OutputLayer.Builder().nIn(input.columns()).nOut(target.columns()) OutputLayer.builder().nIn(input.columns()).nOut(target.columns())
.activation(a2).lossFunction(LossFunction.MSE).build(), .activation(a2).lossFunction(LossFunction.MSE).build(),
"shiftvertex") "shiftvertex")
.setOutputs("output").build(); .setOutputs("output").build();

View File

@ -67,7 +67,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
@Test @Test
public void testLayer() throws Exception { public void testLayer() throws Exception {
DenseLayer layer = new DenseLayer.Builder().activation(act).weightInit(weight).dropOut(dropOut) DenseLayer layer = DenseLayer.builder().activation(act).weightInit(weight).dropOut(dropOut)
.updater(updater).gradientNormalization(gradNorm) .updater(updater).gradientNormalization(gradNorm)
.gradientNormalizationThreshold(gradNormThreshold).build(); .gradientNormalizationThreshold(gradNormThreshold).build();
@ -75,7 +75,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
assertEquals(act, layer.getActivationFn()); assertEquals(act, layer.getActivationFn());
assertEquals(weight.getWeightInitFunction(), layer.getWeightInit()); assertEquals(weight.getWeightInitFunction(), layer.getWeightInit());
assertEquals(new Dropout(dropOut), layer.getIDropout()); assertEquals(new Dropout(dropOut), layer.getDropOut());
assertEquals(updater, layer.getIUpdater()); assertEquals(updater, layer.getIUpdater());
assertEquals(gradNorm, layer.getGradientNormalization()); assertEquals(gradNorm, layer.getGradientNormalization());
assertEquals(gradNormThreshold, layer.getGradientNormalizationThreshold(), 0.0); assertEquals(gradNormThreshold, layer.getGradientNormalizationThreshold(), 0.0);
@ -83,7 +83,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
@Test @Test
public void testFeedForwardLayer() throws Exception { public void testFeedForwardLayer() throws Exception {
DenseLayer ff = new DenseLayer.Builder().nIn(numIn).nOut(numOut).build(); DenseLayer ff = DenseLayer.builder().nIn(numIn).nOut(numOut).build();
checkSerialization(ff); checkSerialization(ff);
@ -93,7 +93,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
@Test @Test
public void testConvolutionLayer() throws Exception { public void testConvolutionLayer() throws Exception {
ConvolutionLayer conv = new ConvolutionLayer.Builder(kernelSize, stride, padding).build(); ConvolutionLayer conv = ConvolutionLayer.builder(kernelSize, stride, padding).build();
checkSerialization(conv); checkSerialization(conv);
@ -106,7 +106,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
@Test @Test
public void testSubsamplingLayer() throws Exception { public void testSubsamplingLayer() throws Exception {
SubsamplingLayer sample = SubsamplingLayer sample =
new SubsamplingLayer.Builder(poolType, stride).kernelSize(kernelSize).padding(padding).build(); SubsamplingLayer.builder(poolType, stride).kernelSize(kernelSize).padding(padding).build();
checkSerialization(sample); checkSerialization(sample);
@ -118,21 +118,21 @@ public class LayerBuilderTest extends BaseDL4JTest {
@Test @Test
public void testOutputLayer() throws Exception { public void testOutputLayer() throws Exception {
OutputLayer out = new OutputLayer.Builder(loss).build(); OutputLayer out = OutputLayer.builder(loss).build();
checkSerialization(out); checkSerialization(out);
} }
@Test @Test
public void testRnnOutputLayer() throws Exception { public void testRnnOutputLayer() throws Exception {
RnnOutputLayer out = new RnnOutputLayer.Builder(loss).build(); RnnOutputLayer out = RnnOutputLayer.builder(loss).build();
checkSerialization(out); checkSerialization(out);
} }
@Test @Test
public void testAutoEncoder() throws Exception { public void testAutoEncoder() throws Exception {
AutoEncoder enc = new AutoEncoder.Builder().corruptionLevel(corruptionLevel).sparsity(sparsity).build(); AutoEncoder enc = AutoEncoder.builder().corruptionLevel(corruptionLevel).sparsity(sparsity).build();
checkSerialization(enc); checkSerialization(enc);
@ -142,7 +142,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
@Test @Test
public void testGravesLSTM() throws Exception { public void testGravesLSTM() throws Exception {
GravesLSTM glstm = new GravesLSTM.Builder().forgetGateBiasInit(1.5).activation(Activation.TANH).nIn(numIn) GravesLSTM glstm = GravesLSTM.builder().forgetGateBiasInit(1.5).activation(Activation.TANH).nIn(numIn)
.nOut(numOut).build(); .nOut(numOut).build();
checkSerialization(glstm); checkSerialization(glstm);
@ -155,7 +155,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
@Test @Test
public void testGravesBidirectionalLSTM() throws Exception { public void testGravesBidirectionalLSTM() throws Exception {
final GravesBidirectionalLSTM glstm = new GravesBidirectionalLSTM.Builder().forgetGateBiasInit(1.5) final GravesBidirectionalLSTM glstm = GravesBidirectionalLSTM.builder().forgetGateBiasInit(1.5)
.activation(Activation.TANH).nIn(numIn).nOut(numOut).build(); .activation(Activation.TANH).nIn(numIn).nOut(numOut).build();
checkSerialization(glstm); checkSerialization(glstm);
@ -168,7 +168,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
@Test @Test
public void testEmbeddingLayer() throws Exception { public void testEmbeddingLayer() throws Exception {
EmbeddingLayer el = new EmbeddingLayer.Builder().nIn(10).nOut(5).build(); EmbeddingLayer el = EmbeddingLayer.builder().nIn(10).nOut(5).build();
checkSerialization(el); checkSerialization(el);
assertEquals(10, el.getNIn()); assertEquals(10, el.getNIn());
@ -177,7 +177,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
@Test @Test
public void testBatchNormLayer() throws Exception { public void testBatchNormLayer() throws Exception {
BatchNormalization bN = new BatchNormalization.Builder().nIn(numIn).nOut(numOut).gamma(2).beta(1).decay(0.5) BatchNormalization bN =BatchNormalization.builder().nIn(numIn).nOut(numOut).gamma(2).beta(1).decay(0.5)
.lockGammaBeta(true).build(); .lockGammaBeta(true).build();
checkSerialization(bN); checkSerialization(bN);
@ -192,11 +192,11 @@ public class LayerBuilderTest extends BaseDL4JTest {
@Test @Test
public void testActivationLayer() throws Exception { public void testActivationLayer() throws Exception {
ActivationLayer activationLayer = new ActivationLayer.Builder().activation(act).build(); ActivationLayer activationLayer = ActivationLayer.builder().activation(act).build();
checkSerialization(activationLayer); checkSerialization(activationLayer);
assertEquals(act, activationLayer.activationFn); assertEquals(act, activationLayer.getActivation());
} }
private void checkSerialization(LayerConfiguration layer) throws Exception { private void checkSerialization(LayerConfiguration layer) throws Exception {
@ -225,7 +225,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
assertEquals(confExpected.getFlattenedLayerConfigurations().get(0), confActual.getFlattenedLayerConfigurations().get(0), "unequal YAML serialization"); assertEquals(confExpected.getFlattenedLayerConfigurations().get(0), confActual.getFlattenedLayerConfigurations().get(0), "unequal YAML serialization");
// check the layer's use of callSuper on equals method // check the layer's use of callSuper on equals method
confActual.getFlattenedLayerConfigurations().get(0).setIDropout(new Dropout(new java.util.Random().nextDouble())); confActual.getFlattenedLayerConfigurations().get(0).setDropOut(new Dropout(new java.util.Random().nextDouble()));
assertNotEquals( confExpected, confActual, "broken equals method (missing callSuper?)"); assertNotEquals( confExpected, confActual, "broken equals method (missing callSuper?)");
} }

View File

@ -53,13 +53,13 @@ public class LayerConfigTest extends BaseDL4JTest {
String name2 = "bill"; String name2 = "bill";
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).name(name1).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).name(name1).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).name(name2).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).name(name2).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
assertEquals(name1, conf.getConf(0).getLayer().getLayerName()); assertEquals(name1, conf.getConf(0).getLayer().getName());
assertEquals(name2, conf.getConf(1).getLayer().getLayerName()); assertEquals(name2, conf.getConf(1).getLayer().getName());
} }
@ -67,8 +67,8 @@ public class LayerConfigTest extends BaseDL4JTest {
public void testActivationLayerwiseOverride() { public void testActivationLayerwiseOverride() {
//Without layerwise override: //Without layerwise override:
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().activation(Activation.RELU) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().activation(Activation.RELU)
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -77,8 +77,8 @@ public class LayerConfigTest extends BaseDL4JTest {
//With //With
conf = NeuralNetConfiguration.builder().activation(Activation.RELU) conf = NeuralNetConfiguration.builder().activation(Activation.RELU)
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).activation(Activation.TANH).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).activation(Activation.TANH).build()).build();
net = new MultiLayerNetwork(conf); net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -94,8 +94,8 @@ public class LayerConfigTest extends BaseDL4JTest {
final Distribution defaultDistribution = new NormalDistribution(0, 1.0); final Distribution defaultDistribution = new NormalDistribution(0, 1.0);
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.dist(defaultDistribution).biasInit(1) .dist(defaultDistribution).biasInit(1)
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -109,8 +109,8 @@ public class LayerConfigTest extends BaseDL4JTest {
final Distribution overriddenDistribution = new UniformDistribution(0, 1); final Distribution overriddenDistribution = new UniformDistribution(0, 1);
conf = NeuralNetConfiguration.builder() conf = NeuralNetConfiguration.builder()
.dist(defaultDistribution).biasInit(1) .dist(defaultDistribution).biasInit(1)
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()).layer(1, .layer(0, DenseLayer.builder().nIn(2).nOut(2).build()).layer(1,
new DenseLayer.Builder().nIn(2).nOut(2) DenseLayer.builder().nIn(2).nOut(2)
.dist(overriddenDistribution).biasInit(0).build()) .dist(overriddenDistribution).biasInit(0).build())
.build(); .build();
@ -181,23 +181,23 @@ public class LayerConfigTest extends BaseDL4JTest {
@Test @Test
public void testDropoutLayerwiseOverride() { public void testDropoutLayerwiseOverride() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().dropOut(1.0) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().dropOut(1.0)
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
assertEquals(new Dropout(1.0), conf.getConf(0).getLayer().getIDropout()); assertEquals(new Dropout(1.0), conf.getConf(0).getLayer().getDropOut());
assertEquals(new Dropout(1.0), conf.getConf(1).getLayer().getIDropout()); assertEquals(new Dropout(1.0), conf.getConf(1).getLayer().getDropOut());
conf = NeuralNetConfiguration.builder().dropOut(1.0) conf = NeuralNetConfiguration.builder().dropOut(1.0)
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).dropOut(2.0).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).dropOut(2.0).build()).build();
net = new MultiLayerNetwork(conf); net = new MultiLayerNetwork(conf);
net.init(); net.init();
assertEquals(new Dropout(1.0), conf.getConf(0).getLayer().getIDropout()); assertEquals(new Dropout(1.0), conf.getConf(0).getLayer().getDropOut());
assertEquals(new Dropout(2.0), conf.getConf(1).getLayer().getIDropout()); assertEquals(new Dropout(2.0), conf.getConf(1).getLayer().getDropOut());
} }
@Test @Test
@ -208,8 +208,8 @@ public class LayerConfigTest extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter))) .updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter)))
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -221,7 +221,7 @@ public class LayerConfigTest extends BaseDL4JTest {
conf = NeuralNetConfiguration.builder().updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter) )) conf = NeuralNetConfiguration.builder().updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter) ))
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()).layer(1, new DenseLayer.Builder() .layer(0, DenseLayer.builder().nIn(2).nOut(2).build()).layer(1, DenseLayer.builder()
.nIn(2).nOut(2).updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter2))).build()) .nIn(2).nOut(2).updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter2))).build())
.build(); .build();
@ -234,8 +234,8 @@ public class LayerConfigTest extends BaseDL4JTest {
@Test @Test
public void testUpdaterRhoRmsDecayLayerwiseOverride() { public void testUpdaterRhoRmsDecayLayerwiseOverride() {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new AdaDelta(0.5, 0.9)) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new AdaDelta(0.5, 0.9))
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).updater(new AdaDelta(0.01,0.9)).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).updater(new AdaDelta(0.01,0.9)).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -245,8 +245,8 @@ public class LayerConfigTest extends BaseDL4JTest {
assertEquals(0.01, ((AdaDelta)((BaseLayerConfiguration) conf.getConf(1).getLayer()).getIUpdater()).getRho(), 0.0); assertEquals(0.01, ((AdaDelta)((BaseLayerConfiguration) conf.getConf(1).getLayer()).getIUpdater()).getRho(), 0.0);
conf = NeuralNetConfiguration.builder().updater(new RmsProp(1.0, 2.0, RmsProp.DEFAULT_RMSPROP_EPSILON)) conf = NeuralNetConfiguration.builder().updater(new RmsProp(1.0, 2.0, RmsProp.DEFAULT_RMSPROP_EPSILON))
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).updater(new RmsProp(1.0, 1.0, RmsProp.DEFAULT_RMSPROP_EPSILON)).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).updater(new RmsProp(1.0, 1.0, RmsProp.DEFAULT_RMSPROP_EPSILON)).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).updater(new AdaDelta(0.5,AdaDelta.DEFAULT_ADADELTA_EPSILON)).build()) .layer(1, DenseLayer.builder().nIn(2).nOut(2).updater(new AdaDelta(0.5,AdaDelta.DEFAULT_ADADELTA_EPSILON)).build())
.build(); .build();
net = new MultiLayerNetwork(conf); net = new MultiLayerNetwork(conf);
@ -264,8 +264,8 @@ public class LayerConfigTest extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.updater(new Adam(1.0, 0.5, 0.5, 1e-8)) .updater(new Adam(1.0, 0.5, 0.5, 1e-8))
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).updater(new Adam(1.0, 0.6, 0.7, 1e-8)).build()) .layer(1, DenseLayer.builder().nIn(2).nOut(2).updater(new Adam(1.0, 0.6, 0.7, 1e-8)).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -283,8 +283,8 @@ public class LayerConfigTest extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue) .gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
.gradientNormalizationThreshold(10) .gradientNormalizationThreshold(10)
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
BaseLayerConfiguration bconf = (BaseLayerConfiguration) conf.getConf(0).getLayer(); BaseLayerConfiguration bconf = (BaseLayerConfiguration) conf.getConf(0).getLayer();
@ -297,8 +297,8 @@ public class LayerConfigTest extends BaseDL4JTest {
conf = NeuralNetConfiguration.builder() conf = NeuralNetConfiguration.builder()
.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue) .gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
.gradientNormalizationThreshold(10) .gradientNormalizationThreshold(10)
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2) .layer(1, DenseLayer.builder().nIn(2).nOut(2)
.gradientNormalization(GradientNormalization.None) .gradientNormalization(GradientNormalization.None)
.gradientNormalizationThreshold(2.5).build()) .gradientNormalizationThreshold(2.5).build())
.build(); .build();

View File

@ -56,8 +56,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
public void testDropConnect() { public void testDropConnect() {
// Warning thrown only since some layers may not have l1 or l2 // Warning thrown only since some layers may not have l1 or l2
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.1)).weightNoise(new DropConnect(0.5)) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.1)).weightNoise(new DropConnect(0.5))
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
} }
@ -67,8 +67,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
public void testL1L2NotSet() { public void testL1L2NotSet() {
// Warning thrown only since some layers may not have l1 or l2 // Warning thrown only since some layers may not have l1 or l2
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3)) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3))
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
} }
@ -78,8 +78,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
public void testRegNotSetL1Global() { public void testRegNotSetL1Global() {
assertThrows(IllegalStateException.class, () -> { assertThrows(IllegalStateException.class, () -> {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3)).l1(0.5) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3)).l1(0.5)
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
}); });
@ -90,8 +90,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
public void testRegNotSetL2Local() { public void testRegNotSetL2Local() {
assertThrows(IllegalStateException.class, () -> { assertThrows(IllegalStateException.class, () -> {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3)) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3))
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).l2(0.5).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).l2(0.5).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
}); });
@ -102,8 +102,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
// Warning thrown only since global dist can be set with a different weight init locally // Warning thrown only since global dist can be set with a different weight init locally
NeuralNetConfiguration conf = NeuralNetConfiguration conf =
NeuralNetConfiguration.builder().updater(new Sgd(0.3)).dist(new GaussianDistribution(1e-3, 2)) NeuralNetConfiguration.builder().updater(new Sgd(0.3)).dist(new GaussianDistribution(1e-3, 2))
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
} }
@ -116,8 +116,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration conf =
NeuralNetConfiguration.builder().updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter))) NeuralNetConfiguration.builder().updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter)))
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
} }
@ -130,12 +130,12 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
/* Graph Builder */ /* Graph Builder */
.updater(Updater.RMSPROP.getIUpdaterWithDefaultConfig()).graphBuilder().addInputs("in") .updater(Updater.RMSPROP.getIUpdaterWithDefaultConfig()).graphBuilder().addInputs("in")
.addLayer("L" + 1, .addLayer("L" + 1,
new GravesLSTM.Builder().nIn(20).updater(Updater.RMSPROP).nOut(10) GravesLSTM.builder().nIn(20).updater(Updater.RMSPROP).nOut(10)
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.dropOut(0.4).l1(0.3).activation(Activation.SIGMOID).build(), .dropOut(0.4).l1(0.3).activation(Activation.SIGMOID).build(),
"in") "in")
.addLayer("output", .addLayer("output",
new RnnOutputLayer.Builder().nIn(20).nOut(10).activation(Activation.SOFTMAX) RnnOutputLayer.builder().nIn(20).nOut(10).activation(Activation.SOFTMAX)
.weightInit(WeightInit.RELU_UNIFORM).build(), .weightInit(WeightInit.RELU_UNIFORM).build(),
"L" + 1) "L" + 1)
.setOutputs("output"); .setOutputs("output");
@ -157,8 +157,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
// Nesterovs Updater // Nesterovs Updater
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Nesterovs(0.9)) NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Nesterovs(0.9))
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).l2(0.5).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).l2(0.5).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).updater(new Nesterovs(0.3, 0.4)).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).updater(new Nesterovs(0.3, 0.4)).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -173,8 +173,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
// Adam Updater // Adam Updater
conf = NeuralNetConfiguration.builder().updater(new Adam(0.3)) conf = NeuralNetConfiguration.builder().updater(new Adam(0.3))
.weightInit(expectedDist) .weightInit(expectedDist)
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).l2(0.5).l1(0.3).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).l2(0.5).l1(0.3).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
net = new MultiLayerNetwork(conf); net = new MultiLayerNetwork(conf);
net.init(); net.init();
@ -191,8 +191,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
//RMSProp Updater //RMSProp Updater
conf = NeuralNetConfiguration.builder().updater(new RmsProp(0.3)) conf = NeuralNetConfiguration.builder().updater(new RmsProp(0.3))
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()) .layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).updater(new RmsProp(0.3, 0.4, RmsProp.DEFAULT_RMSPROP_EPSILON)).build()).build(); .layer(1, DenseLayer.builder().nIn(2).nOut(2).updater(new RmsProp(0.3, 0.4, RmsProp.DEFAULT_RMSPROP_EPSILON)).build()).build();
net = new MultiLayerNetwork(conf); net = new MultiLayerNetwork(conf);
net.init(); net.init();

View File

@ -249,7 +249,7 @@ public class CNNProcessorTest extends BaseDL4JTest {
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer) .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
// Building the DL4J network // Building the DL4J network
.layer(0, new ConvolutionLayer.Builder(kernelArray, strideArray, zeroPaddingArray) .layer(0, ConvolutionLayer.builder(kernelArray, strideArray, zeroPaddingArray)
.name("cnn1") .name("cnn1")
.convolutionMode(ConvolutionMode.Strict) .convolutionMode(ConvolutionMode.Strict)
.nIn(2) // 2 input channels .nIn(2) // 2 input channels
@ -258,7 +258,7 @@ public class CNNProcessorTest extends BaseDL4JTest {
.activation(Activation.RELU) .activation(Activation.RELU)
.biasInit(1e-2).build()) .biasInit(1e-2).build())
.layer(1, new ConvolutionLayer.Builder(kernelArray, strideArray, zeroPaddingArray) .layer(1, ConvolutionLayer.builder(kernelArray, strideArray, zeroPaddingArray)
.name("cnn2") .name("cnn2")
.convolutionMode(ConvolutionMode.Strict) .convolutionMode(ConvolutionMode.Strict)
.nOut(processWidth) .nOut(processWidth)
@ -267,21 +267,21 @@ public class CNNProcessorTest extends BaseDL4JTest {
.biasInit(1e-2) .biasInit(1e-2)
.build()) .build())
.layer(2, new ConvolutionLayer.Builder(kernelArray, strideArray, zeroPaddingArray) .layer(2, ConvolutionLayer.builder(kernelArray, strideArray, zeroPaddingArray)
.name("cnn3") .name("cnn3")
.convolutionMode(ConvolutionMode.Strict) .convolutionMode(ConvolutionMode.Strict)
.nOut(processWidth) .nOut(processWidth)
.weightInit(WeightInit.XAVIER_UNIFORM) .weightInit(WeightInit.XAVIER_UNIFORM)
.activation(Activation.RELU).build()) .activation(Activation.RELU).build())
.layer(3, new ConvolutionLayer.Builder(kernelArray, strideArray, zeroPaddingArray) .layer(3, ConvolutionLayer.builder(kernelArray, strideArray, zeroPaddingArray)
.name("cnn4") .name("cnn4")
.convolutionMode(ConvolutionMode.Strict) .convolutionMode(ConvolutionMode.Strict)
.nOut(processWidth) .nOut(processWidth)
.weightInit(WeightInit.XAVIER_UNIFORM) .weightInit(WeightInit.XAVIER_UNIFORM)
.activation(Activation.RELU).build()) .activation(Activation.RELU).build())
.layer(4, new OutputLayer.Builder(LossFunctions.LossFunction.MSE) .layer(4, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
.name("output") .name("output")
.nOut(1) .nOut(1)
.activation(Activation.TANH) .activation(Activation.TANH)

View File

@ -39,8 +39,8 @@ public class CustomPreprocessorTest extends BaseDL4JTest {
//Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works... //Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
NeuralNetConfiguration conf = NeuralNetConfiguration conf =
NeuralNetConfiguration.builder() NeuralNetConfiguration.builder()
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()) .layer(0, DenseLayer.builder().nIn(10).nOut(10).build())
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10) .layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(10)
.activation(Activation.SOFTMAX).nOut(10).build()) .activation(Activation.SOFTMAX).nOut(10).build())
.inputPreProcessor(0, new MyCustomPreprocessor()) .inputPreProcessor(0, new MyCustomPreprocessor())
.build(); .build();

View File

@ -58,7 +58,7 @@ public class TestPreProcessors extends BaseDL4JTest {
RnnToFeedForwardPreProcessor proc = new RnnToFeedForwardPreProcessor(); RnnToFeedForwardPreProcessor proc = new RnnToFeedForwardPreProcessor();
NeuralNetConfiguration nnc = NeuralNetConfiguration.builder() NeuralNetConfiguration nnc = NeuralNetConfiguration.builder()
.layer(new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(layerSize) .layer(org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(layerSize)
.nOut(layerSize).build()) .nOut(layerSize).build())
.build(); .build();
@ -143,7 +143,7 @@ public class TestPreProcessors extends BaseDL4JTest {
FeedForwardToRnnPreProcessor proc = new FeedForwardToRnnPreProcessor(); FeedForwardToRnnPreProcessor proc = new FeedForwardToRnnPreProcessor();
NeuralNetConfiguration nnc = NeuralNetConfiguration.builder() NeuralNetConfiguration nnc = NeuralNetConfiguration.builder()
.layer(new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(layerSize) .layer(org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(layerSize)
.nOut(layerSize).build()) .nOut(layerSize).build())
.build(); .build();
@ -227,7 +227,7 @@ public class TestPreProcessors extends BaseDL4JTest {
NeuralNetConfiguration nnc = NeuralNetConfiguration nnc =
NeuralNetConfiguration.builder() NeuralNetConfiguration.builder()
.layer(new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder( .layer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder(
inputWidth, inputHeight).nIn(cnnNChannelsIn) inputWidth, inputHeight).nIn(cnnNChannelsIn)
.nOut(nChannels).build()) .nOut(nChannels).build())
.build(); .build();
@ -309,7 +309,7 @@ public class TestPreProcessors extends BaseDL4JTest {
NeuralNetConfiguration nnc = NeuralNetConfiguration nnc =
NeuralNetConfiguration.builder() NeuralNetConfiguration.builder()
.layer(new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder( .layer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder(
inputWidth, inputHeight).nIn(cnnNChannelsIn) inputWidth, inputHeight).nIn(cnnNChannelsIn)
.nOut(nChannels).build()) .nOut(nChannels).build())
.build(); .build();
@ -397,12 +397,12 @@ public class TestPreProcessors extends BaseDL4JTest {
//FF->RNN and RNN->FF //FF->RNN and RNN->FF
NeuralNetConfiguration conf1 = NeuralNetConfiguration conf1 =
NeuralNetConfiguration.builder() NeuralNetConfiguration.builder()
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(5) .layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(5)
.nOut(6).build()) .nOut(6).build())
.layer(1, new GravesLSTM.Builder().nIn(6).nOut(7).build()) .layer(1, GravesLSTM.builder().nIn(6).nOut(7).build())
.layer(2, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(7) .layer(2, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(7)
.nOut(8).build()) .nOut(8).build())
.layer(3, new RnnOutputLayer.Builder().nIn(8).nOut(9).activation(Activation.SOFTMAX).build()).build(); .layer(3, RnnOutputLayer.builder().nIn(8).nOut(9).activation(Activation.SOFTMAX).build()).build();
//Expect preprocessors: layer1: FF->RNN; 2: RNN->FF; 3: FF->RNN //Expect preprocessors: layer1: FF->RNN; 2: RNN->FF; 3: FF->RNN
assertEquals(3, conf1.getInputPreProcessors().size()); assertEquals(3, conf1.getInputPreProcessors().size());
assertTrue(conf1.getInputPreProcess(1) instanceof FeedForwardToRnnPreProcessor); assertTrue(conf1.getInputPreProcess(1) instanceof FeedForwardToRnnPreProcessor);
@ -412,10 +412,10 @@ public class TestPreProcessors extends BaseDL4JTest {
//FF-> CNN, CNN-> FF, FF->RNN //FF-> CNN, CNN-> FF, FF->RNN
NeuralNetConfiguration conf2 = NeuralNetConfiguration.builder() NeuralNetConfiguration conf2 = NeuralNetConfiguration.builder()
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder().nOut(10) .layer(0, org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder().nOut(10)
.kernelSize(5, 5).stride(1, 1).build()) .kernelSize(5, 5).stride(1, 1).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nOut(6).build()) .layer(1, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nOut(6).build())
.layer(2, new RnnOutputLayer.Builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build()) .layer(2, RnnOutputLayer.builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build())
.inputType(InputType.convolutionalFlat(28, 28, 1)).build(); .inputType(InputType.convolutionalFlat(28, 28, 1)).build();
//Expect preprocessors: 0: FF->CNN; 1: CNN->FF; 2: FF->RNN //Expect preprocessors: 0: FF->CNN; 1: CNN->FF; 2: FF->RNN
assertEquals(3, conf2.getInputPreProcessors().size()); assertEquals(3, conf2.getInputPreProcessors().size());
@ -425,10 +425,10 @@ public class TestPreProcessors extends BaseDL4JTest {
//CNN-> FF, FF->RNN - InputType.convolutional instead of convolutionalFlat //CNN-> FF, FF->RNN - InputType.convolutional instead of convolutionalFlat
NeuralNetConfiguration conf2a = NeuralNetConfiguration.builder() NeuralNetConfiguration conf2a = NeuralNetConfiguration.builder()
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder().nOut(10) .layer(0, org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder().nOut(10)
.kernelSize(5, 5).stride(1, 1).build()) .kernelSize(5, 5).stride(1, 1).build())
.layer(1, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nOut(6).build()) .layer(1, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nOut(6).build())
.layer(2, new RnnOutputLayer.Builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build()) .layer(2, RnnOutputLayer.builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build())
.inputType(InputType.convolutional(28, 28, 1)).build(); .inputType(InputType.convolutional(28, 28, 1)).build();
//Expect preprocessors: 1: CNN->FF; 2: FF->RNN //Expect preprocessors: 1: CNN->FF; 2: FF->RNN
assertEquals(2, conf2a.getInputPreProcessors().size()); assertEquals(2, conf2a.getInputPreProcessors().size());
@ -438,10 +438,10 @@ public class TestPreProcessors extends BaseDL4JTest {
//FF->CNN and CNN->RNN: //FF->CNN and CNN->RNN:
NeuralNetConfiguration conf3 = NeuralNetConfiguration.builder().list() NeuralNetConfiguration conf3 = NeuralNetConfiguration.builder().list()
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder().nOut(10) .layer(0, org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder().nOut(10)
.kernelSize(5, 5).stride(1, 1).build()) .kernelSize(5, 5).stride(1, 1).build())
.layer(1, new GravesLSTM.Builder().nOut(6).build()) .layer(1, GravesLSTM.builder().nOut(6).build())
.layer(2, new RnnOutputLayer.Builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build()) .layer(2, RnnOutputLayer.builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build())
.inputType(InputType.convolutionalFlat(28, 28, 1)).build(); .inputType(InputType.convolutionalFlat(28, 28, 1)).build();
//Expect preprocessors: 0: FF->CNN, 1: CNN->RNN; //Expect preprocessors: 0: FF->CNN, 1: CNN->RNN;
assertEquals(2, conf3.getInputPreProcessors().size()); assertEquals(2, conf3.getInputPreProcessors().size());
@ -454,16 +454,16 @@ public class TestPreProcessors extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration conf =
NeuralNetConfiguration.builder() NeuralNetConfiguration.builder()
.list().layer(0, .list().layer(0,
new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder( org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder(
4, 4) // 28*28*1 => 15*15*10 4, 4) // 28*28*1 => 15*15*10
.nIn(1).nOut(10).padding(2, 2) .nIn(1).nOut(10).padding(2, 2)
.stride(2, 2) .stride(2, 2)
.weightInit(WeightInit.RELU) .weightInit(WeightInit.RELU)
.activation(Activation.RELU) .activation(Activation.RELU)
.build()) .build())
.layer(1, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder() .layer(1, org.deeplearning4j.nn.conf.layers.DenseLayer.builder()
.activation(Activation.RELU).nOut(200).build()) .activation(Activation.RELU).nOut(200).build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(200) .layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(200)
.nOut(5).weightInit(WeightInit.RELU) .nOut(5).weightInit(WeightInit.RELU)
.activation(Activation.SOFTMAX).build()) .activation(Activation.SOFTMAX).build())
.inputType(InputType.convolutionalFlat(28, 28, 1)) .inputType(InputType.convolutionalFlat(28, 28, 1))

View File

@ -67,9 +67,9 @@ public class TestWeightNoise extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.weightNoise(wn) .weightNoise(wn)
.layer(new DenseLayer.Builder().nIn(10).nOut(10).build()) .layer(DenseLayer.builder().nIn(10).nOut(10).build())
.layer(new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(new DropConnect(0.25)).build()) .layer(DenseLayer.builder().nIn(10).nOut(10).weightNoise(new DropConnect(0.25)).build())
.layer(new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()) .layer(OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -86,9 +86,9 @@ public class TestWeightNoise extends BaseDL4JTest {
.weightNoise(wn) .weightNoise(wn)
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in") .layer("0", DenseLayer.builder().nIn(10).nOut(10).build(), "in")
.layer("1", new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(new DropConnect(0.25)).build(), "0") .layer("1", DenseLayer.builder().nIn(10).nOut(10).weightNoise(new DropConnect(0.25)).build(), "0")
.layer("2", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "1") .layer("2", OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "1")
.setOutputs("2") .setOutputs("2")
.build(); .build();
@ -145,9 +145,9 @@ public class TestWeightNoise extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.layer(new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(wn1).build()) .layer(DenseLayer.builder().nIn(10).nOut(10).weightNoise(wn1).build())
.layer(new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(wn2).build()) .layer(DenseLayer.builder().nIn(10).nOut(10).weightNoise(wn2).build())
.layer(new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).weightNoise(wn3).build()) .layer(OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).weightNoise(wn3).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -170,9 +170,9 @@ public class TestWeightNoise extends BaseDL4JTest {
ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(wn1).build(), "in") .layer("0", DenseLayer.builder().nIn(10).nOut(10).weightNoise(wn1).build(), "in")
.layer("1", new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(wn2).build(), "0") .layer("1", DenseLayer.builder().nIn(10).nOut(10).weightNoise(wn2).build(), "0")
.layer("2", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).weightNoise(wn3).build(), "1") .layer("2", OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).weightNoise(wn3).build(), "1")
.setOutputs("2") .setOutputs("2")
.build(); .build();
@ -249,7 +249,7 @@ public class TestWeightNoise extends BaseDL4JTest {
NeuralNetConfiguration conf = NeuralNetConfiguration.builder() NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
.weightInit(WeightInit.ONES) .weightInit(WeightInit.ONES)
.layer(new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()) .layer(OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init(); net.init();

View File

@ -305,9 +305,9 @@ public class DTypeTests extends BaseDL4JTest {
.updater(new Adam(0.01)) .updater(new Adam(0.01))
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.list() .list()
.layer(new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build()) .layer(DenseLayer.builder().activation(Activation.TANH).nIn(10).nOut(10).build())
.layer(new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build()) .layer(DenseLayer.builder().activation(Activation.TANH).nIn(10).nOut(10).build())
.layer(new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build()) .layer(OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
.build(); .build();
MultiLayerNetwork net = new MultiLayerNetwork(conf); MultiLayerNetwork net = new MultiLayerNetwork(conf);
@ -389,9 +389,9 @@ public class DTypeTests extends BaseDL4JTest {
.dataType(DataType.DOUBLE) .dataType(DataType.DOUBLE)
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("l0", new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "in") .layer("l0", DenseLayer.builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "in")
.layer("l1", new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "l0") .layer("l1", DenseLayer.builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "l0")
.layer("out", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "l1") .layer("out", OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "l1")
.setOutputs("out") .setOutputs("out")
.build(); .build();
@ -477,24 +477,24 @@ public class DTypeTests extends BaseDL4JTest {
LayerConfiguration secondLast; LayerConfiguration secondLast;
switch (outputLayer) { switch (outputLayer) {
case 0: case 0:
ol = new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(); ol = OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
secondLast = new GlobalPoolingLayer(PoolingType.MAX); secondLast = GlobalPoolingLayer.builder(PoolingType.MAX).build();
break; break;
case 1: case 1:
ol = new LossLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(); ol = LossLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction()).build();
secondLast = new FrozenLayerWithBackprop(new DenseLayer.Builder().nOut(10).activation(Activation.SIGMOID).build()); secondLast = new FrozenLayerWithBackprop(DenseLayer.builder().nOut(10).activation(Activation.SIGMOID).build());
break; break;
case 2: case 2:
ol = new CenterLossOutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(); ol =CenterLossOutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
secondLast = new VariationalAutoencoder.Builder().encoderLayerSizes(10).decoderLayerSizes(10).nOut(10).activation(Activation.SIGMOID).build(); secondLast = VariationalAutoencoder.builder().encoderLayerSizes(10).decoderLayerSizes(10).nOut(10).activation(Activation.SIGMOID).build();
break; break;
case 3: case 3:
ol = new CnnLossLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(); ol = CnnLossLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction()).build();
secondLast = new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(3).activation(Activation.TANH).build(); secondLast = ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(3).activation(Activation.TANH).build();
break; break;
case 4: case 4:
ol = new Yolo2OutputLayer.Builder().boundingBoxPriors(Nd4j.create(new double[][]{{1.0, 1.0}, {2.0, 2.0}}).castTo(networkDtype)).build(); ol = new Yolo2OutputLayer.Builder().boundingBoxPriors(Nd4j.create(new double[][]{{1.0, 1.0}, {2.0, 2.0}}).castTo(networkDtype)).build();
secondLast = new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(14).activation(Activation.TANH).build(); secondLast = ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(14).activation(Activation.TANH).build();
break; break;
default: default:
throw new RuntimeException(); throw new RuntimeException();
@ -505,28 +505,28 @@ public class DTypeTests extends BaseDL4JTest {
.dataType(networkDtype) .dataType(networkDtype)
.convolutionMode(ConvolutionMode.Same) .convolutionMode(ConvolutionMode.Same)
.updater(new Adam(1e-2)) .updater(new Adam(1e-2))
.list()
.layer(new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(3).activation(Activation.TANH).build()) .layer(ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(3).activation(Activation.TANH).build())
.layer(new LocalResponseNormalization()) .layer(new LocalResponseNormalization())
.layer(new DropoutLayer(0.5)) .layer(DropoutLayer.builder(0.5).build())
.layer(new DropoutLayer(new AlphaDropout(0.5))) .layer(DropoutLayer.builder(new AlphaDropout(0.5)).build())
.layer(new DropoutLayer(new GaussianDropout(0.5))) .layer(DropoutLayer.builder(new GaussianDropout(0.5)).build())
.layer(new DropoutLayer(new GaussianNoise(0.1))) .layer(DropoutLayer.builder(new GaussianNoise(0.1)).build())
.layer(new DropoutLayer(new SpatialDropout(0.5))) .layer(DropoutLayer.builder(new SpatialDropout(0.5)).build())
.layer(new SubsamplingLayer.Builder().poolingType(SubsamplingLayer.PoolingType.AVG).kernelSize(3, 3).stride(2, 2).build()) .layer(SubsamplingLayer.builder().poolingType(SubsamplingLayer.PoolingType.AVG.toPoolingType()).kernelSize(3, 3).stride(2, 2).build())
.layer(new Pooling2D.Builder().poolingType(SubsamplingLayer.PoolingType.AVG).kernelSize(2, 2).stride(1, 1).build()) .layer(Pooling2D.builder().poolingType(SubsamplingLayer.PoolingType.AVG.toPoolingType()).kernelSize(2, 2).stride(1, 1).build())
.layer(new Deconvolution2D.Builder().kernelSize(2, 2).stride(2, 2).nOut(3).activation(Activation.TANH).build()) .layer(Deconvolution2D.builder().kernelSize(2, 2).stride(2, 2).nOut(3).activation(Activation.TANH).build())
// .layer(new LocallyConnected2D.Builder().nOut(3).kernelSize(2,2).stride(1,1).activation(Activation.SIGMOID).build()) //EXCEPTION // .layer(LocallyConnected2D.builder().nOut(3).kernelSize(2,2).stride(1,1).activation(Activation.SIGMOID).build()) //EXCEPTION
.layer(new ZeroPaddingLayer(1, 1)) .layer(ZeroPaddingLayer.builder(1, 1).build())
.layer(new Cropping2D(1, 1)) .layer(Cropping2D.builder(1, 1).build())
.layer(new IdentityLayer()) .layer(new IdentityLayer())
.layer(new Upsampling2D.Builder().size(2).build()) .layer(Upsampling2D.builder().size(2).build())
.layer(new SubsamplingLayer.Builder().kernelSize(2, 2).stride(2, 2).build()) .layer(SubsamplingLayer.builder().kernelSize(2, 2).stride(2, 2).build())
.layer(new DepthwiseConvolution2D.Builder().nOut(3).activation(Activation.RELU).build()) .layer(DepthwiseConvolution2D.builder().nOut(3).activation(Activation.RELU).build())
.layer(new SeparableConvolution2D.Builder().nOut(3).activation(Activation.HARDTANH).build()) .layer(SeparableConvolution2D.builder().nOut(3).activation(Activation.HARDTANH).build())
.layer(new MaskLayer()) .layer(new MaskLayer())
.layer(new BatchNormalization.Builder().build()) .layer(BatchNormalization.builder().build())
.layer(new ActivationLayer(Activation.LEAKYRELU)) .layer(ActivationLayer.builder().activation(Activation.LEAKYRELU).build())
.layer(secondLast) .layer(secondLast)
.layer(ol) .layer(ol)
.inputType(InputType.convolutionalFlat(8, 8, 1)) .inputType(InputType.convolutionalFlat(8, 8, 1))
@ -603,16 +603,16 @@ public class DTypeTests extends BaseDL4JTest {
LayerConfiguration secondLast; LayerConfiguration secondLast;
switch (outputLayer) { switch (outputLayer) {
case 0: case 0:
ol = new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(); ol = OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
secondLast = new GlobalPoolingLayer(PoolingType.AVG); secondLast = GlobalPoolingLayer.builder(PoolingType.AVG).build();
break; break;
case 1: case 1:
ol = new Cnn3DLossLayer.Builder(Convolution3D.DataFormat.NCDHW).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(); ol = Cnn3DLossLayer.builder().dataFormat(Convolution3D.DataFormat.NCDHW).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction()).build();
secondLast = new Convolution3D.Builder().nOut(3).activation(Activation.ELU).build(); secondLast = Convolution3D.builder().nOut(3).activation(Activation.ELU).build();
break; break;
case 2: case 2:
ol = new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(); ol = OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
secondLast = new Convolution3D.Builder().nOut(3).activation(Activation.ELU).build(); secondLast = Convolution3D.builder().nOut(3).activation(Activation.ELU).build();
break; break;
default: default:
throw new RuntimeException(); throw new RuntimeException();
@ -623,15 +623,15 @@ public class DTypeTests extends BaseDL4JTest {
.dataType(networkDtype) .dataType(networkDtype)
.convolutionMode(ConvolutionMode.Same) .convolutionMode(ConvolutionMode.Same)
.updater(new Nesterovs(1e-2, 0.9)) .updater(new Nesterovs(1e-2, 0.9))
.list()
.layer(new Convolution3D.Builder().kernelSize(2, 2, 2).stride(1, 1, 1).nOut(3).activation(Activation.TANH).build()) .layer(Convolution3D.builder().kernelSize(2, 2, 2).stride(1, 1, 1).nOut(3).activation(Activation.TANH).build())
.layer(new Convolution3D.Builder().kernelSize(2, 2, 2).stride(1, 1, 1).nOut(3).activation(Activation.TANH).build()) .layer(Convolution3D.builder().kernelSize(2, 2, 2).stride(1, 1, 1).nOut(3).activation(Activation.TANH).build())
.layer(new Subsampling3DLayer.Builder().poolingType(PoolingType.AVG).kernelSize(2, 2, 2).stride(2, 2, 2).build()) .layer(Subsampling3DLayer.builder().poolingType(PoolingType.AVG).kernelSize(2, 2, 2).stride(2, 2, 2).build())
.layer(new Deconvolution3D.Builder().kernelSize(2,2,2).stride(1,1,1).nIn(3).nOut(3).activation(Activation.TANH).build()) .layer(Deconvolution3D.builder().kernelSize(2,2,2).stride(1,1,1).nIn(3).nOut(3).activation(Activation.TANH).build())
.layer(new Cropping3D.Builder(1, 1, 1, 1, 1, 1).build()) .layer(Cropping3D.builder(1, 1, 1, 1, 1, 1).build())
.layer(new ZeroPadding3DLayer.Builder(1, 1, 1, 1, 1, 1).build()) .layer(ZeroPadding3DLayer.builder(1, 1, 1, 1, 1, 1).build())
.layer(new ActivationLayer(Activation.LEAKYRELU)) .layer(ActivationLayer.builder(Activation.LEAKYRELU).build())
.layer(new Upsampling3D.Builder().size(2).build()) .layer(Upsampling3D.builder().size(2).build())
.layer(secondLast) .layer(secondLast)
.layer(ol) .layer(ol)
.inputType(InputType.convolutional3D(Convolution3D.DataFormat.NCDHW, 8, 8, 8, 1)) .inputType(InputType.convolutional3D(Convolution3D.DataFormat.NCDHW, 8, 8, 8, 1))
@ -714,16 +714,16 @@ public class DTypeTests extends BaseDL4JTest {
LayerConfiguration secondLast; LayerConfiguration secondLast;
switch (outputLayer) { switch (outputLayer) {
case 0: case 0:
ol = new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(); ol = OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
secondLast = new GlobalPoolingLayer(PoolingType.MAX); secondLast = GlobalPoolingLayer.builder(PoolingType.MAX).build();
break; break;
case 1: case 1:
ol = new RnnOutputLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).nOut(5).build(); ol = RnnOutputLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).nOut(5).build();
secondLast = new Convolution1D.Builder().kernelSize(2).nOut(5).build(); secondLast = Convolution1D.builder().kernelSize(2).nOut(5).build();
break; break;
case 2: case 2:
ol = new RnnLossLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(); ol = RnnLossLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction()).build();
secondLast = new Convolution1D.Builder().kernelSize(2).nOut(5).build(); secondLast = Convolution1D.builder().kernelSize(2).nOut(5).build();
break; break;
default: default:
throw new RuntimeException(); throw new RuntimeException();
@ -737,14 +737,14 @@ public class DTypeTests extends BaseDL4JTest {
.convolutionMode(ConvolutionMode.Same) .convolutionMode(ConvolutionMode.Same)
.updater(new Adam(1e-2)) .updater(new Adam(1e-2))
.list() .list()
.layer(new Convolution1D.Builder() .layer(Convolution1D.builder()
.kernelSize(2) .kernelSize(2)
.stride(1).nOut(3). .stride(1).nOut(3).
activation(Activation.TANH).build()) activation(Activation.TANH).build())
.layer(new Subsampling1DLayer.Builder().poolingType(PoolingType.MAX).kernelSize(5).stride(1).build()) .layer(Subsampling1DLayer.builder().poolingType(PoolingType.MAX).kernelSize(5).stride(1).build())
.layer(new Cropping1D.Builder(1).build()) .layer(Cropping1D.builder(1).build())
.layer(new ZeroPadding1DLayer(1)) .layer(ZeroPadding1DLayer.builder(1).build())
.layer(new Upsampling1D.Builder(2).build()) .layer(Upsampling1D.builder(2).build())
.layer(secondLast) .layer(secondLast)
.layer(ol) .layer(ol)
.inputType(InputType.recurrent(5, 10,RNNFormat.NCW)) .inputType(InputType.recurrent(5, 10,RNNFormat.NCW))
@ -819,7 +819,7 @@ public class DTypeTests extends BaseDL4JTest {
.list() .list()
.layer(new SpaceToBatchLayer.Builder().blocks(1, 1).build()) .layer(new SpaceToBatchLayer.Builder().blocks(1, 1).build())
.layer(new SpaceToDepthLayer.Builder().blocks(2).build()) .layer(new SpaceToDepthLayer.Builder().blocks(2).build())
.layer(new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build()) .layer(OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
.inputType(InputType.convolutional(28, 28, 5)) .inputType(InputType.convolutional(28, 28, 5))
.build(); .build();
@ -880,16 +880,16 @@ public class DTypeTests extends BaseDL4JTest {
LayerConfiguration secondLast; LayerConfiguration secondLast;
switch (outputLayer) { switch (outputLayer) {
case 0: case 0:
ol = new RnnOutputLayer.Builder().nOut(5).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(); ol = RnnOutputLayer.builder().nOut(5).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
secondLast = new SimpleRnn.Builder().nOut(5).activation(Activation.TANH).build(); secondLast = SimpleRnn.builder().nOut(5).activation(Activation.TANH).build();
break; break;
case 1: case 1:
ol = new RnnLossLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(); ol = RnnLossLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
secondLast = new SimpleRnn.Builder().nOut(5).activation(Activation.TANH).build(); secondLast = SimpleRnn.builder().nOut(5).activation(Activation.TANH).build();
break; break;
case 2: case 2:
ol = new OutputLayer.Builder().nOut(5).build(); ol = OutputLayer.builder().nOut(5).build();
secondLast = new LastTimeStep(new SimpleRnn.Builder().nOut(5).activation(Activation.TANH).build()); secondLast = new LastTimeStep(SimpleRnn.builder().nOut(5).activation(Activation.TANH).build());
break; break;
default: default:
throw new RuntimeException(); throw new RuntimeException();
@ -899,15 +899,15 @@ public class DTypeTests extends BaseDL4JTest {
.dataType(networkDtype) .dataType(networkDtype)
.convolutionMode(ConvolutionMode.Same) .convolutionMode(ConvolutionMode.Same)
.updater(new Adam(1e-2)) .updater(new Adam(1e-2))
.list()
.layer(new LSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build()) .layer(LSTM.builder().nIn(5).nOut(5).activation(Activation.TANH).build())
.layer(new GravesLSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build()) .layer(GravesLSTM.builder().nIn(5).nOut(5).activation(Activation.TANH).build())
.layer(new DenseLayer.Builder().nOut(5).build()) .layer(DenseLayer.builder().nOut(5).build())
.layer(new GravesBidirectionalLSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build()) .layer(GravesBidirectionalLSTM.builder().nIn(5).nOut(5).activation(Activation.TANH).build())
.layer(new Bidirectional(new LSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build())) .layer(Bidirectional.builder(LSTM.builder().nIn(5).nOut(5).activation(Activation.TANH).build()).build())
.layer(new TimeDistributed(new DenseLayer.Builder().nIn(10).nOut(5).activation(Activation.TANH).build())) .layer(new TimeDistributed(DenseLayer.builder().nIn(10).nOut(5).activation(Activation.TANH).build()))
.layer(new SimpleRnn.Builder().nIn(5).nOut(5).build()) .layer(SimpleRnn.builder().nIn(5).nOut(5).build())
.layer(new MaskZeroLayer.Builder().underlying(new SimpleRnn.Builder().nIn(5).nOut(5).build()).maskValue(0.0).build()) .layer(new MaskZeroLayer.Builder().underlying(SimpleRnn.builder().nIn(5).nOut(5).build()).maskValue(0.0).build())
.layer(secondLast) .layer(secondLast)
.layer(ol) .layer(ol)
.build(); .build();
@ -990,10 +990,10 @@ public class DTypeTests extends BaseDL4JTest {
.kernelSize(3, 3) .kernelSize(3, 3)
.stride(2, 2) .stride(2, 2)
.build()) .build())
.layer(new CapsuleLayer.Builder(capsule, capsuleDim, routing).build()) .layer(CapsuleLayer.builder(capsule, capsuleDim, routing).build())
.layer(new CapsuleStrengthLayer.Builder().build()) .layer(CapsuleStrengthLayer.builder().build())
.layer(new ActivationLayer.Builder(new ActivationSoftmax()).build()) .layer(ActivationLayer.builder(new ActivationSoftmax()).build())
.layer(new LossLayer.Builder(new LossNegativeLogLikelihood()).build()) .layer(LossLayer.builder().lossFunction(new LossNegativeLogLikelihood()).build())
.inputType(InputType.convolutional(height, width, inputDepth)) .inputType(InputType.convolutional(height, width, inputDepth))
.build(); .build();
@ -1062,33 +1062,33 @@ public class DTypeTests extends BaseDL4JTest {
INDArray input; INDArray input;
if (test == 0) { if (test == 0) {
if (frozen) { if (frozen) {
conf.layer("0", new FrozenLayer(new EmbeddingLayer.Builder().nIn(5).nOut(5).build()), "in"); conf.layer("0", new FrozenLayer(EmbeddingLayer.builder().nIn(5).nOut(5).build()), "in");
} else { } else {
conf.layer("0", new EmbeddingLayer.Builder().nIn(5).nOut(5).build(), "in"); conf.layer("0", EmbeddingLayer.builder().nIn(5).nOut(5).build(), "in");
} }
input = Nd4j.zeros(networkDtype, 10, 1).muli(5).castTo(DataType.INT); input = Nd4j.zeros(networkDtype, 10, 1).muli(5).castTo(DataType.INT);
conf.setInputTypes(InputType.feedForward(1)); conf.setInputTypes(InputType.feedForward(1));
} else if (test == 1) { } else if (test == 1) {
if (frozen) { if (frozen) {
conf.layer("0", new FrozenLayer(new EmbeddingSequenceLayer.Builder().nIn(5).nOut(5).build()), "in"); conf.layer("0", new FrozenLayer(EmbeddingSequenceLayer.builder().nIn(5).nOut(5).build()), "in");
} else { } else {
conf.layer("0", new EmbeddingSequenceLayer.Builder().nIn(5).nOut(5).build(), "in"); conf.layer("0", EmbeddingSequenceLayer.builder().nIn(5).nOut(5).build(), "in");
} }
conf.layer("gp", new GlobalPoolingLayer.Builder(PoolingType.PNORM).pnorm(2).poolingDimensions(2).build(), "0"); conf.layer("gp", GlobalPoolingLayer.builder(PoolingType.PNORM).pnorm(2).poolingDimensions(2).build(), "0");
input = Nd4j.zeros(networkDtype, 10, 1, 5).muli(5).castTo(DataType.INT); input = Nd4j.zeros(networkDtype, 10, 1, 5).muli(5).castTo(DataType.INT);
conf.setInputTypes(InputType.recurrent(1)); conf.setInputTypes(InputType.recurrent(1));
} else { } else {
conf.layer("0", new RepeatVector.Builder().repetitionFactor(5).nOut(5).build(), "in"); conf.layer("0", RepeatVector.builder().repetitionFactor(5).nOut(5).build(), "in");
conf.layer("gp", new GlobalPoolingLayer.Builder(PoolingType.SUM).build(), "0"); conf.layer("gp", GlobalPoolingLayer.builder(PoolingType.SUM).build(), "0");
input = Nd4j.zeros(networkDtype, 10, 5); input = Nd4j.zeros(networkDtype, 10, 5);
conf.setInputTypes(InputType.feedForward(5)); conf.setInputTypes(InputType.feedForward(5));
} }
conf.appendLayer("el", new ElementWiseMultiplicationLayer.Builder().nOut(5).build()) conf.appendLayer("el", ElementWiseMultiplicationLayer.builder().nOut(5).build())
.appendLayer("ae", new AutoEncoder.Builder().nOut(5).build()) .appendLayer("ae", AutoEncoder.builder().nOut(5).build())
.appendLayer("prelu", new PReLULayer.Builder().nOut(5).inputShape(5).build()) .appendLayer("prelu", PReLULayer.builder().nOut(5).inputShape(5).build())
.appendLayer("out", new OutputLayer.Builder().nOut(10).build()); .appendLayer("out", OutputLayer.builder().nOut(10).build());
ComputationGraph net = new ComputationGraph(conf.build()); ComputationGraph net = new ComputationGraph(conf.build());
net.init(); net.init();
@ -1153,34 +1153,34 @@ public class DTypeTests extends BaseDL4JTest {
switch (test) { switch (test) {
case 0: case 0:
b.addInputs("in") b.addInputs("in")
.addLayer("l", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(1).build(), "in") .addLayer("l", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(1).build(), "in")
.addVertex("preproc", new PreprocessorVertex(new CnnToRnnPreProcessor(28, 28, 1)), "l") .addVertex("preproc", new PreprocessorVertex(new CnnToRnnPreProcessor(28, 28, 1)), "l")
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "preproc") .addLayer("out", OutputLayer.builder().nOut(10).build(), "preproc")
.setInputTypes(InputType.convolutional(28, 28, 1)) .setInputTypes(InputType.convolutional(28, 28, 1))
.setOutputs("out"); .setOutputs("out");
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 28, 28)}; in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 28, 28)};
break; break;
case 1: case 1:
b.addInputs("in") b.addInputs("in")
.addLayer("l", new DenseLayer.Builder().nOut(16).build(), "in") .addLayer("l", DenseLayer.builder().nOut(16).build(), "in")
.addVertex("preproc", new PreprocessorVertex(new FeedForwardToCnn3DPreProcessor(2, 2, 2, 2, true)), "l") .addVertex("preproc", new PreprocessorVertex(new FeedForwardToCnn3DPreProcessor(2, 2, 2, 2, true)), "l")
.addVertex("preproc2", new PreprocessorVertex(new PermutePreprocessor(0, 2, 3, 4, 1)), "preproc") .addVertex("preproc2", new PreprocessorVertex(new PermutePreprocessor(0, 2, 3, 4, 1)), "preproc")
.addVertex("preproc3", new PreprocessorVertex(new ReshapePreprocessor(new long[]{2, 2, 2, 2}, new long[]{16}, false)), "preproc2") .addVertex("preproc3", new PreprocessorVertex(new ReshapePreprocessor(new long[]{2, 2, 2, 2}, new long[]{16}, false)), "preproc2")
.addLayer("out", new OutputLayer.Builder().nIn(16).nOut(10).build(), "preproc3") .addLayer("out", OutputLayer.builder().nIn(16).nOut(10).build(), "preproc3")
.setInputTypes(InputType.feedForward(5)) .setInputTypes(InputType.feedForward(5))
.setOutputs("out"); .setOutputs("out");
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5)}; in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5)};
break; break;
case 2: case 2:
b.addInputs("in") b.addInputs("in")
.addLayer("1", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(1).build(), "in") .addLayer("1", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(1).build(), "in")
.addVertex("1a", new PoolHelperVertex(), "1") .addVertex("1a", new PoolHelperVertex(), "1")
.addVertex("2", new ShiftVertex(1), "1a") .addVertex("2", new ShiftVertex(1), "1a")
.addVertex("3", new ScaleVertex(2), "2") .addVertex("3", new ScaleVertex(2), "2")
.addVertex("4", new ReshapeVertex(2, -1), "3") .addVertex("4", new ReshapeVertex(2, -1), "3")
.addVertex("5", new SubsetVertex(0, 99), "4") .addVertex("5", new SubsetVertex(0, 99), "4")
.addVertex("6", new L2NormalizeVertex(), "5") .addVertex("6", new L2NormalizeVertex(), "5")
.addLayer("out", new OCNNOutputLayer.Builder().hiddenLayerSize(10).nIn(100).build(), "6") .addLayer("out",OCNNOutputLayer.builder().hiddenLayerSize(10).nIn(100).build(), "6")
.setInputTypes(InputType.convolutional(28, 28, 1)) .setInputTypes(InputType.convolutional(28, 28, 1))
.setOutputs("out"); .setOutputs("out");
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 28, 28)}; in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 28, 28)};
@ -1193,23 +1193,23 @@ public class DTypeTests extends BaseDL4JTest {
.addVertex("3", new StackVertex(), "2a", "2b") .addVertex("3", new StackVertex(), "2a", "2b")
.addVertex("4", new DuplicateToTimeSeriesVertex("in3"), "3") .addVertex("4", new DuplicateToTimeSeriesVertex("in3"), "3")
.addVertex("5", new ReverseTimeSeriesVertex(), "4") .addVertex("5", new ReverseTimeSeriesVertex(), "4")
.addLayer("6", new GlobalPoolingLayer(PoolingType.AVG), "5") .addLayer("6", GlobalPoolingLayer.builder(PoolingType.AVG).build(), "5")
.addVertex("7", new LastTimeStepVertex("in3"), "in3") .addVertex("7", new LastTimeStepVertex("in3"), "in3")
.addVertex("8", new MergeVertex(), "6", "7") .addVertex("8", new MergeVertex(), "6", "7")
.addVertex("9", new PreprocessorVertex(new ComposableInputPreProcessor()), "8") .addVertex("9", new PreprocessorVertex(new ComposableInputPreProcessor()), "8")
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "9") .addLayer("out", OutputLayer.builder().nOut(10).build(), "9")
.setInputTypes(InputType.feedForward(8), InputType.feedForward(8), InputType.recurrent(8)) .setInputTypes(InputType.feedForward(8), InputType.feedForward(8), InputType.recurrent(8))
.setOutputs("out"); .setOutputs("out");
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 8), Nd4j.rand(networkDtype, 2, 8), Nd4j.rand(networkDtype, 2, 8, 5)}; in = new INDArray[]{Nd4j.rand(networkDtype, 2, 8), Nd4j.rand(networkDtype, 2, 8), Nd4j.rand(networkDtype, 2, 8, 5)};
break; break;
case 4: case 4:
b.addInputs("in1", "in2") b.addInputs("in1", "in2")
.addLayer("1", new LSTM.Builder().nOut(8).build(), "in1") .addLayer("1", LSTM.builder().nOut(8).build(), "in1")
.addVertex("preproc1", new PreprocessorVertex(new RnnToCnnPreProcessor(2, 2, 2)), "1") .addVertex("preproc1", new PreprocessorVertex(new RnnToCnnPreProcessor(2, 2, 2)), "1")
.addVertex("preproc2", new PreprocessorVertex(new CnnToRnnPreProcessor(2, 2, 2)), "preproc1") .addVertex("preproc2", new PreprocessorVertex(new CnnToRnnPreProcessor(2, 2, 2)), "preproc1")
.addLayer("pool", new GlobalPoolingLayer(), "preproc2") .addLayer("pool", GlobalPoolingLayer.builder().build(), "preproc2")
.addLayer("pool2", new GlobalPoolingLayer(), "in2") .addLayer("pool2", GlobalPoolingLayer.builder().build(), "in2")
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "pool", "pool2") .addLayer("out", OutputLayer.builder().nOut(10).build(), "pool", "pool2")
.setInputTypes(InputType.recurrent(8), InputType.convolutional(28, 28, 1)) .setInputTypes(InputType.recurrent(8), InputType.convolutional(28, 28, 1))
.setOutputs("out"); .setOutputs("out");
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 8, 5), Nd4j.rand(networkDtype, 2, 1, 28, 28)}; in = new INDArray[]{Nd4j.rand(networkDtype, 2, 8, 5), Nd4j.rand(networkDtype, 2, 1, 28, 28)};
@ -1217,28 +1217,28 @@ public class DTypeTests extends BaseDL4JTest {
case 5: case 5:
b.addInputs("in1", "in2") b.addInputs("in1", "in2")
.addVertex("fv", new FrozenVertex(new ScaleVertex(2.0)), "in1") .addVertex("fv", new FrozenVertex(new ScaleVertex(2.0)), "in1")
.addLayer("1", new DenseLayer.Builder().nOut(5).build(), "fv") .addLayer("1", DenseLayer.builder().nOut(5).build(), "fv")
.addLayer("2", new DenseLayer.Builder().nOut(5).build(), "in2") .addLayer("2", DenseLayer.builder().nOut(5).build(), "in2")
.addVertex("v", new L2Vertex(), "1", "2") .addVertex("v", new L2Vertex(), "1", "2")
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "v") .addLayer("out", OutputLayer.builder().nOut(10).build(), "v")
.setInputTypes(InputType.feedForward(5), InputType.feedForward(5)) .setInputTypes(InputType.feedForward(5), InputType.feedForward(5))
.setOutputs("out"); .setOutputs("out");
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5), Nd4j.rand(networkDtype, 2, 5)}; in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5), Nd4j.rand(networkDtype, 2, 5)};
break; break;
case 6: case 6:
b.addInputs("in") b.addInputs("in")
.addLayer("1", new LSTM.Builder().nOut(5).build(), "in") .addLayer("1", LSTM.builder().nOut(5).build(), "in")
.addVertex("2", new PreprocessorVertex(new KerasFlattenRnnPreprocessor(5, 4)), "1") .addVertex("2", new PreprocessorVertex(new KerasFlattenRnnPreprocessor(5, 4)), "1")
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "2") .addLayer("out", OutputLayer.builder().nOut(10).build(), "2")
.setOutputs("out") .setOutputs("out")
.setInputTypes(InputType.recurrent(5, 4)); .setInputTypes(InputType.recurrent(5, 4));
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5, 4)}; in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5, 4)};
break; break;
case 7: case 7:
b.addInputs("in") b.addInputs("in")
.addLayer("1", new ConvolutionLayer.Builder().kernelSize(2, 2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in") .addLayer("1", ConvolutionLayer.builder().kernelSize(2, 2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in")
.addVertex("2", new PreprocessorVertex(new CnnToFeedForwardPreProcessor(28, 28, 5)), "1") .addVertex("2", new PreprocessorVertex(new CnnToFeedForwardPreProcessor(28, 28, 5)), "1")
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "2") .addLayer("out", OutputLayer.builder().nOut(10).build(), "2")
.setOutputs("out") .setOutputs("out")
.setInputTypes(InputType.convolutional(28, 28, 1)); .setInputTypes(InputType.convolutional(28, 28, 1));
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 28, 28)}; in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 28, 28)};
@ -1311,9 +1311,9 @@ public class DTypeTests extends BaseDL4JTest {
switch (test) { switch (test) {
case 0: case 0:
b.addInputs("in") b.addInputs("in")
.addLayer("1", new LSTM.Builder().nOut(5).build(), "in") .addLayer("1", LSTM.builder().nOut(5).build(), "in")
.addLayer("2", new LocallyConnected1D.Builder().kernelSize(2).nOut(4).build(), "1") .addLayer("2", LocallyConnected1D.builder().kernelSize(2).nOut(4).build(), "1")
.addLayer("out", new RnnOutputLayer.Builder().nOut(10).build(), "2") .addLayer("out", RnnOutputLayer.builder().nOut(10).build(), "2")
.setOutputs("out") .setOutputs("out")
.setInputTypes(InputType.recurrent(5, 2)); .setInputTypes(InputType.recurrent(5, 2));
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5, 2)}; in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5, 2)};
@ -1321,9 +1321,9 @@ public class DTypeTests extends BaseDL4JTest {
break; break;
case 1: case 1:
b.addInputs("in") b.addInputs("in")
.addLayer("1", new ConvolutionLayer.Builder().kernelSize(2, 2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in") .addLayer("1", ConvolutionLayer.builder().kernelSize(2, 2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in")
.addLayer("2", new LocallyConnected2D.Builder().kernelSize(2, 2).nOut(5).build(), "1") .addLayer("2", LocallyConnected2D.builder().kernelSize(2, 2).nOut(5).build(), "1")
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "2") .addLayer("out", OutputLayer.builder().nOut(10).build(), "2")
.setOutputs("out") .setOutputs("out")
.setInputTypes(InputType.convolutional(8, 8, 1)); .setInputTypes(InputType.convolutional(8, 8, 1));
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 8, 8)}; in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 8, 8)};
@ -1399,12 +1399,12 @@ public class DTypeTests extends BaseDL4JTest {
.updater(new NoOp()) .updater(new NoOp())
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.list() .list()
.layer(new LSTM.Builder().nOut(layerSize).build()) .layer(LSTM.builder().nOut(layerSize).build())
.layer(new SelfAttentionLayer.Builder().nOut(8).nHeads(2).projectInput(true).build()) .layer(new SelfAttentionLayer.Builder().nOut(8).nHeads(2).projectInput(true).build())
.layer(new LearnedSelfAttentionLayer.Builder().nOut(8).nHeads(2).nQueries(numQueries).projectInput(true).build()) .layer(new LearnedSelfAttentionLayer.Builder().nOut(8).nHeads(2).nQueries(numQueries).projectInput(true).build())
.layer(new RecurrentAttentionLayer.Builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build()) .layer(new RecurrentAttentionLayer.Builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build())
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build()) .layer(GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build())
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX) .layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build()) .lossFunction(LossFunctions.LossFunction.MCXENT).build())
.inputType(InputType.recurrent(nIn)) .inputType(InputType.recurrent(nIn))
.build(); .build();
@ -1487,12 +1487,12 @@ public class DTypeTests extends BaseDL4JTest {
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.graphBuilder() .graphBuilder()
.addInputs("input") .addInputs("input")
.addLayer("lstmKeys", new LSTM.Builder().nOut(layerSize).build(), "input") .addLayer("lstmKeys", LSTM.builder().nOut(layerSize).build(), "input")
.addLayer("lstmQueries", new LSTM.Builder().nOut(layerSize).build(), "input") .addLayer("lstmQueries", LSTM.builder().nOut(layerSize).build(), "input")
.addLayer("lstmValues", new LSTM.Builder().nOut(layerSize).build(), "input") .addLayer("lstmValues", LSTM.builder().nOut(layerSize).build(), "input")
.addVertex("attention", new AttentionVertex.Builder().nOut(8).nHeads(2).projectInput(true).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build(), "lstmQueries", "lstmKeys", "lstmValues") .addVertex("attention", new AttentionVertex.Builder().nOut(8).nHeads(2).projectInput(true).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build(), "lstmQueries", "lstmKeys", "lstmValues")
.addLayer("pooling", new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build(), "attention") .addLayer("pooling", GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build(), "attention")
.addLayer("output", new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling") .addLayer("output", OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling")
.setOutputs("output") .setOutputs("output")
.setInputTypes(InputType.recurrent(nIn)) .setInputTypes(InputType.recurrent(nIn))
.build(); .build();

View File

@ -68,18 +68,18 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
//4 layer network: 2 GravesLSTM + DenseLayerConfiguration + RnnOutputLayer. Hence also tests preprocessors. //4 layer network: 2 GravesLSTM + DenseLayerConfiguration + RnnOutputLayer. Hence also tests preprocessors.
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345).graphBuilder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345).graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(5).nOut(7) .addLayer("0", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(5).nOut(7)
.activation(Activation.TANH) .activation(Activation.TANH)
.dist(new NormalDistribution(0, 0.5)).build(), "in") .dist(new NormalDistribution(0, 0.5)).build(), "in")
.addLayer("1", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(7).nOut(8) .addLayer("1", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(7).nOut(8)
.activation(Activation.TANH) .activation(Activation.TANH)
.dist(new NormalDistribution(0, 0.5)).build(), "0") .dist(new NormalDistribution(0, 0.5)).build(), "0")
.addLayer("2", new DenseLayer.Builder().nIn(8).nOut(9).activation(Activation.TANH) .addLayer("2", DenseLayer.builder().nIn(8).nOut(9).activation(Activation.TANH)
.dist(new NormalDistribution(0, .dist(new NormalDistribution(0,
0.5)) 0.5))
.build(), "1") .build(), "1")
.addLayer("3", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .addLayer("3", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.nIn(9).nOut(4) .nIn(9).nOut(4)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.dist(new NormalDistribution(0, 0.5)).build(), "2") .dist(new NormalDistribution(0, 0.5)).build(), "2")
@ -157,15 +157,15 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
int timeSeriesLength = 6; int timeSeriesLength = 6;
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in") ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
.addLayer("0", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(5).nOut(7) .addLayer("0", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(5).nOut(7)
.activation(Activation.TANH) .activation(Activation.TANH)
.dist(new NormalDistribution(0, 0.5)).build(), "in") .dist(new NormalDistribution(0, 0.5)).build(), "in")
.addLayer("1", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(7).nOut(8) .addLayer("1", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(7).nOut(8)
.activation(Activation.TANH) .activation(Activation.TANH)
.dist(new NormalDistribution(0, .dist(new NormalDistribution(0,
0.5)) 0.5))
.build(), "0") .build(), "0")
.addLayer("2", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .addLayer("2", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.nIn(8).nOut(4) .nIn(8).nOut(4)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.dist(new NormalDistribution(0, 0.5)).build(), "1") .dist(new NormalDistribution(0, 0.5)).build(), "1")
@ -214,27 +214,27 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345).graphBuilder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345).graphBuilder()
.addInputs("in0", "in1") .addInputs("in0", "in1")
.addLayer("lstm0", .addLayer("lstm0",
new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(5).nOut(6) org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(5).nOut(6)
.activation(Activation.TANH) .activation(Activation.TANH)
.dist(new NormalDistribution(0, 0.5)).build(), .dist(new NormalDistribution(0, 0.5)).build(),
"in0") "in0")
.addLayer("lstm1", .addLayer("lstm1",
new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(4).nOut(5) org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(4).nOut(5)
.activation(Activation.TANH) .activation(Activation.TANH)
.dist(new NormalDistribution(0, 0.5)).build(), .dist(new NormalDistribution(0, 0.5)).build(),
"in1") "in1")
.addLayer("dense", new DenseLayer.Builder().nIn(6 + 5).nOut(9).activation(Activation.TANH) .addLayer("dense", DenseLayer.builder().nIn(6 + 5).nOut(9).activation(Activation.TANH)
.dist(new NormalDistribution(0, .dist(new NormalDistribution(0,
0.5)) 0.5))
.build(), "lstm0", "lstm1") .build(), "lstm0", "lstm1")
.addLayer("out0", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .addLayer("out0", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.nIn(9).nOut(3) .nIn(9).nOut(3)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.dist(new NormalDistribution(0, .dist(new NormalDistribution(0,
0.5)) 0.5))
.build(), "dense") .build(), "dense")
.addLayer("out1", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .addLayer("out1", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.nIn(9).nOut(4) .nIn(9).nOut(4)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.dist(new NormalDistribution(0, 0.5)).build(), "dense") .dist(new NormalDistribution(0, 0.5)).build(), "dense")
@ -344,15 +344,15 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
.trainingWorkspaceMode(WorkspaceMode.NONE).inferenceWorkspaceMode(WorkspaceMode.NONE) .trainingWorkspaceMode(WorkspaceMode.NONE).inferenceWorkspaceMode(WorkspaceMode.NONE)
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(nIn).nOut(7) .addLayer("0", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(nIn).nOut(7)
.activation(Activation.TANH) .activation(Activation.TANH)
.dist(new NormalDistribution(0, 0.5)).build(), "in") .dist(new NormalDistribution(0, 0.5)).build(), "in")
.addLayer("1", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(7).nOut(8) .addLayer("1", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(7).nOut(8)
.activation(Activation.TANH) .activation(Activation.TANH)
.dist(new NormalDistribution(0, .dist(new NormalDistribution(0,
0.5)) 0.5))
.build(), "0") .build(), "0")
.addLayer("out", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .addLayer("out", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.nIn(8).nOut(nOut) .nIn(8).nOut(nOut)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.dist(new NormalDistribution(0, 0.5)).build(), "1") .dist(new NormalDistribution(0, 0.5)).build(), "1")
@ -364,15 +364,15 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
.trainingWorkspaceMode(WorkspaceMode.NONE).inferenceWorkspaceMode(WorkspaceMode.NONE) .trainingWorkspaceMode(WorkspaceMode.NONE).inferenceWorkspaceMode(WorkspaceMode.NONE)
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(nIn).nOut(7) .addLayer("0", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(nIn).nOut(7)
.activation(Activation.TANH) .activation(Activation.TANH)
.dist(new NormalDistribution(0, 0.5)).build(), "in") .dist(new NormalDistribution(0, 0.5)).build(), "in")
.addLayer("1", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(7).nOut(8) .addLayer("1", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(7).nOut(8)
.activation(Activation.TANH) .activation(Activation.TANH)
.dist(new NormalDistribution(0, .dist(new NormalDistribution(0,
0.5)) 0.5))
.build(), "0") .build(), "0")
.addLayer("out", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .addLayer("out", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.nIn(8).nOut(nOut) .nIn(8).nOut(nOut)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.dist(new NormalDistribution(0, 0.5)).build(), "1") .dist(new NormalDistribution(0, 0.5)).build(), "1")
@ -459,15 +459,15 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345) ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(nIn).nOut(7) .addLayer("0", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(nIn).nOut(7)
.activation(Activation.TANH) .activation(Activation.TANH)
.dist(new NormalDistribution(0, 0.5)).build(), "in") .dist(new NormalDistribution(0, 0.5)).build(), "in")
.addLayer("1", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(7).nOut(8) .addLayer("1", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(7).nOut(8)
.activation(Activation.TANH) .activation(Activation.TANH)
.dist(new NormalDistribution(0, .dist(new NormalDistribution(0,
0.5)) 0.5))
.build(), "0") .build(), "0")
.addLayer("out", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .addLayer("out", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.nIn(8).nOut(nOut) .nIn(8).nOut(nOut)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.dist(new NormalDistribution(0, 0.5)).build(), "1") .dist(new NormalDistribution(0, 0.5)).build(), "1")
@ -496,15 +496,15 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345) ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(nIn).nOut(7) .addLayer("0", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(nIn).nOut(7)
.activation(Activation.TANH) .activation(Activation.TANH)
.dist(new NormalDistribution(0, 0.5)).build(), "in") .dist(new NormalDistribution(0, 0.5)).build(), "in")
.addLayer("1", new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(7).nOut(8) .addLayer("1", org.deeplearning4j.nn.conf.layers.GravesLSTM.builder().nIn(7).nOut(8)
.activation(Activation.TANH) .activation(Activation.TANH)
.dist(new NormalDistribution(0, .dist(new NormalDistribution(0,
0.5)) 0.5))
.build(), "0") .build(), "0")
.addLayer("out", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .addLayer("out", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.nIn(8).nOut(nOut) .nIn(8).nOut(nOut)
.activation(Activation.SOFTMAX) .activation(Activation.SOFTMAX)
.dist(new NormalDistribution(0, 0.5)).build(), "1") .dist(new NormalDistribution(0, 0.5)).build(), "1")
@ -532,7 +532,7 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
//Simple "does it throw an exception" type test... //Simple "does it throw an exception" type test...
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345) ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
.graphBuilder().addInputs("in") .graphBuilder().addInputs("in")
.addLayer("out", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE) .addLayer("out", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
.activation(Activation.IDENTITY).nIn(1).nOut(1).build(), "in") .activation(Activation.IDENTITY).nIn(1).nOut(1).build(), "in")
.setOutputs("out").backpropType(BackpropType.TruncatedBPTT).tbpttFwdLength(8) .setOutputs("out").backpropType(BackpropType.TruncatedBPTT).tbpttFwdLength(8)
.setInputTypes(InputType.recurrent(1,1,RNNFormat.NCW)) .setInputTypes(InputType.recurrent(1,1,RNNFormat.NCW))
@ -555,7 +555,7 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
//Simple "does it throw an exception" type test... //Simple "does it throw an exception" type test...
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345) ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
.graphBuilder().addInputs("in") .graphBuilder().addInputs("in")
.addLayer("out", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE) .addLayer("out", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
.activation(Activation.IDENTITY).nIn(1).nOut(1).build(), "in") .activation(Activation.IDENTITY).nIn(1).nOut(1).build(), "in")
.setOutputs("out").backpropType(tbptt ? BackpropType.TruncatedBPTT : BackpropType.Standard) .setOutputs("out").backpropType(tbptt ? BackpropType.TruncatedBPTT : BackpropType.Standard)
.tbpttFwdLength(8).tbpttBackLength(8).build(); .tbpttFwdLength(8).tbpttBackLength(8).build();
@ -619,9 +619,9 @@ public class ComputationGraphTestRNN extends BaseDL4JTest {
NeuralNetConfiguration.builder() NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new org.deeplearning4j.nn.conf.layers.LSTM.Builder().nIn(nIn).nOut(nHiddenUnits).build(), "in") .layer("0", org.deeplearning4j.nn.conf.layers.LSTM.builder().nIn(nIn).nOut(nHiddenUnits).build(), "in")
.layer("1", new GlobalPoolingLayer(), "0") .layer("1", GlobalPoolingLayer.builder().build(), "0")
.layer("2", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(nHiddenUnits) .layer("2", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(nHiddenUnits)
.nOut(nOut) .nOut(nOut)
.activation(Activation.TANH).build(), "1") .activation(Activation.TANH).build(), "1")
.setOutputs("2") .setOutputs("2")

View File

@ -62,19 +62,19 @@ public class TestCompGraphCNN extends BaseDL4JTest {
.graphBuilder().addInputs("input") .graphBuilder().addInputs("input")
.setInputTypes(InputType.convolutional(32, 32, 3)) .setInputTypes(InputType.convolutional(32, 32, 3))
.addLayer("cnn1", .addLayer("cnn1",
new ConvolutionLayer.Builder(4, 4).stride(2, 2).nIn(3).nOut(3) ConvolutionLayer.builder(4, 4).stride(2, 2).nIn(3).nOut(3)
.build(), .build(),
"input") "input")
.addLayer("cnn2", .addLayer("cnn2",
new ConvolutionLayer.Builder(4, 4).stride(2, 2).nIn(3).nOut(3) ConvolutionLayer.builder(4, 4).stride(2, 2).nIn(3).nOut(3)
.build(), .build(),
"input") "input")
.addLayer("max1", .addLayer("max1",
new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX) SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX)
.stride(1, 1).kernelSize(2, 2).build(), .stride(1, 1).kernelSize(2, 2).build(),
"cnn1", "cnn2") "cnn1", "cnn2")
.addLayer("dnn1", new DenseLayer.Builder().nOut(7).build(), "max1") .addLayer("dnn1", DenseLayer.builder().nOut(7).build(), "max1")
.addLayer("output", new OutputLayer.Builder().nIn(7).nOut(10).activation(Activation.SOFTMAX).build(), "dnn1") .addLayer("output", OutputLayer.builder().nIn(7).nOut(10).activation(Activation.SOFTMAX).build(), "dnn1")
.setOutputs("output").build(); .setOutputs("output").build();
return conf; return conf;
@ -159,19 +159,19 @@ public class TestCompGraphCNN extends BaseDL4JTest {
.seed(123).graphBuilder().addInputs("input") .seed(123).graphBuilder().addInputs("input")
.setInputTypes(InputType.convolutional(nChannels, imageWidth, .setInputTypes(InputType.convolutional(nChannels, imageWidth,
imageHeight)) imageHeight))
.addLayer("conv1", new ConvolutionLayer.Builder() .addLayer("conv1", ConvolutionLayer.builder()
.kernelSize(kernelHeight, kernelWidth).stride(1, 1) .kernelSize(kernelHeight, kernelWidth).stride(1, 1)
.dataFormat(CNN2DFormat.NCHW) .dataFormat(CNN2DFormat.NCHW)
.nIn(nChannels).nOut(2).weightInit(WeightInit.XAVIER) .nIn(nChannels).nOut(2).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build(), "input") .activation(Activation.RELU).build(), "input")
.addLayer("pool1", .addLayer("pool1",
new SubsamplingLayer.Builder() SubsamplingLayer.builder()
.dataFormat(CNN2DFormat.NCHW) .dataFormat(CNN2DFormat.NCHW)
.poolingType(SubsamplingLayer.PoolingType.MAX) .poolingType(SubsamplingLayer.PoolingType.MAX.toPoolingType())
.kernelSize(imageHeight - kernelHeight + 1, 1) .kernelSize(imageHeight - kernelHeight + 1, 1)
.stride(1, 1).build(), .stride(1, 1).build(),
"conv1") "conv1")
.addLayer("output", new OutputLayer.Builder().nOut(classes).activation(Activation.SOFTMAX).build(), "pool1") .addLayer("output", OutputLayer.builder().nOut(classes).activation(Activation.SOFTMAX).build(), "pool1")
.setOutputs("output").build(); .setOutputs("output").build();

View File

@ -67,7 +67,7 @@ public class TestCompGraphUnsupervised extends BaseDL4JTest {
.trainingWorkspaceMode(wsm) .trainingWorkspaceMode(wsm)
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("vae1", new VariationalAutoencoder.Builder() .addLayer("vae1", VariationalAutoencoder.builder()
.nIn(784) .nIn(784)
.nOut(32) .nOut(32)
.encoderLayerSizes(16) .encoderLayerSizes(16)
@ -76,7 +76,7 @@ public class TestCompGraphUnsupervised extends BaseDL4JTest {
.pzxActivationFunction(Activation.SIGMOID) .pzxActivationFunction(Activation.SIGMOID)
.reconstructionDistribution(new BernoulliReconstructionDistribution(Activation.SIGMOID)) .reconstructionDistribution(new BernoulliReconstructionDistribution(Activation.SIGMOID))
.build(), "in") .build(), "in")
.addLayer("vae2", new VariationalAutoencoder.Builder() .addLayer("vae2", VariationalAutoencoder.builder()
.nIn(32) .nIn(32)
.nOut(8) .nOut(8)
.encoderLayerSizes(16) .encoderLayerSizes(16)
@ -142,7 +142,7 @@ public class TestCompGraphUnsupervised extends BaseDL4JTest {
.inferenceWorkspaceMode(wsm) .inferenceWorkspaceMode(wsm)
.trainingWorkspaceMode(wsm) .trainingWorkspaceMode(wsm)
.layer(new VariationalAutoencoder.Builder() .layer(VariationalAutoencoder.builder()
.nIn(784) .nIn(784)
.nOut(32) .nOut(32)
.encoderLayerSizes(16) .encoderLayerSizes(16)
@ -151,7 +151,7 @@ public class TestCompGraphUnsupervised extends BaseDL4JTest {
.pzxActivationFunction(Activation.SIGMOID) .pzxActivationFunction(Activation.SIGMOID)
.reconstructionDistribution(new BernoulliReconstructionDistribution(Activation.SIGMOID)) .reconstructionDistribution(new BernoulliReconstructionDistribution(Activation.SIGMOID))
.build()) .build())
.layer(new VariationalAutoencoder.Builder() .layer(VariationalAutoencoder.builder()
.nIn(32) .nIn(32)
.nOut(8) .nOut(8)
.encoderLayerSizes(16) .encoderLayerSizes(16)

View File

@ -101,16 +101,16 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
return NeuralNetConfiguration.builder().seed(12345) return NeuralNetConfiguration.builder().seed(12345)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
.addInputs("input") .addInputs("input")
.addLayer("firstLayer", new DenseLayer.Builder().nIn(4).nOut(5).build(), "input") .addLayer("firstLayer", DenseLayer.builder().nIn(4).nOut(5).build(), "input")
.addLayer("outputLayer", new OutputLayer.Builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(), "firstLayer") .addLayer("outputLayer", OutputLayer.builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(), "firstLayer")
.setOutputs("outputLayer").build(); .setOutputs("outputLayer").build();
} }
private static NeuralNetConfiguration getIrisMLNConfiguration() { private static NeuralNetConfiguration getIrisMLNConfiguration() {
return NeuralNetConfiguration.builder().seed(12345) return NeuralNetConfiguration.builder().seed(12345)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.layer(0, new DenseLayer.Builder().nIn(4).nOut(5).build()) .layer(0, DenseLayer.builder().nIn(4).nOut(5).build())
.layer(1, new OutputLayer.Builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build()).build(); .layer(1, OutputLayer.builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build()).build();
} }
private static int getNumParams() { private static int getNumParams() {
@ -335,8 +335,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration config = NeuralNetConfiguration.builder() ComputationGraphConfiguration config = NeuralNetConfiguration.builder()
.updater(new Sgd(0.1)) .updater(new Sgd(0.1))
.graphBuilder().addInputs("in") .graphBuilder().addInputs("in")
.addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(2).build(), "in").addLayer("out", .addLayer("dense", DenseLayer.builder().nIn(4).nOut(2).build(), "in").addLayer("out",
new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(2).nOut(3) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(2).nOut(3)
.build(), .build(),
"dense") "dense")
.setOutputs("out").build(); .setOutputs("out").build();
@ -403,8 +403,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
//First: check FF -> RNN //First: check FF -> RNN
ComputationGraphConfiguration conf1 = NeuralNetConfiguration.builder().graphBuilder().addInputs("in") ComputationGraphConfiguration conf1 = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
.setInputTypes(InputType.feedForward(5)) .setInputTypes(InputType.feedForward(5))
.addLayer("rnn", new GravesLSTM.Builder().nOut(5).build(), "in") .addLayer("rnn", GravesLSTM.builder().nOut(5).build(), "in")
.addLayer("out", new RnnOutputLayer.Builder().nOut(5).activation(Activation.SOFTMAX).build(), "rnn").setOutputs("out").build(); .addLayer("out", RnnOutputLayer.builder().nOut(5).activation(Activation.SOFTMAX).build(), "rnn").setOutputs("out").build();
assertEquals(5, ((FeedForwardLayer) ((LayerVertex) conf1.getVertices().get("rnn")).getNetConfiguration().getFlattenedLayerConfigurations().get(0)) assertEquals(5, ((FeedForwardLayer) ((LayerVertex) conf1.getVertices().get("rnn")).getNetConfiguration().getFlattenedLayerConfigurations().get(0))
.getNIn()); .getNIn());
@ -419,8 +419,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
//Check RNN -> FF -> RNN //Check RNN -> FF -> RNN
ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder().graphBuilder().addInputs("in") ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
.setInputTypes(InputType.recurrent(5)) .setInputTypes(InputType.recurrent(5))
.addLayer("ff", new DenseLayer.Builder().nOut(5).build(), "in") .addLayer("ff", DenseLayer.builder().nOut(5).build(), "in")
.addLayer("out", new RnnOutputLayer.Builder().nOut(5).activation(Activation.SOFTMAX).build(), "ff") .addLayer("out", RnnOutputLayer.builder().nOut(5).activation(Activation.SOFTMAX).build(), "ff")
.setOutputs("out").build(); .setOutputs("out").build();
assertEquals(5, ((FeedForwardLayer) ((LayerVertex) conf2.getVertices().get("ff")).getNetConfiguration().getFlattenedLayerConfigurations().get(0)) assertEquals(5, ((FeedForwardLayer) ((LayerVertex) conf2.getVertices().get("ff")).getNetConfiguration().getFlattenedLayerConfigurations().get(0))
@ -436,14 +436,14 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
//CNN -> Dense //CNN -> Dense
ComputationGraphConfiguration conf3 = NeuralNetConfiguration.builder().graphBuilder().addInputs("in") ComputationGraphConfiguration conf3 = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
.setInputTypes(InputType.convolutional(28, 28, 1)) .setInputTypes(InputType.convolutional(28, 28, 1))
.addLayer("cnn", new ConvolutionLayer.Builder().kernelSize(2, 2).padding(0, 0).stride(2, 2) .addLayer("cnn", ConvolutionLayer.builder().kernelSize(2, 2).padding(0, 0).stride(2, 2)
.nOut(3).build(), "in") //(28-2+0)/2+1 = 14 .nOut(3).build(), "in") //(28-2+0)/2+1 = 14
.addLayer("pool", .addLayer("pool",
new SubsamplingLayer.Builder().kernelSize(2, 2).padding(0, 0).stride(2, 2) SubsamplingLayer.builder().kernelSize(2, 2).padding(0, 0).stride(2, 2)
.build(), .build(),
"cnn") //(14-2+0)/2+1=7 "cnn") //(14-2+0)/2+1=7
.addLayer("dense", new DenseLayer.Builder().nOut(10).build(), "pool") .addLayer("dense", DenseLayer.builder().nOut(10).build(), "pool")
.addLayer("out", new OutputLayer.Builder().nIn(10).nOut(5).activation(Activation.SOFTMAX).build(), "dense").setOutputs("out") .addLayer("out", OutputLayer.builder().nIn(10).nOut(5).activation(Activation.SOFTMAX).build(), "dense").setOutputs("out")
.build(); .build();
//Check preprocessors: //Check preprocessors:
lv1 = (LayerVertex) conf3.getVertices().get("cnn"); lv1 = (LayerVertex) conf3.getVertices().get("cnn");
@ -466,16 +466,16 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration conf4 = ComputationGraphConfiguration conf4 =
NeuralNetConfiguration.builder().graphBuilder().addInputs("inCNN", "inRNN") NeuralNetConfiguration.builder().graphBuilder().addInputs("inCNN", "inRNN")
.setInputTypes(InputType.convolutional(28, 28, 1), InputType.recurrent(5)) .setInputTypes(InputType.convolutional(28, 28, 1), InputType.recurrent(5))
.addLayer("cnn", new ConvolutionLayer.Builder().kernelSize(2, 2).padding(0, 0) .addLayer("cnn", ConvolutionLayer.builder().kernelSize(2, 2).padding(0, 0)
.stride(2, 2).nOut(3).build(), "inCNN") //(28-2+0)/2+1 = 14 .stride(2, 2).nOut(3).build(), "inCNN") //(28-2+0)/2+1 = 14
.addLayer("pool", .addLayer("pool",
new SubsamplingLayer.Builder().kernelSize(2, 2).padding(0, 0) SubsamplingLayer.builder().kernelSize(2, 2).padding(0, 0)
.stride(2, 2).build(), .stride(2, 2).build(),
"cnn") //(14-2+0)/2+1=7 "cnn") //(14-2+0)/2+1=7
.addLayer("dense", new DenseLayer.Builder().nOut(10).build(), "pool") .addLayer("dense", DenseLayer.builder().nOut(10).build(), "pool")
.addLayer("dense2", new DenseLayer.Builder().nOut(10).build(), "inRNN") .addLayer("dense2", DenseLayer.builder().nOut(10).build(), "inRNN")
.addVertex("merge", new MergeVertex(), "dense", "dense2") .addVertex("merge", new MergeVertex(), "dense", "dense2")
.addLayer("out", new RnnOutputLayer.Builder().nOut(5).activation(Activation.SOFTMAX).build(), "merge") .addLayer("out", RnnOutputLayer.builder().nOut(5).activation(Activation.SOFTMAX).build(), "merge")
.setOutputs("out").build(); .setOutputs("out").build();
//Check preprocessors: //Check preprocessors:
@ -507,18 +507,18 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.graphBuilder().addInputs("input") .graphBuilder().addInputs("input")
.setInputTypes(InputType.convolutional(28, 28, 1)) .setInputTypes(InputType.convolutional(28, 28, 1))
.addLayer("cnn_1", .addLayer("cnn_1",
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(3) ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(3)
.build(), .build(),
"input") "input")
.addLayer("cnn_2", .addLayer("cnn_2",
new ConvolutionLayer.Builder(4, 4).stride(2, 2).padding(1, 1) ConvolutionLayer.builder(4, 4).stride(2, 2).padding(1, 1)
.nIn(1).nOut(3).build(), .nIn(1).nOut(3).build(),
"input") "input")
.addLayer("max_1", .addLayer("max_1",
new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX) SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX)
.kernelSize(2, 2).build(), .kernelSize(2, 2).build(),
"cnn_1", "cnn_2") "cnn_1", "cnn_2")
.addLayer("output", new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build(), "max_1") //.nIn(7 * 7 * 6) .addLayer("output", OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build(), "max_1") //.nIn(7 * 7 * 6)
.setOutputs("output").build(); .setOutputs("output").build();
lv1 = (LayerVertex) conf5.getVertices().get("cnn_1"); lv1 = (LayerVertex) conf5.getVertices().get("cnn_1");
assertNull(lv1.getPreProcessor()); //Expect no preprocessor: cnn data -> cnn layer assertNull(lv1.getPreProcessor()); //Expect no preprocessor: cnn data -> cnn layer
@ -578,8 +578,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
.addInputs("input") .addInputs("input")
.addLayer("first_layer", new DenseLayer.Builder().nIn(4).nOut(5).build(), "input") .addLayer("first_layer", DenseLayer.builder().nIn(4).nOut(5).build(), "input")
.addLayer("output_layer", new OutputLayer.Builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(), "first_layer") .addLayer("output_layer", OutputLayer.builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(), "first_layer")
.setOutputs("output_layer").build(); .setOutputs("output_layer").build();
ComputationGraph net = new ComputationGraph(conf); ComputationGraph net = new ComputationGraph(conf);
@ -599,7 +599,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.updater(new Sgd(1e-6)) .updater(new Sgd(1e-6))
.l2(2e-4).graphBuilder().addInputs("in") .l2(2e-4).graphBuilder().addInputs("in")
.addLayer("layer0", .addLayer("layer0",
new VariationalAutoencoder.Builder().nIn(4).nOut(3) VariationalAutoencoder.builder().nIn(4).nOut(3)
.dist(new UniformDistribution(0, .dist(new UniformDistribution(0,
1)) 1))
@ -608,7 +608,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.build(), .build(),
"in") "in")
.addLayer("layer1", .addLayer("layer1",
new VariationalAutoencoder.Builder().nIn(4).nOut(3) VariationalAutoencoder.builder().nIn(4).nOut(3)
.dist(new UniformDistribution(0, .dist(new UniformDistribution(0,
1)) 1))
@ -617,7 +617,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.build(), .build(),
"in") "in")
.addLayer("layer2", .addLayer("layer2",
new VariationalAutoencoder.Builder().nIn(3).nOut(3) VariationalAutoencoder.builder().nIn(3).nOut(3)
.dist(new UniformDistribution(0, .dist(new UniformDistribution(0,
1)) 1))
@ -625,7 +625,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE) .lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE)
.build(), .build(),
"layer1") "layer1")
.addLayer("out", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .addLayer("out", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
LossFunctions.LossFunction.MCXENT).nIn(3 + 3).nOut(3) LossFunctions.LossFunction.MCXENT).nIn(3 + 3).nOut(3)
.dist(new UniformDistribution(0, 1)) .dist(new UniformDistribution(0, 1))
@ -652,9 +652,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.updater(new Sgd(0.1)) .updater(new Sgd(0.1))
.activation(Activation.TANH).weightInit(WeightInit.XAVIER) .activation(Activation.TANH).weightInit(WeightInit.XAVIER)
.graphBuilder().addInputs("in") .graphBuilder().addInputs("in")
.addLayer("0", new DenseLayer.Builder().nIn(nIn).nOut(20).build(), "in") .addLayer("0", DenseLayer.builder().nIn(nIn).nOut(20).build(), "in")
.addLayer("1", new DenseLayer.Builder().nIn(20).nOut(30).build(), "0") .addLayer("1", DenseLayer.builder().nIn(20).nOut(30).build(), "0")
.addLayer("2", new OutputLayer.Builder() .addLayer("2", OutputLayer.builder()
.lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut) .lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut)
.build(), "1") .build(), "1")
.setOutputs("2").build(); .setOutputs("2").build();
@ -662,9 +662,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration confNoReg = ComputationGraphConfiguration confNoReg =
NeuralNetConfiguration.builder().seed(12345).updater(new Sgd(0.1)).activation(Activation.TANH) NeuralNetConfiguration.builder().seed(12345).updater(new Sgd(0.1)).activation(Activation.TANH)
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in") .weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
.addLayer("0", new DenseLayer.Builder().nIn(nIn).nOut(20).build(), "in") .addLayer("0", DenseLayer.builder().nIn(nIn).nOut(20).build(), "in")
.addLayer("1", new DenseLayer.Builder().nIn(20).nOut(30).build(), "0") .addLayer("1", DenseLayer.builder().nIn(20).nOut(30).build(), "0")
.addLayer("2", new OutputLayer.Builder() .addLayer("2", OutputLayer.builder()
.lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut) .lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut)
.build(), "1") .build(), "1")
.setOutputs("2").build(); .setOutputs("2").build();
@ -720,8 +720,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration standard = NeuralNetConfiguration.builder().updater(new Sgd(0.1)) ComputationGraphConfiguration standard = NeuralNetConfiguration.builder().updater(new Sgd(0.1))
.trainingWorkspaceMode(ws).inferenceWorkspaceMode(ws) .trainingWorkspaceMode(ws).inferenceWorkspaceMode(ws)
.seed(12345).graphBuilder().addInputs("in") .seed(12345).graphBuilder().addInputs("in")
.addLayer("l0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in") .addLayer("l0", DenseLayer.builder().nIn(10).nOut(10).build(), "in")
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10) .addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10)
.nOut(10).build(), "l0") .nOut(10).build(), "l0")
.setOutputs("out").build(); .setOutputs("out").build();
ComputationGraph s = new ComputationGraph(standard); ComputationGraph s = new ComputationGraph(standard);
@ -732,7 +732,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration external = NeuralNetConfiguration.builder().updater(new Sgd(0.1)) ComputationGraphConfiguration external = NeuralNetConfiguration.builder().updater(new Sgd(0.1))
.trainingWorkspaceMode(ws).inferenceWorkspaceMode(ws) .trainingWorkspaceMode(ws).inferenceWorkspaceMode(ws)
.seed(12345).graphBuilder().addInputs("in") .seed(12345).graphBuilder().addInputs("in")
.addLayer("l0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in").setOutputs("l0") .addLayer("l0", DenseLayer.builder().nIn(10).nOut(10).build(), "in").setOutputs("l0")
.build(); .build();
ComputationGraph e = new ComputationGraph(external); ComputationGraph e = new ComputationGraph(external);
@ -778,9 +778,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.graphBuilder() .graphBuilder()
.addInputs("features") .addInputs("features")
.addVertex("rnn2ffn", new PreprocessorVertex(new RnnToFeedForwardPreProcessor()), "features") .addVertex("rnn2ffn", new PreprocessorVertex(new RnnToFeedForwardPreProcessor()), "features")
.addLayer("predict", new DenseLayer.Builder().nIn(nIn).nOut(nOut).activation(Activation.RELU).build(), "rnn2ffn") .addLayer("predict", DenseLayer.builder().nIn(nIn).nOut(nOut).activation(Activation.RELU).build(), "rnn2ffn")
.addVertex("ffn2rnn", new PreprocessorVertex(new FeedForwardToRnnPreProcessor()), "predict") .addVertex("ffn2rnn", new PreprocessorVertex(new FeedForwardToRnnPreProcessor()), "predict")
.addLayer("output", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), "ffn2rnn") .addLayer("output", ActivationLayer.builder().activation(Activation.IDENTITY).build(), "ffn2rnn")
.setOutputs("output") .setOutputs("output")
.build(); .build();
@ -822,9 +822,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new DenseLayer.Builder().nIn(nIn).nOut(4).activation(Activation.RELU).build(), "in") .addLayer("0", DenseLayer.builder().nIn(nIn).nOut(4).activation(Activation.RELU).build(), "in")
.addLayer("1", new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.RELU).build(), "0") .addLayer("1", DenseLayer.builder().nIn(4).nOut(4).activation(Activation.RELU).build(), "0")
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(nOut).build(), "1") .addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(nOut).build(), "1")
.setOutputs("out") .setOutputs("out")
.setInputTypes(InputType.feedForward(nIn)) .setInputTypes(InputType.feedForward(nIn))
.build(); .build();
@ -859,8 +859,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
.addInputs("input").addLayer("first", new DenseLayer.Builder().nIn(4).nOut(5).build(), "input") .addInputs("input").addLayer("first", DenseLayer.builder().nIn(4).nOut(5).build(), "input")
.addLayer("output", new OutputLayer.Builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(), "first") .addLayer("output", OutputLayer.builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(), "first")
.setOutputs("output").build(); .setOutputs("output").build();
ComputationGraph net = new ComputationGraph(conf); ComputationGraph net = new ComputationGraph(conf);
@ -896,10 +896,10 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in") ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
.setInputTypes(InputType.convolutional(10, 8, 3)) .setInputTypes(InputType.convolutional(10, 8, 3))
.addLayer("layer", .addLayer("layer",
new ConvolutionLayer.Builder().kernelSize(2, 2).padding(0, 0).stride(1, 1) ConvolutionLayer.builder().kernelSize(2, 2).padding(0, 0).stride(1, 1)
.build(), .build(),
"in") "in")
.addLayer("out", new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build(), "layer").setOutputs("out") .addLayer("out", OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build(), "layer").setOutputs("out")
.build(); .build();
LayerVertex lv = (LayerVertex) conf.getVertices().get("layer"); LayerVertex lv = (LayerVertex) conf.getVertices().get("layer");
@ -913,10 +913,10 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in") conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
.setInputTypes(InputType.convolutionalFlat(10, 8, 3)) .setInputTypes(InputType.convolutionalFlat(10, 8, 3))
.addLayer("layer", .addLayer("layer",
new ConvolutionLayer.Builder().kernelSize(2, 2).padding(0, 0).stride(1, 1) ConvolutionLayer.builder().kernelSize(2, 2).padding(0, 0).stride(1, 1)
.build(), .build(),
"in") "in")
.addLayer("out", new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build(), "layer").setOutputs("out") .addLayer("out", OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build(), "layer").setOutputs("out")
.build(); .build();
lv = (LayerVertex) conf.getVertices().get("layer"); lv = (LayerVertex) conf.getVertices().get("layer");
@ -934,13 +934,13 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
//Finally, check configuration with a subsampling layer //Finally, check configuration with a subsampling layer
conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in") conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
.setInputTypes(InputType.convolutionalFlat(10, 8, 3)) .setInputTypes(InputType.convolutionalFlat(10, 8, 3))
.addLayer("l0", new SubsamplingLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0) .addLayer("l0", SubsamplingLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
.build(), "in") .build(), "in")
.addLayer("layer", .addLayer("layer",
new ConvolutionLayer.Builder().kernelSize(2, 2).padding(0, 0).stride(1, 1) ConvolutionLayer.builder().kernelSize(2, 2).padding(0, 0).stride(1, 1)
.build(), .build(),
"l0") "l0")
.addLayer("out", new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build(), "layer").setOutputs("out") .addLayer("out", OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build(), "layer").setOutputs("out")
.build(); .build();
//Check subsampling layer: //Check subsampling layer:
@ -1001,8 +1001,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration conf = ComputationGraphConfiguration conf =
NeuralNetConfiguration.builder().optimizationAlgo(oa).graphBuilder() NeuralNetConfiguration.builder().optimizationAlgo(oa).graphBuilder()
.addInputs("input") .addInputs("input")
.addLayer("first", new DenseLayer.Builder().nIn(4).nOut(5).build(), "input") .addLayer("first", DenseLayer.builder().nIn(4).nOut(5).build(), "input")
.addLayer("output", new OutputLayer.Builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(), .addLayer("output", OutputLayer.builder().nIn(5).nOut(3).activation(Activation.SOFTMAX).build(),
"first") "first")
.setOutputs("output").build(); .setOutputs("output").build();
@ -1019,9 +1019,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(123) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(123)
.graphBuilder().addInputs("in") .graphBuilder().addInputs("in")
.addLayer("0", new DenseLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER) .addLayer("0", DenseLayer.builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
.activation(Activation.TANH).build(), "in") .activation(Activation.TANH).build(), "in")
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( .addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder(
LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3) LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
.build(), .build(),
"0") "0")
@ -1058,24 +1058,24 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.activation(Activation.IDENTITY); .activation(Activation.IDENTITY);
ComputationGraphConfiguration conf = overallConf.graphBuilder().addInputs("inCentre", "inRight") ComputationGraphConfiguration conf = overallConf.graphBuilder().addInputs("inCentre", "inRight")
.addLayer("denseCentre0", new DenseLayer.Builder().nIn(10).nOut(9).build(), "inCentre") .addLayer("denseCentre0", DenseLayer.builder().nIn(10).nOut(9).build(), "inCentre")
.addLayer("denseCentre1", new DenseLayer.Builder().nIn(9).nOut(8).build(), "denseCentre0") .addLayer("denseCentre1", DenseLayer.builder().nIn(9).nOut(8).build(), "denseCentre0")
.addLayer("denseCentre2", new DenseLayer.Builder().nIn(8).nOut(7).build(), "denseCentre1") .addLayer("denseCentre2", DenseLayer.builder().nIn(8).nOut(7).build(), "denseCentre1")
.addLayer("denseCentre3", new DenseLayer.Builder().nIn(7).nOut(7).build(), "denseCentre2") .addLayer("denseCentre3", DenseLayer.builder().nIn(7).nOut(7).build(), "denseCentre2")
.addLayer("outCentre", .addLayer("outCentre",
new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(7).nOut(4).build(), OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(7).nOut(4).build(),
"denseCentre3") "denseCentre3")
.addVertex("subsetLeft", new SubsetVertex(0, 3), "denseCentre1") .addVertex("subsetLeft", new SubsetVertex(0, 3), "denseCentre1")
.addLayer("denseLeft0", new DenseLayer.Builder().nIn(4).nOut(5).build(), "subsetLeft") .addLayer("denseLeft0", DenseLayer.builder().nIn(4).nOut(5).build(), "subsetLeft")
.addLayer("outLeft", .addLayer("outLeft",
new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(5).nOut(6).build(), OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(5).nOut(6).build(),
"denseLeft0") "denseLeft0")
.addLayer("denseRight", new DenseLayer.Builder().nIn(7).nOut(7).build(), "denseCentre2") .addLayer("denseRight", DenseLayer.builder().nIn(7).nOut(7).build(), "denseCentre2")
.addLayer("denseRight0", new DenseLayer.Builder().nIn(2).nOut(3).build(), "inRight") .addLayer("denseRight0", DenseLayer.builder().nIn(2).nOut(3).build(), "inRight")
.addVertex("mergeRight", new MergeVertex(), "denseRight", "denseRight0") .addVertex("mergeRight", new MergeVertex(), "denseRight", "denseRight0")
.addLayer("denseRight1", new DenseLayer.Builder().nIn(10).nOut(5).build(), "mergeRight") .addLayer("denseRight1", DenseLayer.builder().nIn(10).nOut(5).build(), "mergeRight")
.addLayer("outRight", .addLayer("outRight",
new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(5).nOut(5).build(), OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(5).nOut(5).build(),
"denseRight1") "denseRight1")
.setOutputs("outLeft", "outCentre", "outRight").build(); .setOutputs("outLeft", "outCentre", "outRight").build();
@ -1096,10 +1096,10 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
public void testFeedForwardIncludeNonLayerVertices() { public void testFeedForwardIncludeNonLayerVertices() {
ComputationGraphConfiguration c = NeuralNetConfiguration.builder().graphBuilder().addInputs("in") ComputationGraphConfiguration c = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
.addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).build(), "in") .addLayer("0", DenseLayer.builder().nIn(5).nOut(5).build(), "in")
.addLayer("1", new DenseLayer.Builder().nIn(5).nOut(5).build(), "in") .addLayer("1", DenseLayer.builder().nIn(5).nOut(5).build(), "in")
.addVertex("merge", new MergeVertex(), "0", "1") .addVertex("merge", new MergeVertex(), "0", "1")
.addLayer("out", new OutputLayer.Builder().nIn(10).nOut(5).activation(Activation.SOFTMAX).build(), "merge").setOutputs("out") .addLayer("out", OutputLayer.builder().nIn(10).nOut(5).activation(Activation.SOFTMAX).build(), "merge").setOutputs("out")
.build(); .build();
ComputationGraph cg = new ComputationGraph(c); ComputationGraph cg = new ComputationGraph(c);
@ -1124,7 +1124,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
//Users generally shouldn't do this, but multiple setOutputs calls should *replace* not *add* outputs //Users generally shouldn't do this, but multiple setOutputs calls should *replace* not *add* outputs
ComputationGraphConfiguration c = NeuralNetConfiguration.builder().graphBuilder().addInputs("in") ComputationGraphConfiguration c = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
.addLayer("out", new OutputLayer.Builder().nIn(10).nOut(5).activation(Activation.SOFTMAX).build(), "in").setOutputs("out") .addLayer("out", OutputLayer.builder().nIn(10).nOut(5).activation(Activation.SOFTMAX).build(), "in").setOutputs("out")
.setOutputs("out").build(); .setOutputs("out").build();
List<String> l = c.getNetworkOutputs(); List<String> l = c.getNetworkOutputs();
@ -1138,7 +1138,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
NeuralNetConfiguration.builder().weightNoise(new DropConnect(0.5)) NeuralNetConfiguration.builder().weightNoise(new DropConnect(0.5))
.graphBuilder().setInputTypes(InputType.feedForward(1)).addInputs("input1") .graphBuilder().setInputTypes(InputType.feedForward(1)).addInputs("input1")
.addLayer("output", .addLayer("output",
new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(1).nOut(1) OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(1).nOut(1)
.activation(Activation.SIGMOID).build(), .activation(Activation.SIGMOID).build(),
"input1") "input1")
.setOutputs("output").backpropType(BackpropType.Standard) .setOutputs("output").backpropType(BackpropType.Standard)
@ -1153,17 +1153,17 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration c = ComputationGraphConfiguration c =
NeuralNetConfiguration.builder().l1(0.5).l2(0.6).graphBuilder() NeuralNetConfiguration.builder().l1(0.5).l2(0.6).graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("sub1", new SubsamplingLayer.Builder(2, 2).build(), "in") .addLayer("sub1", SubsamplingLayer.builder(2, 2).build(), "in")
.addLayer("sub2", new Subsampling1DLayer.Builder(2).build(), "sub1") .addLayer("sub2", Subsampling1DLayer.builder(2).build(), "sub1")
.addLayer("act", new ActivationLayer.Builder().activation(Activation.TANH) .addLayer("act", ActivationLayer.builder().activation(Activation.TANH)
.build(), "sub2") .build(), "sub2")
.addLayer("pad", new ZeroPaddingLayer.Builder(2, 3).build(), "act") .addLayer("pad", ZeroPaddingLayer.builder(2, 3).build(), "act")
.addLayer("lrn", new LocalResponseNormalization.Builder().build(), "pad") .addLayer("lrn", LocalResponseNormalization.builder().build(), "pad")
.addLayer("pool", new GlobalPoolingLayer.Builder(PoolingType.AVG).build(), .addLayer("pool", GlobalPoolingLayer.builder(PoolingType.AVG).build(),
"act") "act")
.addLayer("drop", new DropoutLayer.Builder(0.5).build(), "pool") .addLayer("drop", DropoutLayer.builder(0.5).build(), "pool")
.addLayer("dense", new DenseLayer.Builder().nIn(1).nOut(1).build(), "drop") .addLayer("dense", DenseLayer.builder().nIn(1).nOut(1).build(), "drop")
.addLayer("loss", new LossLayer.Builder(LossFunctions.LossFunction.MCXENT) .addLayer("loss", LossLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.build(), "dense") .build(), "dense")
.allowDisconnected(true) .allowDisconnected(true)
.setOutputs("loss").build(); .setOutputs("loss").build();
@ -1179,7 +1179,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
public void testErrorNoOutputLayer() { public void testErrorNoOutputLayer() {
ComputationGraphConfiguration c = NeuralNetConfiguration.builder().graphBuilder().addInputs("in") ComputationGraphConfiguration c = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
.addLayer("dense", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in").setOutputs("dense") .addLayer("dense", DenseLayer.builder().nIn(10).nOut(10).build(), "in").setOutputs("dense")
.build(); .build();
ComputationGraph cg = new ComputationGraph(c); ComputationGraph cg = new ComputationGraph(c);
@ -1203,7 +1203,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
//vertex //vertex
NeuralNetConfiguration nnc = NeuralNetConfiguration.builder().build(); NeuralNetConfiguration nnc = NeuralNetConfiguration.builder().build();
nnc.setLayer(new DenseLayer.Builder().build()); nnc.setLayer(DenseLayer.builder().build());
GraphVertex[] singleInputVertices = new GraphVertex[]{new L2NormalizeVertex(), new LayerVertex(nnc, null), GraphVertex[] singleInputVertices = new GraphVertex[]{new L2NormalizeVertex(), new LayerVertex(nnc, null),
new PoolHelperVertex(), new PreprocessorVertex(), new ReshapeVertex(1, 1), new PoolHelperVertex(), new PreprocessorVertex(), new ReshapeVertex(1, 1),
new ScaleVertex(1.0), new ShiftVertex(1.0), new SubsetVertex(1, 1), new UnstackVertex(0, 2), new ScaleVertex(1.0), new ShiftVertex(1.0), new SubsetVertex(1, 1), new UnstackVertex(0, 2),
@ -1241,7 +1241,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("input") .addInputs("input")
.addLayer("L1", new ConvolutionLayer.Builder(new int[]{1, 1}, new int[]{1, 1}, new int[]{0, 0}).nIn(depth).nOut(depth) .addLayer("L1", ConvolutionLayer.builder(new int[]{1, 1}, new int[]{1, 1}, new int[]{0, 0}).nIn(depth).nOut(depth)
.build(), "input") .build(), "input")
.addVertex("L2", new ReshapeVertex(minibatch, 1, 36, 48), "L1") .addVertex("L2", new ReshapeVertex(minibatch, 1, 36, 48), "L1")
.setOutputs("L2") .setOutputs("L2")
@ -1265,7 +1265,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("out", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in") .addLayer("out", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
.setOutputs("out") .setOutputs("out")
.build(); .build();
@ -1305,23 +1305,23 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
NeuralNetConfiguration.builder().seed(12345).l2(0.001) //l2 regularization on all layers NeuralNetConfiguration.builder().seed(12345).l2(0.001) //l2 regularization on all layers
.updater(new AdaGrad(0.4)).graphBuilder() .updater(new AdaGrad(0.4)).graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("layer0", new ConvolutionLayer.Builder(10, 10).nIn(3) //3 channels: RGB .addLayer("layer0", ConvolutionLayer.builder(10, 10).nIn(3) //3 channels: RGB
.nOut(30).stride(4, 4).activation(Activation.RELU).weightInit( .nOut(30).stride(4, 4).activation(Activation.RELU).weightInit(
WeightInit.RELU).build(),"in") //Output: (130-10+0)/4+1 = 31 -> 31*31*30 WeightInit.RELU).build(),"in") //Output: (130-10+0)/4+1 = 31 -> 31*31*30
.addLayer("layer1", new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX) .addLayer("layer1", SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX)
.kernelSize(3, 3).stride(2, 2).build(),"layer0") //(31-3+0)/2+1 = 15 .kernelSize(3, 3).stride(2, 2).build(),"layer0") //(31-3+0)/2+1 = 15
.addLayer("layer2", new ConvolutionLayer.Builder(3, 3).nIn(30).nOut(10).stride(2, 2) .addLayer("layer2", ConvolutionLayer.builder(3, 3).nIn(30).nOut(10).stride(2, 2)
.activation(Activation.RELU).weightInit(WeightInit.RELU) .activation(Activation.RELU).weightInit(WeightInit.RELU)
.updater(Updater.ADAGRAD).build(), "layer1") //Output: (15-3+0)/2+1 = 7 -> 7*7*10 = 490 .updater(Updater.ADAGRAD).build(), "layer1") //Output: (15-3+0)/2+1 = 7 -> 7*7*10 = 490
.addLayer("layer3", new DenseLayer.Builder().activation(Activation.RELU).nIn(490).nOut(50) .addLayer("layer3", DenseLayer.builder().activation(Activation.RELU).nIn(490).nOut(50)
.weightInit(WeightInit.RELU).gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue) .weightInit(WeightInit.RELU).gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
.gradientNormalizationThreshold(10).build(), "layer2") .gradientNormalizationThreshold(10).build(), "layer2")
.addLayer("layer4", new GravesLSTM.Builder().activation(Activation.SOFTSIGN).nIn(50) .addLayer("layer4", GravesLSTM.builder().activation(Activation.SOFTSIGN).nIn(50)
.nOut(50).weightInit(WeightInit.XAVIER).updater(Updater.ADAGRAD) .nOut(50).weightInit(WeightInit.XAVIER).updater(Updater.ADAGRAD)
.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue) .gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
.gradientNormalizationThreshold(10) .gradientNormalizationThreshold(10)
.build(), "layer3") .build(), "layer3")
.addLayer("layer5", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .addLayer("layer5", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nIn(50).nOut(4) //4 possible shapes: circle, square, arc, line .activation(Activation.SOFTMAX).nIn(50).nOut(4) //4 possible shapes: circle, square, arc, line
.weightInit(WeightInit.XAVIER) .weightInit(WeightInit.XAVIER)
.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue) .gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
@ -1351,10 +1351,10 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.convolutionMode(ConvolutionMode.Same) .convolutionMode(ConvolutionMode.Same)
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new ConvolutionLayer.Builder().kernelSize(2,2).stride(1,1).nIn(1).nOut(1).build(), "in") .addLayer("0", ConvolutionLayer.builder().kernelSize(2,2).stride(1,1).nIn(1).nOut(1).build(), "in")
.addLayer("1", new SubsamplingLayer.Builder().kernelSize(2,2).stride(1,1).build(), "0") .addLayer("1", SubsamplingLayer.builder().kernelSize(2,2).stride(1,1).build(), "0")
.addLayer("2", new DenseLayer.Builder().nOut(10).build(), "1") .addLayer("2", DenseLayer.builder().nOut(10).build(), "1")
.addLayer("3", new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build(), "2") .addLayer("3", OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build(), "2")
.setOutputs("3") .setOutputs("3")
.setInputTypes(InputType.convolutional(28,28,1)) .setInputTypes(InputType.convolutional(28,28,1))
.build(); .build();
@ -1386,9 +1386,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration.GraphBuilder b = NeuralNetConfiguration.builder() ComputationGraphConfiguration.GraphBuilder b = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("0", new DenseLayer.Builder().activation(Activation.SIGMOID).nOut(8).build(), "in") .addLayer("0", DenseLayer.builder().activation(Activation.SIGMOID).nOut(8).build(), "in")
.addLayer("1", new DenseLayer.Builder().activation(Activation.SIGMOID).nOut(8).build(), "in") //Disconnected .addLayer("1", DenseLayer.builder().activation(Activation.SIGMOID).nOut(8).build(), "in") //Disconnected
.addLayer("O", new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nOut(10).build(), "0") .addLayer("O", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nOut(10).build(), "0")
.setOutputs("O") .setOutputs("O")
.setInputTypes(InputType.feedForward(8)); .setInputTypes(InputType.feedForward(8));
@ -1418,10 +1418,10 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new ConvolutionLayer.Builder().kernelSize(2,2).nOut(6).build(), "in") .layer("0", ConvolutionLayer.builder().kernelSize(2,2).nOut(6).build(), "in")
.layer("1", new SubsamplingLayer.Builder().kernelSize(2,2).build(), "0") .layer("1", SubsamplingLayer.builder().kernelSize(2,2).build(), "0")
.layer("2", new DenseLayer.Builder().nOut(30).build(), "1") .layer("2", DenseLayer.builder().nOut(30).build(), "1")
.layer("3", new OutputLayer.Builder().nOut(13).activation(Activation.SOFTMAX).build(), "2") .layer("3", OutputLayer.builder().nOut(13).activation(Activation.SOFTMAX).build(), "2")
.setOutputs("3") .setOutputs("3")
.setInputTypes(InputType.convolutional(28,28,3)) .setInputTypes(InputType.convolutional(28,28,3))
.build(); .build();
@ -1456,8 +1456,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new SubsamplingLayer.Builder().kernelSize(2,2).stride(2,2).build(), "in") .layer("0", SubsamplingLayer.builder().kernelSize(2,2).stride(2,2).build(), "in")
.layer("1", new LossLayer.Builder().activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build(), "0") .layer("1", LossLayer.builder().lossFunction().activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build(), "0")
.setOutputs("1") .setOutputs("1")
.setInputTypes(InputType.convolutionalFlat(28,28,1)) .setInputTypes(InputType.convolutionalFlat(28,28,1))
.build(); .build();
@ -1501,7 +1501,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.setOutputs(outputName) .setOutputs(outputName)
.setInputTypes(InputType.inferInputType(input)) .setInputTypes(InputType.inferInputType(input))
.addVertex(scaleName, new ScaleVertex(scaleFactor), inputName) .addVertex(scaleName, new ScaleVertex(scaleFactor), inputName)
.addLayer(outputName, new OutputLayer.Builder() .addLayer(outputName, OutputLayer.builder()
.activation(new ActivationIdentity()) .activation(new ActivationIdentity())
.lossFunction(LossFunctions.LossFunction.MSE) .lossFunction(LossFunctions.LossFunction.MSE)
.nOut(input.length()) .nOut(input.length())
@ -1539,7 +1539,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.seed(12345) .seed(12345)
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("layer", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in") .layer("layer", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
.setOutputs("layer") .setOutputs("layer")
.build(); .build();
ComputationGraph cg = new ComputationGraph(conf); ComputationGraph cg = new ComputationGraph(conf);
@ -1561,11 +1561,11 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration.GraphBuilder builder = NeuralNetConfiguration.builder() ComputationGraphConfiguration.GraphBuilder builder = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in1", "in2") .addInputs("in1", "in2")
.layer("0", new DenseLayer.Builder().nOut(10).build(), "in1") .layer("0", DenseLayer.builder().nOut(10).build(), "in1")
.layer("1", new DenseLayer.Builder().nOut(9).build(), "in1", "in2") .layer("1", DenseLayer.builder().nOut(9).build(), "in1", "in2")
.layer("2", new DenseLayer.Builder().nOut(8).build(), "in2") .layer("2", DenseLayer.builder().nOut(8).build(), "in2")
.layer("3", new DenseLayer.Builder().nOut(7).build(), "0") .layer("3", DenseLayer.builder().nOut(7).build(), "0")
.layer("4", new DenseLayer.Builder().nOut(6).build(), "1", "2") .layer("4", DenseLayer.builder().nOut(6).build(), "1", "2")
.setInputTypes(InputType.feedForward(5), InputType.feedForward(6)) .setInputTypes(InputType.feedForward(5), InputType.feedForward(6))
.allowNoOutput(true); .allowNoOutput(true);
@ -1598,14 +1598,14 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in1", "in2") .addInputs("in1", "in2")
.addLayer("l0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in1") .addLayer("l0", DenseLayer.builder().nIn(10).nOut(10).build(), "in1")
.addLayer("l1", new DenseLayer.Builder().nIn(20).nOut(10).build(), "in1", "in2") .addLayer("l1", DenseLayer.builder().nIn(20).nOut(10).build(), "in1", "in2")
.addLayer("l2", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in2") .addLayer("l2", DenseLayer.builder().nIn(10).nOut(10).build(), "in2")
.addLayer("l3", new DenseLayer.Builder().nIn(10).nOut(10).build(), "l0") .addLayer("l3", DenseLayer.builder().nIn(10).nOut(10).build(), "l0")
.addLayer("l4", new DenseLayer.Builder().nIn(10).nOut(10).build(), "l1") .addLayer("l4", DenseLayer.builder().nIn(10).nOut(10).build(), "l1")
.addLayer("l5", new DenseLayer.Builder().nIn(10).nOut(10).build(), "l2") .addLayer("l5", DenseLayer.builder().nIn(10).nOut(10).build(), "l2")
.addLayer("l6", new OutputLayer.Builder().nIn(20).nOut(10).activation(Activation.SOFTMAX).build(), "l3", "l5") .addLayer("l6", OutputLayer.builder().nIn(20).nOut(10).activation(Activation.SOFTMAX).build(), "l3", "l5")
.addLayer("l7", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "l4") .addLayer("l7", OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "l4")
.setOutputs("l6", "l7") .setOutputs("l6", "l7")
.build(); .build();
@ -1698,9 +1698,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new VariationalAutoencoder.Builder() .layer("0", VariationalAutoencoder.builder()
.nIn(10).nOut(10).encoderLayerSizes(10).decoderLayerSizes(10).build(), "in") .nIn(10).nOut(10).encoderLayerSizes(10).decoderLayerSizes(10).build(), "in")
.layer("1", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "0") .layer("1", OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "0")
.setOutputs("1") .setOutputs("1")
.build(); .build();
@ -1746,13 +1746,13 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.graphBuilder() .graphBuilder()
.addInputs("in1", "in2") .addInputs("in1", "in2")
.layer("0", new DenseLayer.Builder().nOut(10).build(), "in1") //Modification should not be allowed on input .layer("0", DenseLayer.builder().nOut(10).build(), "in1") //Modification should not be allowed on input
.layer("1", new DenseLayer.Builder().nOut(10).build(), "in2") //Modification should not be allowed on input .layer("1", DenseLayer.builder().nOut(10).build(), "in2") //Modification should not be allowed on input
.layer("2", new DenseLayer.Builder().nOut(10).build(), "0") //Modification SHOULD be allowed .layer("2", DenseLayer.builder().nOut(10).build(), "0") //Modification SHOULD be allowed
.layer("3", new DenseLayer.Builder().nOut(10).build(), "1") //First in topo sort for using this input - not allowed .layer("3", DenseLayer.builder().nOut(10).build(), "1") //First in topo sort for using this input - not allowed
.layer("4", new DenseLayer.Builder().nOut(10).build(), "1") //Second in topo sort - not allowed .layer("4", DenseLayer.builder().nOut(10).build(), "1") //Second in topo sort - not allowed
.layer("5", new DenseLayer.Builder().nOut(10).build(), "1") //Last in topo sort - allowed .layer("5", DenseLayer.builder().nOut(10).build(), "1") //Last in topo sort - allowed
.layer("6", new DenseLayer.Builder().nOut(10).build(), "2", "3", "4", "5") //Input from merge vertex - allowed .layer("6", DenseLayer.builder().nOut(10).build(), "2", "3", "4", "5") //Input from merge vertex - allowed
.setOutputs("6") .setOutputs("6")
.setInputTypes(InputType.feedForward(10), InputType.feedForward(10)) .setInputTypes(InputType.feedForward(10), InputType.feedForward(10))
.build(); .build();
@ -1787,19 +1787,19 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.addInputs("in1", "in2") .addInputs("in1", "in2")
.addVertex("merge", new MergeVertex(), "in1", "in2") .addVertex("merge", new MergeVertex(), "in1", "in2")
.addLayer("lstm", .addLayer("lstm",
new Bidirectional(Bidirectional.Mode.CONCAT, new LSTM.Builder() Bidirectional.builder(Bidirectional.Mode.CONCAT, LSTM.builder()
.nIn(10).nOut(5) .nIn(10).nOut(5)
.activation(Activation.TANH) .activation(Activation.TANH)
.dropOut(new GaussianNoise(0.05)) .dropOut(new GaussianNoise(0.05))
.build()) .build())
,"merge") ,"merge")
.addLayer("out1", .addLayer("out1",
new RnnOutputLayer.Builder().activation(Activation.SOFTMAX) RnnOutputLayer.builder().activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).nIn(10) .lossFunction(LossFunctions.LossFunction.MCXENT).nIn(10)
.nOut(6).build(), .nOut(6).build(),
"lstm") "lstm")
.addLayer("out2", .addLayer("out2",
new RnnOutputLayer.Builder().activation(Activation.SOFTMAX) RnnOutputLayer.builder().activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).nIn(10) .lossFunction(LossFunctions.LossFunction.MCXENT).nIn(10)
.nOut(4).build(), .nOut(4).build(),
"lstm") "lstm")
@ -1825,18 +1825,18 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.addInputs("in1", "in2") .addInputs("in1", "in2")
.addVertex("merge", new MergeVertex(), "in1", "in2") .addVertex("merge", new MergeVertex(), "in1", "in2")
.addLayer("dense", .addLayer("dense",
new DenseLayer.Builder() DenseLayer.builder()
.nIn(10).nOut(5) .nIn(10).nOut(5)
.activation(Activation.TANH) .activation(Activation.TANH)
.dropOut(new GaussianNoise(0.05)) .dropOut(new GaussianNoise(0.05))
.build(),"merge") .build(),"merge")
.addLayer("out1", .addLayer("out1",
new OutputLayer.Builder().activation(Activation.SOFTMAX) OutputLayer.builder().activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).nIn(5) .lossFunction(LossFunctions.LossFunction.MCXENT).nIn(5)
.nOut(6).build(), .nOut(6).build(),
"dense") "dense")
.addLayer("out2", .addLayer("out2",
new OutputLayer.Builder().activation(Activation.SOFTMAX) OutputLayer.builder().activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).nIn(5) .lossFunction(LossFunctions.LossFunction.MCXENT).nIn(5)
.nOut(4).build(), .nOut(4).build(),
"dense") "dense")
@ -1867,8 +1867,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("layer_zero", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in") .layer("layer_zero", DenseLayer.builder().nIn(10).nOut(10).build(), "in")
.layer("layer_one", new OutputLayer.Builder().nIn(10).nOut(10).build(), "layer_zero") .layer("layer_one", OutputLayer.builder().nIn(10).nOut(10).build(), "layer_zero")
.setOutputs("layer_one") .setOutputs("layer_one")
.build(); .build();
@ -1894,10 +1894,10 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.seed(12345) .seed(12345)
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("0", new DenseLayer.Builder().nIn(10).nOut(9).build(), "in") .layer("0", DenseLayer.builder().nIn(10).nOut(9).build(), "in")
.layer("1", new DenseLayer.Builder().nIn(9).nOut(8).build(), "0") .layer("1", DenseLayer.builder().nIn(9).nOut(8).build(), "0")
.layer("2", new DenseLayer.Builder().nIn(8).nOut(7).build(), "1") .layer("2", DenseLayer.builder().nIn(8).nOut(7).build(), "1")
.layer("3", new OutputLayer.Builder().nIn(7).nOut(6).build(), "2") .layer("3", OutputLayer.builder().nIn(7).nOut(6).build(), "2")
.setOutputs("3") .setOutputs("3")
.build(); .build();
@ -1923,7 +1923,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.setInputTypes(inputType) .setInputTypes(inputType)
.addInputs("input") .addInputs("input")
.setOutputs("output") .setOutputs("output")
.addLayer("0", new ConvolutionLayer.Builder().nOut(5).convolutionMode(ConvolutionMode.Same).build(),"input" ) .addLayer("0", ConvolutionLayer.builder().nOut(5).convolutionMode(ConvolutionMode.Same).build(),"input" )
.addVertex("dummyAdd", new ElementWiseVertex(ElementWiseVertex.Op.Add), "0") .addVertex("dummyAdd", new ElementWiseVertex(ElementWiseVertex.Op.Add), "0")
.addLayer("output", new CnnLossLayer(), "dummyAdd") .addLayer("output", new CnnLossLayer(), "dummyAdd")
.build()); .build());
@ -1943,7 +1943,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.addInputs("input") .addInputs("input")
.addLayer( .addLayer(
"dense", "dense",
new DenseLayer.Builder() DenseLayer.builder()
.nIn(10) .nIn(10)
.nOut(10) .nOut(10)
.activation(Activation.RELU) .activation(Activation.RELU)
@ -1952,7 +1952,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.build(), .build(),
"input") "input")
.addLayer("output", .addLayer("output",
new OutputLayer.Builder() OutputLayer.builder()
.nIn(10) .nIn(10)
.nOut(1) .nOut(1)
.lossFunction(LossFunctions.LossFunction.XENT) .lossFunction(LossFunctions.LossFunction.XENT)
@ -1968,8 +1968,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraph cg2 = model.clone(); ComputationGraph cg2 = model.clone();
IDropout d1 = model.getLayer(0).getLayerConfiguration().getIDropout(); IDropout d1 = model.getLayer(0).getLayerConfiguration().getDropOut();
IDropout d2 = cg2.getLayer(0).getLayerConfiguration().getIDropout(); IDropout d2 = cg2.getLayer(0).getLayerConfiguration().getDropOut();
assertNotSame(d1, d2); //Should not be same object! assertNotSame(d1, d2); //Should not be same object!
assertEquals(d1, d2); //But should be equal assertEquals(d1, d2); //But should be equal
@ -1986,15 +1986,15 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.updater(new Adam()) .updater(new Adam())
.graphBuilder() .graphBuilder()
.addInputs("x_emb") .addInputs("x_emb")
.addLayer("agg_lstm", new Bidirectional(CONCAT, new LSTM.Builder().nOut(hiddenSize/2).build()), "x_emb") .addLayer("agg_lstm", Bidirectional.builder(CONCAT, LSTM.builder().nOut(hiddenSize/2).build()), "x_emb")
.addLayer("agg_att", new DenseLayer.Builder().nIn(100).nOut(1).activation(Activation.SOFTMAX).build(), "agg_lstm") .addLayer("agg_att", DenseLayer.builder().nIn(100).nOut(1).activation(Activation.SOFTMAX).build(), "agg_lstm")
.addVertex("att", new PreprocessorVertex(new ComposableInputPreProcessor(new FeedForwardToRnnPreProcessor(), new PermutePreprocessor(0,2,1), new RnnToFeedForwardPreProcessor())), "agg_att") .addVertex("att", new PreprocessorVertex(new ComposableInputPreProcessor(new FeedForwardToRnnPreProcessor(), new PermutePreprocessor(0,2,1), new RnnToFeedForwardPreProcessor())), "agg_att")
.addLayer("att_repeat", new RepeatVector.Builder(hiddenSize).build(),"att") .addLayer("att_repeat", new RepeatVector.Builder(hiddenSize).build(),"att")
.addVertex("att_trans", new PreprocessorVertex(new PermutePreprocessor(0, 2, 1)), "att_repeat") .addVertex("att_trans", new PreprocessorVertex(new PermutePreprocessor(0, 2, 1)), "att_repeat")
.addVertex("mult", new ElementWiseVertex(ElementWiseVertex.Op.Product), "agg_lstm", "att_trans") .addVertex("mult", new ElementWiseVertex(ElementWiseVertex.Op.Product), "agg_lstm", "att_trans")
.addLayer("sum", new GlobalPoolingLayer.Builder().build(), "mult") .addLayer("sum", GlobalPoolingLayer.builder().build(), "mult")
.addLayer("agg_out", new DenseLayer.Builder().nIn(100).nOut(6).activation(Activation.TANH).build(), "sum") .addLayer("agg_out", DenseLayer.builder().nIn(100).nOut(6).activation(Activation.TANH).build(), "sum")
.addLayer("output", new OutputLayer.Builder().nIn(6).nOut(6).lossFunction(LossFunctions.LossFunction.RECONSTRUCTION_CROSSENTROPY).build(), "agg_out") .addLayer("output", OutputLayer.builder().nIn(6).nOut(6).lossFunction(LossFunctions.LossFunction.RECONSTRUCTION_CROSSENTROPY).build(), "agg_out")
.setOutputs("output") .setOutputs("output")
.setInputTypes(InputType.recurrent(inputSize,seqLen,RNNFormat.NCW)) .setInputTypes(InputType.recurrent(inputSize,seqLen,RNNFormat.NCW))
.build(); .build();
@ -2029,9 +2029,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.backpropType(BackpropType.Standard) .backpropType(BackpropType.Standard)
.addInputs("in") .addInputs("in")
.setOutputs("out") .setOutputs("out")
.addLayer("0",new DenseLayer.Builder().nIn(5).nOut(3).build(),"in") .addLayer("0",DenseLayer.builder().nIn(5).nOut(3).build(),"in")
.addLayer("1",new DenseLayer.Builder().nIn(3).nOut(2).build(),"0") .addLayer("1",DenseLayer.builder().nIn(3).nOut(2).build(),"0")
.addLayer("out",new OutputLayer.Builder(LossFunctions.LossFunction.XENT).nIn(2).nOut(1) .addLayer("out",OutputLayer.builder(LossFunctions.LossFunction.XENT).nIn(2).nOut(1)
.activation(Activation.SIGMOID).build(),"1") .activation(Activation.SIGMOID).build(),"1")
.build(); .build();
@ -2129,9 +2129,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.setOutputs("out") .setOutputs("out")
.addLayer("0",new DenseLayer.Builder().nIn(inputSize).nOut(layerSize).build(),"in") .addLayer("0",DenseLayer.builder().nIn(inputSize).nOut(layerSize).build(),"in")
.addVertex("combine", new MergeVertex(), "0", "0", "0") .addVertex("combine", new MergeVertex(), "0", "0", "0")
.addLayer("out",new OutputLayer.Builder(LossFunctions.LossFunction.XENT).nIn(3*layerSize).nOut(outputSize) .addLayer("out",OutputLayer.builder(LossFunctions.LossFunction.XENT).nIn(3*layerSize).nOut(outputSize)
.activation(Activation.SIGMOID).build(),"combine") .activation(Activation.SIGMOID).build(),"combine")
.build(); .build();
@ -2155,8 +2155,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addLayer("l0", new Convolution3D.Builder().kernelSize(2,2,2).stride(1,1,1).nIn(3).nOut(3).dataFormat(Convolution3D.DataFormat.NCDHW).build(), "in") .addLayer("l0", Convolution3D.builder().kernelSize(2,2,2).stride(1,1,1).nIn(3).nOut(3).dataFormat(Convolution3D.DataFormat.NCDHW).build(), "in")
.addLayer("l1", new Convolution3D.Builder().kernelSize(2,2,2).stride(1,1,1).nIn(3).nOut(3).dataFormat(Convolution3D.DataFormat.NCDHW).build(), "in") .addLayer("l1", Convolution3D.builder().kernelSize(2,2,2).stride(1,1,1).nIn(3).nOut(3).dataFormat(Convolution3D.DataFormat.NCDHW).build(), "in")
.addVertex("out", new MergeVertex(), "l0", "l1") .addVertex("out", new MergeVertex(), "l0", "l1")
.setInputTypes(InputType.convolutional3D(Convolution3D.DataFormat.NCDHW, 16, 16, 16, 3)) .setInputTypes(InputType.convolutional3D(Convolution3D.DataFormat.NCDHW, 16, 16, 16, 3))
.addInputs("in") .addInputs("in")
@ -2175,9 +2175,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder() ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.addLayer("e1", new EmbeddingLayer.Builder().nIn(10).nOut(5).build(), "in") .addLayer("e1", EmbeddingLayer.builder().nIn(10).nOut(5).build(), "in")
.addLayer("e2", new EmbeddingLayer.Builder().nIn(10).nOut(5).build(), "in") .addLayer("e2", EmbeddingLayer.builder().nIn(10).nOut(5).build(), "in")
.addLayer("out", new OutputLayer.Builder().nIn(10).nOut(2).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "e1", "e2") .addLayer("out", OutputLayer.builder().nIn(10).nOut(2).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "e1", "e2")
.setOutputs("out") .setOutputs("out")
.build(); .build();
@ -2195,18 +2195,18 @@ public class TestComputationGraphNetwork extends BaseDL4JTest {
.convolutionMode(ConvolutionMode.Same) .convolutionMode(ConvolutionMode.Same)
.graphBuilder() .graphBuilder()
.addInputs("in") .addInputs("in")
.layer("l0", new ConvolutionLayer.Builder() .layer("l0", ConvolutionLayer.builder()
.nOut(16) .nOut(16)
.dataFormat(CNN2DFormat.NHWC) .dataFormat(CNN2DFormat.NHWC)
.kernelSize(2,2).stride(1,1) .kernelSize(2,2).stride(1,1)
.build(), "in") .build(), "in")
.layer("l1", new ConvolutionLayer.Builder() .layer("l1", ConvolutionLayer.builder()
.nOut(8) .nOut(8)
.dataFormat(CNN2DFormat.NHWC) .dataFormat(CNN2DFormat.NHWC)
.kernelSize(2,2).stride(1,1) .kernelSize(2,2).stride(1,1)
.build(), "in") .build(), "in")
.addVertex("merge", new MergeVertex(), "l0", "l1") .addVertex("merge", new MergeVertex(), "l0", "l1")
.layer("out", new CnnLossLayer.Builder().activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "merge") .layer("out", CnnLossLayer.builder().activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "merge")
.setOutputs("out") .setOutputs("out")
.setInputTypes(InputType.convolutional(32, 32, 3, CNN2DFormat.NHWC)) .setInputTypes(InputType.convolutional(32, 32, 3, CNN2DFormat.NHWC))
.build(); .build();

Some files were not shown because too many files have changed in this diff Show More