Using @SuperBuilder for LayerConfigurations
parent
4482113f23
commit
396dbec24e
|
@ -207,7 +207,7 @@ public class TupleStreamDataSetIteratorTest extends SolrCloudTestCase {
|
|||
final MultiLayerNetwork model = new MultiLayerNetwork(
|
||||
NeuralNetConfiguration.builder()
|
||||
.list(
|
||||
new OutputLayer.Builder(LossFunction.MSE)
|
||||
OutputLayer.builder(LossFunction.MSE)
|
||||
.nIn(3)
|
||||
.nOut(1)
|
||||
.weightInit(WeightInit.ONES)
|
||||
|
|
|
@ -155,7 +155,7 @@ public class ModelTupleStreamIntegrationTest extends SolrCloudTestCase {
|
|||
|
||||
final NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.list(
|
||||
new OutputLayer.Builder()
|
||||
OutputLayer.builder()
|
||||
.nIn(numInputs)
|
||||
.nOut(numOutputs)
|
||||
.activation(Activation.IDENTITY)
|
||||
|
|
|
@ -244,7 +244,7 @@ public class ModelTupleStreamTest {
|
|||
|
||||
final NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.list(
|
||||
new OutputLayer.Builder()
|
||||
OutputLayer.builder()
|
||||
.nIn(numInputs)
|
||||
.nOut(numOutputs)
|
||||
.activation(Activation.IDENTITY)
|
||||
|
@ -278,7 +278,7 @@ public class ModelTupleStreamTest {
|
|||
.graphBuilder()
|
||||
.addInputs("inputLayer")
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder()
|
||||
OutputLayer.builder()
|
||||
.nIn(numInputs)
|
||||
.nOut(numOutputs)
|
||||
.activation(Activation.IDENTITY)
|
||||
|
|
|
@ -194,7 +194,7 @@ public class ScoringModelTest {
|
|||
|
||||
final NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.list(
|
||||
new OutputLayer.Builder().nIn(numFeatures).nOut(1).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).build()
|
||||
OutputLayer.builder().nIn(numFeatures).nOut(1).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).build()
|
||||
)
|
||||
.build();
|
||||
|
||||
|
@ -221,7 +221,7 @@ public class ScoringModelTest {
|
|||
.graphBuilder()
|
||||
.addInputs("inputLayer")
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder().nIn(numFeatures).nOut(1).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).build(),
|
||||
OutputLayer.builder().nIn(numFeatures).nOut(1).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).build(),
|
||||
"inputLayer")
|
||||
.setOutputs("outputLayer")
|
||||
.build();
|
||||
|
|
|
@ -75,8 +75,8 @@ public class JsonModelServerTest extends BaseDL4JTest {
|
|||
.updater(new Adam(0.119f))
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(0, new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(10).build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.SQUARED_LOSS).activation(Activation.SIGMOID).nIn(10).nOut(1).build())
|
||||
.layer(0, DenseLayer.builder().activation(Activation.TANH).nIn(4).nOut(10).build())
|
||||
.layer(1, OutputLayer.builder(LossFunctions.LossFunction.SQUARED_LOSS).activation(Activation.SIGMOID).nIn(10).nOut(1).build())
|
||||
.build();
|
||||
|
||||
model = new MultiLayerNetwork(conf);
|
||||
|
@ -543,8 +543,8 @@ public class JsonModelServerTest extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.list()
|
||||
.layer(new DenseLayer.Builder().nIn(784).nOut(10).build())
|
||||
.layer(new LossLayer.Builder().activation(Activation.SOFTMAX).build())
|
||||
.layer(DenseLayer.builder().nIn(784).nOut(10).build())
|
||||
.layer(LossLayer.builder().lossFunction().activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -600,10 +600,10 @@ public class JsonModelServerTest extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("input1", "input2")
|
||||
.addLayer("L1", new DenseLayer.Builder().nIn(3).nOut(4).build(), "input1")
|
||||
.addLayer("L2", new DenseLayer.Builder().nIn(3).nOut(4).build(), "input2")
|
||||
.addLayer("L1", DenseLayer.builder().nIn(3).nOut(4).build(), "input1")
|
||||
.addLayer("L2", DenseLayer.builder().nIn(3).nOut(4).build(), "input2")
|
||||
.addVertex("merge", new MergeVertex(), "L1", "L2")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(4+4).nOut(3).build(), "merge")
|
||||
.addLayer("out", OutputLayer.builder().nIn(4+4).nOut(3).build(), "merge")
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
||||
|
@ -656,11 +656,11 @@ public class JsonModelServerTest extends BaseDL4JTest {
|
|||
.updater(new Sgd(0.01))
|
||||
.graphBuilder()
|
||||
.addInputs("input")
|
||||
.addLayer("L1", new DenseLayer.Builder().nIn(8).nOut(4).build(), "input")
|
||||
.addLayer("out1", new OutputLayer.Builder()
|
||||
.addLayer("L1", DenseLayer.builder().nIn(8).nOut(4).build(), "input")
|
||||
.addLayer("out1", OutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nIn(4).nOut(3).build(), "L1")
|
||||
.addLayer("out2", new OutputLayer.Builder()
|
||||
.addLayer("out2", OutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.nIn(4).nOut(2).build(), "L1")
|
||||
.setOutputs("out1","out2")
|
||||
|
|
|
@ -129,9 +129,9 @@ public abstract class BaseSparkTest extends BaseDL4JTest implements Serializable
|
|||
protected NeuralNetConfiguration getBasicConf() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
|
||||
.updater(new Nesterovs(0.1, 0.9)).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(nIn).nOut(3)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
|
|
@ -137,7 +137,7 @@ public class GradientSharingTrainingTest extends BaseSparkTest {
|
|||
.updater(new AMSGrad(0.1))
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("out", new OutputLayer.Builder().nIn(784).nOut(10).activation(Activation.SOFTMAX)
|
||||
.layer("out", OutputLayer.builder().nIn(784).nOut(10).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
@ -272,15 +272,15 @@ public class GradientSharingTrainingTest extends BaseSparkTest {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.seed(12345)
|
||||
.list()
|
||||
.layer(new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.layer(OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
} else {
|
||||
conf = NeuralNetConfiguration.builder()
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.seed(12345)
|
||||
.list()
|
||||
.layer(new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.layer(DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
}
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -358,7 +358,7 @@ public class GradientSharingTrainingTest extends BaseSparkTest {
|
|||
.updater(new AMSGrad(0.001))
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("out", new OutputLayer.Builder().nIn(784).nOut(10).activation(Activation.SOFTMAX)
|
||||
.layer("out", OutputLayer.builder().nIn(784).nOut(10).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
|
|
@ -132,9 +132,9 @@ public abstract class BaseSparkTest extends BaseDL4JTest implements Serializable
|
|||
protected NeuralNetConfiguration getBasicConf() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
|
||||
.updater(new Nesterovs(0.1, 0.9)).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(nIn).nOut(3)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
|
|
@ -71,7 +71,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd()).weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -127,7 +127,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(10.0)) //Intentionally huge LR
|
||||
.weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3).activation(Activation.IDENTITY)
|
||||
.lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -166,7 +166,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -212,7 +212,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -249,7 +249,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd()).weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
|
|
@ -74,7 +74,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd()).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
ComputationGraph net = new ComputationGraph(conf);
|
||||
|
@ -128,7 +128,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(2.0)) //Intentionally huge LR
|
||||
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.IDENTITY)
|
||||
.lossFunction(LossFunctions.LossFunction.MSE).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
ComputationGraph net = new ComputationGraph(conf);
|
||||
|
@ -169,7 +169,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
ComputationGraph net = new ComputationGraph(conf);
|
||||
|
@ -217,7 +217,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
ComputationGraph net = new ComputationGraph(conf);
|
||||
|
@ -256,7 +256,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd()).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
ComputationGraph net = new ComputationGraph(conf);
|
||||
|
|
|
@ -69,7 +69,7 @@ public class TestKryo extends BaseSparkKryoTest {
|
|||
m.put(0, 0.5);
|
||||
m.put(10, 0.1);
|
||||
NeuralNetConfiguration mlc = NeuralNetConfiguration.builder()
|
||||
.updater(new Nadam(new MapSchedule(ScheduleType.ITERATION,m))).list().layer(0, new OutputLayer.Builder().nIn(10).nOut(10).build())
|
||||
.updater(new Nadam(new MapSchedule(ScheduleType.ITERATION,m))).list().layer(0, OutputLayer.builder().nIn(10).nOut(10).build())
|
||||
.build();
|
||||
|
||||
testSerialization(mlc, si);
|
||||
|
@ -79,23 +79,23 @@ public class TestKryo extends BaseSparkKryoTest {
|
|||
.dist(new UniformDistribution(-1, 1))
|
||||
.updater(new Adam(new MapSchedule(ScheduleType.ITERATION,m)))
|
||||
.graphBuilder()
|
||||
.addInputs("in").addLayer("out", new OutputLayer.Builder().nIn(10).nOut(10).build(), "in")
|
||||
.addInputs("in").addLayer("out", OutputLayer.builder().nIn(10).nOut(10).build(), "in")
|
||||
.setOutputs("out").build();
|
||||
|
||||
testSerialization(cgc, si);
|
||||
|
||||
|
||||
//Check main layers:
|
||||
Layer[] layers = new Layer[] {new OutputLayer.Builder().nIn(10).nOut(10).build(),
|
||||
new RnnOutputLayer.Builder().nIn(10).nOut(10).build(), new LossLayer.Builder().build(),
|
||||
new CenterLossOutputLayer.Builder().nIn(10).nOut(10).build(),
|
||||
new DenseLayer.Builder().nIn(10).nOut(10).build(),
|
||||
new ConvolutionLayer.Builder().nIn(10).nOut(10).build(), new SubsamplingLayer.Builder().build(),
|
||||
Layer[] layers = new Layer[] {OutputLayer.builder().nIn(10).nOut(10).build(),
|
||||
RnnOutputLayer.builder().nIn(10).nOut(10).build(), LossLayer.builder().lossFunction().build(),
|
||||
CenterLossOutputLayer.builder().nIn(10).nOut(10).build(),
|
||||
DenseLayer.builder().nIn(10).nOut(10).build(),
|
||||
ConvolutionLayer.builder().nIn(10).nOut(10).build(), SubsamplingLayer.builder().build(),
|
||||
new Convolution1DLayer.Builder(2, 2).nIn(10).nOut(10).build(),
|
||||
new ActivationLayer.Builder().activation(Activation.TANH).build(),
|
||||
new GlobalPoolingLayer.Builder().build(), new GravesLSTM.Builder().nIn(10).nOut(10).build(),
|
||||
new LSTM.Builder().nIn(10).nOut(10).build(), new DropoutLayer.Builder(0.5).build(),
|
||||
new BatchNormalization.Builder().build(), new LocalResponseNormalization.Builder().build()};
|
||||
ActivationLayer.builder().activation(Activation.TANH).build(),
|
||||
GlobalPoolingLayer.builder().build(), GravesLSTM.builder().nIn(10).nOut(10).build(),
|
||||
LSTM.builder().nIn(10).nOut(10).build(), DropoutLayer.builder(0.5).build(),
|
||||
BatchNormalization.builder().build(), LocalResponseNormalization.builder().build()};
|
||||
|
||||
for (Layer l : layers) {
|
||||
testSerialization(l, si);
|
||||
|
|
|
@ -86,9 +86,9 @@ public class TestPreProcessedData extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(Updater.RMSPROP)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3).activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
.build();
|
||||
|
@ -137,9 +137,9 @@ public class TestPreProcessedData extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(Updater.RMSPROP)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3).activation(Activation.SOFTMAX)
|
||||
.build(),
|
||||
"0")
|
||||
|
@ -191,9 +191,9 @@ public class TestPreProcessedData extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(Updater.RMSPROP)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3).activation(Activation.SOFTMAX)
|
||||
.build(),
|
||||
"0")
|
||||
|
|
|
@ -41,7 +41,7 @@ public class TestKryoWarning {
|
|||
try {
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new OutputLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(0, OutputLayer.builder().nIn(10).nOut(10).build())
|
||||
.build();
|
||||
|
||||
TrainingMaster tm = new ParameterAveragingTrainingMaster.Builder(1).build();
|
||||
|
@ -58,7 +58,7 @@ public class TestKryoWarning {
|
|||
try {
|
||||
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(10).nOut(10).build(), "in").setOutputs("0")
|
||||
.addLayer("0", OutputLayer.builder().nIn(10).nOut(10).build(), "in").setOutputs("0")
|
||||
.build();
|
||||
|
||||
TrainingMaster tm = new ParameterAveragingTrainingMaster.Builder(1).build();
|
||||
|
|
|
@ -53,9 +53,9 @@ public class TestCustomLayer extends BaseSparkTest {
|
|||
//Custom layers are tested more extensively in dl4j core
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder().updater(new Sgd(0.1)).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(1, new CustomLayer(3.14159)).layer(2,
|
||||
new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.nIn(10).nOut(10).build())
|
||||
.build();
|
||||
|
||||
|
|
|
@ -79,8 +79,8 @@ public class TestSparkComputationGraph extends BaseSparkTest {
|
|||
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("l0", new DenseLayer.Builder().nIn(4).nOut(10).build(), "in")
|
||||
.addLayer("l1", new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.addLayer("l0", DenseLayer.builder().nIn(4).nOut(10).build(), "in")
|
||||
.addLayer("l1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(10).nOut(2).build(), "l0")
|
||||
.setOutputs("l1").build();
|
||||
|
||||
|
@ -107,8 +107,8 @@ public class TestSparkComputationGraph extends BaseSparkTest {
|
|||
ComputationGraphConfiguration config = NeuralNetConfiguration.builder()
|
||||
.updater(new Sgd(0.1))
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(2).build(), "in").addLayer("out",
|
||||
new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3)
|
||||
.addLayer("dense", DenseLayer.builder().nIn(4).nOut(2).build(), "in").addLayer("out",
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3)
|
||||
.build(),
|
||||
"dense")
|
||||
.setOutputs("out").build();
|
||||
|
@ -141,9 +141,9 @@ public class TestSparkComputationGraph extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().l1(0.1).l2(0.1)
|
||||
.seed(123).updater(new Nesterovs(0.1, 0.9)).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(nIn).nOut(3)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).build(),
|
||||
"0")
|
||||
|
@ -220,9 +220,9 @@ public class TestSparkComputationGraph extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(Updater.RMSPROP)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(4)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(4).nOut(4)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.build(),
|
||||
"0")
|
||||
|
@ -421,8 +421,8 @@ public class TestSparkComputationGraph extends BaseSparkTest {
|
|||
.graphBuilder()
|
||||
.addInputs("input1", "input2")
|
||||
.addVertex("avg",new ElementWiseVertex(ElementWiseVertex.Op.Average),"input1","input2")
|
||||
.addLayer("dense",new DenseLayer.Builder().dropOut(0.9).nIn(featSize).nOut(featSize / 2).build(),"avg")
|
||||
.addLayer("output",new OutputLayer.Builder().nIn(featSize / 2).nOut(2).lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).hasBias(false).build(),"dense")
|
||||
.addLayer("dense",DenseLayer.builder().dropOut(0.9).nIn(featSize).nOut(featSize / 2).build(),"avg")
|
||||
.addLayer("output",OutputLayer.builder().nIn(featSize / 2).nOut(2).lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).hasBias(false).build(),"dense")
|
||||
.setOutputs("output")
|
||||
.build();
|
||||
|
||||
|
|
|
@ -62,10 +62,10 @@ public class TestFrozenLayers extends BaseSparkTest {
|
|||
int nOut = 3;
|
||||
|
||||
MultiLayerNetwork origModel = new MultiLayerNetwork(overallConf.clone().list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(6).nOut(5).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(5).nOut(4).build())
|
||||
.layer(2, new DenseLayer.Builder().nIn(4).nOut(3).build())
|
||||
.layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(0, DenseLayer.builder().nIn(6).nOut(5).build())
|
||||
.layer(1, DenseLayer.builder().nIn(5).nOut(4).build())
|
||||
.layer(2, DenseLayer.builder().nIn(4).nOut(3).build())
|
||||
.layer(3, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
|
||||
.build())
|
||||
.build());
|
||||
|
@ -138,10 +138,10 @@ public class TestFrozenLayers extends BaseSparkTest {
|
|||
|
||||
ComputationGraph origModel = new ComputationGraph(NeuralNetConfiguration.builder().updater(new Sgd(0.1))
|
||||
.activation(Activation.TANH).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(6).nOut(5).build(), "in")
|
||||
.addLayer("1", new DenseLayer.Builder().nIn(5).nOut(4).build(), "0")
|
||||
.addLayer("2", new DenseLayer.Builder().nIn(4).nOut(3).build(), "1")
|
||||
.addLayer("3", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("0", DenseLayer.builder().nIn(6).nOut(5).build(), "in")
|
||||
.addLayer("1", DenseLayer.builder().nIn(5).nOut(4).build(), "0")
|
||||
.addLayer("2", DenseLayer.builder().nIn(4).nOut(3).build(), "1")
|
||||
.addLayer("3", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
|
||||
.build(),
|
||||
"2")
|
||||
|
|
|
@ -58,8 +58,8 @@ public class TestMiscFunctions extends BaseSparkTest {
|
|||
public void testFeedForwardWithKey() {
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(3).build())
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
||||
|
@ -109,9 +109,9 @@ public class TestMiscFunctions extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer( new LSTM.Builder().nIn(4).nOut(3).build())
|
||||
.layer(new GlobalPoolingLayer(PoolingType.AVG))
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.layer( LSTM.builder().nIn(4).nOut(3).build())
|
||||
.layer(GlobalPoolingLayer.builder(PoolingType.AVG))
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
||||
|
@ -164,9 +164,9 @@ public class TestMiscFunctions extends BaseSparkTest {
|
|||
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER)
|
||||
.graphBuilder().addInputs("in1", "in2")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(4).nOut(3).build(), "in1")
|
||||
.addLayer("1", new DenseLayer.Builder().nIn(4).nOut(3).build(), "in2").addLayer("2",
|
||||
new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(6).nOut(3)
|
||||
.addLayer("0", DenseLayer.builder().nIn(4).nOut(3).build(), "in1")
|
||||
.addLayer("1", DenseLayer.builder().nIn(4).nOut(3).build(), "in2").addLayer("2",
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(6).nOut(3)
|
||||
.activation(Activation.SOFTMAX).build(),
|
||||
"0", "1")
|
||||
.setOutputs("2").build();
|
||||
|
@ -221,7 +221,7 @@ public class TestMiscFunctions extends BaseSparkTest {
|
|||
int nIn = 10;
|
||||
|
||||
NeuralNetConfiguration mlc = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.builder()
|
||||
.reconstructionDistribution(
|
||||
new GaussianReconstructionDistribution(Activation.IDENTITY))
|
||||
.nIn(nIn).nOut(5).encoderLayerSizes(12).decoderLayerSizes(13).build())
|
||||
|
@ -261,7 +261,7 @@ public class TestMiscFunctions extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration mlc = NeuralNetConfiguration.builder()
|
||||
.list().layer(0,
|
||||
new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
|
||||
org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.builder()
|
||||
.reconstructionDistribution(new LossFunctionWrapper(
|
||||
Activation.IDENTITY, new LossMSE()))
|
||||
.nIn(nIn).nOut(5).encoderLayerSizes(12).decoderLayerSizes(13)
|
||||
|
|
|
@ -111,9 +111,9 @@ public class TestSparkDl4jMultiLayer extends BaseSparkTest {
|
|||
.updater(new Adam(1e-3))
|
||||
.l2(1e-5)
|
||||
.list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(28 * 28).nOut(500).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(500).nOut(100).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(0, DenseLayer.builder().nIn(28 * 28).nOut(500).build())
|
||||
.layer(1, DenseLayer.builder().nIn(500).nOut(100).build())
|
||||
.layer(2, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.activation(Activation.SOFTMAX).nIn(100).nOut(10).build())
|
||||
.build();
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ public class TestCompareParameterAveragingSparkVsSingleMachine {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.weightInit(WeightInit.XAVIER).updater(updater).seed(seed).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new OutputLayer.Builder()
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(10).build()).layer(1, OutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(10).build())
|
||||
.build();
|
||||
return conf;
|
||||
|
@ -79,11 +79,11 @@ public class TestCompareParameterAveragingSparkVsSingleMachine {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.weightInit(WeightInit.XAVIER).updater(updater).seed(seed).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().nOut(3).kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.layer(0, ConvolutionLayer.builder().nOut(3).kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new ConvolutionLayer.Builder().nOut(3).kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.layer(1, ConvolutionLayer.builder().nOut(3).kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(10)
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(10)
|
||||
.build())
|
||||
.inputType(InputType.convolutional(10, 10, 3)).build();
|
||||
return conf;
|
||||
|
@ -95,8 +95,8 @@ public class TestCompareParameterAveragingSparkVsSingleMachine {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.weightInit(WeightInit.XAVIER).updater(updater).seed(seed).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in").addLayer("1",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10)
|
||||
.addLayer("0", DenseLayer.builder().nIn(10).nOut(10).build(), "in").addLayer("1",
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10)
|
||||
.nOut(10).build(),
|
||||
"0")
|
||||
.setOutputs("1").build();
|
||||
|
@ -109,11 +109,11 @@ public class TestCompareParameterAveragingSparkVsSingleMachine {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.weightInit(WeightInit.XAVIER).updater(updater).seed(seed).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new ConvolutionLayer.Builder().nOut(3).kernelSize(2, 2).stride(1, 1)
|
||||
.addLayer("0", ConvolutionLayer.builder().nOut(3).kernelSize(2, 2).stride(1, 1)
|
||||
.padding(0, 0).activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new ConvolutionLayer.Builder().nOut(3).kernelSize(2, 2).stride(1, 1)
|
||||
.addLayer("1", ConvolutionLayer.builder().nOut(3).kernelSize(2, 2).stride(1, 1)
|
||||
.padding(0, 0).activation(Activation.TANH).build(), "0")
|
||||
.addLayer("2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(10)
|
||||
.addLayer("2", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(10)
|
||||
.build(), "1")
|
||||
.setOutputs("2").setInputTypes(InputType.convolutional(10, 10, 3))
|
||||
.build();
|
||||
|
|
|
@ -129,9 +129,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
DataSet d = new IrisDataSetIterator(150, 150).next();
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(100).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(100).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(100).nOut(3)
|
||||
.activation(Activation.SOFTMAX).weightInit(WeightInit.XAVIER)
|
||||
.build())
|
||||
|
@ -167,9 +167,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
.updater(new Adam(1e-6))
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new BatchNormalization.Builder().nIn(4).nOut(4).build())
|
||||
.layer(new DenseLayer.Builder().nIn(4).nOut(32).activation(Activation.RELU).build())
|
||||
.layer(new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(32).nOut(3)
|
||||
.layer(BatchNormalization.builder().nIn(4).nOut(4).build())
|
||||
.layer(DenseLayer.builder().nIn(4).nOut(32).activation(Activation.RELU).build())
|
||||
.layer(org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(32).nOut(3)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
||||
|
@ -277,9 +277,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
}
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(nIn).nOut(3)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder(
|
||||
LossFunctions.LossFunction.MSE).nIn(3).nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
.build();
|
||||
|
@ -302,9 +302,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l1(0.1).l2(0.1)
|
||||
.seed(123).updater(new Nesterovs(0.1, 0.9)).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(nIn).nOut(3)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -391,9 +391,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -455,9 +455,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -525,9 +525,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -614,9 +614,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
|
||||
.activation(Activation.SOFTMAX).build(),
|
||||
"0")
|
||||
|
@ -687,9 +687,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(4)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(4).nOut(4)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
.build();
|
||||
|
@ -771,9 +771,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -822,9 +822,9 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50)
|
||||
.addLayer("0", org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(50)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.addLayer("1", org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10)
|
||||
.activation(Activation.SOFTMAX).build(),
|
||||
"0")
|
||||
|
@ -862,7 +862,7 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
Nd4j.getRandom().setSeed(12345);
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(new RmsProp())
|
||||
.weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new VariationalAutoencoder.Builder().nIn(8).nOut(10).encoderLayerSizes(12)
|
||||
.layer(0, VariationalAutoencoder.builder().nIn(8).nOut(10).encoderLayerSizes(12)
|
||||
.decoderLayerSizes(13).reconstructionDistribution(
|
||||
new GaussianReconstructionDistribution(Activation.IDENTITY))
|
||||
.build())
|
||||
|
@ -898,7 +898,7 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
Nd4j.getRandom().setSeed(12345);
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(new RmsProp())
|
||||
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new VariationalAutoencoder.Builder().nIn(8).nOut(10).encoderLayerSizes(12)
|
||||
.addLayer("0", VariationalAutoencoder.builder().nIn(8).nOut(10).encoderLayerSizes(12)
|
||||
.decoderLayerSizes(13).reconstructionDistribution(
|
||||
new GaussianReconstructionDistribution(Activation.IDENTITY))
|
||||
.build(), "in")
|
||||
|
@ -938,8 +938,8 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(layerSize).nOut(nOut)
|
||||
.layer(0, DenseLayer.builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(1, OutputLayer.builder().nIn(layerSize).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT)
|
||||
.build())
|
||||
|
@ -993,8 +993,8 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(layerSize).nOut(nOut)
|
||||
.layer(0, DenseLayer.builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(1, OutputLayer.builder().nIn(layerSize).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT)
|
||||
.build())
|
||||
|
@ -1047,13 +1047,13 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest {
|
|||
}
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.list()
|
||||
.layer(new OutputLayer.Builder().nIn(4).nOut(3).build())
|
||||
.layer(OutputLayer.builder().nIn(4).nOut(3).build())
|
||||
.build();
|
||||
|
||||
ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(4).nOut(3).build(), "in")
|
||||
.addLayer("out", OutputLayer.builder().nIn(4).nOut(3).build(), "in")
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
||||
|
|
|
@ -69,8 +69,8 @@ public class TestTrainingStatsCollection extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nIn(10).nOut(10).build())
|
||||
.build();
|
||||
|
||||
int miniBatchSizePerWorker = 10;
|
||||
|
|
|
@ -62,9 +62,9 @@ public class TestListeners extends BaseSparkTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(100).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(100).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(100).nOut(3)
|
||||
.activation(Activation.SOFTMAX).weightInit(WeightInit.XAVIER)
|
||||
.build())
|
||||
|
|
|
@ -65,27 +65,27 @@ public class ActorCriticFactoryCompGraphStdConv implements ActorCriticFactoryCom
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.l2(conf.getL2()).graphBuilder()
|
||||
.addInputs("input").addLayer("0",
|
||||
new ConvolutionLayer.Builder(8, 8).nIn(shapeInputs[0]).nOut(16)
|
||||
ConvolutionLayer.builder(8, 8).nIn(shapeInputs[0]).nOut(16)
|
||||
.stride(4, 4).activation(Activation.RELU).build(),
|
||||
"input");
|
||||
|
||||
confB.addLayer("1", new ConvolutionLayer.Builder(4, 4).nIn(16).nOut(32).stride(2, 2).activation(Activation.RELU).build(), "0");
|
||||
confB.addLayer("1", ConvolutionLayer.builder(4, 4).nIn(16).nOut(32).stride(2, 2).activation(Activation.RELU).build(), "0");
|
||||
|
||||
confB.addLayer("2", new DenseLayer.Builder().nIn(w * h * 32).nOut(256).activation(Activation.RELU).build(), "1");
|
||||
confB.addLayer("2", DenseLayer.builder().nIn(w * h * 32).nOut(256).activation(Activation.RELU).build(), "1");
|
||||
|
||||
if (conf.isUseLSTM()) {
|
||||
confB.addLayer("3", new LSTM.Builder().nIn(256).nOut(256).activation(Activation.TANH).build(), "2");
|
||||
confB.addLayer("3", LSTM.builder().nIn(256).nOut(256).activation(Activation.TANH).build(), "2");
|
||||
|
||||
confB.addLayer("value", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
confB.addLayer("value", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nIn(256).nOut(1).build(), "3");
|
||||
|
||||
confB.addLayer("softmax", new RnnOutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
confB.addLayer("softmax", RnnOutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.nIn(256).nOut(numOutputs).build(), "3");
|
||||
} else {
|
||||
confB.addLayer("value", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
confB.addLayer("value", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nIn(256).nOut(1).build(), "2");
|
||||
|
||||
confB.addLayer("softmax", new OutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
confB.addLayer("softmax", OutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.nIn(256).nOut(numOutputs).build(), "2");
|
||||
}
|
||||
|
||||
|
|
|
@ -56,31 +56,31 @@ public class ActorCriticFactoryCompGraphStdDense implements ActorCriticFactoryCo
|
|||
.l2(conf.getL2()).graphBuilder()
|
||||
.setInputTypes(conf.isUseLSTM() ? InputType.recurrent(nIn)
|
||||
: InputType.feedForward(nIn)).addInputs("input")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(nIn)
|
||||
.addLayer("0", DenseLayer.builder().nIn(nIn)
|
||||
.nOut(conf.getNumHiddenNodes()).activation(Activation.RELU).build(),
|
||||
"input");
|
||||
|
||||
|
||||
for (int i = 1; i < conf.getNumLayers(); i++) {
|
||||
confB.addLayer(i + "", new DenseLayer.Builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
confB.addLayer(i + "", DenseLayer.builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
.activation(Activation.RELU).build(), (i - 1) + "");
|
||||
}
|
||||
|
||||
|
||||
if (conf.isUseLSTM()) {
|
||||
confB.addLayer(getConf().getNumLayers() + "", new LSTM.Builder().activation(Activation.TANH)
|
||||
confB.addLayer(getConf().getNumLayers() + "", LSTM.builder().activation(Activation.TANH)
|
||||
.nOut(conf.getNumHiddenNodes()).build(), (getConf().getNumLayers() - 1) + "");
|
||||
|
||||
confB.addLayer("value", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
confB.addLayer("value", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nOut(1).build(), getConf().getNumLayers() + "");
|
||||
|
||||
confB.addLayer("softmax", new RnnOutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
confB.addLayer("softmax", RnnOutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.nOut(numOutputs).build(), getConf().getNumLayers() + "");
|
||||
} else {
|
||||
confB.addLayer("value", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
confB.addLayer("value", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nOut(1).build(), (getConf().getNumLayers() - 1) + "");
|
||||
|
||||
confB.addLayer("softmax", new OutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
confB.addLayer("softmax", OutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.nOut(numOutputs).build(), (getConf().getNumLayers() - 1) + "");
|
||||
}
|
||||
|
||||
|
|
|
@ -61,22 +61,22 @@ public class ActorCriticFactorySeparateStdDense implements ActorCriticFactorySep
|
|||
.updater(conf.getUpdater() != null ? conf.getUpdater() : new Adam())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.l2(conf.getL2())
|
||||
.list().layer(0, new DenseLayer.Builder().nIn(nIn).nOut(conf.getNumHiddenNodes())
|
||||
.list().layer(0, DenseLayer.builder().nIn(nIn).nOut(conf.getNumHiddenNodes())
|
||||
.activation(Activation.RELU).build());
|
||||
|
||||
|
||||
for (int i = 1; i < conf.getNumLayers(); i++) {
|
||||
confB.layer(i, new DenseLayer.Builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
confB.layer(i, DenseLayer.builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
.activation(Activation.RELU).build());
|
||||
}
|
||||
|
||||
if (conf.isUseLSTM()) {
|
||||
confB.layer(conf.getNumLayers(), new LSTM.Builder().nOut(conf.getNumHiddenNodes()).activation(Activation.TANH).build());
|
||||
confB.layer(conf.getNumLayers(), LSTM.builder().nOut(conf.getNumHiddenNodes()).activation(Activation.TANH).build());
|
||||
|
||||
confB.layer(conf.getNumLayers() + 1, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
confB.layer(conf.getNumLayers() + 1, RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nIn(conf.getNumHiddenNodes()).nOut(1).build());
|
||||
} else {
|
||||
confB.layer(conf.getNumLayers(), new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
confB.layer(conf.getNumLayers(), OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nIn(conf.getNumHiddenNodes()).nOut(1).build());
|
||||
}
|
||||
|
||||
|
@ -96,22 +96,22 @@ public class ActorCriticFactorySeparateStdDense implements ActorCriticFactorySep
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
//.regularization(true)
|
||||
//.l2(conf.getL2())
|
||||
.list().layer(0, new DenseLayer.Builder().nIn(nIn).nOut(conf.getNumHiddenNodes())
|
||||
.list().layer(0, DenseLayer.builder().nIn(nIn).nOut(conf.getNumHiddenNodes())
|
||||
.activation(Activation.RELU).build());
|
||||
|
||||
|
||||
for (int i = 1; i < conf.getNumLayers(); i++) {
|
||||
confB2.layer(i, new DenseLayer.Builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
confB2.layer(i, DenseLayer.builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
.activation(Activation.RELU).build());
|
||||
}
|
||||
|
||||
if (conf.isUseLSTM()) {
|
||||
confB2.layer(conf.getNumLayers(), new LSTM.Builder().nOut(conf.getNumHiddenNodes()).activation(Activation.TANH).build());
|
||||
confB2.layer(conf.getNumLayers(), LSTM.builder().nOut(conf.getNumHiddenNodes()).activation(Activation.TANH).build());
|
||||
|
||||
confB2.layer(conf.getNumLayers() + 1, new RnnOutputLayer.Builder(new ActorCriticLoss())
|
||||
confB2.layer(conf.getNumLayers() + 1, RnnOutputLayer.builder(new ActorCriticLoss())
|
||||
.activation(Activation.SOFTMAX).nIn(conf.getNumHiddenNodes()).nOut(numOutputs).build());
|
||||
} else {
|
||||
confB2.layer(conf.getNumLayers(), new OutputLayer.Builder(new ActorCriticLoss())
|
||||
confB2.layer(conf.getNumLayers(), OutputLayer.builder(new ActorCriticLoss())
|
||||
.activation(Activation.SOFTMAX).nIn(conf.getNumHiddenNodes()).nOut(numOutputs).build());
|
||||
}
|
||||
|
||||
|
|
|
@ -60,15 +60,15 @@ public class DQNFactoryStdConv implements DQNFactory {
|
|||
.l2(conf.getL2())
|
||||
.updater(conf.getUpdater() != null ? conf.getUpdater() : new Adam())
|
||||
.weightInit(WeightInit.XAVIER).l2(conf.getL2()).list()
|
||||
.layer(0, new ConvolutionLayer.Builder(8, 8).nIn(shapeInputs[0]).nOut(16).stride(4, 4)
|
||||
.layer(0, ConvolutionLayer.builder(8, 8).nIn(shapeInputs[0]).nOut(16).stride(4, 4)
|
||||
.activation(Activation.RELU).build());
|
||||
|
||||
|
||||
confB.layer(1, new ConvolutionLayer.Builder(4, 4).nOut(32).stride(2, 2).activation(Activation.RELU).build());
|
||||
confB.layer(1, ConvolutionLayer.builder(4, 4).nOut(32).stride(2, 2).activation(Activation.RELU).build());
|
||||
|
||||
confB.layer(2, new DenseLayer.Builder().nOut(256).activation(Activation.RELU).build());
|
||||
confB.layer(2, DenseLayer.builder().nOut(256).activation(Activation.RELU).build());
|
||||
|
||||
confB.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(numOutputs)
|
||||
confB.layer(3, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(numOutputs)
|
||||
.build());
|
||||
|
||||
confB.inputType(InputType.convolutional(shapeInputs[1], shapeInputs[2], shapeInputs[0]));
|
||||
|
|
|
@ -61,7 +61,7 @@ public class DQNFactoryStdDense implements DQNFactory {
|
|||
.l2(conf.getL2())
|
||||
.list()
|
||||
.layer(0,
|
||||
new DenseLayer.Builder()
|
||||
DenseLayer.builder()
|
||||
.nIn(nIn)
|
||||
.nOut(conf.getNumHiddenNodes())
|
||||
.activation(Activation.RELU).build()
|
||||
|
@ -69,12 +69,12 @@ public class DQNFactoryStdDense implements DQNFactory {
|
|||
|
||||
|
||||
for (int i = 1; i < conf.getNumLayers(); i++) {
|
||||
confB.layer(i, new DenseLayer.Builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
confB.layer(i, DenseLayer.builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
|
||||
.activation(Activation.RELU).build());
|
||||
}
|
||||
|
||||
confB.layer(conf.getNumLayers(),
|
||||
new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.activation(Activation.IDENTITY)
|
||||
.nIn(conf.getNumHiddenNodes())
|
||||
.nOut(numOutputs)
|
||||
|
|
|
@ -141,16 +141,16 @@ public class NStepRnn {
|
|||
.graphBuilder()
|
||||
.addInputs("input")
|
||||
.setInputTypes(InputType.recurrent(NUM_INPUTS))
|
||||
.addLayer("lstm", new LSTM.Builder().nOut(lstmLayerSize).activation(Activation.TANH).build(), "input")
|
||||
.addLayer("dl", new DenseLayer.Builder().nOut(dl1Size).activation(Activation.RELU).build(), "input", "lstm")
|
||||
.addLayer("dl-1", new DenseLayer.Builder().nOut(dl2Size).activation(Activation.RELU).build(), "dl")
|
||||
.addLayer("lstm", LSTM.builder().nOut(lstmLayerSize).activation(Activation.TANH).build(), "input")
|
||||
.addLayer("dl", DenseLayer.builder().nOut(dl1Size).activation(Activation.RELU).build(), "input", "lstm")
|
||||
.addLayer("dl-1", DenseLayer.builder().nOut(dl2Size).activation(Activation.RELU).build(), "dl")
|
||||
.addVertex("dl-rnn", new PreprocessorVertex(new FeedForwardToRnnPreProcessor()), "dl-1");
|
||||
}
|
||||
|
||||
private static ITrainableNeuralNet buildActorCriticNetwork() {
|
||||
ComputationGraphConfiguration valueConfiguration = buildBaseNetworkConfiguration(COMBINED_LSTM_LAYER_SIZE, COMBINED_DL1_LAYER_SIZE, COMBINED_DL2_LAYER_SIZE)
|
||||
.addLayer("value", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(1).build(), "dl-rnn", "lstm")
|
||||
.addLayer("softmax", new RnnOutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX).nOut(NUM_ACTIONS).build(), "dl-rnn", "lstm")
|
||||
.addLayer("value", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(1).build(), "dl-rnn", "lstm")
|
||||
.addLayer("softmax", RnnOutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX).nOut(NUM_ACTIONS).build(), "dl-rnn", "lstm")
|
||||
.setOutputs("value", "softmax")
|
||||
.build();
|
||||
|
||||
|
@ -164,12 +164,12 @@ public class NStepRnn {
|
|||
|
||||
private static ITrainableNeuralNet buildSeparateActorCriticNetwork() {
|
||||
ComputationGraphConfiguration valueConfiguration = buildBaseNetworkConfiguration(SEPARATE_LSTM_LAYER_SIZE, SEPARATE_DL1_LAYER_SIZE, SEPARATE_DL2_LAYER_SIZE)
|
||||
.addLayer("value", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(1).build(), "dl-rnn", "lstm")
|
||||
.addLayer("value", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(1).build(), "dl-rnn", "lstm")
|
||||
.setOutputs("value")
|
||||
.build();
|
||||
|
||||
ComputationGraphConfiguration policyConfiguration = buildBaseNetworkConfiguration(SEPARATE_LSTM_LAYER_SIZE, SEPARATE_DL1_LAYER_SIZE, SEPARATE_DL2_LAYER_SIZE)
|
||||
.addLayer("softmax", new RnnOutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX).nOut(NUM_ACTIONS).build(), "dl-rnn", "lstm")
|
||||
.addLayer("softmax", RnnOutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX).nOut(NUM_ACTIONS).build(), "dl-rnn", "lstm")
|
||||
.setOutputs("softmax")
|
||||
.build();
|
||||
|
||||
|
|
|
@ -197,13 +197,13 @@ public class RobotLakeExample {
|
|||
InputType.feedForward(4)) // radar )
|
||||
.addInputs("tracker-in", "radar-in")
|
||||
|
||||
.layer("dl_1", new DenseLayer.Builder().activation(Activation.RELU).nOut(40).build(), "tracker-in", "radar-in")
|
||||
.layer("dl_out", new DenseLayer.Builder().activation(Activation.RELU).nOut(40).build(), "dl_1");
|
||||
.layer("dl_1", DenseLayer.builder().activation(Activation.RELU).nOut(40).build(), "tracker-in", "radar-in")
|
||||
.layer("dl_out", DenseLayer.builder().activation(Activation.RELU).nOut(40).build(), "dl_1");
|
||||
}
|
||||
|
||||
private static ITrainableNeuralNet buildQNetwork() {
|
||||
ComputationGraphConfiguration conf = buildBaseNetworkConfiguration()
|
||||
.addLayer("output", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.addLayer("output", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nOut(RobotLake.NUM_ACTIONS).build(), "dl_out")
|
||||
|
||||
.setOutputs("output")
|
||||
|
@ -220,9 +220,9 @@ public class RobotLakeExample {
|
|||
|
||||
private static ITrainableNeuralNet buildActorCriticNetwork() {
|
||||
ComputationGraphConfiguration conf = buildBaseNetworkConfiguration()
|
||||
.addLayer("value", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.addLayer("value", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nOut(1).build(), "dl_out")
|
||||
.addLayer("softmax", new OutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.addLayer("softmax", OutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.nOut(RobotLake.NUM_ACTIONS).build(), "dl_out")
|
||||
.setOutputs("value", "softmax")
|
||||
.build();
|
||||
|
|
|
@ -181,18 +181,18 @@ public class TMazeExample {
|
|||
.graphBuilder()
|
||||
.setInputTypes(InputType.recurrent(NUM_INPUTS))
|
||||
.addInputs("input")
|
||||
.addLayer("goal", new LSTM.Builder()
|
||||
.addLayer("goal", LSTM.builder()
|
||||
.nOut(40)
|
||||
.activation(Activation.TANH)
|
||||
.build(), "input")
|
||||
.addLayer("corridor", new DenseLayer.Builder().nOut(40).activation(Activation.RELU).build(), "input", "goal")
|
||||
.addLayer("corridor-1", new DenseLayer.Builder().nOut(20).activation(Activation.RELU).build(), "corridor")
|
||||
.addLayer("corridor", DenseLayer.builder().nOut(40).activation(Activation.RELU).build(), "input", "goal")
|
||||
.addLayer("corridor-1", DenseLayer.builder().nOut(20).activation(Activation.RELU).build(), "corridor")
|
||||
.addVertex("corridor-rnn", new PreprocessorVertex(new FeedForwardToRnnPreProcessor()), "corridor-1");
|
||||
}
|
||||
|
||||
private static ITrainableNeuralNet buildQNetwork() {
|
||||
ComputationGraphConfiguration conf = buildBaseNetworkConfiguration()
|
||||
.addLayer("output", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.addLayer("output", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nOut(NUM_ACTIONS).build(), "goal", "corridor-rnn")
|
||||
|
||||
.setOutputs("output")
|
||||
|
@ -207,9 +207,9 @@ public class TMazeExample {
|
|||
|
||||
private static ITrainableNeuralNet buildActorCriticNetwork() {
|
||||
ComputationGraphConfiguration conf = buildBaseNetworkConfiguration()
|
||||
.addLayer("value", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.addLayer("value", RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
|
||||
.nOut(1).build(), "goal", "corridor-rnn")
|
||||
.addLayer("softmax", new RnnOutputLayer.Builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.addLayer("softmax", RnnOutputLayer.builder(new ActorCriticLoss()).activation(Activation.SOFTMAX)
|
||||
.nOut(NUM_ACTIONS).build(), "goal", "corridor-rnn")
|
||||
.setOutputs("value", "softmax")
|
||||
.build();
|
||||
|
|
|
@ -167,9 +167,9 @@ public class PolicyTest {
|
|||
@Test
|
||||
public void testACPolicy() throws Exception {
|
||||
ComputationGraph cg = new ComputationGraph(NeuralNetConfiguration.builder().seed(444).graphBuilder().addInputs("input")
|
||||
.addLayer("output", new OutputLayer.Builder().nOut(1).lossFunction(LossFunctions.LossFunction.XENT).activation(Activation.SIGMOID).build(), "input").setOutputs("output").build());
|
||||
.addLayer("output", OutputLayer.builder().nOut(1).lossFunction(LossFunctions.LossFunction.XENT).activation(Activation.SIGMOID).build(), "input").setOutputs("output").build());
|
||||
MultiLayerNetwork mln = new MultiLayerNetwork(NeuralNetConfiguration.builder().seed(555).list()
|
||||
.layer(0, new OutputLayer.Builder().nOut(1).lossFunction(LossFunctions.LossFunction.XENT).activation(Activation.SIGMOID).build()).build());
|
||||
.layer(0, OutputLayer.builder().nOut(1).lossFunction(LossFunctions.LossFunction.XENT).activation(Activation.SIGMOID).build()).build());
|
||||
|
||||
ACPolicy policy = new ACPolicy(new DummyAC(mln), true, Nd4j.getRandom());
|
||||
|
||||
|
|
|
@ -95,13 +95,13 @@ public class App {
|
|||
|
||||
private static LayerConfiguration[] genLayers() {
|
||||
return new LayerConfiguration[] {
|
||||
new DenseLayer.Builder().nIn(INPUT).nOut(X_DIM*Y_DIM*CHANNELS).weightInit(WeightInit.NORMAL).build(),
|
||||
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(),
|
||||
new DenseLayer.Builder().nIn(X_DIM*Y_DIM*CHANNELS).nOut(X_DIM*Y_DIM).build(),
|
||||
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(),
|
||||
new DenseLayer.Builder().nIn(X_DIM*Y_DIM).nOut(X_DIM*Y_DIM).build(),
|
||||
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(),
|
||||
new DenseLayer.Builder().nIn(X_DIM*Y_DIM).nOut(X_DIM*Y_DIM*CHANNELS).activation(Activation.TANH)
|
||||
DenseLayer.builder().nIn(INPUT).nOut(X_DIM*Y_DIM*CHANNELS).weightInit(WeightInit.NORMAL).build(),
|
||||
ActivationLayer.builder(Activation.LEAKYRELU).build(),
|
||||
DenseLayer.builder().nIn(X_DIM*Y_DIM*CHANNELS).nOut(X_DIM*Y_DIM).build(),
|
||||
ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
|
||||
DenseLayer.builder().nIn(X_DIM*Y_DIM).nOut(X_DIM*Y_DIM).build(),
|
||||
ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
|
||||
DenseLayer.builder().nIn(X_DIM*Y_DIM).nOut(X_DIM*Y_DIM*CHANNELS).activation(Activation.TANH)
|
||||
.build()
|
||||
};
|
||||
}
|
||||
|
@ -131,19 +131,19 @@ public class App {
|
|||
|
||||
private static LayerConfiguration[] disLayers() {
|
||||
return new LayerConfiguration[]{
|
||||
new DenseLayer.Builder().nOut(X_DIM*Y_DIM*CHANNELS*2).build(), //input is set by setInputType on the network
|
||||
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(),
|
||||
new DropoutLayer.Builder(1 - 0.5).build(),
|
||||
new DenseLayer.Builder().nIn(X_DIM * Y_DIM*CHANNELS*2).nOut(X_DIM*Y_DIM*CHANNELS*4).build(), //HxBxC
|
||||
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(),
|
||||
new DropoutLayer.Builder(1 - 0.5).build(),
|
||||
new DenseLayer.Builder().nIn(X_DIM*Y_DIM*CHANNELS*4).nOut(X_DIM*Y_DIM*CHANNELS).build(),
|
||||
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(),
|
||||
new DropoutLayer.Builder(1 - 0.5).build(),
|
||||
new DenseLayer.Builder().nIn(X_DIM*Y_DIM*CHANNELS).nOut(X_DIM*Y_DIM).build(),
|
||||
new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(),
|
||||
new DropoutLayer.Builder(1 - 0.5).build(),
|
||||
new OutputLayer.Builder(LossFunction.XENT).nIn(X_DIM*Y_DIM).nOut(1).activation(Activation.SIGMOID).build()
|
||||
DenseLayer.builder().nOut(X_DIM*Y_DIM*CHANNELS*2).build(), //input is set by setInputType on the network
|
||||
ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
|
||||
DropoutLayer.builder(1 - 0.5).build(),
|
||||
DenseLayer.builder().nIn(X_DIM * Y_DIM*CHANNELS*2).nOut(X_DIM*Y_DIM*CHANNELS*4).build(), //HxBxC
|
||||
ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
|
||||
DropoutLayer.builder(1 - 0.5).build(),
|
||||
DenseLayer.builder().nIn(X_DIM*Y_DIM*CHANNELS*4).nOut(X_DIM*Y_DIM*CHANNELS).build(),
|
||||
ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
|
||||
DropoutLayer.builder(1 - 0.5).build(),
|
||||
DenseLayer.builder().nIn(X_DIM*Y_DIM*CHANNELS).nOut(X_DIM*Y_DIM).build(),
|
||||
ActivationLayer.builder(new ActivationLReLU(0.2)).build(),
|
||||
DropoutLayer.builder(1 - 0.5).build(),
|
||||
OutputLayer.builder().lossFunction(LossFunction.XENT).nIn(X_DIM*Y_DIM).nOut(1).activation(Activation.SIGMOID).build()
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -171,7 +171,7 @@ public class App {
|
|||
LayerConfiguration[] disLayers = Arrays.stream(disLayers())
|
||||
.map((layer) -> {
|
||||
if (layer instanceof DenseLayer || layer instanceof OutputLayer) {
|
||||
return new FrozenLayerWithBackprop(layer);
|
||||
return FrozenLayerWithBackprop.builder(layer);
|
||||
} else {
|
||||
return layer;
|
||||
}
|
||||
|
@ -242,6 +242,7 @@ public class App {
|
|||
gan.addTrainingListeners(new ScoreToChartListener("gan"));
|
||||
//dis.setListeners(new ScoreToChartListener("dis"));
|
||||
|
||||
System.out.println(gan.toString());
|
||||
gan.fit(Nd4j.rand(batchSize, CHANNELS, X_DIM, Y_DIM), Nd4j.zeros(batchSize, 1));
|
||||
|
||||
//gan.fit(new DataSet(trainData.next().getFeatures(), Nd4j.zeros(batchSize, 1)));
|
||||
|
|
|
@ -101,21 +101,21 @@ public class MnistDCGANExample {
|
|||
public static void main(String[] args) throws Exception {
|
||||
Supplier<MultiLayerNetwork> genSupplier = () -> {
|
||||
return new MultiLayerNetwork(NeuralNetConfiguration.builder()
|
||||
.layer(0, new DenseLayer.Builder().nIn(latentDim).nOut(width / 2 * height / 2 * 128)
|
||||
.layer(0, DenseLayer.builder().nIn(latentDim).nOut(width / 2 * height / 2 * 128)
|
||||
.activation(Activation.LEAKYRELU).weightInit(WeightInit.NORMAL).build())
|
||||
.layer(1, new Convolution2D.Builder().nIn(128).nOut(128).kernelSize(5, 5)
|
||||
.layer(1, Convolution2D.builder().nIn(128).nOut(128).kernelSize(5, 5)
|
||||
.convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build())
|
||||
// Up-sampling to 28x28x256
|
||||
.layer(2, new Deconvolution2D.Builder().nIn(128).nOut(128).stride(2, 2)
|
||||
.layer(2, Deconvolution2D.builder().nIn(128).nOut(128).stride(2, 2)
|
||||
.kernelSize(5, 5).convolutionMode(ConvolutionMode.Same)
|
||||
.activation(Activation.LEAKYRELU).build())
|
||||
.layer(3, new Convolution2D.Builder().nIn(128).nOut(128).kernelSize(5, 5)
|
||||
.layer(3, Convolution2D.builder().nIn(128).nOut(128).kernelSize(5, 5)
|
||||
.convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build())
|
||||
.layer(4, new Convolution2D.Builder().nIn(128).nOut(128).kernelSize(5, 5)
|
||||
.layer(4, Convolution2D.builder().nIn(128).nOut(128).kernelSize(5, 5)
|
||||
.convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build())
|
||||
.layer(5, new Convolution2D.Builder().nIn(128).nOut(channels).kernelSize(7, 7)
|
||||
.layer(5, Convolution2D.builder().nIn(128).nOut(channels).kernelSize(7, 7)
|
||||
.convolutionMode(ConvolutionMode.Same).activation(Activation.LEAKYRELU).build())
|
||||
.layer(6, new ActivationLayer.Builder().activation(Activation.TANH).build())
|
||||
.layer(6, ActivationLayer.builder().activation(Activation.TANH).build())
|
||||
.inputPreProcessor(1,
|
||||
new FeedForwardToCnnPreProcessor(height / 2, width / 2, 128))
|
||||
.inputPreProcessor(6, new CnnToFeedForwardPreProcessor(height, width, channels))
|
||||
|
@ -129,17 +129,17 @@ public class MnistDCGANExample {
|
|||
//.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
|
||||
//.gradientNormalizationThreshold(100.0)
|
||||
|
||||
.layer(0, new Convolution2D.Builder().nIn(channels).nOut(64).kernelSize(3, 3)
|
||||
.layer(0, Convolution2D.builder().nIn(channels).nOut(64).kernelSize(3, 3)
|
||||
.activation(Activation.LEAKYRELU).build())
|
||||
.layer(1, new Convolution2D.Builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2)
|
||||
.layer(1, Convolution2D.builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2)
|
||||
.activation(Activation.LEAKYRELU).build())
|
||||
.layer(2, new Convolution2D.Builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2)
|
||||
.layer(2, Convolution2D.builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2)
|
||||
.activation(Activation.LEAKYRELU).build())
|
||||
.layer(3, new Convolution2D.Builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2)
|
||||
.layer(3, Convolution2D.builder().nIn(64).nOut(64).kernelSize(3, 3).stride(2, 2)
|
||||
.activation(Activation.LEAKYRELU).build())
|
||||
.layer(4, new DropoutLayer.Builder().dropOut(0.5).build())
|
||||
.layer(5, new DenseLayer.Builder().nIn(64 * 2 * 2).nOut(1).activation(Activation.SIGMOID).build())
|
||||
.layer(6, new LossLayer.Builder().lossFunction(LossFunctions.LossFunction.XENT).build())
|
||||
.layer(4, DropoutLayer.builder().dropOut(0.5).build())
|
||||
.layer(5, DenseLayer.builder().nIn(64 * 2 * 2).nOut(1).activation(Activation.SIGMOID).build())
|
||||
.layer(6, LossLayer.builder().lossFunction(LossFunctions.LossFunction.XENT.getILossFunction()).build())
|
||||
.inputPreProcessor(0, new FeedForwardToCnnPreProcessor(height, width, channels))
|
||||
.inputPreProcessor(4, new CnnToFeedForwardPreProcessor(2, 2, 64))
|
||||
.inputType(InputType.convolutionalFlat(height, width, channels))
|
||||
|
|
|
@ -62,13 +62,13 @@ public class MnistSimpleGAN {
|
|||
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
|
||||
.gradientNormalizationThreshold(100)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(100).nOut(256).weightInit(WeightInit.NORMAL).build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(new DenseLayer.Builder().nIn(256).nOut(512).build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(new DenseLayer.Builder().nIn(512).nOut(1024).build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(new DenseLayer.Builder().nIn(1024).nOut(784).activation(Activation.TANH).build())
|
||||
.layer(DenseLayer.builder().nIn(100).nOut(256).weightInit(WeightInit.NORMAL).build())
|
||||
.layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(DenseLayer.builder().nIn(256).nOut(512).build())
|
||||
.layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(DenseLayer.builder().nIn(512).nOut(1024).build())
|
||||
.layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(DenseLayer.builder().nIn(1024).nOut(784).activation(Activation.TANH).build())
|
||||
.build();
|
||||
return new MultiLayerNetwork(genConf);
|
||||
}
|
||||
|
@ -83,16 +83,16 @@ public class MnistSimpleGAN {
|
|||
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
|
||||
.gradientNormalizationThreshold(100)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(784).nOut(1024).updater(updater).build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(new DropoutLayer.Builder(1 - 0.5).build())
|
||||
.layer(new DenseLayer.Builder().nIn(1024).nOut(512).updater(updater).build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(new DropoutLayer.Builder(1 - 0.5).build())
|
||||
.layer(new DenseLayer.Builder().nIn(512).nOut(256).updater(updater).build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(new DropoutLayer.Builder(1 - 0.5).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.XENT).nIn(256).nOut(1)
|
||||
.layer(DenseLayer.builder().nIn(784).nOut(1024).updater(updater).build())
|
||||
.layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(DropoutLayer.builder(1 - 0.5).build())
|
||||
.layer(DenseLayer.builder().nIn(1024).nOut(512).updater(updater).build())
|
||||
.layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(DropoutLayer.builder(1 - 0.5).build())
|
||||
.layer(DenseLayer.builder().nIn(512).nOut(256).updater(updater).build())
|
||||
.layer(ActivationLayer.builder(new ActivationLReLU(0.2)).build())
|
||||
.layer(DropoutLayer.builder(1 - 0.5).build())
|
||||
.layer(OutputLayer.builder(LossFunctions.LossFunction.XENT).nIn(256).nOut(1)
|
||||
.activation(Activation.SIGMOID).updater(updater).build())
|
||||
.build();
|
||||
|
||||
|
|
|
@ -288,12 +288,12 @@ public class BrianTest extends BaseSparkSessionTest {
|
|||
.seed(123)
|
||||
.updater(new Nesterovs(0.1, 0.9))
|
||||
|
||||
.layer(0, new DenseLayer.Builder().nIn(5).nOut(20).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, DenseLayer.builder().nIn(5).nOut(20).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).l2(0.001).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(20).nOut(20).weightInit(WeightInit.XAVIER)
|
||||
.layer(1, DenseLayer.builder().nIn(20).nOut(20).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
//.layer(2, new DenseLayerConfiguration.Builder().nIn(9).nOut(9).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.XENT).nIn(20).nOut(4)
|
||||
.layer(2, OutputLayer.builder(LossFunctions.LossFunction.XENT).nIn(20).nOut(4)
|
||||
.weightInit(WeightInit.XAVIER).activation(Activation.SIGMOID).build())
|
||||
.build();
|
||||
|
||||
|
|
|
@ -298,10 +298,10 @@ public class BrianTest2 /*extends BaseDL4JTest*/ {
|
|||
.seed(123)
|
||||
.updater(new Nesterovs(0.1, 0.9))
|
||||
|
||||
.layer(0, new DenseLayer.Builder().nIn(5).nOut(20).weightInit(WeightInit.XAVIER).activation(Activation.RELU).l2(0.001).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(20).nOut(20).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
|
||||
.layer(0, DenseLayer.builder().nIn(5).nOut(20).weightInit(WeightInit.XAVIER).activation(Activation.RELU).l2(0.001).build())
|
||||
.layer(1, DenseLayer.builder().nIn(20).nOut(20).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
|
||||
//.layer(2, new DenseLayerConfiguration.Builder().nIn(9).nOut(9).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.XENT).nIn(20).nOut(4).weightInit(WeightInit.XAVIER).activation(Activation.SIGMOID).build())
|
||||
.layer(2, OutputLayer.builder(LossFunctions.LossFunction.XENT).nIn(20).nOut(4).weightInit(WeightInit.XAVIER).activation(Activation.SIGMOID).build())
|
||||
.build();
|
||||
|
||||
//Define SparkNet
|
||||
|
|
|
@ -87,15 +87,15 @@ public class TestServer {
|
|||
.activation(Activation.RELU)
|
||||
.l2(0)
|
||||
|
||||
//.layer(0, new ConvolutionLayer.Builder().nIn(1).kernelSize(1, 5).stride(1,1).padding(0,2).nOut(1).name("1st Filter").updater(new Adam.Builder().learningRate(0.2).build()).build())
|
||||
//.layer(1, new ConvolutionLayer.Builder().nIn(1).kernelSize(1, 2).stride(1,2).padding(0,0).nOut(1).name("2nd Filter").updater(new Adam.Builder().learningRate(0.1).build()).build())
|
||||
//.layer(0, ConvolutionLayer.builder().nIn(1).kernelSize(1, 5).stride(1,1).padding(0,2).nOut(1).name("1st Filter").updater(new Adam.Builder().learningRate(0.2).build()).build())
|
||||
//.layer(1, ConvolutionLayer.builder().nIn(1).kernelSize(1, 2).stride(1,2).padding(0,0).nOut(1).name("2nd Filter").updater(new Adam.Builder().learningRate(0.1).build()).build())
|
||||
// .layer(1, new DenseLayerConfiguration.Builder().nIn(10).nOut(64).activation(Activation.RELU).build())
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(100).activation(Activation.RELU).l2(0.003).build())
|
||||
.layer(1, new LSTM.Builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(2, new LSTM.Builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(3, new DenseLayer.Builder().nIn(100).nOut(16).activation(Activation.RELU).l2(0.001).build())
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(100).activation(Activation.RELU).l2(0.003).build())
|
||||
.layer(1, LSTM.builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(2, LSTM.builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(3, DenseLayer.builder().nIn(100).nOut(16).activation(Activation.RELU).l2(0.001).build())
|
||||
|
||||
.layer(4, new OutputLayer.Builder().nIn(16).nOut(numClasses)
|
||||
.layer(4, OutputLayer.builder().nIn(16).nOut(numClasses)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(new LossMCXENT())
|
||||
.build()
|
||||
|
|
|
@ -127,15 +127,15 @@ public class TestServer2 {
|
|||
.activation(Activation.RELU)
|
||||
.l2(0)
|
||||
|
||||
//.layer(0, new ConvolutionLayer.Builder().nIn(1).kernelSize(1, 5).stride(1,1).padding(0,2).nOut(1).name("1st Filter").updater(new Adam.Builder().learningRate(0.2).build()).build())
|
||||
//.layer(1, new ConvolutionLayer.Builder().nIn(1).kernelSize(1, 2).stride(1,2).padding(0,0).nOut(1).name("2nd Filter").updater(new Adam.Builder().learningRate(0.1).build()).build())
|
||||
//.layer(0, ConvolutionLayer.builder().nIn(1).kernelSize(1, 5).stride(1,1).padding(0,2).nOut(1).name("1st Filter").updater(new Adam.Builder().learningRate(0.2).build()).build())
|
||||
//.layer(1, ConvolutionLayer.builder().nIn(1).kernelSize(1, 2).stride(1,2).padding(0,0).nOut(1).name("2nd Filter").updater(new Adam.Builder().learningRate(0.1).build()).build())
|
||||
// .layer(1, new DenseLayerConfiguration.Builder().nIn(10).nOut(64).activation(Activation.RELU).build())
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(100).activation(Activation.RELU).l2(0.003).build())
|
||||
.layer(1, new LSTM.Builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(2, new LSTM.Builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(3, new DenseLayer.Builder().nIn(100).nOut(16).activation(Activation.RELU).l2(0.001).build())
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(100).activation(Activation.RELU).l2(0.003).build())
|
||||
.layer(1, LSTM.builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(2, LSTM.builder().nIn(100).nOut(100).activation(Activation.TANH).build())
|
||||
.layer(3, DenseLayer.builder().nIn(100).nOut(16).activation(Activation.RELU).l2(0.001).build())
|
||||
|
||||
.layer(4, new OutputLayer.Builder().nIn(16).nOut(numClasses)
|
||||
.layer(4, OutputLayer.builder().nIn(16).nOut(numClasses)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(new LossMCXENT())
|
||||
.build()
|
||||
|
|
|
@ -832,7 +832,7 @@ public class IntegrationTestRunner {
|
|||
if(m instanceof MultiLayerNetwork){
|
||||
paramPrefix = l.getIndex() + "_";
|
||||
} else {
|
||||
paramPrefix = l.getLayerConfiguration().getLayerName() + "_";
|
||||
paramPrefix = l.getLayerConfiguration().getName() + "_";
|
||||
}
|
||||
Map<String,INDArray> paramTable = l.getParamTable();
|
||||
for(Map.Entry<String,INDArray> e : paramTable.entrySet()){
|
||||
|
|
|
@ -88,11 +88,11 @@ public class CNN1DTestCases {
|
|||
.convolutionMode(ConvolutionMode.Same))
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new Convolution1DLayer.Builder().nOut(32).activation(Activation.TANH).kernelSize(3).stride(1).build(), "in")
|
||||
.layer("1", new Subsampling1DLayer.Builder().kernelSize(2).stride(1).poolingType(SubsamplingLayer.PoolingType.MAX).build(), "0")
|
||||
.layer("2", new Cropping1D(1), "1")
|
||||
.layer("3", new ZeroPadding1DLayer(1), "2")
|
||||
.layer("out", new RnnOutputLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).nOut(nOut).build(), "3")
|
||||
.layer("0", Convolution1DLayer.builder().nOut(32).activation(Activation.TANH).kernelSize(3).stride(1).build(), "in")
|
||||
.layer("1", Subsampling1DLayer.builder().kernelSize(2).stride(1).poolingType(SubsamplingLayer.PoolingType.MAX.toPoolingType()).build(), "0")
|
||||
.layer("2", Cropping1D.builder(1).build(), "1")
|
||||
.layer("3", ZeroPadding1DLayer.builder(1).build(), "2")
|
||||
.layer("out", RnnOutputLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).nOut(nOut).build(), "3")
|
||||
.setInputTypes(InputType.recurrent(nOut))
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
|
|
@ -105,30 +105,30 @@ public class CNN2DTestCases {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.updater(new Nesterovs(0.01, 0.9))
|
||||
|
||||
.layer(0, new ConvolutionLayer.Builder(5, 5)
|
||||
.layer(0, ConvolutionLayer.builder(5, 5)
|
||||
//nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
|
||||
.nIn(nChannels)
|
||||
.stride(1, 1)
|
||||
.nOut(20)
|
||||
.activation(Activation.IDENTITY)
|
||||
.build())
|
||||
.layer(1, new SubsamplingLayer.Builder(PoolingType.MAX)
|
||||
.layer(1, SubsamplingLayer.builder(PoolingType.MAX)
|
||||
.kernelSize(2, 2)
|
||||
.stride(2, 2)
|
||||
.build())
|
||||
.layer(2, new ConvolutionLayer.Builder(5, 5)
|
||||
.layer(2, ConvolutionLayer.builder(5, 5)
|
||||
//Note that nIn need not be specified in later layers
|
||||
.stride(1, 1)
|
||||
.nOut(50)
|
||||
.activation(Activation.IDENTITY)
|
||||
.build())
|
||||
.layer(3, new SubsamplingLayer.Builder(PoolingType.MAX)
|
||||
.layer(3, SubsamplingLayer.builder(PoolingType.MAX)
|
||||
.kernelSize(2, 2)
|
||||
.stride(2, 2)
|
||||
.build())
|
||||
.layer(4, new DenseLayer.Builder().activation(Activation.RELU)
|
||||
.layer(4, DenseLayer.builder().activation(Activation.RELU)
|
||||
.nOut(500).build())
|
||||
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(5, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nOut(outputNum)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
@ -221,7 +221,7 @@ public class CNN2DTestCases {
|
|||
.seed(12345)
|
||||
.build())
|
||||
.removeVertexKeepConnections("predictions")
|
||||
.addLayer("predictions", new OutputLayer.Builder()
|
||||
.addLayer("predictions", OutputLayer.builder()
|
||||
.nIn(4096)
|
||||
.nOut(200) //Tiny imagenet
|
||||
.build(), "fc2")
|
||||
|
@ -321,7 +321,7 @@ public class CNN2DTestCases {
|
|||
.removeVertexKeepConnections("conv2d_9")
|
||||
.removeVertexAndConnections("outputs")
|
||||
.addLayer("convolution2d_9",
|
||||
new ConvolutionLayer.Builder(1,1)
|
||||
ConvolutionLayer.builder(1,1)
|
||||
.nIn(1024)
|
||||
.nOut(nBoxes * (5 + nClasses))
|
||||
.stride(1,1)
|
||||
|
@ -331,10 +331,10 @@ public class CNN2DTestCases {
|
|||
.build(),
|
||||
"leaky_re_lu_8")
|
||||
.addLayer("outputs",
|
||||
new Yolo2OutputLayer.Builder()
|
||||
Yolo2OutputLayer.builder()
|
||||
.lambdaNoObj(lambdaNoObj)
|
||||
.lambdaCoord(lambdaCoord)
|
||||
.boundingBoxPriors(priors)
|
||||
.boundingBoxes(priors)
|
||||
.build(),
|
||||
"convolution2d_9")
|
||||
.setOutputs("outputs")
|
||||
|
@ -417,32 +417,32 @@ public class CNN2DTestCases {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.updater(new Nesterovs(0.01, 0.9))
|
||||
|
||||
.layer(0, new ConvolutionLayer.Builder(5, 5)
|
||||
.layer(0, ConvolutionLayer.builder(5, 5)
|
||||
//nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
|
||||
.nIn(1)
|
||||
.stride(1, 1)
|
||||
.nOut(20)
|
||||
.activation(Activation.IDENTITY)
|
||||
.build())
|
||||
.layer(1, new SubsamplingLayer.Builder(PoolingType.MAX)
|
||||
.layer(1, SubsamplingLayer.builder(PoolingType.MAX)
|
||||
.kernelSize(2, 2)
|
||||
.stride(2, 2)
|
||||
.build())
|
||||
.layer(2, new ConvolutionLayer.Builder(5, 5)
|
||||
.layer(2, ConvolutionLayer.builder(5, 5)
|
||||
//Note that nIn need not be specified in later layers
|
||||
.stride(1, 1)
|
||||
.nOut(50)
|
||||
.activation(Activation.IDENTITY)
|
||||
.dropOut(0.5) //**** Dropout on conv layer
|
||||
.build())
|
||||
.layer(3, new SubsamplingLayer.Builder(PoolingType.MAX)
|
||||
.layer(3, SubsamplingLayer.builder(PoolingType.MAX)
|
||||
.kernelSize(2, 2)
|
||||
.stride(2, 2)
|
||||
.build())
|
||||
.layer(4, new DenseLayer.Builder().activation(Activation.RELU)
|
||||
.layer(4, DenseLayer.builder().activation(Activation.RELU)
|
||||
.dropOut(0.5) //**** Dropout on dense layer
|
||||
.nOut(500).build())
|
||||
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(5, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nOut(10)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
|
|
@ -82,18 +82,18 @@ public class CNN3DTestCases {
|
|||
.updater(new Nesterovs(0.01, 0.9))
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
|
||||
.layer(new Convolution3D.Builder(3,3,3)
|
||||
.layer(Convolution3D.builder(3,3,3)
|
||||
.dataFormat(Convolution3D.DataFormat.NCDHW)
|
||||
.nIn(nChannels)
|
||||
.stride(2, 2, 2)
|
||||
.nOut(8)
|
||||
.activation(Activation.IDENTITY)
|
||||
.build())
|
||||
.layer(new Subsampling3DLayer.Builder(PoolingType.MAX)
|
||||
.layer(Subsampling3DLayer.builder(PoolingType.MAX)
|
||||
.kernelSize(2, 2, 2)
|
||||
.stride(2, 2, 2)
|
||||
.build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nOut(outputNum)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
|
|
@ -104,8 +104,8 @@ public class MLPTestCases {
|
|||
.build()))
|
||||
.l1(1e-3).l2(1e-3)
|
||||
|
||||
.layer(new DenseLayer.Builder().activation(Activation.TANH).nOut(64).build())
|
||||
.layer(new OutputLayer.Builder().nOut(10)
|
||||
.layer(DenseLayer.builder().activation(Activation.TANH).nOut(64).build())
|
||||
.layer(OutputLayer.builder().nOut(10)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
@ -202,11 +202,11 @@ public class MLPTestCases {
|
|||
.seed(seed)
|
||||
.updater(new Nesterovs(learningRate, 0.9))
|
||||
|
||||
.layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
|
||||
.layer(0, DenseLayer.builder().nIn(numInputs).nOut(numHiddenNodes)
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(1, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.nIn(numHiddenNodes).nOut(numOutputs).build())
|
||||
|
|
|
@ -119,11 +119,11 @@ public class RNNTestCases {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.updater(new Adam(1e-3))
|
||||
|
||||
.layer(0, new LSTM.Builder().nIn(iter.inputColumns()).nOut(lstmLayerSize)
|
||||
.layer(0, LSTM.builder().nIn(iter.inputColumns()).nOut(lstmLayerSize)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new LSTM.Builder().nIn(lstmLayerSize).nOut(lstmLayerSize)
|
||||
.layer(1, LSTM.builder().nIn(lstmLayerSize).nOut(lstmLayerSize)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(2, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX) //MCXENT + softmax for classification
|
||||
.layer(2, RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX) //MCXENT + softmax for classification
|
||||
.nIn(lstmLayerSize).nOut(nOut).build())
|
||||
.backpropType(BackpropType.TruncatedBPTT).tbpttFwdLength(tbpttLength).tbpttBackLength(tbpttLength)
|
||||
|
||||
|
@ -201,9 +201,9 @@ public class RNNTestCases {
|
|||
.updater(new Adam(5e-2))
|
||||
.l1(1e-3).l2(1e-3)
|
||||
|
||||
.layer(0, new LSTM.Builder().activation(Activation.TANH).nOut(10).build())
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(new OutputLayer.Builder().nOut(6)
|
||||
.layer(0, LSTM.builder().activation(Activation.TANH).nOut(10).build())
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(OutputLayer.builder().nOut(6)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
@ -322,9 +322,9 @@ public class RNNTestCases {
|
|||
.updater(new Adam(5e-2))
|
||||
.l1(1e-3).l2(1e-3)
|
||||
|
||||
.layer(0, new Bidirectional(new LSTM.Builder().activation(Activation.TANH).nOut(10).build()))
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(new OutputLayer.Builder().nOut(6)
|
||||
.layer(0, Bidirectional.builder(LSTM.builder().activation(Activation.TANH).nOut(10).build()).build())
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(OutputLayer.builder().nOut(6)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
|
|
@ -79,7 +79,7 @@ public class UnsupervisedTestCases {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.l2(1e-4)
|
||||
|
||||
.layer(0, new VariationalAutoencoder.Builder()
|
||||
.layer(0, VariationalAutoencoder.builder()
|
||||
.activation(Activation.TANH)
|
||||
.encoderLayerSizes(256, 256) //2 encoder layers, each of size 256
|
||||
.decoderLayerSizes(256, 256) //2 decoder layers, each of size 256
|
||||
|
|
|
@ -71,7 +71,7 @@ dependencies {
|
|||
// api "com.fasterxml.jackson.module:jackson-module-scala_${scalaVersion}"
|
||||
|
||||
|
||||
api "org.projectlombok:lombok:1.18.24"
|
||||
api "org.projectlombok:lombok:1.18.26"
|
||||
|
||||
/*Logging*/
|
||||
api 'org.slf4j:slf4j-api:2.0.3'
|
||||
|
|
|
@ -90,7 +90,7 @@ public abstract class NumericalColumnAnalysis implements ColumnAnalysis {
|
|||
public abstract double getMaxDouble();
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public abstract static class Builder<T extends Builder<T>> {
|
||||
public static abstract class Builder<T extends Builder<T>> {
|
||||
protected double mean;
|
||||
protected double sampleStdev;
|
||||
protected double sampleVariance;
|
||||
|
|
|
@ -970,7 +970,7 @@ public class InferenceSession extends AbstractSession<INDArray, Pair<SameDiffOp,
|
|||
}
|
||||
|
||||
@Data
|
||||
public abstract static class Dep {
|
||||
public static abstract class Dep {
|
||||
protected String frame;
|
||||
protected FrameIter parentFrame;
|
||||
}
|
||||
|
|
|
@ -22,9 +22,11 @@ package org.nd4j.linalg.activations;
|
|||
|
||||
import org.nd4j.autodiff.samediff.SDVariable;
|
||||
import org.nd4j.autodiff.samediff.SameDiff;
|
||||
import org.nd4j.common.primitives.Pair;
|
||||
import org.nd4j.linalg.activations.impl.*;
|
||||
import org.nd4j.linalg.api.ndarray.INDArray;
|
||||
|
||||
public enum Activation {
|
||||
public enum Activation implements IActivation {
|
||||
CUBE, ELU, HARDSIGMOID, HARDTANH, IDENTITY, LEAKYRELU, RATIONALTANH, RELU, RELU6,
|
||||
RRELU, SIGMOID, SOFTMAX, SOFTPLUS, SOFTSIGN, TANH, RECTIFIEDTANH, SELU, SWISH,
|
||||
THRESHOLDEDRELU, GELU, MISH;
|
||||
|
@ -149,4 +151,44 @@ public enum Activation {
|
|||
throw new UnsupportedOperationException("Activation function not yet supported: " + this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Carry out activation function on the input array (usually known as 'preOut' or 'z')
|
||||
* Implementations must overwrite "in", transform in place and return "in"
|
||||
* Can support separate behaviour during test
|
||||
*
|
||||
* @param in input array.
|
||||
* @param training true when training.
|
||||
* @return transformed activation
|
||||
*/
|
||||
@Override
|
||||
public INDArray getActivation(INDArray in, boolean training) {
|
||||
return getActivationFunction().getActivation(in, training);
|
||||
}
|
||||
|
||||
/**
|
||||
* Backpropagate the errors through the activation function, given input z and epsilon dL/da.<br>
|
||||
* Returns 2 INDArrays:<br>
|
||||
* (a) The gradient dL/dz, calculated from dL/da, and<br>
|
||||
* (b) The parameter gradients dL/dW, where w is the weights in the activation function. For activation functions
|
||||
* with no gradients, this will be null.
|
||||
*
|
||||
* @param in Input, before applying the activation function (z, or 'preOut')
|
||||
* @param epsilon Gradient to be backpropagated: dL/da, where L is the loss function
|
||||
* @return dL/dz and dL/dW, for weights w (null if activation function has no weights)
|
||||
*/
|
||||
@Override
|
||||
public Pair<INDArray, INDArray> backprop(INDArray in, INDArray epsilon) {
|
||||
return getActivationFunction().backprop(in, epsilon);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param inputSize
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public int numParams(int inputSize) {
|
||||
return getActivationFunction().numParams(inputSize);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,9 +42,9 @@ public class RandomTests extends BaseDL4JTest {
|
|||
|
||||
final NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new RmsProp())
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(10)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(28 * 28).nOut(10)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
|
|
@ -73,7 +73,7 @@ public class TestUtils {
|
|||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
//Also check the NeuralNetConfiguration is serializable (required by Spark etc)
|
||||
//Also check the NeuralNetConfiguration is serializable (required by Spark etc.)
|
||||
NeuralNetConfiguration conf = net.getNetConfiguration();
|
||||
serializeDeserializeJava(conf);
|
||||
|
||||
|
@ -317,14 +317,14 @@ public class TestUtils {
|
|||
for(Layer l : layers){
|
||||
//Don't use instanceof here - there are sub conv subclasses
|
||||
if(l.getClass() == ConvolutionLayer.class || l instanceof SubsamplingLayer || l instanceof BatchNormalization || l instanceof LSTM){
|
||||
Preconditions.checkNotNull(l.getHelper(), l.getLayerConfiguration().getLayerName());
|
||||
Preconditions.checkNotNull(l.getHelper(), l.getLayerConfiguration().getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void assertHelpersAbsent(Layer[] layers) throws Exception {
|
||||
for(Layer l : layers){
|
||||
Preconditions.checkState(l.getHelper() == null, l.getLayerConfiguration().getLayerName());
|
||||
Preconditions.checkState(l.getHelper() == null, l.getLayerConfiguration().getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -473,9 +473,7 @@ public class RecordReaderDataSetiteratorTest extends BaseDL4JTest {
|
|||
|
||||
|
||||
public Pair<double[][],File> makeRandomCSV(String tempFile, int nLines, int nFeatures) throws IOException {
|
||||
File temp = temporaryFolder;
|
||||
temp.mkdirs();
|
||||
temp.deleteOnExit();
|
||||
File temp = new File(temporaryFolder, "makeRandomCSV.csv");
|
||||
Random rand = new Random(12345);
|
||||
|
||||
double[][] dArr = new double[nLines][nFeatures + 1];
|
||||
|
|
|
@ -774,7 +774,7 @@ public class RecordReaderMultiDataSetIteratorTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testExcludeStringColCSV() throws Exception {
|
||||
File csvFile = temporaryFolder;
|
||||
File csvFile = new File(temporaryFolder, "test.csv");
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for(int i=1; i<=10; i++ ){
|
||||
|
|
|
@ -41,7 +41,9 @@ import org.deeplearning4j.nn.weights.WeightInit;
|
|||
import org.deeplearning4j.optimize.listeners.CollectScoresIterationListener;
|
||||
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
|
||||
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.Timeout;
|
||||
import org.nd4j.linalg.activations.Activation;
|
||||
import org.nd4j.linalg.api.ndarray.INDArray;
|
||||
import org.nd4j.linalg.dataset.DataSet;
|
||||
|
@ -170,11 +172,11 @@ public class DataSetIteratorTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration.NeuralNetConfigurationBuilder builder = NeuralNetConfiguration.builder().seed(seed)
|
||||
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.layer(0, new ConvolutionLayer.Builder(5, 5).nIn(numChannels).nOut(6)
|
||||
.layer(0, ConvolutionLayer.builder(5, 5).nIn(numChannels).nOut(6)
|
||||
.weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
|
||||
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
|
||||
.layer(1, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
|
||||
.stride(1, 1).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(2, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
.inputType(InputType.convolutionalFlat(numRows, numColumns, numChannels));
|
||||
|
@ -207,7 +209,8 @@ public class DataSetIteratorTest extends BaseDL4JTest {
|
|||
}
|
||||
|
||||
|
||||
@Test //@Ignore //Ignored for now - CIFAR iterator needs work - https://github.com/eclipse/deeplearning4j/issues/4673
|
||||
@Test @Timeout(1200) @Disabled("Runs quite some time.")
|
||||
//Ignored for now - CIFAR iterator needs work - https://github.com/eclipse/deeplearning4j/issues/4673
|
||||
public void testCifarModel() throws Exception {
|
||||
// Streaming
|
||||
runCifar(false);
|
||||
|
@ -230,11 +233,11 @@ public class DataSetIteratorTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration.NeuralNetConfigurationBuilder builder = NeuralNetConfiguration.builder().seed(seed)
|
||||
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.layer(0, new ConvolutionLayer.Builder(5, 5).nIn(channels).nOut(6).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, ConvolutionLayer.builder(5, 5).nIn(channels).nOut(6).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
|
||||
.layer(1, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
|
||||
.build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(2, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
||||
|
|
|
@ -76,10 +76,12 @@ public class TestFileIterators extends BaseDL4JTest {
|
|||
assertEquals(exp, act);
|
||||
|
||||
//Test multiple directories
|
||||
|
||||
File f2a = new File(folder2, "f2a");
|
||||
f2a.mkdirs();
|
||||
File f2b = new File(folder2, "f2b");
|
||||
f2b.mkdirs();
|
||||
File f2c = new File(folder2, "f2c");
|
||||
f2c.mkdirs();
|
||||
d1.save(new File(f2a, "d1.bin"));
|
||||
d2.save(new File(f2a, "d2.bin"));
|
||||
d3.save(new File(f2b, "d3.bin"));
|
||||
|
@ -188,8 +190,11 @@ public class TestFileIterators extends BaseDL4JTest {
|
|||
|
||||
//Test multiple directories
|
||||
File f2a = new File(folder2, "2-f2a");
|
||||
f2a.mkdirs();
|
||||
File f2b = new File(folder2, "2-f2b");
|
||||
f2b.mkdirs();
|
||||
File f2c = new File(folder2, "2-f2C");
|
||||
f2c.mkdirs();
|
||||
d1.save(new File(f2a, "d1.bin"));
|
||||
d2.save(new File(f2a, "d2.bin"));
|
||||
d3.save(new File(f2b, "d3.bin"));
|
||||
|
|
|
@ -135,8 +135,8 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.seed(12345)
|
||||
.updater(new Sgd(0.5)).weightInit(WeightInit.XAVIER)
|
||||
.layer(new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -221,7 +221,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.01)).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -250,7 +250,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -300,7 +300,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(5.0)) //Intentionally huge LR
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -338,7 +338,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -381,7 +381,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -421,11 +421,11 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Nesterovs(0.0,0.9))
|
||||
.layer(0, new DenseLayer.Builder().nIn(1).nOut(20)
|
||||
.layer(0, DenseLayer.builder().nIn(1).nOut(20)
|
||||
.weightInit(WeightInit.XAVIER).activation(
|
||||
Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MSE).weightInit(WeightInit.XAVIER)
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.IDENTITY).weightInit(WeightInit.XAVIER).nIn(20).nOut(1)
|
||||
.build())
|
||||
.build();
|
||||
|
@ -468,7 +468,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -506,7 +506,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -570,8 +570,8 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(784).nOut(32).build())
|
||||
.layer(new OutputLayer.Builder().nIn(32).nOut(784).activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.layer(DenseLayer.builder().nIn(784).nOut(32).build())
|
||||
.layer(OutputLayer.builder().nIn(32).nOut(784).activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -613,7 +613,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new AutoEncoder.Builder().nIn(784).nOut(32).build())
|
||||
.layer(AutoEncoder.builder().nIn(784).nOut(32).build())
|
||||
|
||||
.build();
|
||||
|
||||
|
@ -656,7 +656,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new VariationalAutoencoder.Builder()
|
||||
.layer(VariationalAutoencoder.builder()
|
||||
.nIn(784).nOut(32)
|
||||
.encoderLayerSizes(64)
|
||||
.decoderLayerSizes(64)
|
||||
|
@ -701,7 +701,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new VariationalAutoencoder.Builder()
|
||||
.layer(VariationalAutoencoder.builder()
|
||||
.nIn(784).nOut(32)
|
||||
.encoderLayerSizes(64)
|
||||
.decoderLayerSizes(64)
|
||||
|
@ -748,8 +748,8 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(784).nOut(32).build())
|
||||
.layer(new OutputLayer.Builder().nIn(32).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(DenseLayer.builder().nIn(784).nOut(32).build())
|
||||
.layer(OutputLayer.builder().nIn(32).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -785,7 +785,7 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
public void testEarlyStoppingListeners() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
@ -868,14 +868,14 @@ public class TestEarlyStopping extends BaseDL4JTest {
|
|||
.ClipElementWiseAbsoluteValue)
|
||||
.gradientNormalizationThreshold(1.0)
|
||||
|
||||
.layer(0, new LSTM.Builder()
|
||||
.layer(0, LSTM.builder()
|
||||
.nIn(10)
|
||||
.nOut(10)
|
||||
.activation(Activation.TANH)
|
||||
.gateActivationFunction(Activation.SIGMOID)
|
||||
.gateActivationFunction(Activation.SIGMOID.getActivationFunction())
|
||||
.dropOut(0.5)
|
||||
.build())
|
||||
.layer(1, new RnnOutputLayer.Builder()
|
||||
.layer(1, RnnOutputLayer.builder()
|
||||
.nIn(10)
|
||||
.nOut(outputs)
|
||||
.activation(Activation.SOFTMAX)
|
||||
|
|
|
@ -79,7 +79,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
|
@ -124,7 +124,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(5.0)) //Intentionally huge LR
|
||||
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
ComputationGraph net = new ComputationGraph(conf);
|
||||
|
@ -160,7 +160,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
|
@ -202,7 +202,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
|
@ -236,7 +236,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.addLayer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
|
@ -300,8 +300,8 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new DenseLayer.Builder().nIn(784).nOut(32).build(), "in")
|
||||
.layer("1", new OutputLayer.Builder().nIn(32).nOut(784).activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build(), "0")
|
||||
.layer("0", DenseLayer.builder().nIn(784).nOut(32).build(), "in")
|
||||
.layer("1", OutputLayer.builder().nIn(32).nOut(784).activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build(), "0")
|
||||
.setOutputs("1")
|
||||
.build();
|
||||
|
||||
|
@ -346,7 +346,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new AutoEncoder.Builder().nIn(784).nOut(32).build(), "in")
|
||||
.layer("0", AutoEncoder.builder().nIn(784).nOut(32).build(), "in")
|
||||
.setOutputs("0")
|
||||
|
||||
.build();
|
||||
|
@ -391,7 +391,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new VariationalAutoencoder.Builder()
|
||||
.layer("0", VariationalAutoencoder.builder()
|
||||
.nIn(784).nOut(32)
|
||||
.encoderLayerSizes(64)
|
||||
.decoderLayerSizes(64)
|
||||
|
@ -439,7 +439,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
.updater(new Adam(1e-5))
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new VariationalAutoencoder.Builder()
|
||||
.layer("0", VariationalAutoencoder.builder()
|
||||
.nIn(784).nOut(32)
|
||||
.encoderLayerSizes(64)
|
||||
.decoderLayerSizes(64)
|
||||
|
@ -489,8 +489,8 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new DenseLayer.Builder().nIn(784).nOut(32).build(), "in")
|
||||
.layer("1", new OutputLayer.Builder().nIn(32).nOut(10).activation(Activation.SOFTMAX).build(), "0")
|
||||
.layer("0", DenseLayer.builder().nIn(784).nOut(32).build(), "in")
|
||||
.layer("1", OutputLayer.builder().nIn(32).nOut(10).activation(Activation.SOFTMAX).build(), "0")
|
||||
.setOutputs("1")
|
||||
.build();
|
||||
|
||||
|
@ -530,7 +530,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest {
|
|||
.updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new OutputLayer.Builder().nIn(4).nOut(3)
|
||||
.layer("0", OutputLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
|
||||
.setOutputs("0")
|
||||
|
|
|
@ -73,9 +73,9 @@ public class EvalTest extends BaseDL4JTest {
|
|||
|
||||
.optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).seed(42)
|
||||
.updater(new Sgd(1e-6)).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(2).activation(Activation.TANH)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(2).activation(Activation.TANH)
|
||||
.weightInit(WeightInit.XAVIER).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder().lossFunction(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
|
||||
|
@ -180,7 +180,7 @@ public class EvalTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new Sgd(0.1))
|
||||
.list()
|
||||
.layer(0, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(0, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(4).nOut(3).build())
|
||||
.build();
|
||||
|
||||
|
@ -300,8 +300,8 @@ public class EvalTest extends BaseDL4JTest {
|
|||
.trainingWorkspaceMode(ws)
|
||||
.inferenceWorkspaceMode(ws)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut)
|
||||
.layer(LSTM.builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(RnnOutputLayer.builder().nIn(layerSize).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
.build();
|
||||
|
@ -311,8 +311,8 @@ public class EvalTest extends BaseDL4JTest {
|
|||
.trainingWorkspaceMode(ws)
|
||||
.inferenceWorkspaceMode(ws)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut)
|
||||
.layer(LSTM.builder().nIn(nIn).nOut(layerSize).build())
|
||||
.layer(RnnOutputLayer.builder().nIn(layerSize).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.tbpttFwdLength(10).tbpttBackLength(10)
|
||||
.backpropType(BackpropType.TruncatedBPTT)
|
||||
|
@ -377,8 +377,8 @@ public class EvalTest extends BaseDL4JTest {
|
|||
.inferenceWorkspaceMode(ws)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new LSTM.Builder().nIn(nIn).nOut(layerSize).build(), "in")
|
||||
.addLayer("1", new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut)
|
||||
.addLayer("0", LSTM.builder().nIn(nIn).nOut(layerSize).build(), "in")
|
||||
.addLayer("1", RnnOutputLayer.builder().nIn(layerSize).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build(), "0")
|
||||
.setOutputs("1")
|
||||
|
@ -390,8 +390,8 @@ public class EvalTest extends BaseDL4JTest {
|
|||
.inferenceWorkspaceMode(ws)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new LSTM.Builder().nIn(nIn).nOut(layerSize).build(), "in")
|
||||
.addLayer("1", new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut)
|
||||
.addLayer("0", LSTM.builder().nIn(nIn).nOut(layerSize).build(), "in")
|
||||
.addLayer("1", RnnOutputLayer.builder().nIn(layerSize).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build(), "0")
|
||||
.setOutputs("1")
|
||||
|
@ -457,8 +457,8 @@ public class EvalTest extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
|
||||
.list()
|
||||
.layer(0, new LSTM.Builder().activation(Activation.TANH).nIn(3).nOut(3).build())
|
||||
.layer(1, new RnnOutputLayer.Builder().activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.XENT)
|
||||
.layer(0, LSTM.builder().activation(Activation.TANH).nIn(3).nOut(3).build())
|
||||
.layer(1, RnnOutputLayer.builder().activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.XENT)
|
||||
.nIn(3).nOut(1).build())
|
||||
.backpropType(BackpropType.TruncatedBPTT).tbpttFwdLength(10).tbpttBackLength(10)
|
||||
.build();
|
||||
|
@ -477,9 +477,9 @@ public class EvalTest extends BaseDL4JTest {
|
|||
|
||||
.optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).seed(42)
|
||||
.updater(new Sgd(1e-6)).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(2).activation(Activation.TANH)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(2).activation(Activation.TANH)
|
||||
.weightInit(WeightInit.XAVIER).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.OutputLayer.builder(
|
||||
LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -507,8 +507,8 @@ public class EvalTest extends BaseDL4JTest {
|
|||
.seed(12345)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("out1", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
|
||||
.addLayer("out2", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
|
||||
.addLayer("out1", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
|
||||
.addLayer("out2", OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build(), "in")
|
||||
.setOutputs("out1", "out2")
|
||||
.build();
|
||||
|
||||
|
@ -541,11 +541,11 @@ public class EvalTest extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new EmbeddingSequenceLayer.Builder().nIn(10).nOut(10).build(), "in")
|
||||
.layer("1", new LSTM.Builder().nIn(10).nOut(10).build(), "0")
|
||||
.layer("2", new LSTM.Builder().nIn(10).nOut(10).build(), "0")
|
||||
.layer("out1", new RnnOutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "1")
|
||||
.layer("out2", new RnnOutputLayer.Builder().nIn(10).nOut(20).activation(Activation.SOFTMAX).build(), "2")
|
||||
.layer("0", EmbeddingSequenceLayer.builder().nIn(10).nOut(10).build(), "in")
|
||||
.layer("1", LSTM.builder().nIn(10).nOut(10).build(), "0")
|
||||
.layer("2", LSTM.builder().nIn(10).nOut(10).build(), "0")
|
||||
.layer("out1", RnnOutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "1")
|
||||
.layer("out2", RnnOutputLayer.builder().nIn(10).nOut(20).activation(Activation.SOFTMAX).build(), "2")
|
||||
.setOutputs("out1", "out2")
|
||||
.build();
|
||||
|
||||
|
@ -569,8 +569,8 @@ public class EvalTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.list()
|
||||
.layer(new DenseLayer.Builder().nIn(4).nOut(10).build())
|
||||
.layer(new OutputLayer.Builder().nIn(10).nOut(3).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.RELU).build())
|
||||
.layer(DenseLayer.builder().nIn(4).nOut(10).build())
|
||||
.layer(OutputLayer.builder().nIn(10).nOut(3).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.RELU).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
|
|
@ -48,8 +48,8 @@ public class EvaluationToolsTests extends BaseDL4JTest {
|
|||
DataSetIterator iter = new IrisDataSetIterator(150, 150);
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1,
|
||||
new OutputLayer.Builder().nIn(4).nOut(2).activation(Activation.SOFTMAX)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1,
|
||||
OutputLayer.builder().nIn(4).nOut(2).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -90,8 +90,8 @@ public class EvaluationToolsTests extends BaseDL4JTest {
|
|||
DataSetIterator iter = new IrisDataSetIterator(150, 150);
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1,
|
||||
new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1,
|
||||
OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
|
|
@ -84,8 +84,8 @@ public class ROCTest extends BaseDL4JTest {
|
|||
Nd4j.getRandom().setSeed(12345);
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.XAVIER).seed(12345)
|
||||
.list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1,
|
||||
new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1,
|
||||
OutputLayer.builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
|
|
@ -49,7 +49,7 @@ public class RegressionEvalTest extends BaseDL4JTest {
|
|||
|
||||
//Basic sanity check
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().weightInit(WeightInit.ZERO).list()
|
||||
.layer(0, new OutputLayer.Builder().activation(Activation.TANH)
|
||||
.layer(0, OutputLayer.builder().activation(Activation.TANH)
|
||||
.lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(5).build())
|
||||
.build();
|
||||
|
||||
|
@ -71,7 +71,7 @@ public class RegressionEvalTest extends BaseDL4JTest {
|
|||
|
||||
ComputationGraphConfiguration graphConf =
|
||||
NeuralNetConfiguration.builder().weightInit(WeightInit.ZERO).graphBuilder()
|
||||
.addInputs("in").addLayer("0", new OutputLayer.Builder()
|
||||
.addInputs("in").addLayer("0", OutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.activation(Activation.TANH).nIn(10).nOut(5).build(), "in")
|
||||
.setOutputs("0").build();
|
||||
|
|
|
@ -41,8 +41,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
|
||||
public static MultiLayerNetwork getDensePlusOutput(int nIn, int nOut) {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(10).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(10).nOut(nOut).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(nIn).nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nIn(10).nOut(nOut).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -52,8 +52,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
|
||||
public static MultiLayerNetwork getLSTMPlusRnnOutput(int nIn, int nOut) {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new GravesLSTM.Builder().nIn(nIn).nOut(10).build())
|
||||
.layer(1, new RnnOutputLayer.Builder().nIn(10).nOut(nOut).build()).build();
|
||||
.layer(0, GravesLSTM.builder().nIn(nIn).nOut(10).build())
|
||||
.layer(1, RnnOutputLayer.builder().nIn(10).nOut(nOut).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -63,8 +63,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
|
||||
public static MultiLayerNetwork getCnnPlusOutputLayer(int depthIn, int inH, int inW, int nOut) {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new ConvolutionLayer.Builder().nIn(depthIn).nOut(5).build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(nOut).build())
|
||||
.layer(0, ConvolutionLayer.builder().nIn(depthIn).nOut(5).build())
|
||||
.layer(1, OutputLayer.builder().nOut(nOut).build())
|
||||
.inputType(InputType.convolutional(inH, inW, depthIn)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -90,8 +90,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
public void testDenseNout0() {
|
||||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(0).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(0).build())
|
||||
.layer(1, OutputLayer.builder().nIn(10).nOut(10).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -147,8 +147,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
public void testLSTMNOut0() {
|
||||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new GravesLSTM.Builder().nIn(10).nOut(0).build())
|
||||
.layer(1, new RnnOutputLayer.Builder().nIn(10).nOut(10).build()).build();
|
||||
.layer(0, GravesLSTM.builder().nIn(10).nOut(0).build())
|
||||
.layer(1, RnnOutputLayer.builder().nIn(10).nOut(10).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -178,8 +178,8 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
public void testConvolutionalNOut0() {
|
||||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new ConvolutionLayer.Builder().nIn(5).nOut(0).build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).build())
|
||||
.layer(0, ConvolutionLayer.builder().nIn(5).nOut(0).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).build())
|
||||
.inputType(InputType.convolutional(10, 10, 5)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -208,9 +208,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict)
|
||||
.list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(3, 2).stride(2, 2).padding(0, 0).nOut(5)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(3, 2).stride(2, 2).padding(0, 0).nOut(5)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).build())
|
||||
.inputType(InputType.convolutional(hIn, wIn, depthIn)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -234,9 +234,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
int wIn = 10;
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(7, 7).stride(1, 1).padding(0, 0).nOut(5)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(7, 7).stride(1, 1).padding(0, 0).nOut(5)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(hIn, wIn, depthIn)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -266,9 +266,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(3, 3).stride(2, 2)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(3, 3).stride(2, 2)
|
||||
.padding(0, 0).nIn(depthIn).nOut(5).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(5 * 4 * 4).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(1, OutputLayer.builder().nIn(5 * 4 * 4).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.inputPreProcessor(1, new CnnToFeedForwardPreProcessor()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -299,9 +299,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
|
||||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 3).stride(2, 2).padding(0, 0).nOut(5)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(2, 3).stride(2, 2).padding(0, 0).nOut(5)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(hIn, wIn, depthIn)).build();
|
||||
} catch (Exception e) {
|
||||
fail("Did not expect exception with default (truncate)");
|
||||
|
@ -310,9 +310,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict)
|
||||
.list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 3).stride(2, 2).padding(0, 0).nOut(5)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(2, 3).stride(2, 2).padding(0, 0).nOut(5)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).build())
|
||||
.inputType(InputType.convolutional(hIn, wIn, depthIn)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -339,9 +339,9 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict)
|
||||
.list()
|
||||
.layer(0, new SubsamplingLayer.Builder().kernelSize(2, 3).stride(2, 2).padding(0, 0)
|
||||
.layer(0, SubsamplingLayer.builder().kernelSize(2, 3).stride(2, 2).padding(0, 0)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).build())
|
||||
.inputType(InputType.convolutional(hIn, wIn, depthIn)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -358,84 +358,84 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testCnnInvalidKernel() {
|
||||
assertThrows(IllegalStateException.class, () -> {
|
||||
new ConvolutionLayer.Builder().kernelSize(3, 0).build();
|
||||
ConvolutionLayer.builder().kernelSize(3, 0).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCnnInvalidKernel2() {
|
||||
assertThrows(IllegalStateException.class, () -> {
|
||||
new ConvolutionLayer.Builder().kernelSize(2, 2, 2).build();
|
||||
ConvolutionLayer.builder().kernelSize(2, 2, 2).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCnnInvalidStride() {
|
||||
assertThrows(IllegalStateException.class, () -> {
|
||||
new ConvolutionLayer.Builder().kernelSize(3, 3).stride(0, 1).build();
|
||||
ConvolutionLayer.builder().kernelSize(3, 3).stride(0, 1).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCnnInvalidStride2() {
|
||||
assertThrows(IllegalArgumentException.class, () -> {
|
||||
new ConvolutionLayer.Builder().kernelSize(3, 3).stride(1).build();
|
||||
ConvolutionLayer.builder().kernelSize(3, 3).stride(1).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCnnInvalidPadding() {
|
||||
assertThrows(IllegalArgumentException.class, () -> {
|
||||
new ConvolutionLayer.Builder().kernelSize(3, 3).stride(1, 1).padding(-1, 0).build();
|
||||
ConvolutionLayer.builder().kernelSize(3, 3).stride(1, 1).padding(-1, 0).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCnnInvalidPadding2() {
|
||||
assertThrows(IllegalArgumentException.class, () -> {
|
||||
new ConvolutionLayer.Builder().kernelSize(3, 3).stride(1, 1).padding(0, 0, 0).build();
|
||||
ConvolutionLayer.builder().kernelSize(3, 3).stride(1, 1).padding(0, 0, 0).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubsamplingInvalidKernel() {
|
||||
assertThrows(IllegalStateException.class, () -> {
|
||||
new SubsamplingLayer.Builder().kernelSize(3, 0).build();
|
||||
SubsamplingLayer.builder().kernelSize(3, 0).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubsamplingInvalidKernel2() {
|
||||
assertThrows(IllegalArgumentException.class, () -> {
|
||||
new SubsamplingLayer.Builder().kernelSize(2).build();
|
||||
SubsamplingLayer.builder().kernelSize(2).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubsamplingInvalidStride() {
|
||||
assertThrows(IllegalStateException.class, () -> {
|
||||
new SubsamplingLayer.Builder().kernelSize(3, 3).stride(0, 1).build();
|
||||
SubsamplingLayer.builder().kernelSize(3, 3).stride(0, 1).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubsamplingInvalidStride2() {
|
||||
assertThrows(RuntimeException.class, () -> {
|
||||
new SubsamplingLayer.Builder().kernelSize(3, 3).stride(1, 1, 1).build();
|
||||
SubsamplingLayer.builder().kernelSize(3, 3).stride(1, 1, 1).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubsamplingInvalidPadding() {
|
||||
assertThrows(IllegalArgumentException.class, () -> {
|
||||
new SubsamplingLayer.Builder().kernelSize(3, 3).stride(1, 1).padding(-1, 0).build();
|
||||
SubsamplingLayer.builder().kernelSize(3, 3).stride(1, 1).padding(-1, 0).build();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubsamplingInvalidPadding2() {
|
||||
assertThrows(RuntimeException.class, () -> {
|
||||
new SubsamplingLayer.Builder().kernelSize(3, 3).stride(1, 1).padding(0).build();
|
||||
SubsamplingLayer.builder().kernelSize(3, 3).stride(1, 1).padding(0).build();
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -43,8 +43,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testInputNinMismatchDense() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -64,8 +64,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testLabelsNOutMismatchOutputLayer() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -85,8 +85,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testLabelsNOutMismatchRnnOutputLayer() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new LSTM.Builder().nIn(5).nOut(5).build())
|
||||
.layer(1, new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(0, LSTM.builder().nIn(5).nOut(5).build())
|
||||
.layer(1, RnnOutputLayer.builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -112,8 +112,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
int d = 3;
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new ConvolutionLayer.Builder().nIn(d).nOut(5).build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(0, ConvolutionLayer.builder().nIn(d).nOut(5).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(h, w, d)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -139,8 +139,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
int d = 3;
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new ConvolutionLayer.Builder().nIn(d).nOut(5).build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(0, ConvolutionLayer.builder().nIn(d).nOut(5).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(h, w, d)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -165,8 +165,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
int d = 3;
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new SubsamplingLayer.Builder().kernelSize(2, 2).build())
|
||||
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(0, SubsamplingLayer.builder().kernelSize(2, 2).build())
|
||||
.layer(1, OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(h, w, d)).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -188,8 +188,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
public void testInputNinMismatchLSTM() {
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new GravesLSTM.Builder().nIn(5).nOut(5).build())
|
||||
.layer(1, new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(0, GravesLSTM.builder().nIn(5).nOut(5).build())
|
||||
.layer(1, RnnOutputLayer.builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -209,8 +209,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
public void testInputNinMismatchBidirectionalLSTM() {
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new GravesBidirectionalLSTM.Builder().nIn(5).nOut(5).build())
|
||||
.layer(1, new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(0, GravesBidirectionalLSTM.builder().nIn(5).nOut(5).build())
|
||||
.layer(1, RnnOutputLayer.builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -231,8 +231,8 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
public void testInputNinMismatchEmbeddingLayer() {
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new EmbeddingLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(0, EmbeddingLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -259,13 +259,13 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
LayerConfiguration l;
|
||||
switch (layerType){
|
||||
case "simple":
|
||||
l = new SimpleRnn.Builder().nIn(5).nOut(5).build();
|
||||
l = SimpleRnn.builder().nIn(5).nOut(5).build();
|
||||
break;
|
||||
case "lstm":
|
||||
l = new LSTM.Builder().nIn(5).nOut(5).build();
|
||||
l = LSTM.builder().nIn(5).nOut(5).build();
|
||||
break;
|
||||
case "graves":
|
||||
l = new GravesLSTM.Builder().nIn(5).nOut(5).build();
|
||||
l = GravesLSTM.builder().nIn(5).nOut(5).build();
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException();
|
||||
|
@ -273,7 +273,7 @@ public class TestInvalidInput extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().list()
|
||||
.layer(l)
|
||||
.layer(new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(RnnOutputLayer.builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
|
|
@ -88,14 +88,13 @@ public class AttentionLayerTest extends BaseDL4JTest {
|
|||
.activation(Activation.TANH)
|
||||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(LSTM.builder().nOut(layerSize).build())
|
||||
.layer( projectInput ?
|
||||
new SelfAttentionLayer.Builder().nOut(4).nHeads(2).projectInput(true).build()
|
||||
: new SelfAttentionLayer.Builder().nHeads(1).projectInput(false).build()
|
||||
SelfAttentionLayer.builder().nOut(4).nHeads(2).projectInput(true).build()
|
||||
: SelfAttentionLayer.builder().nHeads(1).projectInput(false).build()
|
||||
)
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -150,13 +149,13 @@ public class AttentionLayerTest extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(LSTM.builder().nOut(layerSize).build())
|
||||
.layer( projectInput ?
|
||||
new LearnedSelfAttentionLayer.Builder().nOut(4).nHeads(2).nQueries(numQueries).projectInput(true).build()
|
||||
: new LearnedSelfAttentionLayer.Builder().nHeads(1).nQueries(numQueries).projectInput(false).build()
|
||||
LearnedSelfAttentionLayer.builder().nOut(4).nHeads(2).nQueries(numQueries).projectInput(true).build()
|
||||
: LearnedSelfAttentionLayer.builder().nHeads(1).nQueries(numQueries).projectInput(false).build()
|
||||
)
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -190,13 +189,13 @@ public class AttentionLayerTest extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(LSTM.builder().nOut(layerSize).build())
|
||||
.layer( projectInput ?
|
||||
new LearnedSelfAttentionLayer.Builder().nOut(4).nHeads(2).nQueries(numQueries).projectInput(true).build()
|
||||
: new LearnedSelfAttentionLayer.Builder().nHeads(1).nQueries(numQueries).projectInput(false).build()
|
||||
LearnedSelfAttentionLayer.builder().nOut(4).nHeads(2).nQueries(numQueries).projectInput(true).build()
|
||||
: LearnedSelfAttentionLayer.builder().nHeads(1).nQueries(numQueries).projectInput(false).build()
|
||||
)
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -245,10 +244,10 @@ public class AttentionLayerTest extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(new RecurrentAttentionLayer.Builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build())
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.layer(LSTM.builder().nOut(layerSize).build())
|
||||
.layer(RecurrentAttentionLayer.builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build())
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -308,10 +307,10 @@ public class AttentionLayerTest extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(new RecurrentAttentionLayer.Builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build())
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.layer(LSTM.builder().nOut(layerSize).build())
|
||||
.layer(RecurrentAttentionLayer.builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build())
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -367,15 +366,15 @@ public class AttentionLayerTest extends BaseDL4JTest {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.graphBuilder()
|
||||
.addInputs("input")
|
||||
.addLayer("rnnKeys", new SimpleRnn.Builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("rnnQueries", new SimpleRnn.Builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("rnnValues", new SimpleRnn.Builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("rnnKeys", SimpleRnn.builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("rnnQueries", SimpleRnn.builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("rnnValues", SimpleRnn.builder().nOut(layerSize).build(), "input")
|
||||
.addVertex("attention",
|
||||
projectInput ?
|
||||
new AttentionVertex.Builder().nOut(4).nHeads(2).projectInput(true).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build()
|
||||
: new AttentionVertex.Builder().nOut(3).nHeads(1).projectInput(false).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build(), "rnnQueries", "rnnKeys", "rnnValues")
|
||||
.addLayer("pooling", new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build(), "attention")
|
||||
.addLayer("output", new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling")
|
||||
.addLayer("pooling", GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build(), "attention")
|
||||
.addLayer("output", OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling")
|
||||
.setOutputs("output")
|
||||
.setInputTypes(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -431,13 +430,13 @@ public class AttentionLayerTest extends BaseDL4JTest {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.graphBuilder()
|
||||
.addInputs("input")
|
||||
.addLayer("rnn", new SimpleRnn.Builder().activation(Activation.TANH).nOut(layerSize).build(), "input")
|
||||
.addLayer("rnn", SimpleRnn.builder().activation(Activation.TANH).nOut(layerSize).build(), "input")
|
||||
.addVertex("attention",
|
||||
projectInput ?
|
||||
new AttentionVertex.Builder().nOut(4).nHeads(2).projectInput(true).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build()
|
||||
: new AttentionVertex.Builder().nOut(4).nHeads(1).projectInput(false).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build(), "rnn", "rnn", "rnn")
|
||||
.addLayer("pooling", new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build(), "attention")
|
||||
.addLayer("output", new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling")
|
||||
.addLayer("pooling", GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build(), "attention")
|
||||
.addLayer("output", OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling")
|
||||
.setOutputs("output")
|
||||
.setInputTypes(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
|
|
@ -78,11 +78,11 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.seed(12345L)
|
||||
.dist(new NormalDistribution(0, 1)).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(3)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(3)
|
||||
.activation(Activation.IDENTITY).build())
|
||||
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).nOut(3).build())
|
||||
.layer(2, new ActivationLayer.Builder().activation(Activation.TANH).build())
|
||||
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1,BatchNormalization.builder().useLogStd(useLogStd).nOut(3).build())
|
||||
.layer(2, ActivationLayer.builder().activation(Activation.TANH).build())
|
||||
.layer(3, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(3).nOut(3).build());
|
||||
|
||||
MultiLayerNetwork mln = new MultiLayerNetwork(builder.build());
|
||||
|
@ -122,11 +122,11 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp()).seed(12345L)
|
||||
.dist(new NormalDistribution(0, 2)).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nIn(depth).nOut(2)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nIn(depth).nOut(2)
|
||||
.activation(Activation.IDENTITY).build())
|
||||
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).build())
|
||||
.layer(2, new ActivationLayer.Builder().activation(Activation.TANH).build())
|
||||
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1,BatchNormalization.builder().useLogStd(useLogStd).build())
|
||||
.layer(2, ActivationLayer.builder().activation(Activation.TANH).build())
|
||||
.layer(3, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(nOut).build())
|
||||
.inputType(InputType.convolutional(hw, hw, depth));
|
||||
|
||||
|
@ -193,14 +193,14 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT)
|
||||
.updater(new NoOp())
|
||||
.dist(new UniformDistribution(-2, 2)).seed(12345L).list()
|
||||
.layer(0, new ConvolutionLayer.Builder(2, 2).stride(1, 1).nOut(3)
|
||||
.layer(0, ConvolutionLayer.builder(2, 2).stride(1, 1).nOut(3)
|
||||
.activation(afn).build())
|
||||
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).build())
|
||||
.layer(2, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
|
||||
.layer(1,BatchNormalization.builder().useLogStd(useLogStd).build())
|
||||
.layer(2, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX)
|
||||
.kernelSize(2, 2).stride(1, 1).build())
|
||||
.layer(3, new BatchNormalization())
|
||||
.layer(4, new ActivationLayer.Builder().activation(afn).build())
|
||||
.layer(5, new OutputLayer.Builder(lf).activation(outputActivation).nOut(nOut)
|
||||
.layer(3, BatchNormalization.builder().build())
|
||||
.layer(4, ActivationLayer.builder().activation(afn).build())
|
||||
.layer(5, OutputLayer.builder(lf).activation(outputActivation).nOut(nOut)
|
||||
.build())
|
||||
.inputType(InputType.convolutional(hw, hw, depth));
|
||||
|
||||
|
@ -300,12 +300,12 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
|
||||
.updater(new NoOp())
|
||||
.dist(new UniformDistribution(-2, 2)).seed(12345L).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(4)
|
||||
.layer(0, DenseLayer.builder().nIn(nIn).nOut(4)
|
||||
.activation(afn).build())
|
||||
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).build())
|
||||
.layer(2, new DenseLayer.Builder().nIn(4).nOut(4).build())
|
||||
.layer(3, new BatchNormalization.Builder().useLogStd(useLogStd).build())
|
||||
.layer(4, new OutputLayer.Builder(lf)
|
||||
.layer(1,BatchNormalization.builder().useLogStd(useLogStd).build())
|
||||
.layer(2, DenseLayer.builder().nIn(4).nOut(4).build())
|
||||
.layer(3,BatchNormalization.builder().useLogStd(useLogStd).build())
|
||||
.layer(4, OutputLayer.builder(lf)
|
||||
.activation(outputActivation).nOut(nOut)
|
||||
.build());
|
||||
|
||||
|
@ -373,11 +373,11 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.seed(12345L)
|
||||
.dist(new NormalDistribution(0, 1)).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY).build())
|
||||
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).lockGammaBeta(true).gamma(2.0).beta(0.5).nOut(3)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(3).activation(Activation.IDENTITY).build())
|
||||
.layer(1,BatchNormalization.builder().useLogStd(useLogStd).lockGammaBeta(true).gamma(2.0).beta(0.5).nOut(3)
|
||||
.build())
|
||||
.layer(2, new ActivationLayer.Builder().activation(Activation.TANH).build())
|
||||
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(2, ActivationLayer.builder().activation(Activation.TANH).build())
|
||||
.layer(3, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(3).nOut(3).build());
|
||||
|
||||
MultiLayerNetwork mln = new MultiLayerNetwork(builder.build());
|
||||
|
@ -417,11 +417,11 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.seed(12345L)
|
||||
.dist(new NormalDistribution(0, 2)).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nIn(depth).nOut(2)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nIn(depth).nOut(2)
|
||||
.activation(Activation.IDENTITY).build())
|
||||
.layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).lockGammaBeta(true).gamma(2.0).beta(0.5).build())
|
||||
.layer(2, new ActivationLayer.Builder().activation(Activation.TANH).build())
|
||||
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1,BatchNormalization.builder().useLogStd(useLogStd).lockGammaBeta(true).gamma(2.0).beta(0.5).build())
|
||||
.layer(2, ActivationLayer.builder().activation(Activation.TANH).build())
|
||||
.layer(3, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(nOut).build())
|
||||
.inputType(InputType.convolutional(hw, hw, depth));
|
||||
|
||||
|
@ -460,8 +460,8 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
|
||||
.setInputTypes(InputType.convolutional(height, width, channels))
|
||||
.addLayer("bn", new BatchNormalization.Builder().useLogStd(useLogStd).build(), "in")
|
||||
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.addLayer("bn",BatchNormalization.builder().useLogStd(useLogStd).build(), "in")
|
||||
.addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(numClasses).build(), "bn")
|
||||
.setOutputs("out").build();
|
||||
|
||||
|
@ -531,14 +531,14 @@ public class BNGradientCheckTest extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.dist(new UniformDistribution(-2, 2)).seed(12345L).graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new ConvolutionLayer.Builder(2, 2).stride(1, 1).nOut(3)
|
||||
.addLayer("0", ConvolutionLayer.builder(2, 2).stride(1, 1).nOut(3)
|
||||
.activation(afn).build(), "in")
|
||||
.addLayer("1", new BatchNormalization.Builder().useLogStd(useLogStd).build(), "0")
|
||||
.addLayer("2", new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
|
||||
.addLayer("1",BatchNormalization.builder().useLogStd(useLogStd).build(), "0")
|
||||
.addLayer("2", SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX)
|
||||
.kernelSize(2, 2).stride(1, 1).build(), "1")
|
||||
.addLayer("3", new BatchNormalization.Builder().useLogStd(useLogStd).build(), "2")
|
||||
.addLayer("4", new ActivationLayer.Builder().activation(afn).build(), "3")
|
||||
.addLayer("5", new OutputLayer.Builder(lf).activation(outputActivation)
|
||||
.addLayer("3",BatchNormalization.builder().useLogStd(useLogStd).build(), "2")
|
||||
.addLayer("4", ActivationLayer.builder().activation(afn).build(), "3")
|
||||
.addLayer("5", OutputLayer.builder(lf).activation(outputActivation)
|
||||
.nOut(nOut).build(), "4")
|
||||
.setOutputs("5").setInputTypes(InputType.convolutional(hw, hw, depth))
|
||||
.build();
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -115,16 +115,16 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
|
|||
.updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.list()
|
||||
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel)
|
||||
.layer(0, Convolution3D.builder().activation(afn).kernelSize(kernel)
|
||||
.stride(stride).nIn(convNIn).nOut(convNOut1).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(df)
|
||||
.build())
|
||||
.layer(1, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.layer(1, Convolution3D.builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.nIn(convNOut1).nOut(convNOut2).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(df)
|
||||
.build())
|
||||
.layer(2, new DenseLayer.Builder().nOut(denseNOut).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(2, DenseLayer.builder().nOut(denseNOut).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputPreProcessor(2,
|
||||
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
|
||||
|
@ -218,17 +218,17 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
|
|||
.updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.list()
|
||||
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel)
|
||||
.layer(0, Convolution3D.builder().activation(afn).kernelSize(kernel)
|
||||
.nIn(convNIn).nOut(convNOut1).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
|
||||
.build())
|
||||
.layer(1, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.layer(1, Convolution3D.builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.nIn(convNOut1).nOut(convNOut2).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
|
||||
.build())
|
||||
.layer(2, new ZeroPadding3DLayer.Builder(zeroPadding).build())
|
||||
.layer(3, new DenseLayer.Builder().nOut(denseNOut).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(2, ZeroPadding3DLayer.builder(zeroPadding).build())
|
||||
.layer(3, DenseLayer.builder().nOut(denseNOut).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputPreProcessor(3,
|
||||
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
|
||||
|
@ -314,14 +314,14 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.list()
|
||||
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.layer(0, Convolution3D.builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.nIn(convNIn).nOut(convNOut).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(df)
|
||||
.build())
|
||||
.layer(1, new Subsampling3DLayer.Builder(kernel)
|
||||
.poolingType(pool).convolutionMode(mode).dataFormat(df).build())
|
||||
.layer(2, new DenseLayer.Builder().nOut(denseNOut).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1,Subsampling3DLayer.builder(kernel)
|
||||
.poolingType(pool.toPoolingType()).convolutionMode(mode).dataFormat(df).build())
|
||||
.layer(2, DenseLayer.builder().nOut(denseNOut).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputPreProcessor(2,
|
||||
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,convNOut, df))
|
||||
|
@ -401,13 +401,13 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 1))
|
||||
.seed(12345)
|
||||
.list()
|
||||
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.layer(0, Convolution3D.builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.nIn(convNIn).nOut(convNOut).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(df)
|
||||
.build())
|
||||
.layer(1, new Upsampling3D.Builder(upsamplingSize[0]).dataFormat(df).build())
|
||||
.layer(2, new DenseLayer.Builder().nOut(denseNOut).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1, Upsampling3D.builder(upsamplingSize[0]).dataFormat(df).build())
|
||||
.layer(2, DenseLayer.builder().nOut(denseNOut).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputPreProcessor(2,
|
||||
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
|
||||
|
@ -496,17 +496,17 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
|
|||
.updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.list()
|
||||
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel)
|
||||
.layer(0, Convolution3D.builder().activation(afn).kernelSize(kernel)
|
||||
.nIn(convNIn).nOut(convNOut1).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
|
||||
.build())
|
||||
.layer(1, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.layer(1, Convolution3D.builder().activation(afn).kernelSize(1, 1, 1)
|
||||
.nIn(convNOut1).nOut(convNOut2).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
|
||||
.build())
|
||||
.layer(2, new Cropping3D.Builder(cropping).build())
|
||||
.layer(3, new DenseLayer.Builder().nOut(denseNOut).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(2, Cropping3D.builder(cropping).build())
|
||||
.layer(3, DenseLayer.builder().nOut(denseNOut).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputPreProcessor(3,
|
||||
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
|
||||
|
@ -595,15 +595,15 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(new NormalDistribution(0, 0.1))
|
||||
.list()
|
||||
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel)
|
||||
.layer(0, Convolution3D.builder().activation(afn).kernelSize(kernel)
|
||||
.stride(stride).nIn(convNIn).nOut(dOut).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(df)
|
||||
.build())
|
||||
.layer(1, new Deconvolution3D.Builder().activation(afn).kernelSize(kernel)
|
||||
.layer(1, Deconvolution3D.builder().activation(afn).kernelSize(kernel)
|
||||
.stride(stride).nOut(dOut).hasBias(false)
|
||||
.convolutionMode(mode).dataFormat(df)
|
||||
.build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
|
||||
.inputType(InputType.convolutional3D(df, depth, height, width, convNIn)).build();
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -82,14 +82,14 @@ public class CapsnetGradientCheckTest extends BaseDL4JTest {
|
|||
.seed(123)
|
||||
.updater(new NoOp())
|
||||
.dist(new UniformDistribution(-6, 6))
|
||||
.layer(new PrimaryCapsules.Builder(primaryCapsDim, primarpCapsChannel)
|
||||
.layer(PrimaryCapsules.builder(primaryCapsDim, primarpCapsChannel)
|
||||
.kernelSize(3, 3)
|
||||
.stride(2, 2)
|
||||
.build())
|
||||
.layer(new CapsuleLayer.Builder(capsule, capsuleDim, routing).build())
|
||||
.layer(new CapsuleStrengthLayer.Builder().build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationSoftmax()).build())
|
||||
.layer(new LossLayer.Builder(new LossNegativeLogLikelihood()).build())
|
||||
.layer(CapsuleLayer.builder(capsule, capsuleDim, routing).build())
|
||||
.layer(CapsuleStrengthLayer.builder().build())
|
||||
.layer(ActivationLayer.builder(new ActivationSoftmax()).build())
|
||||
.layer(LossLayer.builder().lossFunction(new LossNegativeLogLikelihood()).build())
|
||||
.inputType(InputType.convolutional(height, width, inputDepth))
|
||||
.build();
|
||||
|
||||
|
|
|
@ -97,24 +97,23 @@ public class DropoutGradientCheck extends BaseDL4JTest {
|
|||
.convolutionMode(ConvolutionMode.Same)
|
||||
.dropOut(dropout)
|
||||
.activation(Activation.TANH)
|
||||
.updater(new NoOp())
|
||||
.list();
|
||||
.updater(new NoOp());
|
||||
|
||||
if(cnn){
|
||||
builder.layer(new ConvolutionLayer.Builder().kernelSize(3,3).stride(2,2).nOut(2).build());
|
||||
builder.layer(new ConvolutionLayer.Builder().kernelSize(3,3).stride(2,2).nOut(2).build());
|
||||
builder.layer(ConvolutionLayer.builder().kernelSize(3,3).stride(2,2).nOut(2).build());
|
||||
builder.layer(ConvolutionLayer.builder().kernelSize(3,3).stride(2,2).nOut(2).build());
|
||||
builder.inputType(InputType.convolutional(6,6,2));
|
||||
} else {
|
||||
builder.layer(new DenseLayer.Builder().nOut(3).build());
|
||||
builder.layer(new DenseLayer.Builder().nOut(3).build());
|
||||
builder.layer(DenseLayer.builder().nOut(3).build());
|
||||
builder.layer(DenseLayer.builder().nOut(3).build());
|
||||
builder.inputType(InputType.feedForward(6));
|
||||
}
|
||||
builder.layer(new OutputLayer.Builder().nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunction.MCXENT).build());
|
||||
builder.layer(OutputLayer.builder().nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunction.MCXENT).build());
|
||||
|
||||
NeuralNetConfiguration conf = builder.build();
|
||||
//Remove spatial dropout from output layer - can't be used for 2d input
|
||||
if(i == 4){
|
||||
conf.getFlattenedLayerConfigurations().get(2).setIDropout(null);
|
||||
conf.getFlattenedLayerConfigurations().get(2).setDropOut(null);
|
||||
}
|
||||
|
||||
MultiLayerNetwork mln = new MultiLayerNetwork(conf);
|
||||
|
@ -157,11 +156,11 @@ public class DropoutGradientCheck extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).build(), "in")
|
||||
.addLayer("1", new DenseLayer.Builder().nIn(5).nOut(5).build(), "0")
|
||||
.addLayer("2", new DenseLayer.Builder().nIn(5).nOut(5).build(), "0")
|
||||
.addLayer("3", new DenseLayer.Builder().nIn(5).nOut(5).build(), "0")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(15).nOut(5).activation(Activation.SOFTMAX)
|
||||
.addLayer("0", DenseLayer.builder().nIn(5).nOut(5).build(), "in")
|
||||
.addLayer("1", DenseLayer.builder().nIn(5).nOut(5).build(), "0")
|
||||
.addLayer("2", DenseLayer.builder().nIn(5).nOut(5).build(), "0")
|
||||
.addLayer("3", DenseLayer.builder().nIn(5).nOut(5).build(), "0")
|
||||
.addLayer("out", OutputLayer.builder().nIn(15).nOut(5).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunction.MCXENT).build(), "1", "2", "3")
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
|
|
@ -75,10 +75,10 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
|
||||
.layer(0, new SimpleRnn.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.layer(0, SimpleRnn.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1, GlobalPoolingLayer.builder().poolingType(pt).build())
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build())
|
||||
.build();
|
||||
|
||||
|
@ -130,12 +130,12 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1)
|
||||
.dataFormat(nchw ? CNN2DFormat.NCHW : CNN2DFormat.NHWC)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1)
|
||||
.convFormat(nchw ? CNN2DFormat.NCHW : CNN2DFormat.NHWC)
|
||||
.nOut(layerDepth)
|
||||
.build())
|
||||
.layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1, GlobalPoolingLayer.builder().poolingType(pt).build())
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(nOut).build())
|
||||
.inputType(InputType.convolutional(inputH, inputW, inputDepth, nchw ? CNN2DFormat.NCHW : CNN2DFormat.NHWC)).build();
|
||||
|
||||
|
@ -188,10 +188,10 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
|
||||
.layer(0, new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.layer(0, LSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1, GlobalPoolingLayer.builder().poolingType(pt).build())
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build())
|
||||
.build();
|
||||
|
||||
|
@ -263,10 +263,10 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1.0)).convolutionMode(ConvolutionMode.Same)
|
||||
.seed(12345L).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(kernel).stride(stride)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(kernel).stride(stride)
|
||||
.nOut(layerDepth).build())
|
||||
.layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1, GlobalPoolingLayer.builder().poolingType(pt).build())
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(nOut).build())
|
||||
|
||||
.inputType(InputType.convolutional(inputH, inputW, inputDepth)).build();
|
||||
|
|
|
@ -78,11 +78,11 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new NoOp())
|
||||
.list()
|
||||
.layer(0,
|
||||
new DenseLayer.Builder().nIn(4).nOut(3)
|
||||
DenseLayer.builder().nIn(4).nOut(3)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(3).nOut(3).build())
|
||||
.build();
|
||||
|
||||
|
@ -168,11 +168,11 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).updater(new NoOp())
|
||||
.seed(12345L)
|
||||
.list().layer(0,
|
||||
new DenseLayer.Builder().nIn(4).nOut(3)
|
||||
DenseLayer.builder().nIn(4).nOut(3)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(afn)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(lf).activation(outputActivation).nIn(3).nOut(3)
|
||||
.layer(1, OutputLayer.builder(lf).activation(outputActivation).nIn(3).nOut(3)
|
||||
.dist(new NormalDistribution(0, 1)).build())
|
||||
.build();
|
||||
|
||||
|
@ -259,12 +259,12 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
|
||||
.seed(12345L)
|
||||
.list().layer(0,
|
||||
new DenseLayer.Builder().nIn(4).nOut(3)
|
||||
DenseLayer.builder().nIn(4).nOut(3)
|
||||
.dist(new NormalDistribution(0,
|
||||
1))
|
||||
.updater(new NoOp())
|
||||
.activation(afn).build())
|
||||
.layer(1, new OutputLayer.Builder(lf).nIn(3).nOut(3)
|
||||
.layer(1, OutputLayer.builder(lf).nIn(3).nOut(3)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp())
|
||||
.activation(outputActivation).build())
|
||||
|
@ -327,10 +327,10 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l2(0.2).l1(0.1)
|
||||
.dataType(DataType.DOUBLE)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345L)
|
||||
.list().layer(new EmbeddingLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.list().layer(EmbeddingLayer.builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.updater(new NoOp()).build())
|
||||
.layer(new PReLULayer.Builder().inputShape(3).sharedAxes(1).updater(new NoOp()).build())
|
||||
.layer(new OutputLayer.Builder(LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.layer(PReLULayer.builder().inputShape(3).sharedAxes(1).updater(new NoOp()).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.weightInit(WeightInit.XAVIER).dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -365,12 +365,12 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l2(0.2).l1(0.1)
|
||||
.dataType(DataType.DOUBLE)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345L)
|
||||
.list().layer(0,
|
||||
new EmbeddingLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.layer(0,
|
||||
EmbeddingLayer.builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.updater(new NoOp()).activation(
|
||||
Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.updater(new NoOp()).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
@ -437,9 +437,9 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.seed(12345L)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.list().layer(0,
|
||||
new AutoEncoder.Builder().nIn(4).nOut(3)
|
||||
AutoEncoder.builder().nIn(4).nOut(3)
|
||||
.activation(afn).build())
|
||||
.layer(1, new OutputLayer.Builder(lf).nIn(3).nOut(3)
|
||||
.layer(1, OutputLayer.builder(lf).nIn(3).nOut(3)
|
||||
.activation(outputActivation).build())
|
||||
.build();
|
||||
|
||||
|
@ -497,13 +497,13 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.weightInit(new UniformDistribution(0, 1))
|
||||
.graphBuilder()
|
||||
.addInputs("features")
|
||||
.addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(4)
|
||||
.addLayer("dense", DenseLayer.builder().nIn(4).nOut(4)
|
||||
.activation(Activation.TANH)
|
||||
.build(), "features")
|
||||
.addLayer("elementWiseMul", new ElementWiseMultiplicationLayer.Builder().nIn(4).nOut(4)
|
||||
.addLayer("elementWiseMul", ElementWiseMultiplicationLayer.builder().nIn(4).nOut(4)
|
||||
.activation(a)
|
||||
.build(), "dense")
|
||||
.addLayer("loss", new LossLayer.Builder(LossFunctions.LossFunction.COSINE_PROXIMITY)
|
||||
.addLayer("loss", LossLayer.builder().lossFunction(LossFunctions.LossFunction.COSINE_PROXIMITY.getILossFunction())
|
||||
.activation(Activation.IDENTITY).build(), "elementWiseMul")
|
||||
.setOutputs("loss")
|
||||
.build();
|
||||
|
@ -566,12 +566,12 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(new NormalDistribution(0, 1))
|
||||
.list()
|
||||
.layer(new EmbeddingSequenceLayer.Builder()
|
||||
.layer(EmbeddingSequenceLayer.builder()
|
||||
.nIn(8)
|
||||
.nOut(4)
|
||||
.outputDataFormat(seqOutputFormat)
|
||||
.build())
|
||||
.layer(new RnnOutputLayer.Builder().nIn(4).nOut(3).activation(Activation.TANH)
|
||||
.layer(RnnOutputLayer.builder().nIn(4).nOut(3).activation(Activation.TANH)
|
||||
.dataFormat(seqOutputFormat)
|
||||
.lossFunction(LossFunction.MSE).build())
|
||||
.build();
|
||||
|
@ -679,12 +679,12 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
|
||||
.seed(12345L)
|
||||
.list().layer(0,
|
||||
new DenseLayer.Builder().nIn(4).nOut(3)
|
||||
DenseLayer.builder().nIn(4).nOut(3)
|
||||
.dist(new NormalDistribution(0,
|
||||
1))
|
||||
.updater(new NoOp())
|
||||
.activation(afn).build())
|
||||
.layer(1, new OutputLayer.Builder(lf).nIn(3).nOut(3)
|
||||
.layer(1, OutputLayer.builder(lf).nIn(3).nOut(3)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp())
|
||||
.activation(outputActivation).build())
|
||||
|
@ -740,12 +740,12 @@ public class GradientCheckTests extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).updater(new NoOp())
|
||||
.seed(12345L)
|
||||
.list().layer(0,
|
||||
new DenseLayer.Builder().nIn(4).nOut(3)
|
||||
DenseLayer.builder().nIn(4).nOut(3)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.hasLayerNorm(layerNorm)
|
||||
.activation(afn)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(lf).activation(outputActivation).nIn(3).nOut(3)
|
||||
.layer(1, OutputLayer.builder(lf).activation(outputActivation).nIn(3).nOut(3)
|
||||
.dist(new NormalDistribution(0, 1)).build())
|
||||
.build();
|
||||
|
||||
|
|
|
@ -76,10 +76,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 1)).updater(new NoOp())
|
||||
.graphBuilder().addInputs("input")
|
||||
.addLayer("firstLayer",
|
||||
new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(5).nOut(3).build(),
|
||||
"firstLayer")
|
||||
.setOutputs("outputLayer").build();
|
||||
|
@ -121,13 +121,13 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1)).updater(new NoOp())
|
||||
.graphBuilder().addInputs("input")
|
||||
.addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
.addLayer("l1", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addLayer("l2", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
.addLayer("l2", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addVertex("merge", new MergeVertex(), "l1", "l2")
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(5 + 5).nOut(3).build(),
|
||||
"merge")
|
||||
.setOutputs("outputLayer").build();
|
||||
|
@ -178,13 +178,13 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).graphBuilder().addInputs("input")
|
||||
.addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
.addLayer("l1", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addLayer("l2", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.SIGMOID)
|
||||
.addLayer("l2", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.SIGMOID)
|
||||
.build(), "input")
|
||||
.addVertex("elementwise", new ElementWiseVertex(op), "l1", "l2")
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(5).nOut(3).build(),
|
||||
"elementwise")
|
||||
.setOutputs("outputLayer").build();
|
||||
|
@ -236,15 +236,15 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).graphBuilder().addInputs("input")
|
||||
.addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
.addLayer("l1", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addLayer("l2", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.SIGMOID)
|
||||
.addLayer("l2", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.SIGMOID)
|
||||
.build(), "input")
|
||||
.addLayer("l3", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.RELU).build(),
|
||||
.addLayer("l3", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.RELU).build(),
|
||||
"input")
|
||||
.addVertex("elementwise", new ElementWiseVertex(op), "l1", "l2", "l3")
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(5).nOut(3).build(),
|
||||
"elementwise")
|
||||
.setOutputs("outputLayer").build();
|
||||
|
@ -299,10 +299,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.setOutputs("out")
|
||||
.layer("l1", new DenseLayer.Builder().nIn(3).nOut(firstSmaller ? 1 : 3).build(), "in") //[mb,3]
|
||||
.layer("l2", new DenseLayer.Builder().nIn(3).nOut(firstSmaller ? 3 : 1).build(), "in") //[mb,1]
|
||||
.layer("l1", DenseLayer.builder().nIn(3).nOut(firstSmaller ? 1 : 3).build(), "in") //[mb,3]
|
||||
.layer("l2", DenseLayer.builder().nIn(3).nOut(firstSmaller ? 3 : 1).build(), "in") //[mb,1]
|
||||
.addVertex("ew", new ElementWiseVertex(op), "l1", "l2")
|
||||
.layer("out", new OutputLayer.Builder().nIn(3).nOut(2).lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).build(), "ew")
|
||||
.layer("out", OutputLayer.builder().nIn(3).nOut(2).lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).build(), "ew")
|
||||
.build();
|
||||
|
||||
ComputationGraph graph = new ComputationGraph(conf);
|
||||
|
@ -344,15 +344,15 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 0.1))
|
||||
.updater(new NoOp()).graphBuilder().addInputs("input")
|
||||
.addLayer("l1", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.dataFormat(format)
|
||||
.addLayer("l1", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.convFormat(format)
|
||||
.nIn(2).nOut(2).activation(Activation.TANH).build(), "input")
|
||||
.addLayer("l2", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1)
|
||||
.padding(0, 0).dataFormat(format)
|
||||
.addLayer("l2", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1)
|
||||
.padding(0, 0).convFormat(format)
|
||||
.nIn(2).nOut(2).activation(Activation.TANH).build(), "input")
|
||||
.addVertex("merge", new MergeVertex(), "l1", "l2")
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(5 * 5 * (2 + 2)).nOut(3)
|
||||
.build(),
|
||||
"merge")
|
||||
|
@ -401,23 +401,23 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.updater(new NoOp()).graphBuilder().addInputs("input")
|
||||
.setOutputs("out")
|
||||
.addLayer("rnn1",
|
||||
new SimpleRnn.Builder().nOut(3)
|
||||
SimpleRnn.builder().nOut(3)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addLayer("rnn2",
|
||||
new SimpleRnn.Builder().nOut(3)
|
||||
SimpleRnn.builder().nOut(3)
|
||||
.activation(Activation.TANH).build(),
|
||||
"rnn1")
|
||||
.addLayer("dense1",
|
||||
new DenseLayer.Builder().nOut(3)
|
||||
DenseLayer.builder().nOut(3)
|
||||
.activation(Activation.SIGMOID).build(),
|
||||
"rnn1")
|
||||
.addLayer("rnn3",
|
||||
new SimpleRnn.Builder().nOut(3)
|
||||
SimpleRnn.builder().nOut(3)
|
||||
.activation(Activation.TANH).build(),
|
||||
"dense1")
|
||||
.addVertex("merge", new MergeVertex(), "rnn2", "rnn3")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nOut(outSize)
|
||||
.addLayer("out", RnnOutputLayer.builder().nOut(outSize)
|
||||
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(),
|
||||
|
@ -457,10 +457,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.weightInit(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).graphBuilder().addInputs("input").setOutputs("out")
|
||||
.addLayer("lstm1", new LSTM.Builder().nOut(6).activation(Activation.TANH).build(),
|
||||
.addLayer("lstm1", LSTM.builder().nOut(6).activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addVertex("subset", new SubsetVertex(0, 2), "lstm1")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nOut(2).activation(Activation.SOFTMAX)
|
||||
.addLayer("out", RnnOutputLayer.builder().nOut(2).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "subset")
|
||||
.setInputTypes(InputType.recurrent(inLength,timeSeriesLength,RNNFormat.NCW))
|
||||
.build();
|
||||
|
@ -494,10 +494,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).graphBuilder().addInputs("input").setOutputs("out")
|
||||
.addLayer("lstm1", new LSTM.Builder().nIn(3).nOut(4).activation(Activation.TANH).build(),
|
||||
.addLayer("lstm1", LSTM.builder().nIn(3).nOut(4).activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addVertex("lastTS", new LastTimeStepVertex("input"), "lstm1")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(4).nOut(2).activation(Activation.SOFTMAX)
|
||||
.addLayer("out", OutputLayer.builder().nIn(4).nOut(2).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "lastTS")
|
||||
.build();
|
||||
|
||||
|
@ -548,16 +548,16 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.updater(new NoOp()).graphBuilder()
|
||||
.addInputs("input1", "input2").setOutputs("out")
|
||||
.addLayer("lstm1",
|
||||
new LSTM.Builder().nIn(3).nOut(3)
|
||||
LSTM.builder().nIn(3).nOut(3)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input1")
|
||||
.addLayer("lstm2",
|
||||
new LSTM.Builder().nIn(2).nOut(4)
|
||||
LSTM.builder().nIn(2).nOut(4)
|
||||
.activation(Activation.SOFTSIGN).build(),
|
||||
"input2")
|
||||
.addVertex("lastTS", new LastTimeStepVertex("input2"), "lstm2")
|
||||
.addVertex("duplicate", new DuplicateToTimeSeriesVertex("input2"), "lastTS")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nIn(3+4).nOut(2)
|
||||
.addLayer("out", RnnOutputLayer.builder().nIn(3+4).nOut(2)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(),
|
||||
"lstm1", "duplicate")
|
||||
|
@ -598,16 +598,16 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.updater(new NoOp()).graphBuilder()
|
||||
.addInputs("input").setOutputs("out")
|
||||
.addLayer("lstm_a",
|
||||
new LSTM.Builder().nIn(2).nOut(3)
|
||||
LSTM.builder().nIn(2).nOut(3)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input")
|
||||
.addVertex("input_rev", new ReverseTimeSeriesVertex("input"), "input")
|
||||
.addLayer("lstm_b",
|
||||
new LSTM.Builder().nIn(2).nOut(3)
|
||||
LSTM.builder().nIn(2).nOut(3)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input_rev")
|
||||
.addVertex("lstm_b_rev", new ReverseTimeSeriesVertex("input"), "lstm_b")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nIn(3 + 3).nOut(2)
|
||||
.addLayer("out", RnnOutputLayer.builder().nIn(3 + 3).nOut(2)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build(),
|
||||
"lstm_a", "lstm_b_rev")
|
||||
|
@ -655,11 +655,11 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("i0", "i1", "i2")
|
||||
.addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i0")
|
||||
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i1")
|
||||
.addLayer("d2", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i2")
|
||||
.addLayer("d3", new DenseLayer.Builder().nIn(6).nOut(2).build(), "d0", "d1", "d2")
|
||||
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(2)
|
||||
.addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "i0")
|
||||
.addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "i1")
|
||||
.addLayer("d2", DenseLayer.builder().nIn(2).nOut(2).build(), "i2")
|
||||
.addLayer("d3", DenseLayer.builder().nIn(6).nOut(2).build(), "d0", "d1", "d2")
|
||||
.addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(2)
|
||||
.nOut(2).build(), "d3")
|
||||
.setOutputs("out").build();
|
||||
|
||||
|
@ -698,11 +698,11 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("i0")
|
||||
.addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i0")
|
||||
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "d0")
|
||||
.addLayer("d2", new DenseLayer.Builder().nIn(2).nOut(2).build(), "d0")
|
||||
.addLayer("d3", new DenseLayer.Builder().nIn(2).nOut(2).build(), "d0")
|
||||
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(6)
|
||||
.addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "i0")
|
||||
.addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "d0")
|
||||
.addLayer("d2", DenseLayer.builder().nIn(2).nOut(2).build(), "d0")
|
||||
.addLayer("d3", DenseLayer.builder().nIn(2).nOut(2).build(), "d0")
|
||||
.addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(6)
|
||||
.nOut(2).build(), "d1", "d2", "d3")
|
||||
.setOutputs("out").build();
|
||||
|
||||
|
@ -738,14 +738,14 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("i0", "i1", "i2")
|
||||
.addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i0")
|
||||
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i1")
|
||||
.addLayer("d2", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i2")
|
||||
.addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "i0")
|
||||
.addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "i1")
|
||||
.addLayer("d2", DenseLayer.builder().nIn(2).nOut(2).build(), "i2")
|
||||
.addVertex("m", new MergeVertex(), "d0", "d1", "d2")
|
||||
.addLayer("D0", new DenseLayer.Builder().nIn(6).nOut(2).build(), "m")
|
||||
.addLayer("D1", new DenseLayer.Builder().nIn(6).nOut(2).build(), "m")
|
||||
.addLayer("D2", new DenseLayer.Builder().nIn(6).nOut(2).build(), "m")
|
||||
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(6)
|
||||
.addLayer("D0", DenseLayer.builder().nIn(6).nOut(2).build(), "m")
|
||||
.addLayer("D1", DenseLayer.builder().nIn(6).nOut(2).build(), "m")
|
||||
.addLayer("D2", DenseLayer.builder().nIn(6).nOut(2).build(), "m")
|
||||
.addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(6)
|
||||
.nOut(2).build(), "D0", "D1", "D2")
|
||||
.setOutputs("out").build();
|
||||
|
||||
|
@ -787,18 +787,18 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).activation(Activation.TANH).graphBuilder().addInputs("input")
|
||||
.addLayer("l0", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.addLayer("l0", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.nIn(2).nOut(2).activation(Activation.TANH).build(), "input")
|
||||
.addLayer("l1", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.addLayer("l1", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.nIn(2).nOut(2).activation(Activation.TANH).build(), "l0")
|
||||
.addLayer("l2", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.addLayer("l2", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.nIn(2).nOut(2).activation(Activation.TANH).build(), "l0")
|
||||
.addVertex("m", new MergeVertex(), "l1", "l2")
|
||||
.addLayer("l3", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.addLayer("l3", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.nIn(4).nOut(2).activation(Activation.TANH).build(), "m")
|
||||
.addLayer("l4", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.addLayer("l4", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).padding(0, 0)
|
||||
.nIn(4).nOut(2).activation(Activation.TANH).build(), "m")
|
||||
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.activation(Activation.IDENTITY).nOut(2)
|
||||
.build(), "l3", "l4")
|
||||
.setOutputs("out").setInputTypes(InputType.convolutional(inH, inW, 2))
|
||||
|
@ -839,7 +839,7 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.updater(new NoOp()).graphBuilder()
|
||||
.addInputs("input1", "input2", "input3")
|
||||
.addVertex("stack1", new StackVertex(), "input1", "input2", "input3")
|
||||
.addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5)
|
||||
.addLayer("l1", DenseLayer.builder().nIn(4).nOut(5)
|
||||
.activation(Activation.TANH).build(), "stack1")
|
||||
.addVertex("unstack0", new UnstackVertex(0, 3), "l1")
|
||||
.addVertex("unstack1", new UnstackVertex(1, 3), "l1")
|
||||
|
@ -847,8 +847,8 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.addVertex("l2-1", new L2Vertex(), "unstack1", "unstack0") // x - x-
|
||||
.addVertex("l2-2", new L2Vertex(), "unstack1", "unstack2") // x - x+
|
||||
.addLayer("lossLayer",
|
||||
new LossLayer.Builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
LossLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction())
|
||||
.activation(Activation.SOFTMAX).build(),
|
||||
"l2-1", "l2-2")
|
||||
.setOutputs("lossLayer").build();
|
||||
|
@ -911,9 +911,9 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new GaussianDistribution(0, 1))
|
||||
.updater(new NoOp()).graphBuilder().addInputs("input1")
|
||||
.addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH)
|
||||
.addLayer("l1", DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH)
|
||||
.build(), "input1")
|
||||
.addLayer("cl", new CenterLossOutputLayer.Builder()
|
||||
.addLayer("cl",CenterLossOutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).nIn(5).nOut(numLabels)
|
||||
.alpha(1.0).lambda(lambda).gradientCheck(true)
|
||||
.activation(Activation.SOFTMAX).build(), "l1")
|
||||
|
@ -975,9 +975,9 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(3).build())
|
||||
.layer(1, new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(2, new CenterLossOutputLayer.Builder()
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(3).build())
|
||||
.layer(1, GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(2,CenterLossOutputLayer.builder()
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).nOut(numLabels)
|
||||
.alpha(1.0).lambda(lambda).gradientCheck(true)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
|
@ -1030,10 +1030,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH).updater(new NoOp()).graphBuilder()
|
||||
.addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1")
|
||||
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2")
|
||||
.addInputs("in1", "in2").addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "in1")
|
||||
.addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "in2")
|
||||
.addVertex("l2", new L2Vertex(), "d0", "d1")
|
||||
.addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(1)
|
||||
.addLayer("out", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(1)
|
||||
.nOut(1).activation(Activation.IDENTITY).build(), "l2")
|
||||
.setOutputs("out").build();
|
||||
|
||||
|
@ -1083,14 +1083,14 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH).updater(new NoOp()).graphBuilder()
|
||||
.addInputs("in1", "in2")
|
||||
.addLayer("d0", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in1")
|
||||
.addLayer("d1", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in2")
|
||||
.addLayer("d0", DenseLayer.builder().nIn(layerSizes).nOut(layerSizes).build(), "in1")
|
||||
.addLayer("d1", DenseLayer.builder().nIn(layerSizes).nOut(layerSizes).build(), "in2")
|
||||
.addVertex("stack", new StackVertex(), "d0", "d1")
|
||||
.addLayer("d2", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "stack")
|
||||
.addLayer("d2", DenseLayer.builder().nIn(layerSizes).nOut(layerSizes).build(), "stack")
|
||||
.addVertex("u1", new UnstackVertex(0, 2), "d2").addVertex("u2", new UnstackVertex(1, 2), "d2")
|
||||
.addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.addLayer("out1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.nIn(layerSizes).nOut(layerSizes).activation(Activation.IDENTITY).build(), "u1")
|
||||
.addLayer("out2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.addLayer("out2", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.nIn(layerSizes).nOut(2).activation(Activation.IDENTITY).build(), "u2")
|
||||
.setOutputs("out1", "out2").build();
|
||||
|
||||
|
@ -1137,17 +1137,17 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH).updater(new NoOp()).graphBuilder()
|
||||
.addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1")
|
||||
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2")
|
||||
.addInputs("in1", "in2").addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "in1")
|
||||
.addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "in2")
|
||||
.addVertex("stack", new StackVertex(), "d0", "d1")
|
||||
.addVertex("u0", new UnstackVertex(0, 2), "stack")
|
||||
.addVertex("u1", new UnstackVertex(1, 2), "stack")
|
||||
.addLayer("out1",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
.nOut(2).activation(Activation.IDENTITY).build(),
|
||||
"u0")
|
||||
.addLayer("out2",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
.nOut(2).activation(Activation.IDENTITY).build(),
|
||||
"u1")
|
||||
.setOutputs("out1", "out2").build();
|
||||
|
@ -1198,16 +1198,16 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH).updater(new NoOp()).graphBuilder()
|
||||
.addInputs("in1", "in2")
|
||||
.addLayer("d0", new SimpleRnn.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in1")
|
||||
.addLayer("d1", new SimpleRnn.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in2")
|
||||
.addLayer("d0", SimpleRnn.builder().nIn(layerSizes).nOut(layerSizes).build(), "in1")
|
||||
.addLayer("d1", SimpleRnn.builder().nIn(layerSizes).nOut(layerSizes).build(), "in2")
|
||||
.addVertex("stack", new StackVertex(), "d0", "d1")
|
||||
.addLayer("d2", new SimpleRnn.Builder().nIn(layerSizes).nOut(layerSizes).build(), "stack")
|
||||
.addLayer("d2", SimpleRnn.builder().nIn(layerSizes).nOut(layerSizes).build(), "stack")
|
||||
.addVertex("u1", new UnstackVertex(0, 2), "d2").addVertex("u2", new UnstackVertex(1, 2), "d2")
|
||||
.addLayer("p1", new GlobalPoolingLayer.Builder(PoolingType.AVG).build(), "u1")
|
||||
.addLayer("p2", new GlobalPoolingLayer.Builder(PoolingType.AVG).build(), "u2")
|
||||
.addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.addLayer("p1", GlobalPoolingLayer.builder(PoolingType.AVG).build(), "u1")
|
||||
.addLayer("p2", GlobalPoolingLayer.builder(PoolingType.AVG).build(), "u2")
|
||||
.addLayer("out1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.nIn(layerSizes).nOut(layerSizes).activation(Activation.IDENTITY).build(), "p1")
|
||||
.addLayer("out2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.addLayer("out2", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2)
|
||||
.nIn(layerSizes).nOut(2).activation(Activation.IDENTITY).build(), "p2")
|
||||
.setOutputs("out1", "out2").build();
|
||||
|
||||
|
@ -1260,14 +1260,14 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH).updater(new NoOp()).graphBuilder()
|
||||
.addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1")
|
||||
.addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2")
|
||||
.addInputs("in1", "in2").addLayer("d0", DenseLayer.builder().nIn(2).nOut(2).build(), "in1")
|
||||
.addLayer("d1", DenseLayer.builder().nIn(2).nOut(2).build(), "in2")
|
||||
.addLayer("out1",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
.nOut(2).activation(Activation.IDENTITY).build(),
|
||||
"d0")
|
||||
.addLayer("out2",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
|
||||
.nOut(2).activation(Activation.IDENTITY).build(),
|
||||
"d1")
|
||||
.setOutputs("out1", "out2").build();
|
||||
|
@ -1320,10 +1320,10 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.activation(Activation.TANH).updater(new NoOp()).graphBuilder()
|
||||
.addInputs("in1").addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(3).build(), "in1")
|
||||
.addInputs("in1").addLayer("d1", DenseLayer.builder().nIn(2).nOut(3).build(), "in1")
|
||||
.addVertex("norm", new L2NormalizeVertex(definition,L2NormalizeVertex.DEFAULT_EPS), "d1")
|
||||
.addLayer("out1",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(3)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nIn(3)
|
||||
.nOut(2).activation(Activation.IDENTITY).build(),
|
||||
"norm")
|
||||
.setOutputs("out1").build();
|
||||
|
@ -1370,11 +1370,11 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH).updater(new NoOp()).graphBuilder()
|
||||
.addInputs("in1")
|
||||
.addLayer("d1", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(2).build(),
|
||||
.addLayer("d1", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(2).build(),
|
||||
"in1")
|
||||
.addVertex("norm", new L2NormalizeVertex(), "d1")
|
||||
.addLayer("out1",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nOut(2)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.L2).nOut(2)
|
||||
.activation(Activation.IDENTITY).build(),
|
||||
"norm")
|
||||
.setOutputs("out1").setInputTypes(InputType.convolutional(h, w, dIn)).build();
|
||||
|
@ -1420,9 +1420,9 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345L)
|
||||
.updater(new NoOp()).graphBuilder().addInputs("in")
|
||||
.addLayer("0", new EmbeddingLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.addLayer("0", EmbeddingLayer.builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.addLayer("1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3)
|
||||
.activation(Activation.SOFTMAX).build(), "0")
|
||||
.setOutputs("1").build();
|
||||
|
||||
|
|
|
@ -119,10 +119,10 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L)
|
||||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.list()
|
||||
.layer(0, new SimpleRnn.Builder().nIn(nIn).nOut(layerSize)
|
||||
|
||||
.layer(0, SimpleRnn.builder().nIn(nIn).nOut(layerSize)
|
||||
.weightInit(new NormalDistribution(0, 1)).build())
|
||||
.layer(1, new RnnOutputLayer.Builder(s.lf).activation(s.act).nIn(layerSize).nOut(s.nOut)
|
||||
.layer(1, RnnOutputLayer.builder().lossFunction(s.lf).activation(s.act).nIn(layerSize).nOut(s.nOut)
|
||||
.weightInit(new NormalDistribution(0, 1)).build())
|
||||
.build();
|
||||
MultiLayerNetwork mln = new MultiLayerNetwork(conf);
|
||||
|
@ -161,10 +161,10 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.dataType(DataType.DOUBLE)
|
||||
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
|
||||
.layer(0, new SimpleRnn.Builder().nIn(nIn).nOut(2).activation(Activation.TANH).build())
|
||||
.layer(1, new GravesBidirectionalLSTM.Builder().nIn(2).nOut(layerSize)
|
||||
.layer(0, SimpleRnn.builder().nIn(nIn).nOut(2).activation(Activation.TANH).build())
|
||||
.layer(1, GravesBidirectionalLSTM.builder().nIn(2).nOut(layerSize)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(2, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(2, RnnOutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build())
|
||||
.build();
|
||||
|
||||
|
@ -241,9 +241,9 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.dist(new NormalDistribution(0, 1)).seed(12345)
|
||||
.list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.layer(0, DenseLayer.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
|
||||
.layer(1, OutputLayer.builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
|
||||
.activation(a).build())
|
||||
.validateOutputLayerConfig(false)
|
||||
.build();
|
||||
|
@ -335,9 +335,9 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.dist(new NormalDistribution(0, 1)).seed(12345)
|
||||
.list()
|
||||
.layer(0, new SimpleRnn.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.layer(0, SimpleRnn.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
|
||||
.layer(1, RnnOutputLayer.builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
|
||||
.activation(a).build())
|
||||
.validateOutputLayerConfig(false)
|
||||
.inputType(InputType.recurrent(nIn,tsLength, RNNFormat.NCW))
|
||||
|
@ -368,9 +368,9 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.dist(new NormalDistribution(0, 2)).seed(12345)
|
||||
.graphBuilder().addInputs("in")
|
||||
.addLayer("0", new SimpleRnn.Builder().nOut(layerSize)
|
||||
.addLayer("0", SimpleRnn.builder().nOut(layerSize)
|
||||
.activation(Activation.TANH).build(), "in")
|
||||
.addLayer("1", new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
|
||||
.addLayer("1", RnnOutputLayer.builder().nIn(layerSize).nOut(nOut).lossFunction(lf)
|
||||
.activation(a).build(), "0")
|
||||
.setOutputs("1").validateOutputLayerConfig(false)
|
||||
.setInputTypes(InputType.recurrent(nIn,tsLength,RNNFormat.NCW))
|
||||
|
@ -401,9 +401,9 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
|
|||
.weightInit(new NormalDistribution(0,2))
|
||||
.updater(new NoOp())
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nIn(3).nOut(3).build())
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(new OutputLayer.Builder().nIn(3).nOut(3).activation(Activation.SOFTMAX).build())
|
||||
.layer(LSTM.builder().nIn(3).nOut(3).build())
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build())
|
||||
.layer(OutputLayer.builder().nIn(3).nOut(3).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.recurrent(3))
|
||||
.build();
|
||||
|
||||
|
@ -457,9 +457,9 @@ public class GradientCheckTestsMasking extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new LSTM.Builder().nIn(3).nOut(3).build(), "in")
|
||||
.layer("1", new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build(), "0")
|
||||
.layer("out", new OutputLayer.Builder().nIn(3).nOut(3).activation(Activation.SOFTMAX).build(), "1")
|
||||
.layer("0", LSTM.builder().nIn(3).nOut(3).build(), "in")
|
||||
.layer("1", GlobalPoolingLayer.builder().poolingType(PoolingType.AVG).build(), "0")
|
||||
.layer("out", OutputLayer.builder().nIn(3).nOut(3).activation(Activation.SOFTMAX).build(), "1")
|
||||
.setOutputs("out")
|
||||
.setInputTypes(InputType.recurrent(3))
|
||||
.build();
|
||||
|
|
|
@ -72,10 +72,10 @@ public class LRNGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.seed(12345L)
|
||||
.dist(new NormalDistribution(0, 2)).list()
|
||||
.layer(0, new ConvolutionLayer.Builder().nOut(6).kernelSize(2, 2).stride(1, 1)
|
||||
.layer(0, ConvolutionLayer.builder().nOut(6).kernelSize(2, 2).stride(1, 1)
|
||||
.activation(Activation.TANH).build())
|
||||
.layer(1, new LocalResponseNormalization.Builder().build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(1, LocalResponseNormalization.builder().build())
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(nOut).build())
|
||||
.inputType(InputType.convolutional(hw, hw, depth));
|
||||
|
||||
|
|
|
@ -73,17 +73,17 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
|
|||
LayerConfiguration l0;
|
||||
LayerConfiguration l1;
|
||||
if (graves) {
|
||||
l0 = new GravesLSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
l0 = GravesLSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
.dist(new NormalDistribution(0, 1.0))
|
||||
.updater(new NoOp()).build();
|
||||
l1 = new GravesLSTM.Builder().nIn(layerSize).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
l1 = GravesLSTM.builder().nIn(layerSize).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
.dist(new NormalDistribution(0, 1.0))
|
||||
.updater(new NoOp()).build();
|
||||
} else {
|
||||
l0 = new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
l0 = LSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
.dist(new NormalDistribution(0, 1.0))
|
||||
.updater(new NoOp()).build();
|
||||
l1 = new LSTM.Builder().nIn(layerSize).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
l1 = LSTM.builder().nIn(layerSize).nOut(layerSize).activation(Activation.SIGMOID)
|
||||
.dist(new NormalDistribution(0, 1.0))
|
||||
.updater(new NoOp()).build();
|
||||
}
|
||||
|
@ -94,7 +94,7 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
|
|||
.list()
|
||||
.layer(0, l0).layer(1,
|
||||
l1)
|
||||
.layer(2, new RnnOutputLayer.Builder(LossFunction.MCXENT)
|
||||
.layer(2, RnnOutputLayer.builder().lossFunction(LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut)
|
||||
|
||||
.dist(new NormalDistribution(0, 1.0)).updater(new NoOp())
|
||||
|
@ -196,14 +196,14 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
|
|||
|
||||
LayerConfiguration layer;
|
||||
if (graves) {
|
||||
layer = new GravesLSTM.Builder().nIn(nIn).nOut(layerSize).activation(afn).build();
|
||||
layer = GravesLSTM.builder().nIn(nIn).nOut(layerSize).activation(afn).build();
|
||||
} else {
|
||||
layer = new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(afn).build();
|
||||
layer = LSTM.builder().nIn(nIn).nOut(layerSize).activation(afn).build();
|
||||
}
|
||||
|
||||
NeuralNetConfiguration.NeuralNetConfigurationBuilder conf2 = (NeuralNetConfigurationBuilder) conf
|
||||
.layer(0, layer)
|
||||
.layer(1, new RnnOutputLayer.Builder(lf).activation(outputActivation)
|
||||
.layer(1, RnnOutputLayer.builder(lf).activation(outputActivation)
|
||||
.nIn(layerSize).nOut(nOut).build());
|
||||
|
||||
MultiLayerNetwork mln = new MultiLayerNetwork(conf2.build());
|
||||
|
@ -251,16 +251,16 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
|
|||
|
||||
LayerConfiguration layer;
|
||||
if (graves) {
|
||||
layer = new GravesLSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build();
|
||||
layer = GravesLSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build();
|
||||
} else {
|
||||
layer = new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build();
|
||||
layer = LSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build();
|
||||
}
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L)
|
||||
.dataType(DataType.DOUBLE)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.updater(new NoOp()).list().layer(0, layer)
|
||||
.layer(1, new RnnOutputLayer.Builder(LossFunction.MCXENT).activation(Activation.SOFTMAX)
|
||||
.layer(1, RnnOutputLayer.builder().lossFunction(LossFunction.MCXENT).activation(Activation.SOFTMAX)
|
||||
.nIn(layerSize).nOut(nOut).build())
|
||||
.build();
|
||||
MultiLayerNetwork mln = new MultiLayerNetwork(conf);
|
||||
|
@ -324,11 +324,11 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.layer(0,
|
||||
new GravesBidirectionalLSTM.Builder().nIn(nIn).nOut(layerSize)
|
||||
GravesBidirectionalLSTM.builder().nIn(nIn).nOut(layerSize)
|
||||
.weightInit(new NormalDistribution(0, 1))
|
||||
.activation(afn)
|
||||
.build())
|
||||
.layer(1, new RnnOutputLayer.Builder(lf).activation(outputActivation).nIn(layerSize)
|
||||
.layer(1, RnnOutputLayer.builder(lf).activation(outputActivation).nIn(layerSize)
|
||||
.nOut(nOut)
|
||||
.dist(new NormalDistribution(0, 1)).updater(new NoOp()).build())
|
||||
.build();
|
||||
|
@ -383,12 +383,12 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L)
|
||||
.dataType(DataType.DOUBLE)
|
||||
.list()
|
||||
.layer(0, new GravesBidirectionalLSTM.Builder().nIn(nIn).nOut(layerSize)
|
||||
.layer(0, GravesBidirectionalLSTM.builder().nIn(nIn).nOut(layerSize)
|
||||
|
||||
.dist(new NormalDistribution(0, 1)).updater(
|
||||
Updater.NONE)
|
||||
.build())
|
||||
.layer(1, new RnnOutputLayer.Builder(LossFunction.MCXENT).activation(Activation.SOFTMAX)
|
||||
.layer(1, RnnOutputLayer.builder().lossFunction(LossFunction.MCXENT).activation(Activation.SOFTMAX)
|
||||
.nIn(layerSize).nOut(nOut)
|
||||
.dist(new NormalDistribution(0, 1)).updater(new NoOp()).build())
|
||||
.build();
|
||||
|
@ -432,13 +432,13 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new NoOp()).seed(12345)
|
||||
.dataType(DataType.DOUBLE)
|
||||
.dist(new UniformDistribution(-2, 2)).list()
|
||||
.layer(0, new ConvolutionLayer.Builder(3, 3).nIn(2).nOut(3).stride(1, 1)
|
||||
.layer(0, ConvolutionLayer.builder(3, 3).nIn(2).nOut(3).stride(1, 1)
|
||||
.activation(Activation.TANH).build()) //Out: (10-5)/1+1 = 6 -> 6x6x5
|
||||
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
|
||||
.layer(1, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
|
||||
.stride(1, 1).build()) //Out: (6-2)/1+1 = 5 -> 5x5x5
|
||||
.layer(2, new DenseLayer.Builder().nIn(27).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(3, new GravesLSTM.Builder().nIn(4).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(4, new RnnOutputLayer.Builder().lossFunction(LossFunction.MCXENT).nIn(3).nOut(nClasses)
|
||||
.layer(2, DenseLayer.builder().nIn(27).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(3, GravesLSTM.builder().nIn(4).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(4, RnnOutputLayer.builder().lossFunction(LossFunction.MCXENT).nIn(3).nOut(nClasses)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(6, 6, 2)).build();
|
||||
|
||||
|
|
|
@ -187,8 +187,8 @@ public class LossFunctionGradientCheck extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345)
|
||||
.updater(new NoOp())
|
||||
.dist(new UniformDistribution(-2, 2)).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(1, new OutputLayer.Builder().lossFunction(lossFunctions[i])
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH).build())
|
||||
.layer(1, OutputLayer.builder().lossFunction(lossFunctions[i])
|
||||
.activation(outputActivationFn[i]).nIn(4).nOut(nOut[i]).build())
|
||||
.validateOutputLayerConfig(false)
|
||||
.build();
|
||||
|
@ -351,9 +351,9 @@ public class LossFunctionGradientCheck extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345)
|
||||
.updater(new NoOp())
|
||||
.dist(new UniformDistribution(-2, 2)).list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(nOut[i]).activation(Activation.TANH)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(nOut[i]).activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new LossLayer.Builder().lossFunction(lossFunctions[i])
|
||||
.layer(1, LossLayer.builder().lossFunction(lossFunctions[i])
|
||||
.activation(outputActivationFn[i]).build())
|
||||
.validateOutputLayerConfig(false)
|
||||
.build();
|
||||
|
@ -361,7 +361,7 @@ public class LossFunctionGradientCheck extends BaseDL4JTest {
|
|||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
assertSame(((LossLayer) net.getLayer(1).getLayerConfiguration()).getLossFn().getClass(), lossFunctions[i]
|
||||
assertSame(((LossLayer) net.getLayer(1).getLayerConfiguration()).getLossFunction().getClass(), lossFunctions[i]
|
||||
.getClass());
|
||||
|
||||
INDArray[] inOut = getFeaturesAndLabels(lossFunctions[i], minibatchSizes[j], 4, nOut[i], 12345);
|
||||
|
@ -655,9 +655,9 @@ public class LossFunctionGradientCheck extends BaseDL4JTest {
|
|||
// .dist(new UniformDistribution(-3, 3))
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH)
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(4).activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder().lossFunction(lossFunctions[i])
|
||||
.layer(1, OutputLayer.builder().lossFunction(lossFunctions[i])
|
||||
.activation(outputActivationFn[i]).nIn(4).nOut(3).build())
|
||||
.validateOutputLayerConfig(false)
|
||||
.build();
|
||||
|
|
|
@ -73,19 +73,19 @@ public class NoBiasGradientCheckTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.seed(12345L)
|
||||
.list()
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIn).nOut(layerSize)
|
||||
.layer(0, DenseLayer.builder().nIn(nIn).nOut(layerSize)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH)
|
||||
.hasBias(true) //ILayer 0: Always have a bias
|
||||
.build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(layerSize).nOut(layerSize)
|
||||
.layer(1, DenseLayer.builder().nIn(layerSize).nOut(layerSize)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH)
|
||||
.hasBias(denseHasBias)
|
||||
.build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunction.MCXENT)
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
|
@ -144,12 +144,12 @@ public class NoBiasGradientCheckTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.seed(12345L)
|
||||
.list()
|
||||
.layer(0, new LSTM.Builder().nIn(nIn).nOut(layerSize)
|
||||
.layer(0, LSTM.builder().nIn(nIn).nOut(layerSize)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH)
|
||||
.build())
|
||||
.layer(1, new RnnOutputLayer.Builder(LossFunction.MCXENT)
|
||||
.layer(1, RnnOutputLayer.builder().lossFunction(LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
|
@ -205,13 +205,13 @@ public class NoBiasGradientCheckTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.seed(12345L)
|
||||
.list()
|
||||
.layer(0, new EmbeddingLayer.Builder().nIn(nIn).nOut(layerSize)
|
||||
.layer(0, EmbeddingLayer.builder().nIn(nIn).nOut(layerSize)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(Activation.TANH)
|
||||
.hasBias(embeddingHasBias)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunction.MCXENT)
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
|
@ -271,17 +271,17 @@ public class NoBiasGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.list()
|
||||
.layer(new ConvolutionLayer.Builder(kernel,
|
||||
.layer(ConvolutionLayer.builder(kernel,
|
||||
stride, padding).nIn(inputDepth)
|
||||
.hasBias(false)
|
||||
.nOut(3).build())//output: (5-2+0)/1+1 = 4
|
||||
.layer(new SubsamplingLayer.Builder(PoolingType.MAX)
|
||||
.layer(SubsamplingLayer.builder(PoolingType.MAX)
|
||||
.kernelSize(kernel).stride(stride).padding(padding)
|
||||
.pnorm(pNorm).build()) //output: (4-2+0)/1+1 =3 -> 3x3x3
|
||||
.layer(new ConvolutionLayer.Builder(kernel, stride, padding)
|
||||
.layer(ConvolutionLayer.builder(kernel, stride, padding)
|
||||
.hasBias(cnnHasBias)
|
||||
.nOut(2).build()) //Output: (3-2+0)/1+1 = 2
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.nOut(4).build())
|
||||
.inputType(InputType.convolutionalFlat(height, width, inputDepth))
|
||||
|
|
|
@ -121,10 +121,10 @@ public class OutputLayerGradientChecks extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.layer(LSTM.builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 1.0))
|
||||
.updater(new NoOp()).build())
|
||||
.layer(new RnnLossLayer.Builder(lf)
|
||||
.layer(RnnLossLayer.builder().lossFunction(lf)
|
||||
.activation(oa)
|
||||
.build())
|
||||
.validateOutputLayerConfig(false).build();
|
||||
|
@ -228,10 +228,10 @@ public class OutputLayerGradientChecks extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.list()
|
||||
.layer(new ConvolutionLayer.Builder().nIn(dIn).nOut(dOut).activation(Activation.TANH)
|
||||
.layer(ConvolutionLayer.builder().nIn(dIn).nOut(dOut).activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 1.0))
|
||||
.updater(new NoOp()).build())
|
||||
.layer(new CnnLossLayer.Builder(lf)
|
||||
.layer(CnnLossLayer.builder().lossFunction(lf)
|
||||
.activation(oa)
|
||||
.build())
|
||||
.validateOutputLayerConfig(false).build();
|
||||
|
@ -375,11 +375,11 @@ public class OutputLayerGradientChecks extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.list()
|
||||
.layer(new Convolution3D.Builder().nIn(chIn).nOut(chOut).activation(Activation.TANH)
|
||||
.layer(Convolution3D.builder().nIn(chIn).nOut(chOut).activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 1.0))
|
||||
.dataFormat(dataFormat)
|
||||
.updater(new NoOp()).build())
|
||||
.layer(new Cnn3DLossLayer.Builder(dataFormat)
|
||||
.layer(Cnn3DLossLayer.builder().dataFormat(dataFormat)
|
||||
.lossFunction(lf)
|
||||
.activation(oa)
|
||||
.build())
|
||||
|
|
|
@ -112,12 +112,12 @@ public class RnnGradientChecks extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nIn(nIn).nOut(3).build())
|
||||
.layer(new Bidirectional(m,
|
||||
.layer(LSTM.builder().nIn(nIn).nOut(3).build())
|
||||
.layer(Bidirectional.builder(m,
|
||||
(simple ?
|
||||
new SimpleRnn.Builder().nIn(3).nOut(3).hasLayerNorm(hasLayerNorm).build() :
|
||||
new LSTM.Builder().nIn(3).nOut(3).build())))
|
||||
.layer(new RnnOutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX).build())
|
||||
SimpleRnn.builder().nIn(3).nOut(3).hasLayerNorm(hasLayerNorm).build() :
|
||||
LSTM.builder().nIn(3).nOut(3).build())).build())
|
||||
.layer(RnnOutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
||||
|
||||
|
@ -194,9 +194,9 @@ public class RnnGradientChecks extends BaseDL4JTest {
|
|||
.l1(l1s[l])
|
||||
.l2(l2s[l])
|
||||
.list()
|
||||
.layer(new SimpleRnn.Builder().nIn(nIn).nOut(layerSize).hasLayerNorm(hasLayerNorm).build())
|
||||
.layer(new SimpleRnn.Builder().nIn(layerSize).nOut(layerSize).hasLayerNorm(hasLayerNorm).build())
|
||||
.layer(new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut)
|
||||
.layer(SimpleRnn.builder().nIn(nIn).nOut(layerSize).hasLayerNorm(hasLayerNorm).build())
|
||||
.layer(SimpleRnn.builder().nIn(layerSize).nOut(layerSize).hasLayerNorm(hasLayerNorm).build())
|
||||
.layer(RnnOutputLayer.builder().nIn(layerSize).nOut(nOut)
|
||||
.activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.build())
|
||||
.build();
|
||||
|
@ -267,12 +267,12 @@ public class RnnGradientChecks extends BaseDL4JTest {
|
|||
.activation(Activation.TANH)
|
||||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(simple ? new SimpleRnn.Builder().nOut(layerSize).hasLayerNorm(hasLayerNorm).build() :
|
||||
new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(new LastTimeStep(simple ? new SimpleRnn.Builder().nOut(layerSize).hasLayerNorm(hasLayerNorm).build() :
|
||||
new LSTM.Builder().nOut(layerSize).build()))
|
||||
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
|
||||
.layer(simple ? SimpleRnn.builder().nOut(layerSize).hasLayerNorm(hasLayerNorm).build() :
|
||||
LSTM.builder().nOut(layerSize).build())
|
||||
.layer(LastTimeStep.builder().underlying(simple ? SimpleRnn.builder().nOut(layerSize).hasLayerNorm(hasLayerNorm).build() :
|
||||
LSTM.builder().nOut(layerSize).build()).build())
|
||||
.layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -334,9 +334,9 @@ public class RnnGradientChecks extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(new TimeDistributed(new DenseLayer.Builder().nOut(layerSize).activation(Activation.SOFTMAX).build()))
|
||||
.layer(new RnnOutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.layer(LSTM.builder().nOut(layerSize).build())
|
||||
.layer(TimeDistributed.builder().underlying(DenseLayer.builder().nOut(layerSize).activation(Activation.SOFTMAX).build()).build())
|
||||
.layer(RnnOutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
|
|
@ -132,27 +132,27 @@ public class UtilLayerGradientChecks extends BaseDL4JTest {
|
|||
InputType it;
|
||||
switch (inputRank){
|
||||
case 2:
|
||||
l1 = new DenseLayer.Builder().nOut(3).build();
|
||||
l2 = new DenseLayer.Builder().nOut(3).build();
|
||||
l3 = new OutputLayer.Builder().nOut(3).lossFunction(LossFunctions.LossFunction.MSE)
|
||||
l1 = DenseLayer.builder().nOut(3).build();
|
||||
l2 = DenseLayer.builder().nOut(3).build();
|
||||
l3 = OutputLayer.builder().nOut(3).lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.activation(Activation.TANH).build();
|
||||
it = InputType.feedForward(3);
|
||||
break;
|
||||
case 3:
|
||||
l1 = new SimpleRnn.Builder().nIn(3).nOut(3).activation(Activation.TANH).build();
|
||||
l2 = new SimpleRnn.Builder().nIn(3).nOut(3).activation(Activation.TANH).build();
|
||||
l3 = new RnnOutputLayer.Builder().nIn(3).nOut(3).lossFunction(LossFunctions.LossFunction.SQUARED_LOSS)
|
||||
l1 = SimpleRnn.builder().nIn(3).nOut(3).activation(Activation.TANH).build();
|
||||
l2 = SimpleRnn.builder().nIn(3).nOut(3).activation(Activation.TANH).build();
|
||||
l3 = RnnOutputLayer.builder().nIn(3).nOut(3).lossFunction(LossFunctions.LossFunction.SQUARED_LOSS)
|
||||
.activation(Activation.IDENTITY).build();
|
||||
it = InputType.recurrent(3);
|
||||
break;
|
||||
case 4:
|
||||
l1 = new ConvolutionLayer.Builder().nOut(5).convolutionMode(ConvolutionMode.Truncate)
|
||||
l1 = ConvolutionLayer.builder().nOut(5).convolutionMode(ConvolutionMode.Truncate)
|
||||
.stride(1,1).kernelSize(2,2).padding(0,0)
|
||||
.build();
|
||||
l2 = new ConvolutionLayer.Builder().nOut(5).convolutionMode(ConvolutionMode.Truncate)
|
||||
l2 = ConvolutionLayer.builder().nOut(5).convolutionMode(ConvolutionMode.Truncate)
|
||||
.stride(1,1).kernelSize(2,2).padding(0,0)
|
||||
.build();
|
||||
l3 = new OutputLayer.Builder().nOut(5).lossFunction(LossFunctions.LossFunction.SQUARED_LOSS)
|
||||
l3 = OutputLayer.builder().nOut(5).lossFunction(LossFunctions.LossFunction.SQUARED_LOSS)
|
||||
.activation(Activation.IDENTITY)
|
||||
.build();
|
||||
it = InputType.convolutional(5,5,1);
|
||||
|
@ -162,19 +162,19 @@ public class UtilLayerGradientChecks extends BaseDL4JTest {
|
|||
|
||||
}
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.updater(new NoOp())
|
||||
.activation(Activation.TANH)
|
||||
.dataType(DataType.DOUBLE)
|
||||
.dist(new NormalDistribution(0,2))
|
||||
.list()
|
||||
.layer(l1)
|
||||
.layer(new MaskLayer())
|
||||
.layer(l2)
|
||||
.layer(l3)
|
||||
.inputType(it)
|
||||
.build();
|
||||
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder()
|
||||
.updater(new NoOp())
|
||||
.activation(Activation.TANH)
|
||||
.dataType(DataType.DOUBLE)
|
||||
.dist(new NormalDistribution(0, 2))
|
||||
.list()
|
||||
.layer(l1)
|
||||
.layer(MaskLayer.builder().build())
|
||||
.layer(l2)
|
||||
.layer(l3)
|
||||
.inputType(it)
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -201,14 +201,14 @@ public class UtilLayerGradientChecks extends BaseDL4JTest {
|
|||
.seed(12345)
|
||||
.updater(Updater.NONE.getIUpdaterWithDefaultConfig())
|
||||
.list()
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10)
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10)
|
||||
.activation(Activation.TANH).weightInit(WeightInit.XAVIER).build())
|
||||
.layer(new FrozenLayerWithBackprop(new DenseLayer.Builder().nIn(10).nOut(10)
|
||||
.activation(Activation.TANH).weightInit(WeightInit.XAVIER).build()))
|
||||
.layer(new FrozenLayerWithBackprop(
|
||||
new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
|
||||
.weightInit(WeightInit.XAVIER).build()))
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(FrozenLayerWithBackprop.builder().underlying(DenseLayer.builder().nIn(10).nOut(10)
|
||||
.activation(Activation.TANH).weightInit(WeightInit.XAVIER).build()).build())
|
||||
.layer(FrozenLayerWithBackprop.builder().underlying(
|
||||
DenseLayer.builder().nIn(10).nOut(10).activation(Activation.TANH)
|
||||
.weightInit(WeightInit.XAVIER).build()).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(10).nOut(10).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf2);
|
||||
|
|
|
@ -99,14 +99,14 @@ public class VaeGradientCheckTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.l2Bias(biasL2[i]).l1Bias(biasL1[i])
|
||||
.updater(new NoOp()).seed(12345L).list()
|
||||
.layer(0, new VariationalAutoencoder.Builder().nIn(4)
|
||||
.layer(0, VariationalAutoencoder.builder().nIn(4)
|
||||
.nOut(3).encoderLayerSizes(encoderSizes)
|
||||
.decoderLayerSizes(decoderSizes)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
.activation(afn)
|
||||
.build())
|
||||
.layer(1, new OutputLayer.Builder(lf)
|
||||
.layer(1, OutputLayer.builder(lf)
|
||||
.activation(outputActivation).nIn(3).nOut(3)
|
||||
|
||||
.dist(new NormalDistribution(0, 1))
|
||||
|
@ -173,7 +173,7 @@ public class VaeGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.l1(l1).l2Bias(biasL2[i]).l1Bias(biasL1[i]).updater(new NoOp())
|
||||
.seed(12345L).weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new VariationalAutoencoder.Builder().nIn(4).nOut(3)
|
||||
.layer(0, VariationalAutoencoder.builder().nIn(4).nOut(3)
|
||||
.encoderLayerSizes(encoderSizes).decoderLayerSizes(decoderSizes)
|
||||
.pzxActivationFunction(pzxAfn)
|
||||
.reconstructionDistribution(
|
||||
|
@ -263,7 +263,7 @@ public class VaeGradientCheckTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.seed(12345L).dist(new NormalDistribution(0, 1))
|
||||
.list().layer(0,
|
||||
new VariationalAutoencoder.Builder().nIn(inOutSize).nOut(3)
|
||||
VariationalAutoencoder.builder().nIn(inOutSize).nOut(3)
|
||||
.encoderLayerSizes(4).decoderLayerSizes(3)
|
||||
.pzxActivationFunction(Activation.TANH)
|
||||
.reconstructionDistribution(
|
||||
|
@ -306,7 +306,7 @@ public class VaeGradientCheckTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.updater(new NoOp())
|
||||
.seed(12345L).weightInit(WeightInit.XAVIER).list()
|
||||
.layer(0, new VariationalAutoencoder.Builder().nIn(4).nOut(3).encoderLayerSizes(2, 3)
|
||||
.layer(0, VariationalAutoencoder.builder().nIn(4).nOut(3).encoderLayerSizes(2, 3)
|
||||
.decoderLayerSizes(4, 3).pzxActivationFunction(Activation.TANH)
|
||||
.reconstructionDistribution(
|
||||
new GaussianReconstructionDistribution(Activation.TANH))
|
||||
|
|
|
@ -115,12 +115,11 @@ public class YoloGradientCheckTests extends BaseDL4JTest {
|
|||
.activation(a)
|
||||
.l1(l1[i]).l2(l2[i])
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.list()
|
||||
.layer(new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1)
|
||||
.dataFormat(format)
|
||||
.layer(ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1)
|
||||
.convFormat(format)
|
||||
.nIn(depthIn).nOut(yoloDepth).build())//output: (5-2+0)/1+1 = 4
|
||||
.layer(new Yolo2OutputLayer.Builder()
|
||||
.boundingBoxPriors(bbPrior)
|
||||
.layer(Yolo2OutputLayer.builder()
|
||||
.boundingBoxes(bbPrior)
|
||||
.build())
|
||||
.inputType(InputType.convolutional(h, w, depthIn, format))
|
||||
.build();
|
||||
|
@ -234,11 +233,11 @@ public class YoloGradientCheckTests extends BaseDL4JTest {
|
|||
.dist(new GaussianDistribution(0,0.1))
|
||||
.seed(12345)
|
||||
.list()
|
||||
.layer(new ConvolutionLayer.Builder().kernelSize(3,3).stride(1,1).nOut(4).build())
|
||||
.layer(new SubsamplingLayer.Builder().kernelSize(2,2).stride(2,2).build())
|
||||
.layer(new ConvolutionLayer.Builder().activation(Activation.IDENTITY).kernelSize(3,3).stride(1,1).nOut(depthOut).build())
|
||||
.layer(new Yolo2OutputLayer.Builder()
|
||||
.boundingBoxPriors(bbPriors)
|
||||
.layer(ConvolutionLayer.builder().kernelSize(3,3).stride(1,1).nOut(4).build())
|
||||
.layer(SubsamplingLayer.builder().kernelSize(2,2).stride(2,2).build())
|
||||
.layer(ConvolutionLayer.builder().activation(Activation.IDENTITY).kernelSize(3,3).stride(1,1).nOut(depthOut).build())
|
||||
.layer(Yolo2OutputLayer.builder()
|
||||
.boundingBoxes(bbPriors)
|
||||
.build())
|
||||
.inputType(InputType.convolutional(h,w,c))
|
||||
.build();
|
||||
|
|
|
@ -62,9 +62,9 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 1)).updater(new NoOp())
|
||||
.graphBuilder().addInputs("input")
|
||||
.appendLayer("firstLayer",
|
||||
new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build())
|
||||
DenseLayer.builder().nIn(4).nOut(5).activation(Activation.TANH).build())
|
||||
.addLayer("outputLayer",
|
||||
new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nIn(5).nOut(3).build(),
|
||||
"firstLayer")
|
||||
.setOutputs("outputLayer").build();
|
||||
|
@ -83,20 +83,20 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.graphBuilder().addInputs("input")
|
||||
.addLayer("cnn1",
|
||||
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
.build(),
|
||||
"input")
|
||||
.addLayer("cnn2",
|
||||
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
.build(),
|
||||
"input")
|
||||
.addLayer("max1",
|
||||
new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
|
||||
SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX)
|
||||
.kernelSize(2, 2).build(),
|
||||
"cnn1", "cnn2")
|
||||
.addLayer("dnn1", new DenseLayer.Builder().nOut(7).build(), "max1")
|
||||
.addLayer("max2", new SubsamplingLayer.Builder().build(), "max1")
|
||||
.addLayer("output", new OutputLayer.Builder().nIn(7).nOut(10).activation(Activation.SOFTMAX).build(), "dnn1",
|
||||
.addLayer("dnn1", DenseLayer.builder().nOut(7).build(), "max1")
|
||||
.addLayer("max2", SubsamplingLayer.builder().build(), "max1")
|
||||
.addLayer("output", OutputLayer.builder().nIn(7).nOut(10).activation(Activation.SOFTMAX).build(), "dnn1",
|
||||
"max2")
|
||||
.setOutputs("output")
|
||||
.inputPreProcessor("cnn1", new FeedForwardToCnnPreProcessor(32, 32, 3))
|
||||
|
@ -119,20 +119,20 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
.graphBuilder().addInputs("input1", "input2")
|
||||
.addLayer("cnn1",
|
||||
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
.build(),
|
||||
"input1")
|
||||
.addLayer("cnn2",
|
||||
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
.build(),
|
||||
"input2")
|
||||
.addVertex("merge1", new MergeVertex(), "cnn1", "cnn2")
|
||||
.addVertex("subset1", new SubsetVertex(0, 1), "merge1")
|
||||
.addLayer("dense1", new DenseLayer.Builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addLayer("dense2", new DenseLayer.Builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addLayer("dense1", DenseLayer.builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addLayer("dense2", DenseLayer.builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addVertex("add", new ElementWiseVertex(ElementWiseVertex.Op.Add), "dense1",
|
||||
"dense2")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "add")
|
||||
.addLayer("out", OutputLayer.builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "add")
|
||||
.setOutputs("out").build();
|
||||
|
||||
String json = conf.toJson();
|
||||
|
@ -150,8 +150,8 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
//Test no inputs for a layer:
|
||||
try {
|
||||
NeuralNetConfiguration.builder().graphBuilder().addInputs("input1")
|
||||
.addLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).build()).setOutputs("out")
|
||||
.addLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", OutputLayer.builder().nIn(2).nOut(2).build()).setOutputs("out")
|
||||
.build();
|
||||
fail("No exception thrown for invalid configuration");
|
||||
} catch (IllegalStateException e) {
|
||||
|
@ -162,8 +162,8 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
// Use appendLayer on first layer
|
||||
try {
|
||||
NeuralNetConfiguration.builder().graphBuilder()
|
||||
.appendLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).build()).setOutputs("out")
|
||||
.appendLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.addLayer("out", OutputLayer.builder().nIn(2).nOut(2).build()).setOutputs("out")
|
||||
.build();
|
||||
fail("No exception thrown for invalid configuration");
|
||||
} catch (IllegalStateException e) {
|
||||
|
@ -174,8 +174,8 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
//Test no network inputs
|
||||
try {
|
||||
NeuralNetConfiguration.builder().graphBuilder()
|
||||
.addLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).build(), "dense1")
|
||||
.addLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", OutputLayer.builder().nIn(2).nOut(2).build(), "dense1")
|
||||
.setOutputs("out").build();
|
||||
fail("No exception thrown for invalid configuration");
|
||||
} catch (IllegalStateException e) {
|
||||
|
@ -186,8 +186,8 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
//Test no network outputs
|
||||
try {
|
||||
NeuralNetConfiguration.builder().graphBuilder().addInputs("input1")
|
||||
.addLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).build(), "dense1").build();
|
||||
.addLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", OutputLayer.builder().nIn(2).nOut(2).build(), "dense1").build();
|
||||
fail("No exception thrown for invalid configuration");
|
||||
} catch (IllegalStateException e) {
|
||||
//OK - exception is good
|
||||
|
@ -197,8 +197,8 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
//Test: invalid input
|
||||
try {
|
||||
NeuralNetConfiguration.builder().graphBuilder().addInputs("input1")
|
||||
.addLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).build(), "thisDoesntExist")
|
||||
.addLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build(), "input1")
|
||||
.addLayer("out", OutputLayer.builder().nIn(2).nOut(2).build(), "thisDoesntExist")
|
||||
.setOutputs("out").build();
|
||||
fail("No exception thrown for invalid configuration");
|
||||
} catch (IllegalStateException e) {
|
||||
|
@ -209,10 +209,10 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
//Test: graph with cycles
|
||||
try {
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("input1")
|
||||
.addLayer("dense1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "input1", "dense3")
|
||||
.addLayer("dense2", new DenseLayer.Builder().nIn(2).nOut(2).build(), "dense1")
|
||||
.addLayer("dense3", new DenseLayer.Builder().nIn(2).nOut(2).build(), "dense2")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(2).nOut(2).lossFunction(LossFunctions.LossFunction.MSE).build(), "dense1")
|
||||
.addLayer("dense1", DenseLayer.builder().nIn(2).nOut(2).build(), "input1", "dense3")
|
||||
.addLayer("dense2", DenseLayer.builder().nIn(2).nOut(2).build(), "dense1")
|
||||
.addLayer("dense3", DenseLayer.builder().nIn(2).nOut(2).build(), "dense2")
|
||||
.addLayer("out", OutputLayer.builder().nIn(2).nOut(2).lossFunction(LossFunctions.LossFunction.MSE).build(), "dense1")
|
||||
.setOutputs("out").build();
|
||||
//Cycle detection happens in ComputationGraph.init()
|
||||
ComputationGraph graph = new ComputationGraph(conf);
|
||||
|
@ -229,20 +229,20 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration.builder().graphBuilder().addInputs("input1", "input2")
|
||||
.setInputTypes(new InputType.InputTypeRecurrent(10, 12))
|
||||
.addLayer("cnn1",
|
||||
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
.build(),
|
||||
"input1")
|
||||
.addLayer("cnn2",
|
||||
new ConvolutionLayer.Builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
ConvolutionLayer.builder(2, 2).stride(2, 2).nIn(1).nOut(5)
|
||||
.build(),
|
||||
"input2")
|
||||
.addVertex("merge1", new MergeVertex(), "cnn1", "cnn2")
|
||||
.addVertex("subset1", new SubsetVertex(0, 1), "merge1")
|
||||
.addLayer("dense1", new DenseLayer.Builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addLayer("dense2", new DenseLayer.Builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addLayer("dense1", DenseLayer.builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addLayer("dense2", DenseLayer.builder().nIn(20).nOut(5).build(), "subset1")
|
||||
.addVertex("add", new ElementWiseVertex(ElementWiseVertex.Op.Add), "dense1",
|
||||
"dense2")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "add")
|
||||
.addLayer("out", OutputLayer.builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "add")
|
||||
.setOutputs("out").build();
|
||||
fail("No exception thrown for invalid configuration");
|
||||
} catch (IllegalArgumentException e) {
|
||||
|
@ -283,9 +283,9 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testOutputOrderDoesntChangeWhenCloning() {
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.addLayer("out1", new OutputLayer.Builder().nIn(1).nOut(1).build(), "in")
|
||||
.addLayer("out2", new OutputLayer.Builder().nIn(1).nOut(1).build(), "in")
|
||||
.addLayer("out3", new OutputLayer.Builder().nIn(1).nOut(1).build(), "in")
|
||||
.addLayer("out1", OutputLayer.builder().nIn(1).nOut(1).build(), "in")
|
||||
.addLayer("out2", OutputLayer.builder().nIn(1).nOut(1).build(), "in")
|
||||
.addLayer("out3", OutputLayer.builder().nIn(1).nOut(1).build(), "in")
|
||||
.validateOutputLayerConfig(false)
|
||||
.setOutputs("out1", "out2", "out3").build();
|
||||
|
||||
|
@ -301,14 +301,14 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
public void testAllowDisconnectedLayers() {
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.addLayer("bidirectional",
|
||||
new Bidirectional(new LSTM.Builder().activation(Activation.TANH).nOut(10).build()),
|
||||
Bidirectional.builder(LSTM.builder().activation(Activation.TANH).nOut(10).build()).build(),
|
||||
"in")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nOut(6)
|
||||
.addLayer("out", RnnOutputLayer.builder().nOut(6)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build(), "bidirectional")
|
||||
.addLayer("disconnected_layer",
|
||||
new Bidirectional(new LSTM.Builder().activation(Activation.TANH).nOut(10).build()),
|
||||
Bidirectional.builder(LSTM.builder().activation(Activation.TANH).nOut(10).build()).build(),
|
||||
"in")
|
||||
.setOutputs("out")
|
||||
.setInputTypes(new InputType.InputTypeRecurrent(10, 12))
|
||||
|
@ -323,9 +323,9 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
public void testBidirectionalGraphSummary() {
|
||||
ComputationGraphConfiguration conf = NeuralNetConfiguration.builder().graphBuilder().addInputs("in")
|
||||
.addLayer("bidirectional",
|
||||
new Bidirectional(new LSTM.Builder().activation(Activation.TANH).nOut(10).build()),
|
||||
Bidirectional.builder(LSTM.builder().activation(Activation.TANH).nOut(10).build()).build(),
|
||||
"in")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nOut(6)
|
||||
.addLayer("out", RnnOutputLayer.builder().nOut(6)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX)
|
||||
.build(), "bidirectional")
|
||||
|
@ -411,10 +411,10 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in")
|
||||
.layer("0", DenseLayer.builder().nIn(10).nOut(10).build(), "in")
|
||||
.layer("1",
|
||||
!lossLayer ? new OutputLayer.Builder().nIn(10).nOut(nOut[i]).activation(activations[i]).lossFunction(lf[i]).build()
|
||||
: new LossLayer.Builder().activation(activations[i]).lossFunction(lf[i]).build(), "0")
|
||||
!lossLayer ? OutputLayer.builder().nIn(10).nOut(nOut[i]).activation(activations[i]).lossFunction(lf[i]).build()
|
||||
: LossLayer.builder().activation(activations[i]).lossFunction(lf[i].getILossFunction()).build(), "0")
|
||||
.setOutputs("1")
|
||||
.validateOutputLayerConfig(validate)
|
||||
.build();
|
||||
|
|
|
@ -99,8 +99,8 @@ public class JsonTest extends BaseDL4JTest {
|
|||
for (int i = 0; i < lossFunctions.length; i++) {
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345).updater(Updater.ADAM.getIUpdaterWithDefaultConfig())
|
||||
.layer(0, new DenseLayer.Builder().nIn(4).nOut(nOut[i]).activation(Activation.TANH).build())
|
||||
.layer(1, new LossLayer.Builder().lossFunction(lossFunctions[i])
|
||||
.layer(0, DenseLayer.builder().nIn(4).nOut(nOut[i]).activation(Activation.TANH).build())
|
||||
.layer(1, LossLayer.builder().lossFunction(lossFunctions[i])
|
||||
.activation(outputActivationFn[i]).build())
|
||||
.validateOutputLayerConfig(false).build();
|
||||
|
||||
|
|
|
@ -69,9 +69,9 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
|
||||
private static NeuralNetConfiguration getConf() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345L)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2)
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2)
|
||||
.dist(new NormalDistribution(0, 1)).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(2).nOut(1)
|
||||
.layer(1, OutputLayer.builder().nIn(2).nOut(1)
|
||||
.activation(Activation.TANH)
|
||||
.dist(new NormalDistribution(0, 1)).lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.build())
|
||||
|
@ -82,7 +82,7 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testJson() throws Exception {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.layer(0, new DenseLayer.Builder().dist(new NormalDistribution(1, 1e-1)).build())
|
||||
.layer(0, DenseLayer.builder().dist(new NormalDistribution(1, 1e-1)).build())
|
||||
.inputPreProcessor(0, new CnnToFeedForwardPreProcessor()).build();
|
||||
|
||||
String json = conf.toJson();
|
||||
|
@ -123,17 +123,17 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
.l1(1e-1).l2(2e-4).weightNoise(new DropConnect(0.5)).miniBatch(true)
|
||||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
|
||||
.layer(0,
|
||||
new ConvolutionLayer.Builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
ConvolutionLayer.builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[]{2, 2})
|
||||
.layer(1, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX, new int[]{2, 2})
|
||||
.build())
|
||||
.layer(2,
|
||||
new ConvolutionLayer.Builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
ConvolutionLayer.builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[]{2, 2})
|
||||
.layer(3, SubsamplingLayer.builder(SubsamplingLayer.PoolingType.MAX, new int[]{2, 2})
|
||||
.build())
|
||||
.layer(4, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(4, DenseLayer.builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(5, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
||||
|
@ -157,15 +157,15 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration.NeuralNetConfigurationBuilder builder = NeuralNetConfiguration.builder().seed(seed)
|
||||
.l1(1e-1).l2(2e-4).dropOut(0.5).miniBatch(true)
|
||||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
|
||||
.layer(new ConvolutionLayer.Builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
.layer(ConvolutionLayer.builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(new Upsampling2D.Builder().size(2).build())
|
||||
.layer(Upsampling2D.builder().size(2).build())
|
||||
.layer(2,
|
||||
new ConvolutionLayer.Builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
ConvolutionLayer.builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(new Upsampling2D.Builder().size(2).build())
|
||||
.layer(4, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.layer(Upsampling2D.builder().size(2).build())
|
||||
.layer(4, DenseLayer.builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(5, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
|
||||
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
|
||||
.build())
|
||||
|
||||
|
@ -181,9 +181,9 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
public void testGlobalPoolingJson() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new NoOp())
|
||||
.dist(new NormalDistribution(0, 1.0)).seed(12345L)
|
||||
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(5).build())
|
||||
.layer(1, new GlobalPoolingLayer.Builder().poolingType(PoolingType.PNORM).pnorm(3).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
|
||||
.layer(0, ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(5).build())
|
||||
.layer(1, GlobalPoolingLayer.builder().poolingType(PoolingType.PNORM).pnorm(3).build())
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT)
|
||||
.activation(Activation.SOFTMAX).nOut(3).build())
|
||||
.inputType(InputType.convolutional(32, 32, 1)).build();
|
||||
|
||||
|
@ -196,7 +196,7 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testYaml() throws Exception {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.layer(0, new DenseLayer.Builder().dist(new NormalDistribution(1, 1e-1)).build())
|
||||
.layer(0, DenseLayer.builder().dist(new NormalDistribution(1, 1e-1)).build())
|
||||
.inputPreProcessor(0, new CnnToFeedForwardPreProcessor()).build();
|
||||
String json = conf.toYaml();
|
||||
NeuralNetConfiguration from = NeuralNetConfiguration.fromYaml(json);
|
||||
|
@ -226,8 +226,8 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testClone() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.layer(0, new DenseLayer.Builder().build())
|
||||
.layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.layer(0, DenseLayer.builder().build())
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.inputPreProcessor(1, new CnnToFeedForwardPreProcessor()).build();
|
||||
|
||||
NeuralNetConfiguration conf2 = conf.clone();
|
||||
|
@ -301,8 +301,8 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
|
||||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.layer(1, new DenseLayer.Builder().nIn(3).nOut(4).build())
|
||||
.layer(2, new OutputLayer.Builder().nIn(4).nOut(5).build())
|
||||
.layer(1, DenseLayer.builder().nIn(3).nOut(4).build())
|
||||
.layer(2, OutputLayer.builder().nIn(4).nOut(5).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -317,8 +317,8 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
|
||||
try {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.layer(0, new DenseLayer.Builder().nIn(3).nOut(4).build())
|
||||
.layer(2, new OutputLayer.Builder().nIn(4).nOut(5).build())
|
||||
.layer(0, DenseLayer.builder().nIn(3).nOut(4).build())
|
||||
.layer(2, OutputLayer.builder().nIn(4).nOut(5).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -336,8 +336,8 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
public void testListOverloads() {
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.layer(0, new DenseLayer.Builder().nIn(3).nOut(4).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.layer(0, DenseLayer.builder().nIn(3).nOut(4).build())
|
||||
.layer(1, OutputLayer.builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -350,16 +350,16 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
assertEquals(5, ol.getNOut());
|
||||
|
||||
NeuralNetConfiguration conf2 = NeuralNetConfiguration.builder().seed(12345)
|
||||
.layer(0, new DenseLayer.Builder().nIn(3).nOut(4).build())
|
||||
.layer(1, new OutputLayer.Builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.layer(0, DenseLayer.builder().nIn(3).nOut(4).build())
|
||||
.layer(1, OutputLayer.builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
|
||||
net2.init();
|
||||
|
||||
NeuralNetConfiguration conf3 = NeuralNetConfiguration.builder().seed(12345)
|
||||
.layer(new DenseLayer.Builder().nIn(3).nOut(4).build())
|
||||
.layer(DenseLayer.builder().nIn(3).nOut(4).build())
|
||||
.layer(
|
||||
new OutputLayer.Builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
OutputLayer.builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
MultiLayerNetwork net3 = new MultiLayerNetwork(conf3);
|
||||
net3.init();
|
||||
|
@ -375,14 +375,16 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(12345)
|
||||
.updater(new Adam(1e-2))
|
||||
.biasUpdater(new Adam(0.5))
|
||||
.layer(0, new ConvolutionLayer.Builder(5, 5).nOut(5).weightInit(WeightInit.XAVIER)
|
||||
.layer(0, ConvolutionLayer.builder(5, 5).nOut(5).weightInit(WeightInit.XAVIER)
|
||||
.activation(Activation.RELU).build())
|
||||
.layer(1, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(2, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10)
|
||||
.layer(1, DenseLayer.builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(2, DenseLayer.builder().nOut(100).activation(Activation.RELU).build())
|
||||
.layer(3, OutputLayer.builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10)
|
||||
.weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(28, 28, 1)).build();
|
||||
|
||||
conf.init();
|
||||
|
||||
BaseLayerConfiguration l0 = (BaseLayerConfiguration) conf.getConf(0).getLayer();
|
||||
BaseLayerConfiguration l1 = (BaseLayerConfiguration) conf.getConf(1).getLayer();
|
||||
BaseLayerConfiguration l2 = (BaseLayerConfiguration) conf.getConf(2).getLayer();
|
||||
|
@ -432,10 +434,10 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
try {
|
||||
NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(!lossLayer ? new OutputLayer.Builder().nIn(10).nOut(nOut[i])
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(!lossLayer ? OutputLayer.builder().nIn(10).nOut(nOut[i])
|
||||
.activation(activations[i]).lossFunction(lf[i]).build()
|
||||
: new LossLayer.Builder().activation(activations[i]).lossFunction(lf[i])
|
||||
: LossLayer.builder().activation(activations[i]).lossFunction(lf[i].getILossFunction())
|
||||
.build())
|
||||
.validateOutputLayerConfig(validate)
|
||||
.build();
|
||||
|
|
|
@ -67,9 +67,9 @@ public class MultiNeuralNetConfLayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration multiConf1 =
|
||||
NeuralNetConfiguration.builder()
|
||||
.layer(0, new DenseLayer.Builder().nIn(newNumIn).nOut(newNumOut).activation(act)
|
||||
.layer(0, DenseLayer.builder().nIn(newNumIn).nOut(newNumOut).activation(act)
|
||||
.build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(newNumIn + 1).nOut(newNumOut + 1)
|
||||
.layer(1, DenseLayer.builder().nIn(newNumIn + 1).nOut(newNumOut + 1)
|
||||
.activation(act).build())
|
||||
.build();
|
||||
NeuralNetConfiguration firstLayer = multiConf1.getConf(0).getNetConfiguration();
|
||||
|
|
|
@ -113,7 +113,7 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testRNG() {
|
||||
DenseLayer layer = new DenseLayer.Builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes())
|
||||
DenseLayer layer = DenseLayer.builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes())
|
||||
.weightInit(WeightInit.UNIFORM).activation(Activation.TANH).build();
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123)
|
||||
|
@ -125,7 +125,7 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
INDArray modelWeights = model.getParam(DefaultParamInitializer.WEIGHT_KEY);
|
||||
|
||||
|
||||
DenseLayer layer2 = new DenseLayer.Builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes())
|
||||
DenseLayer layer2 = DenseLayer.builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes())
|
||||
.weightInit(WeightInit.UNIFORM).activation(Activation.TANH).build();
|
||||
NeuralNetConfiguration conf2 = NeuralNetConfiguration.builder().seed(123)
|
||||
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).layer(layer2).build();
|
||||
|
@ -197,7 +197,7 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
|
||||
|
||||
private static NeuralNetConfiguration getConfig(int nIn, int nOut, IWeightInit weightInit, boolean pretrain) {
|
||||
DenseLayer layer = new DenseLayer.Builder().nIn(nIn).nOut(nOut).weightInit(weightInit)
|
||||
DenseLayer layer = DenseLayer.builder().nIn(nIn).nOut(nOut).weightInit(weightInit)
|
||||
.activation(Activation.TANH).build();
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
@ -226,10 +226,10 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
INDArray gradientW = Nd4j.ones(nIns[0], nOuts[0]);
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3))
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIns[0]).nOut(nOuts[0])
|
||||
.layer(0, DenseLayer.builder().nIn(nIns[0]).nOut(nOuts[0])
|
||||
.updater(new Sgd(lr)).biasUpdater(new Sgd(biasLr)).build())
|
||||
.layer(1, new BatchNormalization.Builder().nIn(nIns[1]).nOut(nOuts[1]).updater(new Sgd(0.7)).build())
|
||||
.layer(2, new OutputLayer.Builder().nIn(nIns[2]).nOut(nOuts[2]).lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.layer(1,BatchNormalization.builder().nIn(nIns[1]).nOut(nOuts[1]).updater(new Sgd(0.7)).build())
|
||||
.layer(2, OutputLayer.builder().nIn(nIns[2]).nOut(nOuts[2]).lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -287,9 +287,9 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().l1(l1)
|
||||
.l2(l2)
|
||||
.layer(0, new DenseLayer.Builder().nIn(nIns[0]).nOut(nOuts[0]).build())
|
||||
.layer(1, new BatchNormalization.Builder().nIn(nIns[1]).nOut(nOuts[1]).l2(0.5).build())
|
||||
.layer(2, new OutputLayer.Builder().nIn(nIns[2]).nOut(nOuts[2]).lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.layer(0, DenseLayer.builder().nIn(nIns[0]).nOut(nOuts[0]).build())
|
||||
.layer(1,BatchNormalization.builder().nIn(nIns[1]).nOut(nOuts[1]).l2(0.5).build())
|
||||
.layer(2, OutputLayer.builder().nIn(nIns[2]).nOut(nOuts[2]).lossFunction(LossFunctions.LossFunction.MSE).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -318,7 +318,7 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest {
|
|||
public void testLayerPretrainConfig() {
|
||||
boolean pretrain = true;
|
||||
|
||||
VariationalAutoencoder layer = new VariationalAutoencoder.Builder()
|
||||
VariationalAutoencoder layer = VariationalAutoencoder.builder()
|
||||
.nIn(10).nOut(5).updater(new Sgd(1e-1))
|
||||
.lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE).build();
|
||||
|
||||
|
|
|
@ -48,6 +48,7 @@ import org.nd4j.linalg.learning.config.RmsProp;
|
|||
import org.nd4j.linalg.learning.config.Sgd;
|
||||
import org.nd4j.linalg.lossfunctions.LossFunctions;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
@ -71,9 +72,9 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.updater(new Sgd(0.0))
|
||||
.dist(new NormalDistribution(0, 5))
|
||||
|
||||
.layer(new LSTM.Builder().nIn(12).nOut(10)
|
||||
.layer(LSTM.builder().nIn(12).nOut(10)
|
||||
.constrainRecurrent(lc).build())
|
||||
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -124,9 +125,9 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 5))
|
||||
.biasInit(10.0)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
|
||||
.layer(DenseLayer.builder().nIn(12).nOut(10)
|
||||
.constrainBias(lc).build())
|
||||
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -176,9 +177,9 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.updater(new Sgd(0.0))
|
||||
.dist(new NormalDistribution(0, 5))
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
|
||||
.layer(DenseLayer.builder().nIn(12).nOut(10)
|
||||
.constrainWeights(lc).build())
|
||||
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -229,9 +230,10 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 5))
|
||||
.biasInit(0.2)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
|
||||
.constrainAllParameters(lc).build())
|
||||
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.layer(DenseLayer.builder().nIn(12).nOut(10)
|
||||
.allParamConstraints(List.of(lc))
|
||||
.build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -290,9 +292,9 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0, 5))
|
||||
.biasInit(0.2)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
|
||||
.layer(DenseLayer.builder().nIn(12).nOut(10)
|
||||
.constrainWeights(lc).constrainBias(lc).build())
|
||||
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -351,8 +353,8 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.dist(new NormalDistribution(0,5))
|
||||
.biasInit(1)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(12).nOut(10).build())
|
||||
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.layer(DenseLayer.builder().nIn(12).nOut(10).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -406,7 +408,7 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.graphBuilder()
|
||||
.addInputs("input_lstm", "input_cpc")
|
||||
.addLayer("first_lstm_layer",
|
||||
new LSTM.Builder()
|
||||
LSTM.builder()
|
||||
.nIn(nIn)
|
||||
.nOut(lstmLayerSize)
|
||||
.activation(Activation.RELU)
|
||||
|
@ -417,7 +419,7 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.addVertex("merge", new MergeVertex(),
|
||||
"lastTimeStep", "input_cpc")
|
||||
.addLayer("dense",
|
||||
new DenseLayer.Builder()
|
||||
DenseLayer.builder()
|
||||
.constrainWeights(new NonNegativeConstraint())
|
||||
.nIn(lstmLayerSize + 1)
|
||||
.nOut(lstmLayerSize/2)
|
||||
|
@ -425,7 +427,7 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.build(),
|
||||
"merge")
|
||||
.addLayer("second_dense",
|
||||
new DenseLayer.Builder()
|
||||
DenseLayer.builder()
|
||||
.constrainWeights(new NonNegativeConstraint())
|
||||
.nIn(lstmLayerSize/2)
|
||||
.nOut(lstmLayerSize/8)
|
||||
|
@ -433,7 +435,7 @@ public class TestConstraints extends BaseDL4JTest {
|
|||
.build(),
|
||||
"dense")
|
||||
.addLayer("output_layer",
|
||||
new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
|
||||
OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.constrainWeights(new NonNegativeConstraint())
|
||||
.nIn(lstmLayerSize/8)
|
||||
.nOut(1)
|
||||
|
|
|
@ -62,29 +62,29 @@ public class TestDropout extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.dropOut(0.6)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).dropOut(0.7).build())
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).dropOut(new AlphaDropout(0.5)).build())
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).dropOut(0.7).build())
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).dropOut(new AlphaDropout(0.5)).build())
|
||||
.build();
|
||||
|
||||
assertEquals(new Dropout(0.6), conf.getFlattenedLayerConfigurations().get(0).getIDropout());
|
||||
assertEquals(new Dropout(0.7), conf.getFlattenedLayerConfigurations().get(1).getIDropout());
|
||||
assertEquals(new AlphaDropout(0.5), conf.getFlattenedLayerConfigurations().get(2).getIDropout());
|
||||
assertEquals(new Dropout(0.6), conf.getFlattenedLayerConfigurations().get(0).getDropOut());
|
||||
assertEquals(new Dropout(0.7), conf.getFlattenedLayerConfigurations().get(1).getDropOut());
|
||||
assertEquals(new AlphaDropout(0.5), conf.getFlattenedLayerConfigurations().get(2).getDropOut());
|
||||
|
||||
|
||||
ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder()
|
||||
.dropOut( new Dropout(0.6))
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in")
|
||||
.addLayer("1", new DenseLayer.Builder().nIn(10).nOut(10).dropOut(0.7).build(), "0")
|
||||
.addLayer("2", new DenseLayer.Builder().nIn(10).nOut(10).dropOut(new AlphaDropout(0.5)).build(), "1")
|
||||
.addLayer("0", DenseLayer.builder().nIn(10).nOut(10).build(), "in")
|
||||
.addLayer("1", DenseLayer.builder().nIn(10).nOut(10).dropOut(0.7).build(), "0")
|
||||
.addLayer("2", DenseLayer.builder().nIn(10).nOut(10).dropOut(new AlphaDropout(0.5)).build(), "1")
|
||||
.setOutputs("2")
|
||||
.build();
|
||||
|
||||
assertEquals(new Dropout(0.6), ((LayerVertex)conf2.getVertices().get("0")).getLayerConfiguration().getIDropout());
|
||||
assertEquals(new Dropout(0.7), ((LayerVertex)conf2.getVertices().get("1")).getLayerConfiguration().getIDropout());
|
||||
assertEquals(new AlphaDropout(0.5), ((LayerVertex)conf2.getVertices().get("2")).getLayerConfiguration().getIDropout());
|
||||
assertEquals(new Dropout(0.6), ((LayerVertex)conf2.getVertices().get("0")).getLayerConfiguration().getDropOut());
|
||||
assertEquals(new Dropout(0.7), ((LayerVertex)conf2.getVertices().get("1")).getLayerConfiguration().getDropOut());
|
||||
assertEquals(new AlphaDropout(0.5), ((LayerVertex)conf2.getVertices().get("2")).getLayerConfiguration().getDropOut());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -95,8 +95,8 @@ public class TestDropout extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(4).nOut(3).dropOut(d1).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MSE).dropOut(d2).nIn(3).nOut(3).build())
|
||||
.layer(DenseLayer.builder().nIn(4).nOut(3).dropOut(d1).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).dropOut(d2).nIn(3).nOut(3).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -131,8 +131,8 @@ public class TestDropout extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(4).nOut(3).dropOut(d1).build(), "in")
|
||||
.addLayer("1", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).dropOut(d2).nIn(3).nOut(3).build(), "0")
|
||||
.addLayer("0", DenseLayer.builder().nIn(4).nOut(3).dropOut(d1).build(), "in")
|
||||
.addLayer("1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).dropOut(d2).nIn(3).nOut(3).build(), "0")
|
||||
.setOutputs("1")
|
||||
.build();
|
||||
|
||||
|
@ -188,8 +188,8 @@ public class TestDropout extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.dropOut(id)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(4).nOut(3).build())
|
||||
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(3).nOut(3).build())
|
||||
.layer(DenseLayer.builder().nIn(4).nOut(3).build())
|
||||
.layer(OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(3).nOut(3).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -200,8 +200,8 @@ public class TestDropout extends BaseDL4JTest {
|
|||
.dropOut(id)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.addLayer("0", new DenseLayer.Builder().nIn(4).nOut(3).build(), "in")
|
||||
.addLayer("1", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(3).nOut(3).build(), "0")
|
||||
.addLayer("0", DenseLayer.builder().nIn(4).nOut(3).build(), "in")
|
||||
.addLayer("1", OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(3).nOut(3).build(), "0")
|
||||
.setOutputs("1")
|
||||
.build();
|
||||
|
||||
|
@ -602,7 +602,7 @@ public class TestDropout extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new DropoutLayer.Builder(new SpatialDropout(0.5)).build())
|
||||
.layer(DropoutLayer.builder(new SpatialDropout(0.5)).build())
|
||||
.build();
|
||||
|
||||
String asJson = conf.toJson();
|
||||
|
|
|
@ -73,7 +73,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder()
|
||||
.addInputs("input1", "input2", "input3")
|
||||
.addLayer("denselayer",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
|
||||
DenseLayer.builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
|
||||
.build(),
|
||||
"input1")
|
||||
/* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
|
||||
|
@ -87,7 +87,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
*/
|
||||
.addVertex("elementwiseAdd", new ElementWiseVertex(ElementWiseVertex.Op.Add), "input1",
|
||||
"input2", "input3")
|
||||
.addLayer("Add", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(),
|
||||
.addLayer("Add", ActivationLayer.builder().activation(Activation.IDENTITY).build(),
|
||||
"elementwiseAdd")
|
||||
.setOutputs("Add", "denselayer").build();
|
||||
|
||||
|
@ -114,7 +114,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder()
|
||||
.addInputs("input1", "input2", "input3")
|
||||
.addLayer("denselayer",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
|
||||
DenseLayer.builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
|
||||
.build(),
|
||||
"input1")
|
||||
/* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
|
||||
|
@ -128,7 +128,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
*/
|
||||
.addVertex("elementwiseProduct", new ElementWiseVertex(ElementWiseVertex.Op.Product), "input1",
|
||||
"input2", "input3")
|
||||
.addLayer("Product", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(),
|
||||
.addLayer("Product", ActivationLayer.builder().activation(Activation.IDENTITY).build(),
|
||||
"elementwiseProduct")
|
||||
.setOutputs("Product", "denselayer").build();
|
||||
|
||||
|
@ -155,7 +155,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder()
|
||||
.addInputs("input1", "input2")
|
||||
.addLayer("denselayer",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
|
||||
DenseLayer.builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
|
||||
.build(),
|
||||
"input1")
|
||||
/* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
|
||||
|
@ -169,7 +169,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
*/
|
||||
.addVertex("elementwiseSubtract", new ElementWiseVertex(ElementWiseVertex.Op.Subtract),
|
||||
"input1", "input2")
|
||||
.addLayer("Subtract", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(),
|
||||
.addLayer("Subtract", ActivationLayer.builder().activation(Activation.IDENTITY).build(),
|
||||
"elementwiseSubtract")
|
||||
.setOutputs("Subtract", "denselayer").build();
|
||||
|
||||
|
@ -200,22 +200,22 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
|
||||
.addInputs("input1", "input2", "input3")
|
||||
.addLayer("dense1",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input1")
|
||||
.addLayer("dense2",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input2")
|
||||
.addLayer("dense3",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input3")
|
||||
.addVertex("elementwiseAdd", new ElementWiseVertex(ElementWiseVertex.Op.Add), "dense1",
|
||||
"dense2", "dense3")
|
||||
.addLayer("output",
|
||||
new OutputLayer.Builder().nIn(midsz).nOut(outputsz)
|
||||
.activation(new ActivationSigmoid())
|
||||
OutputLayer.builder().nIn(midsz).nOut(outputsz)
|
||||
.activation(Activation.SIGMOID)
|
||||
.lossFunction(LossFunction.MSE).build(),
|
||||
"elementwiseAdd")
|
||||
.setOutputs("output").build();
|
||||
|
@ -376,22 +376,22 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
|
||||
.addInputs("input1", "input2", "input3")
|
||||
.addLayer("dense1",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input1")
|
||||
.addLayer("dense2",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input2")
|
||||
.addLayer("dense3",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input3")
|
||||
.addVertex("elementwiseProduct", new ElementWiseVertex(ElementWiseVertex.Op.Product), "dense1",
|
||||
"dense2", "dense3")
|
||||
.addLayer("output",
|
||||
new OutputLayer.Builder().nIn(midsz).nOut(outputsz)
|
||||
.activation(new ActivationSigmoid())
|
||||
OutputLayer.builder().nIn(midsz).nOut(outputsz)
|
||||
.activation(Activation.SIGMOID)
|
||||
.lossFunction(LossFunction.MSE).build(),
|
||||
"elementwiseProduct")
|
||||
.setOutputs("output").build();
|
||||
|
@ -551,18 +551,18 @@ public class ElementWiseVertexTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
|
||||
.addInputs("input1", "input2")
|
||||
.addLayer("dense1",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input1")
|
||||
.addLayer("dense2",
|
||||
new DenseLayer.Builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(new ActivationTanH()).build(),
|
||||
DenseLayer.builder().nIn(featuresz).nOut(midsz)
|
||||
.activation(Activation.TANH).build(),
|
||||
"input2")
|
||||
.addVertex("elementwiseSubtract", new ElementWiseVertex(ElementWiseVertex.Op.Subtract),
|
||||
"dense1", "dense2")
|
||||
.addLayer("output",
|
||||
new OutputLayer.Builder().nIn(midsz).nOut(outputsz)
|
||||
.activation(new ActivationSigmoid())
|
||||
OutputLayer.builder().nIn(midsz).nOut(outputsz)
|
||||
.activation(Activation.SIGMOID)
|
||||
.lossFunction(LossFunction.MSE).build(),
|
||||
"elementwiseSubtract")
|
||||
.setOutputs("output").build();
|
||||
|
|
|
@ -86,7 +86,7 @@ public class ShiftVertexTest extends BaseDL4JTest {
|
|||
double sf = 4.1;
|
||||
ComputationGraphConfiguration cgc = NeuralNetConfiguration.builder().graphBuilder().addInputs("input")
|
||||
.addLayer("denselayer",
|
||||
new DenseLayer.Builder().nIn(input.columns()).nOut(1)
|
||||
DenseLayer.builder().nIn(input.columns()).nOut(1)
|
||||
.activation(Activation.IDENTITY).build(),
|
||||
"input")
|
||||
/* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
|
||||
|
@ -99,10 +99,10 @@ public class ShiftVertexTest extends BaseDL4JTest {
|
|||
* at org.deeplearning4j.nn.graph.ComputationGraph.init(ComputationGraph.java:341)
|
||||
*/
|
||||
.addLayer("identityinputactivation",
|
||||
new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), "input")
|
||||
ActivationLayer.builder().activation(Activation.IDENTITY).build(), "input")
|
||||
.addVertex("shiftvertex", new ShiftVertex(sf), "identityinputactivation")
|
||||
.addLayer("identityshiftvertex",
|
||||
new ActivationLayer.Builder().activation(Activation.IDENTITY).build(),
|
||||
ActivationLayer.builder().activation(Activation.IDENTITY).build(),
|
||||
"shiftvertex")
|
||||
.setOutputs("identityshiftvertex", "denselayer").build();
|
||||
|
||||
|
@ -144,12 +144,12 @@ public class ShiftVertexTest extends BaseDL4JTest {
|
|||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).graphBuilder()
|
||||
.addInputs("input")
|
||||
.addLayer("denselayer",
|
||||
new DenseLayer.Builder().nIn(input.columns()).nOut(input.columns())
|
||||
DenseLayer.builder().nIn(input.columns()).nOut(input.columns())
|
||||
.activation(a1).build(),
|
||||
"input")
|
||||
.addVertex("shiftvertex", new ShiftVertex(sf), "denselayer")
|
||||
.addLayer("output",
|
||||
new OutputLayer.Builder().nIn(input.columns()).nOut(target.columns())
|
||||
OutputLayer.builder().nIn(input.columns()).nOut(target.columns())
|
||||
.activation(a2).lossFunction(LossFunction.MSE).build(),
|
||||
"shiftvertex")
|
||||
.setOutputs("output").build();
|
||||
|
|
|
@ -67,7 +67,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testLayer() throws Exception {
|
||||
DenseLayer layer = new DenseLayer.Builder().activation(act).weightInit(weight).dropOut(dropOut)
|
||||
DenseLayer layer = DenseLayer.builder().activation(act).weightInit(weight).dropOut(dropOut)
|
||||
.updater(updater).gradientNormalization(gradNorm)
|
||||
.gradientNormalizationThreshold(gradNormThreshold).build();
|
||||
|
||||
|
@ -75,7 +75,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
assertEquals(act, layer.getActivationFn());
|
||||
assertEquals(weight.getWeightInitFunction(), layer.getWeightInit());
|
||||
assertEquals(new Dropout(dropOut), layer.getIDropout());
|
||||
assertEquals(new Dropout(dropOut), layer.getDropOut());
|
||||
assertEquals(updater, layer.getIUpdater());
|
||||
assertEquals(gradNorm, layer.getGradientNormalization());
|
||||
assertEquals(gradNormThreshold, layer.getGradientNormalizationThreshold(), 0.0);
|
||||
|
@ -83,7 +83,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testFeedForwardLayer() throws Exception {
|
||||
DenseLayer ff = new DenseLayer.Builder().nIn(numIn).nOut(numOut).build();
|
||||
DenseLayer ff = DenseLayer.builder().nIn(numIn).nOut(numOut).build();
|
||||
|
||||
checkSerialization(ff);
|
||||
|
||||
|
@ -93,7 +93,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testConvolutionLayer() throws Exception {
|
||||
ConvolutionLayer conv = new ConvolutionLayer.Builder(kernelSize, stride, padding).build();
|
||||
ConvolutionLayer conv = ConvolutionLayer.builder(kernelSize, stride, padding).build();
|
||||
|
||||
checkSerialization(conv);
|
||||
|
||||
|
@ -106,7 +106,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testSubsamplingLayer() throws Exception {
|
||||
SubsamplingLayer sample =
|
||||
new SubsamplingLayer.Builder(poolType, stride).kernelSize(kernelSize).padding(padding).build();
|
||||
SubsamplingLayer.builder(poolType, stride).kernelSize(kernelSize).padding(padding).build();
|
||||
|
||||
checkSerialization(sample);
|
||||
|
||||
|
@ -118,21 +118,21 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testOutputLayer() throws Exception {
|
||||
OutputLayer out = new OutputLayer.Builder(loss).build();
|
||||
OutputLayer out = OutputLayer.builder(loss).build();
|
||||
|
||||
checkSerialization(out);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRnnOutputLayer() throws Exception {
|
||||
RnnOutputLayer out = new RnnOutputLayer.Builder(loss).build();
|
||||
RnnOutputLayer out = RnnOutputLayer.builder(loss).build();
|
||||
|
||||
checkSerialization(out);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAutoEncoder() throws Exception {
|
||||
AutoEncoder enc = new AutoEncoder.Builder().corruptionLevel(corruptionLevel).sparsity(sparsity).build();
|
||||
AutoEncoder enc = AutoEncoder.builder().corruptionLevel(corruptionLevel).sparsity(sparsity).build();
|
||||
|
||||
checkSerialization(enc);
|
||||
|
||||
|
@ -142,7 +142,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testGravesLSTM() throws Exception {
|
||||
GravesLSTM glstm = new GravesLSTM.Builder().forgetGateBiasInit(1.5).activation(Activation.TANH).nIn(numIn)
|
||||
GravesLSTM glstm = GravesLSTM.builder().forgetGateBiasInit(1.5).activation(Activation.TANH).nIn(numIn)
|
||||
.nOut(numOut).build();
|
||||
|
||||
checkSerialization(glstm);
|
||||
|
@ -155,7 +155,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testGravesBidirectionalLSTM() throws Exception {
|
||||
final GravesBidirectionalLSTM glstm = new GravesBidirectionalLSTM.Builder().forgetGateBiasInit(1.5)
|
||||
final GravesBidirectionalLSTM glstm = GravesBidirectionalLSTM.builder().forgetGateBiasInit(1.5)
|
||||
.activation(Activation.TANH).nIn(numIn).nOut(numOut).build();
|
||||
|
||||
checkSerialization(glstm);
|
||||
|
@ -168,7 +168,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testEmbeddingLayer() throws Exception {
|
||||
EmbeddingLayer el = new EmbeddingLayer.Builder().nIn(10).nOut(5).build();
|
||||
EmbeddingLayer el = EmbeddingLayer.builder().nIn(10).nOut(5).build();
|
||||
checkSerialization(el);
|
||||
|
||||
assertEquals(10, el.getNIn());
|
||||
|
@ -177,7 +177,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testBatchNormLayer() throws Exception {
|
||||
BatchNormalization bN = new BatchNormalization.Builder().nIn(numIn).nOut(numOut).gamma(2).beta(1).decay(0.5)
|
||||
BatchNormalization bN =BatchNormalization.builder().nIn(numIn).nOut(numOut).gamma(2).beta(1).decay(0.5)
|
||||
.lockGammaBeta(true).build();
|
||||
|
||||
checkSerialization(bN);
|
||||
|
@ -192,11 +192,11 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
|
||||
@Test
|
||||
public void testActivationLayer() throws Exception {
|
||||
ActivationLayer activationLayer = new ActivationLayer.Builder().activation(act).build();
|
||||
ActivationLayer activationLayer = ActivationLayer.builder().activation(act).build();
|
||||
|
||||
checkSerialization(activationLayer);
|
||||
|
||||
assertEquals(act, activationLayer.activationFn);
|
||||
assertEquals(act, activationLayer.getActivation());
|
||||
}
|
||||
|
||||
private void checkSerialization(LayerConfiguration layer) throws Exception {
|
||||
|
@ -225,7 +225,7 @@ public class LayerBuilderTest extends BaseDL4JTest {
|
|||
assertEquals(confExpected.getFlattenedLayerConfigurations().get(0), confActual.getFlattenedLayerConfigurations().get(0), "unequal YAML serialization");
|
||||
|
||||
// check the layer's use of callSuper on equals method
|
||||
confActual.getFlattenedLayerConfigurations().get(0).setIDropout(new Dropout(new java.util.Random().nextDouble()));
|
||||
confActual.getFlattenedLayerConfigurations().get(0).setDropOut(new Dropout(new java.util.Random().nextDouble()));
|
||||
assertNotEquals( confExpected, confActual, "broken equals method (missing callSuper?)");
|
||||
}
|
||||
|
||||
|
|
|
@ -53,13 +53,13 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
String name2 = "bill";
|
||||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).name(name1).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).name(name2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).name(name1).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).name(name2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
assertEquals(name1, conf.getConf(0).getLayer().getLayerName());
|
||||
assertEquals(name2, conf.getConf(1).getLayer().getLayerName());
|
||||
assertEquals(name1, conf.getConf(0).getLayer().getName());
|
||||
assertEquals(name2, conf.getConf(1).getLayer().getName());
|
||||
|
||||
}
|
||||
|
||||
|
@ -67,8 +67,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
public void testActivationLayerwiseOverride() {
|
||||
//Without layerwise override:
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().activation(Activation.RELU)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
|
@ -77,8 +77,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
|
||||
//With
|
||||
conf = NeuralNetConfiguration.builder().activation(Activation.RELU)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).activation(Activation.TANH).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).activation(Activation.TANH).build()).build();
|
||||
|
||||
net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -94,8 +94,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
final Distribution defaultDistribution = new NormalDistribution(0, 1.0);
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.dist(defaultDistribution).biasInit(1)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
|
@ -109,8 +109,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
final Distribution overriddenDistribution = new UniformDistribution(0, 1);
|
||||
conf = NeuralNetConfiguration.builder()
|
||||
.dist(defaultDistribution).biasInit(1)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()).layer(1,
|
||||
new DenseLayer.Builder().nIn(2).nOut(2)
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build()).layer(1,
|
||||
DenseLayer.builder().nIn(2).nOut(2)
|
||||
.dist(overriddenDistribution).biasInit(0).build())
|
||||
.build();
|
||||
|
||||
|
@ -181,23 +181,23 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testDropoutLayerwiseOverride() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().dropOut(1.0)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
assertEquals(new Dropout(1.0), conf.getConf(0).getLayer().getIDropout());
|
||||
assertEquals(new Dropout(1.0), conf.getConf(1).getLayer().getIDropout());
|
||||
assertEquals(new Dropout(1.0), conf.getConf(0).getLayer().getDropOut());
|
||||
assertEquals(new Dropout(1.0), conf.getConf(1).getLayer().getDropOut());
|
||||
|
||||
conf = NeuralNetConfiguration.builder().dropOut(1.0)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).dropOut(2.0).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).dropOut(2.0).build()).build();
|
||||
|
||||
net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
assertEquals(new Dropout(1.0), conf.getConf(0).getLayer().getIDropout());
|
||||
assertEquals(new Dropout(2.0), conf.getConf(1).getLayer().getIDropout());
|
||||
assertEquals(new Dropout(1.0), conf.getConf(0).getLayer().getDropOut());
|
||||
assertEquals(new Dropout(2.0), conf.getConf(1).getLayer().getDropOut());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -208,8 +208,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter)))
|
||||
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
|
@ -221,7 +221,7 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
|
||||
conf = NeuralNetConfiguration.builder().updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter) ))
|
||||
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()).layer(1, new DenseLayer.Builder()
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build()).layer(1, DenseLayer.builder()
|
||||
.nIn(2).nOut(2).updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter2))).build())
|
||||
.build();
|
||||
|
||||
|
@ -234,8 +234,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
@Test
|
||||
public void testUpdaterRhoRmsDecayLayerwiseOverride() {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new AdaDelta(0.5, 0.9))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).updater(new AdaDelta(0.01,0.9)).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).updater(new AdaDelta(0.01,0.9)).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
|
@ -245,8 +245,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
assertEquals(0.01, ((AdaDelta)((BaseLayerConfiguration) conf.getConf(1).getLayer()).getIUpdater()).getRho(), 0.0);
|
||||
|
||||
conf = NeuralNetConfiguration.builder().updater(new RmsProp(1.0, 2.0, RmsProp.DEFAULT_RMSPROP_EPSILON))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).updater(new RmsProp(1.0, 1.0, RmsProp.DEFAULT_RMSPROP_EPSILON)).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).updater(new AdaDelta(0.5,AdaDelta.DEFAULT_ADADELTA_EPSILON)).build())
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).updater(new RmsProp(1.0, 1.0, RmsProp.DEFAULT_RMSPROP_EPSILON)).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).updater(new AdaDelta(0.5,AdaDelta.DEFAULT_ADADELTA_EPSILON)).build())
|
||||
.build();
|
||||
|
||||
net = new MultiLayerNetwork(conf);
|
||||
|
@ -264,8 +264,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.updater(new Adam(1.0, 0.5, 0.5, 1e-8))
|
||||
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).updater(new Adam(1.0, 0.6, 0.7, 1e-8)).build())
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).updater(new Adam(1.0, 0.6, 0.7, 1e-8)).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
@ -283,8 +283,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
|
||||
.gradientNormalizationThreshold(10)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
BaseLayerConfiguration bconf = (BaseLayerConfiguration) conf.getConf(0).getLayer();
|
||||
|
@ -297,8 +297,8 @@ public class LayerConfigTest extends BaseDL4JTest {
|
|||
conf = NeuralNetConfiguration.builder()
|
||||
.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
|
||||
.gradientNormalizationThreshold(10)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2)
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2)
|
||||
.gradientNormalization(GradientNormalization.None)
|
||||
.gradientNormalizationThreshold(2.5).build())
|
||||
.build();
|
||||
|
|
|
@ -56,8 +56,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
public void testDropConnect() {
|
||||
// Warning thrown only since some layers may not have l1 or l2
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.1)).weightNoise(new DropConnect(0.5))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
}
|
||||
|
@ -67,8 +67,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
public void testL1L2NotSet() {
|
||||
// Warning thrown only since some layers may not have l1 or l2
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
}
|
||||
|
@ -78,8 +78,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
public void testRegNotSetL1Global() {
|
||||
assertThrows(IllegalStateException.class, () -> {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3)).l1(0.5)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
});
|
||||
|
@ -90,8 +90,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
public void testRegNotSetL2Local() {
|
||||
assertThrows(IllegalStateException.class, () -> {
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Sgd(0.3))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).l2(0.5).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).l2(0.5).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
});
|
||||
|
@ -102,8 +102,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
// Warning thrown only since global dist can be set with a different weight init locally
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder().updater(new Sgd(0.3)).dist(new GaussianDistribution(1e-3, 2))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
}
|
||||
|
@ -116,8 +116,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder().updater(new Nesterovs(1.0, new MapSchedule(ScheduleType.ITERATION, testMomentumAfter)))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
}
|
||||
|
@ -130,12 +130,12 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
/* Graph Builder */
|
||||
.updater(Updater.RMSPROP.getIUpdaterWithDefaultConfig()).graphBuilder().addInputs("in")
|
||||
.addLayer("L" + 1,
|
||||
new GravesLSTM.Builder().nIn(20).updater(Updater.RMSPROP).nOut(10)
|
||||
GravesLSTM.builder().nIn(20).updater(Updater.RMSPROP).nOut(10)
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.dropOut(0.4).l1(0.3).activation(Activation.SIGMOID).build(),
|
||||
"in")
|
||||
.addLayer("output",
|
||||
new RnnOutputLayer.Builder().nIn(20).nOut(10).activation(Activation.SOFTMAX)
|
||||
RnnOutputLayer.builder().nIn(20).nOut(10).activation(Activation.SOFTMAX)
|
||||
.weightInit(WeightInit.RELU_UNIFORM).build(),
|
||||
"L" + 1)
|
||||
.setOutputs("output");
|
||||
|
@ -157,8 +157,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
|
||||
// Nesterovs Updater
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().updater(new Nesterovs(0.9))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).l2(0.5).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).updater(new Nesterovs(0.3, 0.4)).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).l2(0.5).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).updater(new Nesterovs(0.3, 0.4)).build()).build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
|
@ -173,8 +173,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
// Adam Updater
|
||||
conf = NeuralNetConfiguration.builder().updater(new Adam(0.3))
|
||||
.weightInit(expectedDist)
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).l2(0.5).l1(0.3).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).l2(0.5).l1(0.3).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).build()).build();
|
||||
net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
|
@ -191,8 +191,8 @@ public class LayerConfigValidationTest extends BaseDL4JTest {
|
|||
|
||||
//RMSProp Updater
|
||||
conf = NeuralNetConfiguration.builder().updater(new RmsProp(0.3))
|
||||
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build())
|
||||
.layer(1, new DenseLayer.Builder().nIn(2).nOut(2).updater(new RmsProp(0.3, 0.4, RmsProp.DEFAULT_RMSPROP_EPSILON)).build()).build();
|
||||
.layer(0, DenseLayer.builder().nIn(2).nOut(2).build())
|
||||
.layer(1, DenseLayer.builder().nIn(2).nOut(2).updater(new RmsProp(0.3, 0.4, RmsProp.DEFAULT_RMSPROP_EPSILON)).build()).build();
|
||||
net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
||||
|
|
|
@ -249,7 +249,7 @@ public class CNNProcessorTest extends BaseDL4JTest {
|
|||
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
|
||||
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
|
||||
// Building the DL4J network
|
||||
.layer(0, new ConvolutionLayer.Builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.layer(0, ConvolutionLayer.builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.name("cnn1")
|
||||
.convolutionMode(ConvolutionMode.Strict)
|
||||
.nIn(2) // 2 input channels
|
||||
|
@ -258,7 +258,7 @@ public class CNNProcessorTest extends BaseDL4JTest {
|
|||
.activation(Activation.RELU)
|
||||
.biasInit(1e-2).build())
|
||||
|
||||
.layer(1, new ConvolutionLayer.Builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.layer(1, ConvolutionLayer.builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.name("cnn2")
|
||||
.convolutionMode(ConvolutionMode.Strict)
|
||||
.nOut(processWidth)
|
||||
|
@ -267,21 +267,21 @@ public class CNNProcessorTest extends BaseDL4JTest {
|
|||
.biasInit(1e-2)
|
||||
.build())
|
||||
|
||||
.layer(2, new ConvolutionLayer.Builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.layer(2, ConvolutionLayer.builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.name("cnn3")
|
||||
.convolutionMode(ConvolutionMode.Strict)
|
||||
.nOut(processWidth)
|
||||
.weightInit(WeightInit.XAVIER_UNIFORM)
|
||||
.activation(Activation.RELU).build())
|
||||
|
||||
.layer(3, new ConvolutionLayer.Builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.layer(3, ConvolutionLayer.builder(kernelArray, strideArray, zeroPaddingArray)
|
||||
.name("cnn4")
|
||||
.convolutionMode(ConvolutionMode.Strict)
|
||||
.nOut(processWidth)
|
||||
.weightInit(WeightInit.XAVIER_UNIFORM)
|
||||
.activation(Activation.RELU).build())
|
||||
|
||||
.layer(4, new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
|
||||
.layer(4, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MSE)
|
||||
.name("output")
|
||||
.nOut(1)
|
||||
.activation(Activation.TANH)
|
||||
|
|
|
@ -39,8 +39,8 @@ public class CustomPreprocessorTest extends BaseDL4JTest {
|
|||
//Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
|
||||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder()
|
||||
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10)
|
||||
.layer(0, DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(1, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(10)
|
||||
.activation(Activation.SOFTMAX).nOut(10).build())
|
||||
.inputPreProcessor(0, new MyCustomPreprocessor())
|
||||
.build();
|
||||
|
|
|
@ -58,7 +58,7 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
|
||||
RnnToFeedForwardPreProcessor proc = new RnnToFeedForwardPreProcessor();
|
||||
NeuralNetConfiguration nnc = NeuralNetConfiguration.builder()
|
||||
.layer(new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(layerSize)
|
||||
.layer(org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(layerSize)
|
||||
.nOut(layerSize).build())
|
||||
.build();
|
||||
|
||||
|
@ -143,7 +143,7 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
FeedForwardToRnnPreProcessor proc = new FeedForwardToRnnPreProcessor();
|
||||
|
||||
NeuralNetConfiguration nnc = NeuralNetConfiguration.builder()
|
||||
.layer(new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(layerSize)
|
||||
.layer(org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(layerSize)
|
||||
.nOut(layerSize).build())
|
||||
.build();
|
||||
|
||||
|
@ -227,7 +227,7 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration nnc =
|
||||
NeuralNetConfiguration.builder()
|
||||
.layer(new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
|
||||
.layer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder(
|
||||
inputWidth, inputHeight).nIn(cnnNChannelsIn)
|
||||
.nOut(nChannels).build())
|
||||
.build();
|
||||
|
@ -309,7 +309,7 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration nnc =
|
||||
NeuralNetConfiguration.builder()
|
||||
.layer(new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
|
||||
.layer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder(
|
||||
inputWidth, inputHeight).nIn(cnnNChannelsIn)
|
||||
.nOut(nChannels).build())
|
||||
.build();
|
||||
|
@ -397,12 +397,12 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
//FF->RNN and RNN->FF
|
||||
NeuralNetConfiguration conf1 =
|
||||
NeuralNetConfiguration.builder()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(5)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(5)
|
||||
.nOut(6).build())
|
||||
.layer(1, new GravesLSTM.Builder().nIn(6).nOut(7).build())
|
||||
.layer(2, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(7)
|
||||
.layer(1, GravesLSTM.builder().nIn(6).nOut(7).build())
|
||||
.layer(2, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nIn(7)
|
||||
.nOut(8).build())
|
||||
.layer(3, new RnnOutputLayer.Builder().nIn(8).nOut(9).activation(Activation.SOFTMAX).build()).build();
|
||||
.layer(3, RnnOutputLayer.builder().nIn(8).nOut(9).activation(Activation.SOFTMAX).build()).build();
|
||||
//Expect preprocessors: layer1: FF->RNN; 2: RNN->FF; 3: FF->RNN
|
||||
assertEquals(3, conf1.getInputPreProcessors().size());
|
||||
assertTrue(conf1.getInputPreProcess(1) instanceof FeedForwardToRnnPreProcessor);
|
||||
|
@ -412,10 +412,10 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
|
||||
//FF-> CNN, CNN-> FF, FF->RNN
|
||||
NeuralNetConfiguration conf2 = NeuralNetConfiguration.builder()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder().nOut(10)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder().nOut(10)
|
||||
.kernelSize(5, 5).stride(1, 1).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nOut(6).build())
|
||||
.layer(2, new RnnOutputLayer.Builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nOut(6).build())
|
||||
.layer(2, RnnOutputLayer.builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutionalFlat(28, 28, 1)).build();
|
||||
//Expect preprocessors: 0: FF->CNN; 1: CNN->FF; 2: FF->RNN
|
||||
assertEquals(3, conf2.getInputPreProcessors().size());
|
||||
|
@ -425,10 +425,10 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
|
||||
//CNN-> FF, FF->RNN - InputType.convolutional instead of convolutionalFlat
|
||||
NeuralNetConfiguration conf2a = NeuralNetConfiguration.builder()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder().nOut(10)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder().nOut(10)
|
||||
.kernelSize(5, 5).stride(1, 1).build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nOut(6).build())
|
||||
.layer(2, new RnnOutputLayer.Builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.DenseLayer.builder().nOut(6).build())
|
||||
.layer(2, RnnOutputLayer.builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutional(28, 28, 1)).build();
|
||||
//Expect preprocessors: 1: CNN->FF; 2: FF->RNN
|
||||
assertEquals(2, conf2a.getInputPreProcessors().size());
|
||||
|
@ -438,10 +438,10 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
|
||||
//FF->CNN and CNN->RNN:
|
||||
NeuralNetConfiguration conf3 = NeuralNetConfiguration.builder().list()
|
||||
.layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder().nOut(10)
|
||||
.layer(0, org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder().nOut(10)
|
||||
.kernelSize(5, 5).stride(1, 1).build())
|
||||
.layer(1, new GravesLSTM.Builder().nOut(6).build())
|
||||
.layer(2, new RnnOutputLayer.Builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.layer(1, GravesLSTM.builder().nOut(6).build())
|
||||
.layer(2, RnnOutputLayer.builder().nIn(6).nOut(5).activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutionalFlat(28, 28, 1)).build();
|
||||
//Expect preprocessors: 0: FF->CNN, 1: CNN->RNN;
|
||||
assertEquals(2, conf3.getInputPreProcessors().size());
|
||||
|
@ -454,16 +454,16 @@ public class TestPreProcessors extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf =
|
||||
NeuralNetConfiguration.builder()
|
||||
.list().layer(0,
|
||||
new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
|
||||
org.deeplearning4j.nn.conf.layers.ConvolutionLayer.builder(
|
||||
4, 4) // 28*28*1 => 15*15*10
|
||||
.nIn(1).nOut(10).padding(2, 2)
|
||||
.stride(2, 2)
|
||||
.weightInit(WeightInit.RELU)
|
||||
.activation(Activation.RELU)
|
||||
.build())
|
||||
.layer(1, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder()
|
||||
.layer(1, org.deeplearning4j.nn.conf.layers.DenseLayer.builder()
|
||||
.activation(Activation.RELU).nOut(200).build())
|
||||
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(200)
|
||||
.layer(2, OutputLayer.builder().lossFunction(LossFunctions.LossFunction.MCXENT).nIn(200)
|
||||
.nOut(5).weightInit(WeightInit.RELU)
|
||||
.activation(Activation.SOFTMAX).build())
|
||||
.inputType(InputType.convolutionalFlat(28, 28, 1))
|
||||
|
|
|
@ -67,9 +67,9 @@ public class TestWeightNoise extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.weightNoise(wn)
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).build())
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(new DropConnect(0.25)).build())
|
||||
.layer(new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).build())
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).weightNoise(new DropConnect(0.25)).build())
|
||||
.layer(OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -86,9 +86,9 @@ public class TestWeightNoise extends BaseDL4JTest {
|
|||
.weightNoise(wn)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in")
|
||||
.layer("1", new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(new DropConnect(0.25)).build(), "0")
|
||||
.layer("2", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "1")
|
||||
.layer("0", DenseLayer.builder().nIn(10).nOut(10).build(), "in")
|
||||
.layer("1", DenseLayer.builder().nIn(10).nOut(10).weightNoise(new DropConnect(0.25)).build(), "0")
|
||||
.layer("2", OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "1")
|
||||
.setOutputs("2")
|
||||
.build();
|
||||
|
||||
|
@ -145,9 +145,9 @@ public class TestWeightNoise extends BaseDL4JTest {
|
|||
|
||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(wn1).build())
|
||||
.layer(new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(wn2).build())
|
||||
.layer(new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).weightNoise(wn3).build())
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).weightNoise(wn1).build())
|
||||
.layer(DenseLayer.builder().nIn(10).nOut(10).weightNoise(wn2).build())
|
||||
.layer(OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).weightNoise(wn3).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -170,9 +170,9 @@ public class TestWeightNoise extends BaseDL4JTest {
|
|||
ComputationGraphConfiguration conf2 = NeuralNetConfiguration.builder()
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("0", new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(wn1).build(), "in")
|
||||
.layer("1", new DenseLayer.Builder().nIn(10).nOut(10).weightNoise(wn2).build(), "0")
|
||||
.layer("2", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).weightNoise(wn3).build(), "1")
|
||||
.layer("0", DenseLayer.builder().nIn(10).nOut(10).weightNoise(wn1).build(), "in")
|
||||
.layer("1", DenseLayer.builder().nIn(10).nOut(10).weightNoise(wn2).build(), "0")
|
||||
.layer("2", OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).weightNoise(wn3).build(), "1")
|
||||
.setOutputs("2")
|
||||
.build();
|
||||
|
||||
|
@ -249,7 +249,7 @@ public class TestWeightNoise extends BaseDL4JTest {
|
|||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder()
|
||||
.weightInit(WeightInit.ONES)
|
||||
|
||||
.layer(new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.layer(OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build())
|
||||
.build();
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
net.init();
|
||||
|
|
|
@ -305,9 +305,9 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.updater(new Adam(0.01))
|
||||
.dataType(DataType.DOUBLE)
|
||||
.list()
|
||||
.layer(new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build())
|
||||
.layer(new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build())
|
||||
.layer(new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.layer(DenseLayer.builder().activation(Activation.TANH).nIn(10).nOut(10).build())
|
||||
.layer(DenseLayer.builder().activation(Activation.TANH).nIn(10).nOut(10).build())
|
||||
.layer(OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.build();
|
||||
|
||||
MultiLayerNetwork net = new MultiLayerNetwork(conf);
|
||||
|
@ -389,9 +389,9 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.dataType(DataType.DOUBLE)
|
||||
.graphBuilder()
|
||||
.addInputs("in")
|
||||
.layer("l0", new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "in")
|
||||
.layer("l1", new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "l0")
|
||||
.layer("out", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "l1")
|
||||
.layer("l0", DenseLayer.builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "in")
|
||||
.layer("l1", DenseLayer.builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "l0")
|
||||
.layer("out", OutputLayer.builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "l1")
|
||||
.setOutputs("out")
|
||||
.build();
|
||||
|
||||
|
@ -477,24 +477,24 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
LayerConfiguration secondLast;
|
||||
switch (outputLayer) {
|
||||
case 0:
|
||||
ol = new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new GlobalPoolingLayer(PoolingType.MAX);
|
||||
ol = OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = GlobalPoolingLayer.builder(PoolingType.MAX).build();
|
||||
break;
|
||||
case 1:
|
||||
ol = new LossLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new FrozenLayerWithBackprop(new DenseLayer.Builder().nOut(10).activation(Activation.SIGMOID).build());
|
||||
ol = LossLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction()).build();
|
||||
secondLast = FrozenLayerWithBackprop.builder().underlying(DenseLayer.builder().nOut(10).activation(Activation.SIGMOID).build()).build();
|
||||
break;
|
||||
case 2:
|
||||
ol = new CenterLossOutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new VariationalAutoencoder.Builder().encoderLayerSizes(10).decoderLayerSizes(10).nOut(10).activation(Activation.SIGMOID).build();
|
||||
ol =CenterLossOutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = VariationalAutoencoder.builder().encoderLayerSizes(10).decoderLayerSizes(10).nOut(10).activation(Activation.SIGMOID).build();
|
||||
break;
|
||||
case 3:
|
||||
ol = new CnnLossLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(3).activation(Activation.TANH).build();
|
||||
ol = CnnLossLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction()).build();
|
||||
secondLast = ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(3).activation(Activation.TANH).build();
|
||||
break;
|
||||
case 4:
|
||||
ol = new Yolo2OutputLayer.Builder().boundingBoxPriors(Nd4j.create(new double[][]{{1.0, 1.0}, {2.0, 2.0}}).castTo(networkDtype)).build();
|
||||
secondLast = new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(14).activation(Activation.TANH).build();
|
||||
ol = Yolo2OutputLayer.builder().boundingBoxes(Nd4j.create(new double[][]{{1.0, 1.0}, {2.0, 2.0}}).castTo(networkDtype)).build();
|
||||
secondLast = ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(14).activation(Activation.TANH).build();
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException();
|
||||
|
@ -505,28 +505,28 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.dataType(networkDtype)
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.updater(new Adam(1e-2))
|
||||
.list()
|
||||
.layer(new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(new LocalResponseNormalization())
|
||||
.layer(new DropoutLayer(0.5))
|
||||
.layer(new DropoutLayer(new AlphaDropout(0.5)))
|
||||
.layer(new DropoutLayer(new GaussianDropout(0.5)))
|
||||
.layer(new DropoutLayer(new GaussianNoise(0.1)))
|
||||
.layer(new DropoutLayer(new SpatialDropout(0.5)))
|
||||
.layer(new SubsamplingLayer.Builder().poolingType(SubsamplingLayer.PoolingType.AVG).kernelSize(3, 3).stride(2, 2).build())
|
||||
.layer(new Pooling2D.Builder().poolingType(SubsamplingLayer.PoolingType.AVG).kernelSize(2, 2).stride(1, 1).build())
|
||||
.layer(new Deconvolution2D.Builder().kernelSize(2, 2).stride(2, 2).nOut(3).activation(Activation.TANH).build())
|
||||
// .layer(new LocallyConnected2D.Builder().nOut(3).kernelSize(2,2).stride(1,1).activation(Activation.SIGMOID).build()) //EXCEPTION
|
||||
.layer(new ZeroPaddingLayer(1, 1))
|
||||
.layer(new Cropping2D(1, 1))
|
||||
.layer(new IdentityLayer())
|
||||
.layer(new Upsampling2D.Builder().size(2).build())
|
||||
.layer(new SubsamplingLayer.Builder().kernelSize(2, 2).stride(2, 2).build())
|
||||
.layer(new DepthwiseConvolution2D.Builder().nOut(3).activation(Activation.RELU).build())
|
||||
.layer(new SeparableConvolution2D.Builder().nOut(3).activation(Activation.HARDTANH).build())
|
||||
.layer(new MaskLayer())
|
||||
.layer(new BatchNormalization.Builder().build())
|
||||
.layer(new ActivationLayer(Activation.LEAKYRELU))
|
||||
|
||||
.layer(ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(LocalResponseNormalization.builder())
|
||||
.layer(DropoutLayer.builder(0.5).build())
|
||||
.layer(DropoutLayer.builder(new AlphaDropout(0.5)).build())
|
||||
.layer(DropoutLayer.builder(new GaussianDropout(0.5)).build())
|
||||
.layer(DropoutLayer.builder(new GaussianNoise(0.1)).build())
|
||||
.layer(DropoutLayer.builder(new SpatialDropout(0.5)).build())
|
||||
.layer(SubsamplingLayer.builder().poolingType(SubsamplingLayer.PoolingType.AVG.toPoolingType()).kernelSize(3, 3).stride(2, 2).build())
|
||||
.layer(Pooling2D.builder().poolingType(SubsamplingLayer.PoolingType.AVG.toPoolingType()).kernelSize(2, 2).stride(1, 1).build())
|
||||
.layer(Deconvolution2D.builder().kernelSize(2, 2).stride(2, 2).nOut(3).activation(Activation.TANH).build())
|
||||
// .layer(LocallyConnected2D.builder().nOut(3).kernelSize(2,2).stride(1,1).activation(Activation.SIGMOID).build()) //EXCEPTION
|
||||
.layer(ZeroPaddingLayer.builder(1, 1).build())
|
||||
.layer(Cropping2D.builder(1, 1).build())
|
||||
.layer(IdentityLayer.builder())
|
||||
.layer(Upsampling2D.builder().size(2).build())
|
||||
.layer(SubsamplingLayer.builder().kernelSize(2, 2).stride(2, 2).build())
|
||||
.layer(DepthwiseConvolution2D.builder().nOut(3).activation(Activation.RELU).build())
|
||||
.layer(SeparableConvolution2D.builder().nOut(3).activation(Activation.HARDTANH).build())
|
||||
.layer(MaskLayer.builder())
|
||||
.layer(BatchNormalization.builder().build())
|
||||
.layer(ActivationLayer.builder().activation(Activation.LEAKYRELU).build())
|
||||
.layer(secondLast)
|
||||
.layer(ol)
|
||||
.inputType(InputType.convolutionalFlat(8, 8, 1))
|
||||
|
@ -603,16 +603,16 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
LayerConfiguration secondLast;
|
||||
switch (outputLayer) {
|
||||
case 0:
|
||||
ol = new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new GlobalPoolingLayer(PoolingType.AVG);
|
||||
ol = OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = GlobalPoolingLayer.builder(PoolingType.AVG).build();
|
||||
break;
|
||||
case 1:
|
||||
ol = new Cnn3DLossLayer.Builder(Convolution3D.DataFormat.NCDHW).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new Convolution3D.Builder().nOut(3).activation(Activation.ELU).build();
|
||||
ol = Cnn3DLossLayer.builder().dataFormat(Convolution3D.DataFormat.NCDHW).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction()).build();
|
||||
secondLast = Convolution3D.builder().nOut(3).activation(Activation.ELU).build();
|
||||
break;
|
||||
case 2:
|
||||
ol = new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new Convolution3D.Builder().nOut(3).activation(Activation.ELU).build();
|
||||
ol = OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = Convolution3D.builder().nOut(3).activation(Activation.ELU).build();
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException();
|
||||
|
@ -623,15 +623,15 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.dataType(networkDtype)
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.updater(new Nesterovs(1e-2, 0.9))
|
||||
.list()
|
||||
.layer(new Convolution3D.Builder().kernelSize(2, 2, 2).stride(1, 1, 1).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(new Convolution3D.Builder().kernelSize(2, 2, 2).stride(1, 1, 1).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(new Subsampling3DLayer.Builder().poolingType(PoolingType.AVG).kernelSize(2, 2, 2).stride(2, 2, 2).build())
|
||||
.layer(new Deconvolution3D.Builder().kernelSize(2,2,2).stride(1,1,1).nIn(3).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(new Cropping3D.Builder(1, 1, 1, 1, 1, 1).build())
|
||||
.layer(new ZeroPadding3DLayer.Builder(1, 1, 1, 1, 1, 1).build())
|
||||
.layer(new ActivationLayer(Activation.LEAKYRELU))
|
||||
.layer(new Upsampling3D.Builder().size(2).build())
|
||||
|
||||
.layer(Convolution3D.builder().kernelSize(2, 2, 2).stride(1, 1, 1).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(Convolution3D.builder().kernelSize(2, 2, 2).stride(1, 1, 1).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(Subsampling3DLayer.builder().poolingType(PoolingType.AVG).kernelSize(2, 2, 2).stride(2, 2, 2).build())
|
||||
.layer(Deconvolution3D.builder().kernelSize(2,2,2).stride(1,1,1).nIn(3).nOut(3).activation(Activation.TANH).build())
|
||||
.layer(Cropping3D.builder(1, 1, 1, 1, 1, 1).build())
|
||||
.layer(ZeroPadding3DLayer.builder(1, 1, 1, 1, 1, 1).build())
|
||||
.layer(ActivationLayer.builder(Activation.LEAKYRELU).build())
|
||||
.layer(Upsampling3D.builder().size(2).build())
|
||||
.layer(secondLast)
|
||||
.layer(ol)
|
||||
.inputType(InputType.convolutional3D(Convolution3D.DataFormat.NCDHW, 8, 8, 8, 1))
|
||||
|
@ -714,16 +714,16 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
LayerConfiguration secondLast;
|
||||
switch (outputLayer) {
|
||||
case 0:
|
||||
ol = new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new GlobalPoolingLayer(PoolingType.MAX);
|
||||
ol = OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = GlobalPoolingLayer.builder(PoolingType.MAX).build();
|
||||
break;
|
||||
case 1:
|
||||
ol = new RnnOutputLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).nOut(5).build();
|
||||
secondLast = new Convolution1D.Builder().kernelSize(2).nOut(5).build();
|
||||
ol = RnnOutputLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).nOut(5).build();
|
||||
secondLast = Convolution1D.builder().kernelSize(2).nOut(5).build();
|
||||
break;
|
||||
case 2:
|
||||
ol = new RnnLossLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new Convolution1D.Builder().kernelSize(2).nOut(5).build();
|
||||
ol = RnnLossLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction()).build();
|
||||
secondLast = Convolution1D.builder().kernelSize(2).nOut(5).build();
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException();
|
||||
|
@ -737,14 +737,14 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.convolutionMode(ConvolutionMode.Same)
|
||||
.updater(new Adam(1e-2))
|
||||
.list()
|
||||
.layer(new Convolution1D.Builder()
|
||||
.layer(Convolution1D.builder()
|
||||
.kernelSize(2)
|
||||
.stride(1).nOut(3).
|
||||
activation(Activation.TANH).build())
|
||||
.layer(new Subsampling1DLayer.Builder().poolingType(PoolingType.MAX).kernelSize(5).stride(1).build())
|
||||
.layer(new Cropping1D.Builder(1).build())
|
||||
.layer(new ZeroPadding1DLayer(1))
|
||||
.layer(new Upsampling1D.Builder(2).build())
|
||||
.layer(Subsampling1DLayer.builder().poolingType(PoolingType.MAX).kernelSize(5).stride(1).build())
|
||||
.layer(Cropping1D.builder(1).build())
|
||||
.layer(ZeroPadding1DLayer.builder(1).build())
|
||||
.layer(Upsampling1D.builder(2).build())
|
||||
.layer(secondLast)
|
||||
.layer(ol)
|
||||
.inputType(InputType.recurrent(5, 10,RNNFormat.NCW))
|
||||
|
@ -817,9 +817,9 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.convolutionMode(ConvolutionMode.Same)
|
||||
.updater(new Adam(1e-2))
|
||||
.list()
|
||||
.layer(new SpaceToBatchLayer.Builder().blocks(1, 1).build())
|
||||
.layer(new SpaceToDepthLayer.Builder().blocks(2).build())
|
||||
.layer(new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.layer(SpaceToBatchLayer.builder().blockSize(1, 1).build())
|
||||
.layer(SpaceToDepthLayer.builder().blockSize(2).build())
|
||||
.layer(OutputLayer.builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.convolutional(28, 28, 5))
|
||||
.build();
|
||||
|
||||
|
@ -880,16 +880,16 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
LayerConfiguration secondLast;
|
||||
switch (outputLayer) {
|
||||
case 0:
|
||||
ol = new RnnOutputLayer.Builder().nOut(5).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new SimpleRnn.Builder().nOut(5).activation(Activation.TANH).build();
|
||||
ol = RnnOutputLayer.builder().nOut(5).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = SimpleRnn.builder().nOut(5).activation(Activation.TANH).build();
|
||||
break;
|
||||
case 1:
|
||||
ol = new RnnLossLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = new SimpleRnn.Builder().nOut(5).activation(Activation.TANH).build();
|
||||
ol = RnnLossLayer.builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
|
||||
secondLast = SimpleRnn.builder().nOut(5).activation(Activation.TANH).build();
|
||||
break;
|
||||
case 2:
|
||||
ol = new OutputLayer.Builder().nOut(5).build();
|
||||
secondLast = new LastTimeStep(new SimpleRnn.Builder().nOut(5).activation(Activation.TANH).build());
|
||||
ol = OutputLayer.builder().nOut(5).build();
|
||||
secondLast = LastTimeStep.builder().underlying(SimpleRnn.builder().nOut(5).activation(Activation.TANH).build()).build();
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException();
|
||||
|
@ -899,15 +899,15 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.dataType(networkDtype)
|
||||
.convolutionMode(ConvolutionMode.Same)
|
||||
.updater(new Adam(1e-2))
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build())
|
||||
.layer(new GravesLSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build())
|
||||
.layer(new DenseLayer.Builder().nOut(5).build())
|
||||
.layer(new GravesBidirectionalLSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build())
|
||||
.layer(new Bidirectional(new LSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build()))
|
||||
.layer(new TimeDistributed(new DenseLayer.Builder().nIn(10).nOut(5).activation(Activation.TANH).build()))
|
||||
.layer(new SimpleRnn.Builder().nIn(5).nOut(5).build())
|
||||
.layer(new MaskZeroLayer.Builder().underlying(new SimpleRnn.Builder().nIn(5).nOut(5).build()).maskValue(0.0).build())
|
||||
|
||||
.layer(LSTM.builder().nIn(5).nOut(5).activation(Activation.TANH).build())
|
||||
.layer(GravesLSTM.builder().nIn(5).nOut(5).activation(Activation.TANH).build())
|
||||
.layer(DenseLayer.builder().nOut(5).build())
|
||||
.layer(GravesBidirectionalLSTM.builder().nIn(5).nOut(5).activation(Activation.TANH).build())
|
||||
.layer(Bidirectional.builder(LSTM.builder().nIn(5).nOut(5).activation(Activation.TANH).build()).build())
|
||||
.layer(TimeDistributed.builder().underlying(DenseLayer.builder().nIn(10).nOut(5).activation(Activation.TANH).build()).build())
|
||||
.layer(SimpleRnn.builder().nIn(5).nOut(5).build())
|
||||
.layer(MaskZeroLayer.builder().underlying(SimpleRnn.builder().nIn(5).nOut(5).build()).maskingValue(0.0).build())
|
||||
.layer(secondLast)
|
||||
.layer(ol)
|
||||
.build();
|
||||
|
@ -986,14 +986,14 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.dist(new UniformDistribution(-6, 6))
|
||||
|
||||
.layer(new PrimaryCapsules.Builder(primaryCapsDim, primarpCapsChannel)
|
||||
.layer(PrimaryCapsules.builder(primaryCapsDim, primarpCapsChannel)
|
||||
.kernelSize(3, 3)
|
||||
.stride(2, 2)
|
||||
.build())
|
||||
.layer(new CapsuleLayer.Builder(capsule, capsuleDim, routing).build())
|
||||
.layer(new CapsuleStrengthLayer.Builder().build())
|
||||
.layer(new ActivationLayer.Builder(new ActivationSoftmax()).build())
|
||||
.layer(new LossLayer.Builder(new LossNegativeLogLikelihood()).build())
|
||||
.layer(CapsuleLayer.builder(capsule, capsuleDim, routing).build())
|
||||
.layer(CapsuleStrengthLayer.builder().build())
|
||||
.layer(ActivationLayer.builder(new ActivationSoftmax()).build())
|
||||
.layer(LossLayer.builder().lossFunction(new LossNegativeLogLikelihood()).build())
|
||||
.inputType(InputType.convolutional(height, width, inputDepth))
|
||||
.build();
|
||||
|
||||
|
@ -1062,33 +1062,33 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
INDArray input;
|
||||
if (test == 0) {
|
||||
if (frozen) {
|
||||
conf.layer("0", new FrozenLayer(new EmbeddingLayer.Builder().nIn(5).nOut(5).build()), "in");
|
||||
conf.layer("0", FrozenLayer.builder(EmbeddingLayer.builder().nIn(5).nOut(5).build()).build(), "in");
|
||||
} else {
|
||||
conf.layer("0", new EmbeddingLayer.Builder().nIn(5).nOut(5).build(), "in");
|
||||
conf.layer("0", EmbeddingLayer.builder().nIn(5).nOut(5).build(), "in");
|
||||
}
|
||||
|
||||
input = Nd4j.zeros(networkDtype, 10, 1).muli(5).castTo(DataType.INT);
|
||||
conf.setInputTypes(InputType.feedForward(1));
|
||||
} else if (test == 1) {
|
||||
if (frozen) {
|
||||
conf.layer("0", new FrozenLayer(new EmbeddingSequenceLayer.Builder().nIn(5).nOut(5).build()), "in");
|
||||
conf.layer("0", FrozenLayer.builder(EmbeddingSequenceLayer.builder().nIn(5).nOut(5).build()).build(), "in");
|
||||
} else {
|
||||
conf.layer("0", new EmbeddingSequenceLayer.Builder().nIn(5).nOut(5).build(), "in");
|
||||
conf.layer("0", EmbeddingSequenceLayer.builder().nIn(5).nOut(5).build(), "in");
|
||||
}
|
||||
conf.layer("gp", new GlobalPoolingLayer.Builder(PoolingType.PNORM).pnorm(2).poolingDimensions(2).build(), "0");
|
||||
conf.layer("gp", GlobalPoolingLayer.builder(PoolingType.PNORM).pnorm(2).poolingDimensions(2).build(), "0");
|
||||
input = Nd4j.zeros(networkDtype, 10, 1, 5).muli(5).castTo(DataType.INT);
|
||||
conf.setInputTypes(InputType.recurrent(1));
|
||||
} else {
|
||||
conf.layer("0", new RepeatVector.Builder().repetitionFactor(5).nOut(5).build(), "in");
|
||||
conf.layer("gp", new GlobalPoolingLayer.Builder(PoolingType.SUM).build(), "0");
|
||||
conf.layer("0", RepeatVector.builder().repetitionFactor(5).nOut(5).build(), "in");
|
||||
conf.layer("gp", GlobalPoolingLayer.builder(PoolingType.SUM).build(), "0");
|
||||
input = Nd4j.zeros(networkDtype, 10, 5);
|
||||
conf.setInputTypes(InputType.feedForward(5));
|
||||
}
|
||||
|
||||
conf.appendLayer("el", new ElementWiseMultiplicationLayer.Builder().nOut(5).build())
|
||||
.appendLayer("ae", new AutoEncoder.Builder().nOut(5).build())
|
||||
.appendLayer("prelu", new PReLULayer.Builder().nOut(5).inputShape(5).build())
|
||||
.appendLayer("out", new OutputLayer.Builder().nOut(10).build());
|
||||
conf.appendLayer("el", ElementWiseMultiplicationLayer.builder().nOut(5).build())
|
||||
.appendLayer("ae", AutoEncoder.builder().nOut(5).build())
|
||||
.appendLayer("prelu", PReLULayer.builder().nOut(5).inputShape(5).build())
|
||||
.appendLayer("out", OutputLayer.builder().nOut(10).build());
|
||||
|
||||
ComputationGraph net = new ComputationGraph(conf.build());
|
||||
net.init();
|
||||
|
@ -1153,34 +1153,34 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
switch (test) {
|
||||
case 0:
|
||||
b.addInputs("in")
|
||||
.addLayer("l", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(1).build(), "in")
|
||||
.addLayer("l", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(1).build(), "in")
|
||||
.addVertex("preproc", new PreprocessorVertex(new CnnToRnnPreProcessor(28, 28, 1)), "l")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "preproc")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).build(), "preproc")
|
||||
.setInputTypes(InputType.convolutional(28, 28, 1))
|
||||
.setOutputs("out");
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 28, 28)};
|
||||
break;
|
||||
case 1:
|
||||
b.addInputs("in")
|
||||
.addLayer("l", new DenseLayer.Builder().nOut(16).build(), "in")
|
||||
.addLayer("l", DenseLayer.builder().nOut(16).build(), "in")
|
||||
.addVertex("preproc", new PreprocessorVertex(new FeedForwardToCnn3DPreProcessor(2, 2, 2, 2, true)), "l")
|
||||
.addVertex("preproc2", new PreprocessorVertex(new PermutePreprocessor(0, 2, 3, 4, 1)), "preproc")
|
||||
.addVertex("preproc3", new PreprocessorVertex(new ReshapePreprocessor(new long[]{2, 2, 2, 2}, new long[]{16}, false)), "preproc2")
|
||||
.addLayer("out", new OutputLayer.Builder().nIn(16).nOut(10).build(), "preproc3")
|
||||
.addLayer("out", OutputLayer.builder().nIn(16).nOut(10).build(), "preproc3")
|
||||
.setInputTypes(InputType.feedForward(5))
|
||||
.setOutputs("out");
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5)};
|
||||
break;
|
||||
case 2:
|
||||
b.addInputs("in")
|
||||
.addLayer("1", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(1).build(), "in")
|
||||
.addLayer("1", ConvolutionLayer.builder().kernelSize(2, 2).stride(1, 1).nOut(1).build(), "in")
|
||||
.addVertex("1a", new PoolHelperVertex(), "1")
|
||||
.addVertex("2", new ShiftVertex(1), "1a")
|
||||
.addVertex("3", new ScaleVertex(2), "2")
|
||||
.addVertex("4", new ReshapeVertex(2, -1), "3")
|
||||
.addVertex("5", new SubsetVertex(0, 99), "4")
|
||||
.addVertex("6", new L2NormalizeVertex(), "5")
|
||||
.addLayer("out", new OCNNOutputLayer.Builder().hiddenLayerSize(10).nIn(100).build(), "6")
|
||||
.addLayer("out",OCNNOutputLayer.builder().hiddenLayerSize(10).nIn(100).build(), "6")
|
||||
.setInputTypes(InputType.convolutional(28, 28, 1))
|
||||
.setOutputs("out");
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 28, 28)};
|
||||
|
@ -1193,23 +1193,23 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.addVertex("3", new StackVertex(), "2a", "2b")
|
||||
.addVertex("4", new DuplicateToTimeSeriesVertex("in3"), "3")
|
||||
.addVertex("5", new ReverseTimeSeriesVertex(), "4")
|
||||
.addLayer("6", new GlobalPoolingLayer(PoolingType.AVG), "5")
|
||||
.addLayer("6", GlobalPoolingLayer.builder(PoolingType.AVG).build(), "5")
|
||||
.addVertex("7", new LastTimeStepVertex("in3"), "in3")
|
||||
.addVertex("8", new MergeVertex(), "6", "7")
|
||||
.addVertex("9", new PreprocessorVertex(new ComposableInputPreProcessor()), "8")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "9")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).build(), "9")
|
||||
.setInputTypes(InputType.feedForward(8), InputType.feedForward(8), InputType.recurrent(8))
|
||||
.setOutputs("out");
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 8), Nd4j.rand(networkDtype, 2, 8), Nd4j.rand(networkDtype, 2, 8, 5)};
|
||||
break;
|
||||
case 4:
|
||||
b.addInputs("in1", "in2")
|
||||
.addLayer("1", new LSTM.Builder().nOut(8).build(), "in1")
|
||||
.addLayer("1", LSTM.builder().nOut(8).build(), "in1")
|
||||
.addVertex("preproc1", new PreprocessorVertex(new RnnToCnnPreProcessor(2, 2, 2)), "1")
|
||||
.addVertex("preproc2", new PreprocessorVertex(new CnnToRnnPreProcessor(2, 2, 2)), "preproc1")
|
||||
.addLayer("pool", new GlobalPoolingLayer(), "preproc2")
|
||||
.addLayer("pool2", new GlobalPoolingLayer(), "in2")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "pool", "pool2")
|
||||
.addLayer("pool", GlobalPoolingLayer.builder().build(), "preproc2")
|
||||
.addLayer("pool2", GlobalPoolingLayer.builder().build(), "in2")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).build(), "pool", "pool2")
|
||||
.setInputTypes(InputType.recurrent(8), InputType.convolutional(28, 28, 1))
|
||||
.setOutputs("out");
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 8, 5), Nd4j.rand(networkDtype, 2, 1, 28, 28)};
|
||||
|
@ -1217,28 +1217,28 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
case 5:
|
||||
b.addInputs("in1", "in2")
|
||||
.addVertex("fv", new FrozenVertex(new ScaleVertex(2.0)), "in1")
|
||||
.addLayer("1", new DenseLayer.Builder().nOut(5).build(), "fv")
|
||||
.addLayer("2", new DenseLayer.Builder().nOut(5).build(), "in2")
|
||||
.addLayer("1", DenseLayer.builder().nOut(5).build(), "fv")
|
||||
.addLayer("2", DenseLayer.builder().nOut(5).build(), "in2")
|
||||
.addVertex("v", new L2Vertex(), "1", "2")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "v")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).build(), "v")
|
||||
.setInputTypes(InputType.feedForward(5), InputType.feedForward(5))
|
||||
.setOutputs("out");
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5), Nd4j.rand(networkDtype, 2, 5)};
|
||||
break;
|
||||
case 6:
|
||||
b.addInputs("in")
|
||||
.addLayer("1", new LSTM.Builder().nOut(5).build(), "in")
|
||||
.addLayer("1", LSTM.builder().nOut(5).build(), "in")
|
||||
.addVertex("2", new PreprocessorVertex(new KerasFlattenRnnPreprocessor(5, 4)), "1")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "2")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).build(), "2")
|
||||
.setOutputs("out")
|
||||
.setInputTypes(InputType.recurrent(5, 4));
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5, 4)};
|
||||
break;
|
||||
case 7:
|
||||
b.addInputs("in")
|
||||
.addLayer("1", new ConvolutionLayer.Builder().kernelSize(2, 2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in")
|
||||
.addLayer("1", ConvolutionLayer.builder().kernelSize(2, 2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in")
|
||||
.addVertex("2", new PreprocessorVertex(new CnnToFeedForwardPreProcessor(28, 28, 5)), "1")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "2")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).build(), "2")
|
||||
.setOutputs("out")
|
||||
.setInputTypes(InputType.convolutional(28, 28, 1));
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 28, 28)};
|
||||
|
@ -1311,9 +1311,9 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
switch (test) {
|
||||
case 0:
|
||||
b.addInputs("in")
|
||||
.addLayer("1", new LSTM.Builder().nOut(5).build(), "in")
|
||||
.addLayer("2", new LocallyConnected1D.Builder().kernelSize(2).nOut(4).build(), "1")
|
||||
.addLayer("out", new RnnOutputLayer.Builder().nOut(10).build(), "2")
|
||||
.addLayer("1", LSTM.builder().nOut(5).build(), "in")
|
||||
.addLayer("2", LocallyConnected1D.builder().kernelSize(2).nOut(4).build(), "1")
|
||||
.addLayer("out", RnnOutputLayer.builder().nOut(10).build(), "2")
|
||||
.setOutputs("out")
|
||||
.setInputTypes(InputType.recurrent(5, 2));
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5, 2)};
|
||||
|
@ -1321,9 +1321,9 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
break;
|
||||
case 1:
|
||||
b.addInputs("in")
|
||||
.addLayer("1", new ConvolutionLayer.Builder().kernelSize(2, 2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in")
|
||||
.addLayer("2", new LocallyConnected2D.Builder().kernelSize(2, 2).nOut(5).build(), "1")
|
||||
.addLayer("out", new OutputLayer.Builder().nOut(10).build(), "2")
|
||||
.addLayer("1", ConvolutionLayer.builder().kernelSize(2, 2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in")
|
||||
.addLayer("2", LocallyConnected2D.builder().kernelSize(2, 2).nOut(5).build(), "1")
|
||||
.addLayer("out", OutputLayer.builder().nOut(10).build(), "2")
|
||||
.setOutputs("out")
|
||||
.setInputTypes(InputType.convolutional(8, 8, 1));
|
||||
in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 8, 8)};
|
||||
|
@ -1399,12 +1399,12 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.updater(new NoOp())
|
||||
.weightInit(WeightInit.XAVIER)
|
||||
.list()
|
||||
.layer(new LSTM.Builder().nOut(layerSize).build())
|
||||
.layer(new SelfAttentionLayer.Builder().nOut(8).nHeads(2).projectInput(true).build())
|
||||
.layer(new LearnedSelfAttentionLayer.Builder().nOut(8).nHeads(2).nQueries(numQueries).projectInput(true).build())
|
||||
.layer(new RecurrentAttentionLayer.Builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build())
|
||||
.layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.layer(LSTM.builder().nOut(layerSize).build())
|
||||
.layer(SelfAttentionLayer.builder().nOut(8).nHeads(2).projectInput(true).build())
|
||||
.layer(LearnedSelfAttentionLayer.builder().nOut(8).nHeads(2).nQueries(numQueries).projectInput(true).build())
|
||||
.layer(RecurrentAttentionLayer.builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build())
|
||||
.layer(GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build())
|
||||
.layer(OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX)
|
||||
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
|
||||
.inputType(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
@ -1487,12 +1487,12 @@ public class DTypeTests extends BaseDL4JTest {
|
|||
.weightInit(WeightInit.XAVIER)
|
||||
.graphBuilder()
|
||||
.addInputs("input")
|
||||
.addLayer("lstmKeys", new LSTM.Builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("lstmQueries", new LSTM.Builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("lstmValues", new LSTM.Builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("lstmKeys", LSTM.builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("lstmQueries", LSTM.builder().nOut(layerSize).build(), "input")
|
||||
.addLayer("lstmValues", LSTM.builder().nOut(layerSize).build(), "input")
|
||||
.addVertex("attention", new AttentionVertex.Builder().nOut(8).nHeads(2).projectInput(true).nInQueries(layerSize).nInKeys(layerSize).nInValues(layerSize).build(), "lstmQueries", "lstmKeys", "lstmValues")
|
||||
.addLayer("pooling", new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build(), "attention")
|
||||
.addLayer("output", new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling")
|
||||
.addLayer("pooling", GlobalPoolingLayer.builder().poolingType(PoolingType.MAX).build(), "attention")
|
||||
.addLayer("output", OutputLayer.builder().nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "pooling")
|
||||
.setOutputs("output")
|
||||
.setInputTypes(InputType.recurrent(nIn))
|
||||
.build();
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue