diff --git a/.old/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/test/java/org/deeplearning4j/spark/impl/misc/TestFrozenLayers.java b/.old/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/test/java/org/deeplearning4j/spark/impl/misc/TestFrozenLayers.java index f0d15745d..87493404e 100644 --- a/.old/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/test/java/org/deeplearning4j/spark/impl/misc/TestFrozenLayers.java +++ b/.old/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/test/java/org/deeplearning4j/spark/impl/misc/TestFrozenLayers.java @@ -74,7 +74,7 @@ public class TestFrozenLayers extends BaseSparkTest { MultiLayerNetwork withFrozen = new TransferLearning.Builder(origModel).fineTuneConfiguration(finetune) .setFeatureExtractor(1).build(); - Map m = withFrozen.paramTable(); + Map m = withFrozen.getParamTable(); Map pCopy = new HashMap<>(); for (Map.Entry entry : m.entrySet()) { pCopy.put(entry.getKey(), entry.getValue().dup()); @@ -110,7 +110,7 @@ public class TestFrozenLayers extends BaseSparkTest { MultiLayerNetwork fitted = sNet.getNetwork(); - Map fittedParams = fitted.paramTable(); + Map fittedParams = fitted.getParamTable(); for (Map.Entry entry : fittedParams.entrySet()) { INDArray orig = pCopy.get(entry.getKey()); @@ -151,7 +151,7 @@ public class TestFrozenLayers extends BaseSparkTest { ComputationGraph withFrozen = new TransferLearning.GraphBuilder(origModel).fineTuneConfiguration(finetune) .setFeatureExtractor("1").build(); - Map m = withFrozen.paramTable(); + Map m = withFrozen.getParamTable(); Map pCopy = new HashMap<>(); for (Map.Entry entry : m.entrySet()) { pCopy.put(entry.getKey(), entry.getValue().dup()); @@ -187,7 +187,7 @@ public class TestFrozenLayers extends BaseSparkTest { ComputationGraph fitted = sNet.getNetwork(); - Map fittedParams = fitted.paramTable(); + Map fittedParams = fitted.getParamTable(); for (Map.Entry entry : fittedParams.entrySet()) { INDArray orig = pCopy.get(entry.getKey()); diff --git a/brutex-extended-tests/src/test/java/net/brutex/gan/GAN.java b/brutex-extended-tests/src/test/java/net/brutex/gan/GAN.java index 659c6ab32..b1e780d59 100644 --- a/brutex-extended-tests/src/test/java/net/brutex/gan/GAN.java +++ b/brutex-extended-tests/src/test/java/net/brutex/gan/GAN.java @@ -200,8 +200,8 @@ public class GAN { Layer[] disLayers = ganDiscriminator.getLayers(); Layer[] layers = ArrayUtils.addAll(genLayers, disLayers); - NeuralNetConfiguration genConf = generator.getConfiguration(); - NeuralNetConfiguration disConf = ganDiscriminator.getConfiguration(); + NeuralNetConfiguration genConf = generator.getNetConfiguration(); + NeuralNetConfiguration disConf = ganDiscriminator.getNetConfiguration(); LayerConfiguration[] confLayers = new LayerConfiguration[layers.length]; Map preProcessors = new HashMap<>(); diff --git a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/IntegrationTestRunner.java b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/IntegrationTestRunner.java index 870f4022a..e68751c1b 100644 --- a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/IntegrationTestRunner.java +++ b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/IntegrationTestRunner.java @@ -190,7 +190,7 @@ public class IntegrationTestRunner { m = mln; MultiLayerNetwork loaded = MultiLayerNetwork.load(savedModel, true); - assertEquals(loaded.getConfiguration(), mln.getConfiguration(), "Configs not equal"); + assertEquals(loaded.getNetConfiguration(), mln.getNetConfiguration(), "Configs not equal"); assertEquals( loaded.params(), mln.params(), "Params not equal"); assertEquals( loaded.getParamTable(), mln.getParamTable(), "Param table not equal"); } else if(config instanceof ComputationGraphConfiguration ){ @@ -202,7 +202,7 @@ public class IntegrationTestRunner { ComputationGraph loaded = ComputationGraph.load(savedModel, true); assertEquals(loaded.getComputationGraphConfiguration(), cg.getComputationGraphConfiguration(), "Configs not equal" ); assertEquals( loaded.params(), cg.params(), "Params not equal"); - assertEquals(loaded.paramTable(), cg.paramTable(), "Param table not equal"); + assertEquals(loaded.getParamTable(), cg.getParamTable(), "Param table not equal"); } else if(config instanceof SameDiff){ sd = (SameDiff)config; SameDiff loaded = SameDiff.load(savedModel, true); @@ -426,8 +426,8 @@ public class IntegrationTestRunner { boolean isTbptt; int tbpttLength; if(modelType == ModelType.MLN){ - isTbptt = mln.getConfiguration().getBackpropType() == BackpropType.TruncatedBPTT; - tbpttLength = mln.getConfiguration().getTbpttFwdLength(); + isTbptt = mln.getNetConfiguration().getBackpropType() == BackpropType.TruncatedBPTT; + tbpttLength = mln.getNetConfiguration().getTbpttFwdLength(); } else if(modelType == ModelType.CG) { isTbptt = cg.getComputationGraphConfiguration().getBackpropType() == BackpropType.TruncatedBPTT; tbpttLength = cg.getComputationGraphConfiguration().getTbpttFwdLength(); @@ -606,7 +606,7 @@ public class IntegrationTestRunner { if (modelType == ModelType.MLN) { ModelSerializer.writeModel(m, f, true); MultiLayerNetwork restored = MultiLayerNetwork.load(f, true); - assertEquals(mln.getConfiguration(), restored.getConfiguration()); + assertEquals(mln.getNetConfiguration(), restored.getNetConfiguration()); assertEquals(mln.params(), restored.params()); } else if(modelType == ModelType.CG){ ModelSerializer.writeModel(m, f, true); @@ -742,7 +742,7 @@ public class IntegrationTestRunner { //Collect preprocessor coverage information: Collection preProcessors; if (isMLN) { - preProcessors = mln.getConfiguration().getInputPreProcessors().values(); + preProcessors = mln.getNetConfiguration().getInputPreProcessors().values(); } else { preProcessors = new ArrayList<>(); for (org.deeplearning4j.nn.conf.graph.GraphVertex gv : cg.getComputationGraphConfiguration().getVertices().values()) { @@ -834,7 +834,7 @@ public class IntegrationTestRunner { } else { paramPrefix = l.getLayerConfiguration().getLayerName() + "_"; } - Map paramTable = l.paramTable(); + Map paramTable = l.getParamTable(); for(Map.Entry e : paramTable.entrySet()){ out.put(paramPrefix + e.getKey(), e.getValue().dup()); } @@ -1088,7 +1088,7 @@ public class IntegrationTestRunner { if(pSoFar + n < i){ pSoFar += n; } else { - for(Map.Entry e : l.paramTable().entrySet()){ + for(Map.Entry e : l.getParamTable().entrySet()){ pSoFar += e.getValue().length(); if(pSoFar >= i){ pName = e.getKey(); diff --git a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/TestUtils.java b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/TestUtils.java index bbe38a662..5bdae5d39 100644 --- a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/TestUtils.java +++ b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/TestUtils.java @@ -48,7 +48,7 @@ public class TestUtils { ByteArrayInputStream bais = new ByteArrayInputStream(bytes); restored = ModelSerializer.restoreMultiLayerNetwork(bais, true); - assertEquals(net.getConfiguration(), restored.getConfiguration()); + assertEquals(net.getNetConfiguration(), restored.getNetConfiguration()); assertEquals(net.params(), restored.params()); } catch (IOException e){ //Should never happen @@ -56,7 +56,7 @@ public class TestUtils { } //Also check the NeuralNetConfiguration is serializable (required by Spark etc) - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); serializeDeserializeJava(conf); return restored; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/LayerHelperValidationUtil.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/LayerHelperValidationUtil.java index 8da3ff4e5..db11f8cc7 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/LayerHelperValidationUtil.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/LayerHelperValidationUtil.java @@ -109,12 +109,12 @@ public class LayerHelperValidationUtil { } - MultiLayerNetwork net1NoHelper = new MultiLayerNetwork(netOrig.getConfiguration().clone()); + MultiLayerNetwork net1NoHelper = new MultiLayerNetwork(netOrig.getNetConfiguration().clone()); net1NoHelper.init(); log.info("Removing all layer helpers from network copy 1"); removeHelpers(net1NoHelper.getLayers(), null); - MultiLayerNetwork net2With = new MultiLayerNetwork(netOrig.getConfiguration().clone()); + MultiLayerNetwork net2With = new MultiLayerNetwork(netOrig.getNetConfiguration().clone()); net2With.init(); net2With.params().assign(netOrig.params()); log.info("Removing all except for specified helpers from network copy 2: " + t.getAllowHelpersForClasses()); @@ -253,7 +253,7 @@ public class LayerHelperValidationUtil { Preconditions.checkNotNull(t.getData(), "DataSetIterator is not set (null)"); log.info("Testing run-to-run consistency of training with layer helper"); - net2With = new MultiLayerNetwork(netOrig.getConfiguration().clone()); + net2With = new MultiLayerNetwork(netOrig.getNetConfiguration().clone()); net2With.init(); net2With.params().assign(netOrig.params()); log.info("Removing all except for specified layer helpers from network copy 2: " + t.getAllowHelpersForClasses()); @@ -265,7 +265,7 @@ public class LayerHelperValidationUtil { for( int i=0; i<2; i++ ) { - net2With = new MultiLayerNetwork(netOrig.getConfiguration().clone()); + net2With = new MultiLayerNetwork(netOrig.getNetConfiguration().clone()); net2With.init(); net2With.params().assign(netOrig.params()); log.info("Removing all except for specified layer helpers from network copy 2: " + t.getAllowHelpersForClasses()); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/TestUtils.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/TestUtils.java index 6e4456ef2..374724ae5 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/TestUtils.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/TestUtils.java @@ -66,7 +66,7 @@ public class TestUtils { ByteArrayInputStream bais = new ByteArrayInputStream(bytes); restored = ModelSerializer.restoreMultiLayerNetwork(bais, true); - assertEquals(net.getConfiguration(), restored.getConfiguration()); + assertEquals(net.getNetConfiguration(), restored.getNetConfiguration()); assertEquals(net.params(), restored.params()); } catch (IOException e){ //Should never happen @@ -74,7 +74,7 @@ public class TestUtils { } //Also check the NeuralNetConfiguration is serializable (required by Spark etc) - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); serializeDeserializeJava(conf); return restored; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/EvalTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/EvalTest.java index 8f69cf1d9..8b5f5d46b 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/EvalTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/EvalTest.java @@ -622,7 +622,7 @@ public class EvalTest extends BaseDL4JTest { //Disable validation, and check same thing: - net.getConfiguration().setValidateOutputLayerConfig(false); + net.getNetConfiguration().setValidateOutputLayerConfig(false); net.evaluate(iter); net.evaluateROCMultiClass(iter, 0); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTests.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTests.java index 90f927d66..6cefb32aa 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTests.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTests.java @@ -511,7 +511,7 @@ public class GradientCheckTests extends BaseDL4JTest { ComputationGraph netGraph = new ComputationGraph(conf); netGraph.init(); - log.info("params before learning: " + netGraph.getLayer(1).paramTable()); + log.info("params before learning: " + netGraph.getLayer(1).getParamTable()); //Run a number of iterations of learning manually make some pseudo data //the ides is simple: since we do a element wise multiplication layer (just a scaling), we want the cos sim @@ -538,7 +538,7 @@ public class GradientCheckTests extends BaseDL4JTest { assertTrue( scoreAfter < 0.8 * scoreBefore, msg); // expectation in case linear regression(with only element wise multiplication layer): large weight for the fourth weight - log.info("params after learning: " + netGraph.getLayer(1).paramTable()); + log.info("params after learning: " + netGraph.getLayer(1).getParamTable()); boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(netGraph).inputs(new INDArray[]{features}) .labels(new INDArray[]{labels})); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/NeuralNetConfigurationTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/NeuralNetConfigurationTest.java index 64a9fba11..6a7ec6408 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/NeuralNetConfigurationTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/NeuralNetConfigurationTest.java @@ -100,14 +100,14 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest { @Test public void testClone() { NeuralNetConfiguration conf = getConfig(1, 1, new WeightInitUniform(), true); - BaseLayer bl = (BaseLayer) conf.getFirstLayer(); + BaseLayer bl = (BaseLayer) conf.getFlattenedLayerConfigurations().get(0); conf.setStepFunction(new DefaultStepFunction()); NeuralNetConfiguration conf2 = conf.clone(); assertEquals(conf, conf2); assertNotSame(conf, conf2); - assertNotSame(conf.getFirstLayer(), conf2.getFirstLayer()); + assertNotSame(conf.getFlattenedLayerConfigurations().get(0), conf2.getFlattenedLayerConfigurations().get(0)); assertNotSame(conf.getStepFunction(), conf2.getStepFunction()); } @@ -119,9 +119,9 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest { NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123) .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).layer(layer).build(); - long numParams = conf.getFirstLayer().initializer().numParams(conf); + long numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - Layer model = conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + Layer model = conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); INDArray modelWeights = model.getParam(DefaultParamInitializer.WEIGHT_KEY); @@ -130,9 +130,9 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest { NeuralNetConfiguration conf2 = NeuralNetConfiguration.builder().seed(123) .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).layer(layer2).build(); - long numParams2 = conf2.getFirstLayer().initializer().numParams(conf); + long numParams2 = conf2.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params2 = Nd4j.create(1, numParams); - Layer model2 = conf2.getFirstLayer().instantiate(conf2, null, 0, params2, true, params.dataType()); + Layer model2 = conf2.getFlattenedLayerConfigurations().get(0).instantiate(conf2, null, 0, params2, true, params.dataType()); INDArray modelWeights2 = model2.getParam(DefaultParamInitializer.WEIGHT_KEY); assertEquals(modelWeights, modelWeights2); @@ -208,9 +208,9 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest { private static Layer getLayer(int nIn, int nOut, IWeightInit weightInit, boolean preTrain) { NeuralNetConfiguration conf = getConfig(nIn, nOut, weightInit, preTrain); - long numParams = conf.getFirstLayer().initializer().numParams(conf); + long numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - return conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + return conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); } @@ -235,7 +235,7 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - ConvexOptimizer opt = new StochasticGradientDescent(net.getConfiguration(), + ConvexOptimizer opt = new StochasticGradientDescent(net.getNetConfiguration(), new NegativeDefaultStepFunction(), null, net); assertEquals(lr, ((Sgd)net.getLayer(0).getLayerConfiguration().getUpdaterByParam("W")).getLearningRate(), 1e-4); assertEquals(biasLr, ((Sgd)net.getLayer(0).getLayerConfiguration().getUpdaterByParam("b")).getLearningRate(), 1e-4); @@ -295,7 +295,7 @@ public class NeuralNetConfigurationTest extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - ConvexOptimizer opt = new StochasticGradientDescent(net.getConfiguration(), + ConvexOptimizer opt = new StochasticGradientDescent(net.getNetConfiguration(), new NegativeDefaultStepFunction(), null, net); assertEquals(l1, TestUtils.getL1(net.getLayer(0).getLayerConfiguration().getRegularizationByParam("W")), 1e-4); List r = net.getLayer(0).getLayerConfiguration().getRegularizationByParam("b"); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/constraints/TestConstraints.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/constraints/TestConstraints.java index afbb64726..d1aae72e9 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/constraints/TestConstraints.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/constraints/TestConstraints.java @@ -456,7 +456,7 @@ public class TestConstraints extends BaseDL4JTest { INDArray label = Nd4j.rand(1, 1); g.fit(new INDArray[]{in1, in2}, new INDArray[]{label}); - for(Map.Entry e : g.paramTable().entrySet()){ + for(Map.Entry e : g.getParamTable().entrySet()){ if(!e.getKey().contains("W")){ continue; } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/dropout/TestDropout.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/dropout/TestDropout.java index 26c266dc7..f574ae089 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/dropout/TestDropout.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/dropout/TestDropout.java @@ -82,9 +82,9 @@ public class TestDropout extends BaseDL4JTest { .setOutputs("2") .build(); - assertEquals(new Dropout(0.6), ((LayerVertex)conf2.getVertices().get("0")).getNetConfiguration().getFirstLayer().getIDropout()); - assertEquals(new Dropout(0.7), ((LayerVertex)conf2.getVertices().get("1")).getNetConfiguration().getFirstLayer().getIDropout()); - assertEquals(new AlphaDropout(0.5), ((LayerVertex)conf2.getVertices().get("2")).getNetConfiguration().getFirstLayer().getIDropout()); + assertEquals(new Dropout(0.6), ((LayerVertex)conf2.getVertices().get("0")).getLayerConfiguration().getIDropout()); + assertEquals(new Dropout(0.7), ((LayerVertex)conf2.getVertices().get("1")).getLayerConfiguration().getIDropout()); + assertEquals(new AlphaDropout(0.5), ((LayerVertex)conf2.getVertices().get("2")).getLayerConfiguration().getIDropout()); } @Test diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ElementWiseVertexTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ElementWiseVertexTest.java index 02babc8bc..c3ec4a87c 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ElementWiseVertexTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ElementWiseVertexTest.java @@ -232,7 +232,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest { cg.computeGradientAndScore(); // Let's figure out what our params are now. - Map params = cg.paramTable(); + Map params = cg.getParamTable(); INDArray dense1_W = nullsafe(params.get("dense1_W")); INDArray dense1_b = nullsafe(params.get("dense1_b")); INDArray dense2_W = nullsafe(params.get("dense2_W")); @@ -408,7 +408,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest { cg.computeGradientAndScore(); // Let's figure out what our params are now. - Map params = cg.paramTable(); + Map params = cg.getParamTable(); INDArray dense1_W = nullsafe(params.get("dense1_W")); INDArray dense1_b = nullsafe(params.get("dense1_b")); INDArray dense2_W = nullsafe(params.get("dense2_W")); @@ -578,7 +578,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest { cg.computeGradientAndScore(); // Let's figure out what our params are now. - Map params = cg.paramTable(); + Map params = cg.getParamTable(); INDArray dense1_W = nullsafe(params.get("dense1_W")); INDArray dense1_b = nullsafe(params.get("dense1_b")); INDArray dense2_W = nullsafe(params.get("dense2_W")); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ShiftVertexTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ShiftVertexTest.java index cf0e743e6..9cf99a89c 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ShiftVertexTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ShiftVertexTest.java @@ -159,7 +159,7 @@ public class ShiftVertexTest extends BaseDL4JTest { cg.setLabel(0, target); cg.computeGradientAndScore(); double score_dl4j = cg.score(); - Map weights = cg.paramTable(); + Map weights = cg.getParamTable(); Gradient g = cg.gradient(); Map gradients = g.gradientForVariable(); Map manual_gradients = new TreeMap(); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/layers/LayerBuilderTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/layers/LayerBuilderTest.java index e4e7ce73c..3ae5d8bd0 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/layers/LayerBuilderTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/layers/LayerBuilderTest.java @@ -212,21 +212,21 @@ public class LayerBuilderTest extends BaseDL4JTest { try (ByteArrayInputStream bis = new ByteArrayInputStream(data); ObjectInput in = new ObjectInputStream(bis)) { confActual = (NeuralNetConfiguration) in.readObject(); } - assertEquals(confExpected.getFirstLayer(), confActual.getFirstLayer(), "unequal Java serialization"); + assertEquals(confExpected.getFlattenedLayerConfigurations().get(0), confActual.getFlattenedLayerConfigurations().get(0), "unequal Java serialization"); // check JSON String json = confExpected.toJson(); confActual = NeuralNetConfiguration.fromJson(json); - assertEquals(confExpected.getFirstLayer(), confActual.getFirstLayer(), "unequal JSON serialization"); + assertEquals(confExpected.getFlattenedLayerConfigurations().get(0), confActual.getFlattenedLayerConfigurations().get(0), "unequal JSON serialization"); // check YAML String yaml = confExpected.toYaml(); confActual = NeuralNetConfiguration.fromYaml(yaml); - assertEquals(confExpected.getFirstLayer(), confActual.getFirstLayer(), "unequal YAML serialization"); + assertEquals(confExpected.getFlattenedLayerConfigurations().get(0), confActual.getFlattenedLayerConfigurations().get(0), "unequal YAML serialization"); // check the layer's use of callSuper on equals method - confActual.getFirstLayer().setIDropout(new Dropout(new java.util.Random().nextDouble())); - assertNotEquals( confExpected.getFirstLayer(), confActual.getFirstLayer(), "broken equals method (missing callSuper?)"); + confActual.getFlattenedLayerConfigurations().get(0).setIDropout(new Dropout(new java.util.Random().nextDouble())); + assertNotEquals( confExpected, confActual, "broken equals method (missing callSuper?)"); } } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/TestPreProcessors.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/TestPreProcessors.java index 1f279a762..798762556 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/TestPreProcessors.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/TestPreProcessors.java @@ -62,9 +62,9 @@ public class TestPreProcessors extends BaseDL4JTest { .nOut(layerSize).build()) .build(); - long numParams = nnc.getFirstLayer().initializer().numParams(nnc); + long numParams = nnc.getFlattenedLayerConfigurations().get(0).initializer().numParams(nnc); INDArray params = Nd4j.create(1, numParams); - DenseLayer layer = (DenseLayer) nnc.getFirstLayer().instantiate(nnc, null, 0, params, true, params.dataType()); + DenseLayer layer = (DenseLayer) nnc.getFlattenedLayerConfigurations().get(0).instantiate(nnc, null, 0, params, true, params.dataType()); layer.setInputMiniBatchSize(miniBatchSize); INDArray activations3dc = Nd4j.create(new int[] {miniBatchSize, layerSize, timeSeriesLength}, 'c'); @@ -147,9 +147,9 @@ public class TestPreProcessors extends BaseDL4JTest { .nOut(layerSize).build()) .build(); - val numParams = nnc.getFirstLayer().initializer().numParams(nnc); + val numParams = nnc.getFlattenedLayerConfigurations().get(0).initializer().numParams(nnc); INDArray params = Nd4j.create(1, numParams); - DenseLayer layer = (DenseLayer) nnc.getFirstLayer().instantiate(nnc, null, 0, params, true, params.dataType()); + DenseLayer layer = (DenseLayer) nnc.getFlattenedLayerConfigurations().get(0).instantiate(nnc, null, 0, params, true, params.dataType()); layer.setInputMiniBatchSize(miniBatchSize); INDArray rand = Nd4j.rand(miniBatchSize * timeSeriesLength, layerSize); @@ -232,10 +232,10 @@ public class TestPreProcessors extends BaseDL4JTest { .nOut(nChannels).build()) .build(); - val numParams = nnc.getFirstLayer().initializer().numParams(nnc); + val numParams = nnc.getFlattenedLayerConfigurations().get(0).initializer().numParams(nnc); INDArray params = Nd4j.create(1, numParams); ConvolutionLayer layer = - (ConvolutionLayer) nnc.getFirstLayer().instantiate(nnc, null, 0, params, true, params.dataType()); + (ConvolutionLayer) nnc.getFlattenedLayerConfigurations().get(0).instantiate(nnc, null, 0, params, true, params.dataType()); layer.setInputMiniBatchSize(miniBatchSize); INDArray activationsCnn = Nd4j.rand(miniBatchSize * timeSeriesLength, nChannels, @@ -314,10 +314,10 @@ public class TestPreProcessors extends BaseDL4JTest { .nOut(nChannels).build()) .build(); - val numParams = nnc.getFirstLayer().initializer().numParams(nnc); + val numParams = nnc.getFlattenedLayerConfigurations().get(0).initializer().numParams(nnc); INDArray params = Nd4j.create(1, numParams); ConvolutionLayer layer = - (ConvolutionLayer) nnc.getFirstLayer().instantiate(nnc, null, 0, params, true, params.dataType()); + (ConvolutionLayer) nnc.getFlattenedLayerConfigurations().get(0).instantiate(nnc, null, 0, params, true, params.dataType()); layer.setInputMiniBatchSize(miniBatchSize); val shape_rnn = new long[] {miniBatchSize, nChannels * inputHeight * inputWidth, diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/dtypes/DTypeTests.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/dtypes/DTypeTests.java index 9002ba2af..e37b7b7cb 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/dtypes/DTypeTests.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/dtypes/DTypeTests.java @@ -256,9 +256,9 @@ public class DTypeTests extends BaseDL4JTest { } public static void logUsedClasses(MultiLayerNetwork net) { - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); for (NeuralNetConfiguration nnc : conf.getNetConfigurations()) { - LayerConfiguration l = nnc.getFirstLayer(); + LayerConfiguration l = nnc.getFlattenedLayerConfigurations().get(0); seenLayers.add(l.getClass()); if (l instanceof BaseWrapperLayer) { BaseWrapperLayer bwl = (BaseWrapperLayer) l; @@ -281,7 +281,7 @@ public class DTypeTests extends BaseDL4JTest { for (GraphVertex gv : conf.getVertices().values()) { seenVertices.add(gv.getClass()); if (gv instanceof LayerVertex) { - seenLayers.add(((LayerVertex) gv).getNetConfiguration().getFirstLayer().getClass()); + seenLayers.add(((LayerVertex) gv).getLayerConfiguration().getClass()); InputPreProcessor ipp = ((LayerVertex) gv).getPreProcessor(); if (ipp != null) { seenPreprocs.add(ipp.getClass()); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestCompGraphUnsupervised.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestCompGraphUnsupervised.java index f4da77575..b24dc76ed 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestCompGraphUnsupervised.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestCompGraphUnsupervised.java @@ -96,11 +96,11 @@ public class TestCompGraphUnsupervised extends BaseDL4JTest { Map paramsBefore = new HashMap<>(); //Pretrain first layer - for(Map.Entry e : cg.paramTable().entrySet()){ + for(Map.Entry e : cg.getParamTable().entrySet()){ paramsBefore.put(e.getKey(), e.getValue().dup()); } cg.pretrainLayer("vae1", ds); - for(Map.Entry e : cg.paramTable().entrySet()){ + for(Map.Entry e : cg.getParamTable().entrySet()){ if(e.getKey().startsWith("vae1")){ assertNotEquals(paramsBefore.get(e.getKey()), e.getValue()); } else { @@ -113,11 +113,11 @@ public class TestCompGraphUnsupervised extends BaseDL4JTest { //Pretrain second layer - for(Map.Entry e : cg.paramTable().entrySet()){ + for(Map.Entry e : cg.getParamTable().entrySet()){ paramsBefore.put(e.getKey(), e.getValue().dup()); } cg.pretrainLayer("vae2", ds); - for(Map.Entry e : cg.paramTable().entrySet()){ + for(Map.Entry e : cg.getParamTable().entrySet()){ if(e.getKey().startsWith("vae2")){ assertNotEquals(paramsBefore.get(e.getKey()), e.getValue()); } else { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestComputationGraphNetwork.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestComputationGraphNetwork.java index adf347260..7feb29ddb 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestComputationGraphNetwork.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestComputationGraphNetwork.java @@ -406,9 +406,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { .addLayer("rnn", new GravesLSTM.Builder().nOut(5).build(), "in") .addLayer("out", new RnnOutputLayer.Builder().nOut(5).activation(Activation.SOFTMAX).build(), "rnn").setOutputs("out").build(); - assertEquals(5, ((FeedForwardLayer) ((LayerVertex) conf1.getVertices().get("rnn")).getNetConfiguration().getFirstLayer()) + assertEquals(5, ((FeedForwardLayer) ((LayerVertex) conf1.getVertices().get("rnn")).getNetConfiguration().getFlattenedLayerConfigurations().get(0)) .getNIn()); - assertEquals(5, ((FeedForwardLayer) ((LayerVertex) conf1.getVertices().get("out")).getNetConfiguration().getFirstLayer()) + assertEquals(5, ((FeedForwardLayer) ((LayerVertex) conf1.getVertices().get("out")).getNetConfiguration().getFlattenedLayerConfigurations().get(0)) .getNIn()); LayerVertex lv1 = (LayerVertex) conf1.getVertices().get("rnn"); @@ -423,9 +423,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { .addLayer("out", new RnnOutputLayer.Builder().nOut(5).activation(Activation.SOFTMAX).build(), "ff") .setOutputs("out").build(); - assertEquals(5, ((FeedForwardLayer) ((LayerVertex) conf2.getVertices().get("ff")).getNetConfiguration().getFirstLayer()) + assertEquals(5, ((FeedForwardLayer) ((LayerVertex) conf2.getVertices().get("ff")).getNetConfiguration().getFlattenedLayerConfigurations().get(0)) .getNIn()); - assertEquals(5, ((FeedForwardLayer) ((LayerVertex) conf2.getVertices().get("out")).getNetConfiguration().getFirstLayer()) + assertEquals(5, ((FeedForwardLayer) ((LayerVertex) conf2.getVertices().get("out")).getNetConfiguration().getFlattenedLayerConfigurations().get(0)) .getNIn()); lv1 = (LayerVertex) conf2.getVertices().get("ff"); @@ -460,7 +460,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { LayerVertex lv4 = (LayerVertex) conf3.getVertices().get("out"); assertNull(lv4.getPreProcessor()); //Check nIns: - assertEquals(7 * 7 * 3, ((FeedForwardLayer) lv3.getNetConfiguration().getFirstLayer()).getNIn()); + assertEquals(7 * 7 * 3, ((FeedForwardLayer) lv3.getNetConfiguration().getFlattenedLayerConfigurations().get(0)).getNIn()); //CNN->Dense, RNN->Dense, Dense->RNN ComputationGraphConfiguration conf4 = @@ -495,9 +495,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { LayerVertex lv5 = (LayerVertex) conf4.getVertices().get("out"); assertTrue(lv5.getPreProcessor() instanceof FeedForwardToRnnPreProcessor); //Check nIns: - assertEquals(7 * 7 * 3, ((FeedForwardLayer) lv3.getNetConfiguration().getFirstLayer()).getNIn()); - assertEquals(5, ((FeedForwardLayer) lv4.getNetConfiguration().getFirstLayer()).getNIn()); - assertEquals(20, ((FeedForwardLayer) lv5.getNetConfiguration().getFirstLayer()).getNIn()); //10+10 out of the merge vertex -> 20 in to output layer vertex + assertEquals(7 * 7 * 3, ((FeedForwardLayer) lv3.getNetConfiguration().getFlattenedLayerConfigurations().get(0)).getNIn()); + assertEquals(5, ((FeedForwardLayer) lv4.getNetConfiguration().getFlattenedLayerConfigurations().get(0)).getNIn()); + assertEquals(20, ((FeedForwardLayer) lv5.getNetConfiguration().getFlattenedLayerConfigurations().get(0)).getNIn()); //10+10 out of the merge vertex -> 20 in to output layer vertex //Input to 2 CNN layers: @@ -903,7 +903,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { .build(); LayerVertex lv = (LayerVertex) conf.getVertices().get("layer"); - FeedForwardLayer l = ((FeedForwardLayer) (lv).getNetConfiguration().getFirstLayer()); + FeedForwardLayer l = ((FeedForwardLayer) (lv).getNetConfiguration().getFlattenedLayerConfigurations().get(0)); assertEquals(3, l.getNIn()); assertNull(lv.getPreProcessor()); @@ -920,7 +920,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { .build(); lv = (LayerVertex) conf.getVertices().get("layer"); - l = ((FeedForwardLayer) (lv).getNetConfiguration().getFirstLayer()); + l = ((FeedForwardLayer) (lv).getNetConfiguration().getFlattenedLayerConfigurations().get(0)); assertEquals(3, l.getNIn()); assertNotNull(lv.getPreProcessor()); InputPreProcessor preProcessor = lv.getPreProcessor(); @@ -945,7 +945,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { //Check subsampling layer: lv = (LayerVertex) conf.getVertices().get("l0"); - SubsamplingLayer sl = ((SubsamplingLayer) (lv).getNetConfiguration().getFirstLayer()); + SubsamplingLayer sl = ((SubsamplingLayer) (lv).getNetConfiguration().getFlattenedLayerConfigurations().get(0)); assertNotNull(lv.getPreProcessor()); preProcessor = lv.getPreProcessor(); assertTrue(preProcessor instanceof FeedForwardToCnnPreProcessor); @@ -955,7 +955,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { assertEquals(3, preproc.getNumChannels()); //Check dense layer lv = (LayerVertex) conf.getVertices().get("layer"); - l = ((FeedForwardLayer) (lv).getNetConfiguration().getFirstLayer()); + l = ((FeedForwardLayer) (lv).getNetConfiguration().getFlattenedLayerConfigurations().get(0)); assertEquals(3, l.getNIn()); assertNull(lv.getPreProcessor()); @@ -1673,7 +1673,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { ComputationGraph g = new ComputationGraph(conf2); g.init(); - g.setParamTable(cg.paramTable()); + g.setParamTable(cg.getParamTable()); int[] origOrder = g.topologicalSortOrder(); INDArray[] out4 = g.output(in); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestSetGetParameters.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestSetGetParameters.java index ce8019133..2f752b316 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestSetGetParameters.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestSetGetParameters.java @@ -72,9 +72,9 @@ public class TestSetGetParameters extends BaseDL4JTest { assertSame(params, net3.params()); //Same object due to clone - Map paramsMap = net.paramTable(); - Map paramsMap2 = net2.paramTable(); - Map paramsMap3 = net3.paramTable(); + Map paramsMap = net.getParamTable(); + Map paramsMap2 = net2.getParamTable(); + Map paramsMap3 = net3.getParamTable(); for (String s : paramsMap.keySet()) { assertEquals(paramsMap.get(s), paramsMap2.get(s)); assertEquals(paramsMap.get(s), paramsMap3.get(s)); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/BaseLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/BaseLayerTest.java index 3162ed209..189467ab4 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/BaseLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/BaseLayerTest.java @@ -57,10 +57,10 @@ public class BaseLayerTest extends BaseDL4JTest { @Test public void testSetExistingParamsConvolutionSingleLayer() { Layer layer = configureSingleLayer(); - assertNotEquals(paramTable, layer.paramTable()); + assertNotEquals(paramTable, layer.getParamTable()); layer.setParamTable(paramTable); - assertEquals(paramTable, layer.paramTable()); + assertEquals(paramTable, layer.getParamTable()); } @@ -69,9 +69,9 @@ public class BaseLayerTest extends BaseDL4JTest { MultiLayerNetwork net = configureMultiLayer(); for (Layer layer : net.getLayers()) { - assertNotEquals(paramTable, layer.paramTable()); + assertNotEquals(paramTable, layer.getParamTable()); layer.setParamTable(paramTable); - assertEquals(paramTable, layer.paramTable()); + assertEquals(paramTable, layer.getParamTable()); } } @@ -83,9 +83,9 @@ public class BaseLayerTest extends BaseDL4JTest { NeuralNetConfiguration conf = NeuralNetConfiguration.builder() .layer(new ConvolutionLayer.Builder().nIn(nIn).nOut(nOut).build()).build(); - val numParams = conf.getFirstLayer().initializer().numParams(conf); + val numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - return conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + return conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/FrozenLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/FrozenLayerTest.java index 1e83adaf2..2b8977ed0 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/FrozenLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/FrozenLayerTest.java @@ -133,7 +133,7 @@ public class FrozenLayerTest extends BaseDL4JTest { MultiLayerNetwork clonedModel = modelNow.clone(); //Check json - assertEquals(modelNow.getConfiguration().toJson(), clonedModel.getConfiguration().toJson()); + assertEquals(modelNow.getNetConfiguration().toJson(), clonedModel.getNetConfiguration().toJson()); //Check params assertEquals(modelNow.params(), clonedModel.params()); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/OutputLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/OutputLayerTest.java index 0bdf441ac..0d4f0d710 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/OutputLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/OutputLayerTest.java @@ -64,9 +64,9 @@ public class OutputLayerTest extends BaseDL4JTest { .lossFunction(LossFunctions.LossFunction.MCXENT).build()) .build(); - long numParams = conf.getFirstLayer().initializer().numParams(conf); + long numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - OutputLayer l = (OutputLayer) conf.getFirstLayer().instantiate(conf, + OutputLayer l = (OutputLayer) conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, Collections.singletonList(new ScoreIterationListener(1)), 0, params, true, params.dataType()); params = l.params(); l.setParamsTable(params); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/RepeatVectorTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/RepeatVectorTest.java index 483e34572..a62ccdcf0 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/RepeatVectorTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/RepeatVectorTest.java @@ -43,7 +43,7 @@ public class RepeatVectorTest extends BaseDL4JTest { NeuralNetConfiguration conf = NeuralNetConfiguration.builder().seed(123) .dataType(DataType.DOUBLE) .layer(new RepeatVector.Builder(REPEAT).build()).build(); - return conf.getFirstLayer().instantiate(conf, null, 0, + return conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, null, false, DataType.DOUBLE); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/SeedTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/SeedTest.java index db7d4525c..6306c333b 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/SeedTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/SeedTest.java @@ -52,9 +52,9 @@ public class SeedTest extends BaseDL4JTest { NeuralNetConfiguration conf = NeuralNetConfiguration.builder().layer(layerType).seed(123).build(); - long numParams = conf.getFirstLayer().initializer().numParams(conf); + long numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - Layer layer = conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + Layer layer = conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); layer.setBackpropGradientsViewArray(Nd4j.create(1, numParams)); layer.fit(data.getFeatures(), LayerWorkspaceMgr.noWorkspaces()); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Convolution3DTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Convolution3DTest.java index d4a685a3a..c8137f4a6 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Convolution3DTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Convolution3DTest.java @@ -90,9 +90,9 @@ public class Convolution3DTest extends BaseDL4JTest { .dataFormat(Convolution3D.DataFormat.NCDHW).convolutionMode(mode).hasBias(false) .build()) .build(); - long numParams = conf.getFirstLayer().initializer().numParams(conf); + long numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.ones(1, numParams); - return conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + return conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); } public INDArray getData() throws Exception { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/ConvolutionLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/ConvolutionLayerTest.java index 0c58b8703..f234d3b78 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/ConvolutionLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/ConvolutionLayerTest.java @@ -258,9 +258,9 @@ public class ConvolutionLayerTest extends BaseDL4JTest { NeuralNetConfiguration conf = NeuralNetConfiguration.builder().layer(cnn).build(); - val numParams = conf.getFirstLayer().initializer().numParams(conf); + val numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - Layer layer = conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + Layer layer = conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); assertEquals(1, layer.getParam("b").size(0)); } @@ -319,9 +319,9 @@ public class ConvolutionLayerTest extends BaseDL4JTest { NeuralNetConfiguration conf = NeuralNetConfiguration.builder().layer(layer).build(); - val numParams = conf.getFirstLayer().initializer().numParams(conf); + val numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - return conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + return conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); } public Layer getMNISTConfig() { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SpaceToDepthTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SpaceToDepthTest.java index ed8e8c99d..1c47e1b2d 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SpaceToDepthTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SpaceToDepthTest.java @@ -62,7 +62,7 @@ public class SpaceToDepthTest extends BaseDL4JTest { NeuralNetConfiguration conf = NeuralNetConfiguration.builder() .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).seed(123) .layer(new SpaceToDepthLayer.Builder(blockSize, dataFormat).build()).build(); - return conf.getFirstLayer().instantiate(conf, null, 0, null, true, Nd4j.defaultFloatingPointType()); + return conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, null, true, Nd4j.defaultFloatingPointType()); } @Test diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SubsamplingLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SubsamplingLayerTest.java index 9fda734eb..4cc8341cc 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SubsamplingLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SubsamplingLayerTest.java @@ -172,7 +172,7 @@ public class SubsamplingLayerTest extends BaseDL4JTest { .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).seed(123) .layer(new SubsamplingLayer.Builder(pooling, new int[] {2, 2}).build()).build(); - return conf.getFirstLayer().instantiate(conf, null, 0, null, true, Nd4j.defaultFloatingPointType()); + return conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, null, true, Nd4j.defaultFloatingPointType()); } public INDArray getData() throws Exception { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/TestConvolutionModes.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/TestConvolutionModes.java index 61f937cec..8cdc85768 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/TestConvolutionModes.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/TestConvolutionModes.java @@ -287,28 +287,28 @@ public class TestConvolutionModes extends BaseDL4JTest { .activation(Activation.SOFTMAX).nOut(3).build(), "7") .setOutputs("8").build(); - assertEquals(cm, ((ConvolutionLayer) ((LayerVertex) conf.getVertices().get("0")).getNetConfiguration().getFirstLayer()) + assertEquals(cm, ((ConvolutionLayer) ((LayerVertex) conf.getVertices().get("0")).getNetConfiguration().getFlattenedLayerConfigurations().get(0)) .getConvolutionMode()); assertEquals(ConvolutionMode.Strict, - ((ConvolutionLayer) ((LayerVertex) conf.getVertices().get("1")).getNetConfiguration().getFirstLayer()) + ((ConvolutionLayer) ((LayerVertex) conf.getVertices().get("1")).getNetConfiguration().getFlattenedLayerConfigurations().get(0)) .getConvolutionMode()); assertEquals(ConvolutionMode.Truncate, - ((ConvolutionLayer) ((LayerVertex) conf.getVertices().get("2")).getNetConfiguration().getFirstLayer()) + ((ConvolutionLayer) ((LayerVertex) conf.getVertices().get("2")).getNetConfiguration().getFlattenedLayerConfigurations().get(0)) .getConvolutionMode()); assertEquals(ConvolutionMode.Same, - ((ConvolutionLayer) ((LayerVertex) conf.getVertices().get("3")).getNetConfiguration().getFirstLayer()) + ((ConvolutionLayer) ((LayerVertex) conf.getVertices().get("3")).getNetConfiguration().getFlattenedLayerConfigurations().get(0)) .getConvolutionMode()); - assertEquals(cm, ((SubsamplingLayer) ((LayerVertex) conf.getVertices().get("4")).getNetConfiguration().getFirstLayer()) + assertEquals(cm, ((SubsamplingLayer) ((LayerVertex) conf.getVertices().get("4")).getNetConfiguration().getFlattenedLayerConfigurations().get(0)) .getConvolutionMode()); assertEquals(ConvolutionMode.Strict, - ((SubsamplingLayer) ((LayerVertex) conf.getVertices().get("5")).getNetConfiguration().getFirstLayer()) + ((SubsamplingLayer) ((LayerVertex) conf.getVertices().get("5")).getNetConfiguration().getFlattenedLayerConfigurations().get(0)) .getConvolutionMode()); assertEquals(ConvolutionMode.Truncate, - ((SubsamplingLayer) ((LayerVertex) conf.getVertices().get("6")).getNetConfiguration().getFirstLayer()) + ((SubsamplingLayer) ((LayerVertex) conf.getVertices().get("6")).getNetConfiguration().getFlattenedLayerConfigurations().get(0)) .getConvolutionMode()); assertEquals(ConvolutionMode.Same, - ((SubsamplingLayer) ((LayerVertex) conf.getVertices().get("7")).getNetConfiguration().getFirstLayer()) + ((SubsamplingLayer) ((LayerVertex) conf.getVertices().get("7")).getNetConfiguration().getFlattenedLayerConfigurations().get(0)) .getConvolutionMode()); } } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling1DTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling1DTest.java index 5d74b94fa..064464d67 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling1DTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling1DTest.java @@ -107,7 +107,7 @@ public class Upsampling1DTest extends BaseDL4JTest { NeuralNetConfiguration conf = NeuralNetConfiguration.builder() .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).seed(123) .layer(new Upsampling1D.Builder(size).build()).build(); - return conf.getFirstLayer().instantiate(conf, null, 0, + return conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, null, true, Nd4j.defaultFloatingPointType()); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling2DTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling2DTest.java index bfb872ba8..286259904 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling2DTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling2DTest.java @@ -111,7 +111,7 @@ public class Upsampling2DTest extends BaseDL4JTest { NeuralNetConfiguration conf = NeuralNetConfiguration.builder() .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).seed(123) .layer(new Upsampling2D.Builder(size).build()).build(); - return conf.getFirstLayer().instantiate(conf, null, 0, null, true, Nd4j.defaultFloatingPointType()); + return conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, null, true, Nd4j.defaultFloatingPointType()); } public INDArray getData() throws Exception { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomLayer.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomLayer.java index f3b201d63..ea59a8091 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomLayer.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomLayer.java @@ -27,6 +27,7 @@ import org.deeplearning4j.nn.conf.InputPreProcessor; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.layers.FeedForwardLayer; +import org.deeplearning4j.nn.conf.layers.LayerConfiguration; import org.deeplearning4j.nn.conf.memory.LayerMemoryReport; import org.deeplearning4j.nn.params.DefaultParamInitializer; import org.deeplearning4j.optimize.api.TrainingListener; @@ -53,13 +54,14 @@ public class CustomLayer extends FeedForwardLayer { public org.deeplearning4j.nn.api.Layer instantiate(NeuralNetConfiguration conf, Collection trainingListeners, int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) { - CustomLayerImpl ret = new CustomLayerImpl(conf, networkDataType); + LayerConfiguration lconf = conf.getFlattenedLayerConfigurations().get(layerIndex); + CustomLayerImpl ret = new CustomLayerImpl(lconf, networkDataType); ret.setListeners(trainingListeners); ret.setIndex(layerIndex); ret.setParamsViewArray(layerParamsView); Map paramTable = initializer().init(this, layerParamsView, initializeParams); ret.setParamTable(paramTable); - ret.setLayerConfiguration(conf); + ret.setLayerConfiguration(lconf); return ret; } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomLayerImpl.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomLayerImpl.java index e0f582a52..38a7d215b 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomLayerImpl.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomLayerImpl.java @@ -21,11 +21,12 @@ package org.deeplearning4j.nn.layers.custom.testclasses; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; +import org.deeplearning4j.nn.conf.layers.LayerConfiguration; import org.deeplearning4j.nn.layers.BaseLayer; import org.nd4j.linalg.api.buffer.DataType; public class CustomLayerImpl extends BaseLayer { - public CustomLayerImpl(NeuralNetConfiguration conf, DataType dataType) { + public CustomLayerImpl(LayerConfiguration conf, DataType dataType) { super(conf, dataType); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomOutputLayer.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomOutputLayer.java index b64a341d8..80c983589 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomOutputLayer.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomOutputLayer.java @@ -29,6 +29,7 @@ import org.deeplearning4j.nn.api.Layer; import org.deeplearning4j.nn.api.ParamInitializer; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; import org.deeplearning4j.nn.conf.layers.BaseOutputLayer; +import org.deeplearning4j.nn.conf.layers.LayerConfiguration; import org.deeplearning4j.nn.params.DefaultParamInitializer; import org.deeplearning4j.optimize.api.TrainingListener; import org.nd4j.linalg.api.buffer.DataType; @@ -51,13 +52,14 @@ public class CustomOutputLayer extends BaseOutputLayer { @Override public Layer instantiate(NeuralNetConfiguration conf, Collection trainingListeners, int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) { - CustomOutputLayerImpl ret = new CustomOutputLayerImpl(conf, networkDataType); + LayerConfiguration lconf = conf.getFlattenedLayerConfigurations().get(layerIndex); + CustomOutputLayerImpl ret = new CustomOutputLayerImpl(lconf, networkDataType); ret.setListeners(trainingListeners); ret.setIndex(layerIndex); ret.setParamsViewArray(layerParamsView); Map paramTable = initializer().init(this, layerParamsView, initializeParams); ret.setParamTable(paramTable); - ret.setLayerConfiguration(conf); + ret.setLayerConfiguration(lconf); return ret; } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomOutputLayerImpl.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomOutputLayerImpl.java index 349adba9d..f48f35038 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomOutputLayerImpl.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/custom/testclasses/CustomOutputLayerImpl.java @@ -21,6 +21,7 @@ package org.deeplearning4j.nn.layers.custom.testclasses; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; +import org.deeplearning4j.nn.conf.layers.LayerConfiguration; import org.deeplearning4j.nn.layers.BaseOutputLayer; import org.deeplearning4j.nn.workspace.ArrayType; import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; @@ -28,7 +29,7 @@ import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; public class CustomOutputLayerImpl extends BaseOutputLayer { - public CustomOutputLayerImpl(NeuralNetConfiguration conf, DataType dataType) { + public CustomOutputLayerImpl(LayerConfiguration conf, DataType dataType) { super(conf, dataType); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/dense/DenseTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/dense/DenseTest.java index 382476fc9..ba1129cef 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/dense/DenseTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/dense/DenseTest.java @@ -53,9 +53,9 @@ public class DenseTest extends BaseDL4JTest { NeuralNetConfiguration conf = NeuralNetConfiguration.builder().layer(build).build(); - long numParams = conf.getFirstLayer().initializer().numParams(conf); + long numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - Layer layer = conf.getFirstLayer().instantiate(conf, null, 0, params, true, Nd4j.defaultFloatingPointType()); + Layer layer = conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, Nd4j.defaultFloatingPointType()); assertEquals(1, layer.getParam("b").size(0)); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/normalization/BatchNormalizationTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/normalization/BatchNormalizationTest.java index e6f85611a..eb76c88f2 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/normalization/BatchNormalizationTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/normalization/BatchNormalizationTest.java @@ -130,12 +130,12 @@ public class BatchNormalizationTest extends BaseDL4JTest { BatchNormalization bN = b.build(); NeuralNetConfiguration conf = NeuralNetConfiguration.builder().layer(bN).build(); - long numParams = conf.getFirstLayer().initializer().numParams(conf); + long numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = null; if (numParams > 0) { params = Nd4j.create(1, numParams); } - Layer layer = conf.getFirstLayer().instantiate(conf, null, 0, params, true, params == null ? Nd4j.defaultFloatingPointType() : params.dataType()); + Layer layer = conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params == null ? Nd4j.defaultFloatingPointType() : params.dataType()); if (numParams > 0) { layer.setBackpropGradientsViewArray(Nd4j.create(1, numParams)); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/ocnn/OCNNOutputLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/ocnn/OCNNOutputLayerTest.java index c989d0bf5..c0f6fa24c 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/ocnn/OCNNOutputLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/ocnn/OCNNOutputLayerTest.java @@ -123,7 +123,7 @@ public class OCNNOutputLayerTest extends BaseDL4JTest { DataSet filtered = next.filterBy(new int[]{0, 1}); for (int i = 0; i < 10; i++) { network.setEpochCount(i); - network.getConfiguration().setEpochCount(i); + network.getNetConfiguration().setEpochCount(i); network.fit(filtered); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesBidirectionalLSTMTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesBidirectionalLSTMTest.java index d51fc5280..7d8dd8977 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesBidirectionalLSTMTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesBidirectionalLSTMTest.java @@ -68,10 +68,10 @@ public class GravesBidirectionalLSTMTest extends BaseDL4JTest { .nOut(nHiddenUnits).dataFormat(rnnDataFormat).activation(Activation.TANH).build()) .build(); - val numParams = conf.getFirstLayer().initializer().numParams(conf); + val numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); final GravesBidirectionalLSTM layer = - (GravesBidirectionalLSTM) conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + (GravesBidirectionalLSTM) conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); //Data: has shape [miniBatchSize,nIn,timeSeriesLength]; //Output/activations has shape [miniBatchsize,nHiddenUnits,timeSeriesLength]; @@ -135,11 +135,11 @@ public class GravesBidirectionalLSTMTest extends BaseDL4JTest { .dist(new UniformDistribution(0, 1)).activation(Activation.TANH).build()) .build(); - long numParams = conf.getFirstLayer().initializer().numParams(conf); + long numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); GravesBidirectionalLSTM lstm = - (GravesBidirectionalLSTM) conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); - lstm.setBackpropGradientsViewArray(Nd4j.create(1, conf.getFirstLayer().initializer().numParams(conf))); + (GravesBidirectionalLSTM) conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); + lstm.setBackpropGradientsViewArray(Nd4j.create(1, conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf))); //Set input, do a forward pass: lstm.activate(inputData, false, LayerWorkspaceMgr.noWorkspaces()); assertNotNull(lstm.input()); @@ -207,10 +207,10 @@ public class GravesBidirectionalLSTMTest extends BaseDL4JTest { .dist(new UniformDistribution(0, 1)).activation(Activation.TANH).build()) .build(); - long numParams = conf.getFirstLayer().initializer().numParams(conf); + long numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); final GravesBidirectionalLSTM lstm = - (GravesBidirectionalLSTM) conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + (GravesBidirectionalLSTM) conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); final INDArray input = Nd4j.rand(miniBatchSize, nIn, timeSeriesLength); lstm.setInput(input, LayerWorkspaceMgr.noWorkspaces()); @@ -266,9 +266,9 @@ public class GravesBidirectionalLSTMTest extends BaseDL4JTest { .build(); - long numParams = confBidirectional.getFirstLayer().initializer().numParams(confBidirectional); + long numParams = confBidirectional.getFlattenedLayerConfigurations().get(0).initializer().numParams(confBidirectional); INDArray params = Nd4j.create(1, numParams); - final GravesBidirectionalLSTM bidirectionalLSTM = (GravesBidirectionalLSTM) confBidirectional.getFirstLayer() + final GravesBidirectionalLSTM bidirectionalLSTM = (GravesBidirectionalLSTM) confBidirectional.getFlattenedLayerConfigurations().get(0) .instantiate(confBidirectional, null, 0, params, true, params.dataType()); @@ -311,19 +311,19 @@ public class GravesBidirectionalLSTMTest extends BaseDL4JTest { .weightInit(WeightInit.ZERO).activation(Activation.TANH).build()) .build(); - long numParams = confForwards.getFirstLayer().initializer().numParams(confForwards); + long numParams = confForwards.getFlattenedLayerConfigurations().get(0).initializer().numParams(confForwards); INDArray params = Nd4j.create(1, numParams); - long numParamsBD = confBidirectional.getFirstLayer().initializer().numParams(confBidirectional); + long numParamsBD = confBidirectional.getFlattenedLayerConfigurations().get(0).initializer().numParams(confBidirectional); INDArray paramsBD = Nd4j.create(1, numParamsBD); - final GravesBidirectionalLSTM bidirectionalLSTM = (GravesBidirectionalLSTM) confBidirectional.getFirstLayer() + final GravesBidirectionalLSTM bidirectionalLSTM = (GravesBidirectionalLSTM) confBidirectional.getFlattenedLayerConfigurations().get(0) .instantiate(confBidirectional, null, 0, paramsBD, true, params.dataType()); final GravesLSTM forwardsLSTM = - (GravesLSTM) confForwards.getFirstLayer().instantiate(confForwards, null, 0, params, true, params.dataType()); + (GravesLSTM) confForwards.getFlattenedLayerConfigurations().get(0).instantiate(confForwards, null, 0, params, true, params.dataType()); bidirectionalLSTM.setBackpropGradientsViewArray( - Nd4j.create(1, confBidirectional.getFirstLayer().initializer().numParams(confBidirectional))); + Nd4j.create(1, confBidirectional.getFlattenedLayerConfigurations().get(0).initializer().numParams(confBidirectional))); forwardsLSTM.setBackpropGradientsViewArray( - Nd4j.create(1, confForwards.getFirstLayer().initializer().numParams(confForwards))); + Nd4j.create(1, confForwards.getFlattenedLayerConfigurations().get(0).initializer().numParams(confForwards))); final INDArray sig = (rnnDataFormat == RNNFormat.NCW)?Nd4j.rand(miniBatchSize, nIn, timeSeriesLength): @@ -546,7 +546,7 @@ public class GravesBidirectionalLSTMTest extends BaseDL4JTest { net.init(); assertEquals(gateAfn, ((org.deeplearning4j.nn.conf.layers.GravesBidirectionalLSTM) net.getLayer(0).getNetConfiguration() - .getFirstLayer()).getGateActivationFn().toString()); + .getFlattenedLayerConfigurations().get(0)).getGateActivationFn().toString()); INDArray in = Nd4j.rand(3, 2, 5); INDArray labels = Nd4j.rand(3, 2, 5); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesLSTMTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesLSTMTest.java index 2868c08d8..791ff8fa6 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesLSTMTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesLSTMTest.java @@ -63,9 +63,9 @@ public class GravesLSTMTest extends BaseDL4JTest { .nOut(nHiddenUnits).activation(Activation.TANH).build()) .build(); - val numParams = conf.getFirstLayer().initializer().numParams(conf); + val numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - GravesLSTM layer = (GravesLSTM) conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + GravesLSTM layer = (GravesLSTM) conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); //Data: has shape [miniBatchSize,nIn,timeSeriesLength]; //Output/activations has shape [miniBatchsize,nHiddenUnits,timeSeriesLength]; @@ -109,10 +109,10 @@ public class GravesLSTMTest extends BaseDL4JTest { .dist(new UniformDistribution(0, 1)).activation(Activation.TANH).build()) .build(); - val numParams = conf.getFirstLayer().initializer().numParams(conf); + val numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - GravesLSTM lstm = (GravesLSTM) conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); - lstm.setBackpropGradientsViewArray(Nd4j.create(1, conf.getFirstLayer().initializer().numParams(conf))); + GravesLSTM lstm = (GravesLSTM) conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); + lstm.setBackpropGradientsViewArray(Nd4j.create(1, conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf))); //Set input, do a forward pass: lstm.activate(inputData, false, LayerWorkspaceMgr.noWorkspaces()); assertNotNull(lstm.input()); @@ -160,9 +160,9 @@ public class GravesLSTMTest extends BaseDL4JTest { .activation(Activation.TANH).build()) .build(); - val numParams = conf.getFirstLayer().initializer().numParams(conf); + val numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - GravesLSTM lstm = (GravesLSTM) conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + GravesLSTM lstm = (GravesLSTM) conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); INDArray input = Nd4j.rand(miniBatchSize, nIn, timeSeriesLength); lstm.setInput(input, LayerWorkspaceMgr.noWorkspaces()); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffConv.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffConv.java index 690c07f37..f0b23e335 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffConv.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffConv.java @@ -73,7 +73,7 @@ public class TestSameDiffConv extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - Map pt1 = net.getLayer(0).paramTable(); + Map pt1 = net.getLayer(0).getParamTable(); assertNotNull(pt1); assertEquals(2, pt1.size()); assertNotNull(pt1.get(ConvolutionParamInitializer.WEIGHT_KEY)); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffDense.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffDense.java index 64d59c84b..60446d43f 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffDense.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffDense.java @@ -71,7 +71,7 @@ public class TestSameDiffDense extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - Map pt1 = net.getLayer(0).paramTable(); + Map pt1 = net.getLayer(0).getParamTable(); assertNotNull(pt1); assertEquals(2, pt1.size()); assertNotNull(pt1.get(DefaultParamInitializer.WEIGHT_KEY)); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffDenseVertex.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffDenseVertex.java index f70c4de92..5e67862ff 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffDenseVertex.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffDenseVertex.java @@ -104,7 +104,7 @@ public class TestSameDiffDenseVertex extends BaseDL4JTest { //Check params: assertEquals(netStandard.params(), netSD.params()); - assertEquals(netStandard.paramTable(), netSD.paramTable()); + assertEquals(netStandard.getParamTable(), netSD.getParamTable()); INDArray in = Nd4j.rand(minibatch, nIn); INDArray l = TestUtils.randomOneHot(minibatch, nOut, 12345); @@ -159,7 +159,7 @@ public class TestSameDiffDenseVertex extends BaseDL4JTest { netSD.fit(ds); netStandard.fit(ds); - assertEquals(netStandard.paramTable(), netSD.paramTable()); + assertEquals(netStandard.getParamTable(), netSD.getParamTable()); assertEquals(netStandard.params(), netSD.params()); assertEquals(netStandard.getFlattenedGradients(), netSD.getFlattenedGradients()); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestVAE.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestVAE.java index 3da4abed5..639520492 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestVAE.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestVAE.java @@ -63,7 +63,7 @@ public class TestVAE extends BaseDL4JTest { .build()) .build(); - LayerConfiguration c = mlc.getFirstLayer(); + LayerConfiguration c = mlc.getFlattenedLayerConfigurations().get(0); org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder vae = (VariationalAutoencoder) c; @@ -78,7 +78,7 @@ public class TestVAE extends BaseDL4JTest { System.out.println("Exp num params: " + expNumParams); assertEquals(expNumParams, net.getLayer(0).params().length()); - Map paramTable = net.getLayer(0).paramTable(); + Map paramTable = net.getLayer(0).getParamTable(); int count = 0; for (INDArray arr : paramTable.values()) { count += arr.length(); @@ -135,7 +135,7 @@ public class TestVAE extends BaseDL4JTest { net.init(); net.initGradientsView(); //TODO this should happen automatically - Map paramTable = net.getLayer(0).paramTable(); + Map paramTable = net.getLayer(0).getParamTable(); Map gradTable = ((org.deeplearning4j.nn.layers.variational.VariationalAutoencoder) net.getLayer(0)) .getGradientViews(); @@ -175,7 +175,7 @@ public class TestVAE extends BaseDL4JTest { org.deeplearning4j.nn.layers.variational.VariationalAutoencoder layer = (org.deeplearning4j.nn.layers.variational.VariationalAutoencoder) net.getLayer(0); - Map layerParams = layer.paramTable(); + Map layerParams = layer.getParamTable(); Map layerGradViews = layer.getGradientViews(); layer.setInput(Nd4j.rand(3, 10), LayerWorkspaceMgr.noWorkspaces()); @@ -239,7 +239,7 @@ public class TestVAE extends BaseDL4JTest { net.pretrainLayer(0, input); //Get a snapshot of the pretrain params after fitting: - Map layerParams = layer.paramTable(); + Map layerParams = layer.getParamTable(); Map pretrainParamsBefore = new HashMap<>(); for (String s : layerParams.keySet()) { if (layer.isPretrainParam(s)) { @@ -255,7 +255,7 @@ public class TestVAE extends BaseDL4JTest { net.fit(features, labels); } - Map layerParamsAfter = layer.paramTable(); + Map layerParamsAfter = layer.getParamTable(); for (String s : pretrainParamsBefore.keySet()) { INDArray before = pretrainParamsBefore.get(s); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/misc/WorkspaceTests.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/misc/WorkspaceTests.java index 9649adffd..794b45411 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/misc/WorkspaceTests.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/misc/WorkspaceTests.java @@ -104,13 +104,13 @@ public class WorkspaceTests extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf.clone()); net.init(); - net.getConfiguration().setInferenceWorkspaceMode(WorkspaceMode.ENABLED); - net.getConfiguration().setTrainingWorkspaceMode(WorkspaceMode.ENABLED); + net.getNetConfiguration().setInferenceWorkspaceMode(WorkspaceMode.ENABLED); + net.getNetConfiguration().setTrainingWorkspaceMode(WorkspaceMode.ENABLED); MultiLayerNetwork net2 = new MultiLayerNetwork(conf.clone()); net2.init(); - net2.getConfiguration().setInferenceWorkspaceMode(WorkspaceMode.NONE); - net2.getConfiguration().setTrainingWorkspaceMode(WorkspaceMode.NONE); + net2.getNetConfiguration().setInferenceWorkspaceMode(WorkspaceMode.NONE); + net2.getNetConfiguration().setTrainingWorkspaceMode(WorkspaceMode.NONE); INDArray in = Nd4j.rand(1, 2, 5, 5); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java index cad0cfd50..4fb1c3fad 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java @@ -817,15 +817,15 @@ public class MultiLayerTest extends BaseDL4JTest { DataSetIterator iter = new IrisDataSetIterator(50, 150); - assertEquals(0, network.getConfiguration().getIterationCount()); + assertEquals(0, network.getNetConfiguration().getIterationCount()); network.fit(iter); - assertEquals(3, network.getConfiguration().getIterationCount()); + assertEquals(3, network.getNetConfiguration().getIterationCount()); iter.reset(); network.fit(iter); - assertEquals(6, network.getConfiguration().getIterationCount()); + assertEquals(6, network.getNetConfiguration().getIterationCount()); iter.reset(); network.fit(iter.next()); - assertEquals(7, network.getConfiguration().getIterationCount()); + assertEquals(7, network.getNetConfiguration().getIterationCount()); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ModelSerializer.writeModel(network, baos, true); @@ -833,7 +833,7 @@ public class MultiLayerTest extends BaseDL4JTest { ByteArrayInputStream bais = new ByteArrayInputStream(asBytes); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(bais, true); - assertEquals(7, net.getConfiguration().getIterationCount()); + assertEquals(7, net.getNetConfiguration().getIterationCount()); } @@ -1072,20 +1072,20 @@ public class MultiLayerTest extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - assertEquals(0, net.getConfiguration().getEpochCount()); + assertEquals(0, net.getNetConfiguration().getEpochCount()); DataSetIterator iter = new IrisDataSetIterator(150, 150); for (int i = 0; i < 4; i++) { - assertEquals(i, net.getConfiguration().getEpochCount()); + assertEquals(i, net.getNetConfiguration().getEpochCount()); net.fit(iter); - assertEquals(i + 1, net.getConfiguration().getEpochCount()); + assertEquals(i + 1, net.getNetConfiguration().getEpochCount()); } - assertEquals(4, net.getConfiguration().getEpochCount()); + assertEquals(4, net.getNetConfiguration().getEpochCount()); MultiLayerNetwork restored = TestUtils.testModelSerialization(net); - assertEquals(4, restored.getConfiguration().getEpochCount()); + assertEquals(4, restored.getNetConfiguration().getEpochCount()); } @Test diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTestRNN.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTestRNN.java index 1a6175cde..99c1c6077 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTestRNN.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTestRNN.java @@ -86,7 +86,7 @@ public class MultiLayerTestRNN extends BaseDL4JTest { Layer layer = network.getLayer(0); assertTrue(layer instanceof GravesLSTM); - Map paramTable = layer.paramTable(); + Map paramTable = layer.getParamTable(); assertEquals(3, paramTable.size()); //2 sets of weights, 1 set of biases INDArray recurrentWeights = paramTable.get(GravesLSTMParamInitializer.RECURRENT_WEIGHT_KEY); @@ -131,7 +131,7 @@ public class MultiLayerTestRNN extends BaseDL4JTest { Layer layer = network.getLayer(i); assertTrue(layer instanceof GravesLSTM); - Map paramTable = layer.paramTable(); + Map paramTable = layer.getParamTable(); assertEquals(3, paramTable.size()); //2 sets of weights, 1 set of biases int layerNIn = (i == 0 ? nIn : nHiddenUnits[i - 1]); @@ -458,9 +458,9 @@ public class MultiLayerTestRNN extends BaseDL4JTest { mlnTBPTT.clearTbpttState = false; - assertEquals(BackpropType.TruncatedBPTT, mlnTBPTT.getConfiguration().getBackpropType()); - assertEquals(timeSeriesLength, mlnTBPTT.getConfiguration().getTbpttFwdLength()); - assertEquals(timeSeriesLength, mlnTBPTT.getConfiguration().getTbpttBackLength()); + assertEquals(BackpropType.TruncatedBPTT, mlnTBPTT.getNetConfiguration().getBackpropType()); + assertEquals(timeSeriesLength, mlnTBPTT.getNetConfiguration().getTbpttFwdLength()); + assertEquals(timeSeriesLength, mlnTBPTT.getNetConfiguration().getTbpttBackLength()); INDArray inputData = Nd4j.rand(miniBatchSize, nIn, timeSeriesLength); INDArray labels = Nd4j.rand(miniBatchSize, nOut, timeSeriesLength); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/rl/TestMultiModelGradientApplication.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/rl/TestMultiModelGradientApplication.java index fe80d1e24..19360abb7 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/rl/TestMultiModelGradientApplication.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/rl/TestMultiModelGradientApplication.java @@ -124,8 +124,8 @@ public class TestMultiModelGradientApplication extends BaseDL4JTest { net2GradUpd.getUpdater().getStateViewArray()); //Remove the next 2 lines: fails - as net 1 is 1 iteration ahead - net1GradCalc.getConfiguration().setIterationCount(0); - net2GradUpd.getConfiguration().setIterationCount(0); + net1GradCalc.getNetConfiguration().setIterationCount(0); + net2GradUpd.getNetConfiguration().setIterationCount(0); for (int i = 0; i < 100; i++) { net1GradCalc.fit(f, l); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestFrozenLayers.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestFrozenLayers.java index d35b46911..5c5fb204e 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestFrozenLayers.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestFrozenLayers.java @@ -127,7 +127,7 @@ public class TestFrozenLayers extends BaseDL4JTest { } Map paramsBefore = new LinkedHashMap<>(); - for(Map.Entry entry : transfer.paramTable().entrySet()){ + for(Map.Entry entry : transfer.getParamTable().entrySet()){ paramsBefore.put(entry.getKey(), entry.getValue().dup()); } @@ -137,7 +137,7 @@ public class TestFrozenLayers extends BaseDL4JTest { transfer.fit(new INDArray[]{f},new INDArray[]{l}); } - for(Map.Entry entry : transfer.paramTable().entrySet()){ + for(Map.Entry entry : transfer.getParamTable().entrySet()){ String s = msg + " - " + entry.getKey(); if(entry.getKey().startsWith("5_")){ //Non-frozen layer diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestTransferLearningModelSerializer.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestTransferLearningModelSerializer.java index b328c8dff..6d6ce41c0 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestTransferLearningModelSerializer.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestTransferLearningModelSerializer.java @@ -70,9 +70,9 @@ public class TestTransferLearningModelSerializer extends BaseDL4JTest { assertTrue(withFrozen.getLayer(0) instanceof FrozenLayer); assertTrue(withFrozen.getLayer(1) instanceof FrozenLayer); - assertTrue(withFrozen.getConfiguration().getConf(0) + assertTrue(withFrozen.getNetConfiguration().getConf(0) .getLayer() instanceof org.deeplearning4j.nn.conf.layers.misc.FrozenLayer); - assertTrue(withFrozen.getConfiguration().getConf(1) + assertTrue(withFrozen.getNetConfiguration().getConf(1) .getLayer() instanceof org.deeplearning4j.nn.conf.layers.misc.FrozenLayer); MultiLayerNetwork restored = TestUtils.testModelSerialization(withFrozen); @@ -120,8 +120,8 @@ public class TestTransferLearningModelSerializer extends BaseDL4JTest { assertTrue(withFrozen.getLayer(1) instanceof FrozenLayer); Map m = withFrozen.getComputationGraphConfiguration().getVertices(); - LayerConfiguration l0 = ((LayerVertex) m.get("0")).getNetConfiguration().getFirstLayer(); - LayerConfiguration l1 = ((LayerVertex) m.get("1")).getNetConfiguration().getFirstLayer(); + LayerConfiguration l0 = ((LayerVertex) m.get("0")).getLayerConfiguration(); + LayerConfiguration l1 = ((LayerVertex) m.get("1")).getLayerConfiguration(); assertTrue(l0 instanceof org.deeplearning4j.nn.conf.layers.misc.FrozenLayer); assertTrue(l1 instanceof org.deeplearning4j.nn.conf.layers.misc.FrozenLayer); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningCompGraphTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningCompGraphTest.java index 0f75f1426..195ee2f6d 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningCompGraphTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningCompGraphTest.java @@ -605,13 +605,13 @@ public class TransferLearningCompGraphTest extends BaseDL4JTest { cg2.output(arr); - Map m = new HashMap<>(cg.paramTable()); + Map m = new HashMap<>(cg.getParamTable()); m.put("newOut_W", m.remove("out_W")); m.put("newOut_b", m.remove("out_b")); cg2.setParamTable(m); - Map p1 = cg.paramTable(); - Map p2 = cg2.paramTable(); + Map p1 = cg.getParamTable(); + Map p2 = cg2.getParamTable(); for(String s : p1.keySet()){ INDArray i1 = p1.get(s); INDArray i2 = p2.get(s.replaceAll("out", "newOut")); @@ -651,13 +651,13 @@ public class TransferLearningCompGraphTest extends BaseDL4JTest { cg2.output(arr); - Map m = new HashMap<>(cg.paramTable()); + Map m = new HashMap<>(cg.getParamTable()); m.put("newOut_W", m.remove("out_W")); m.put("newOut_b", m.remove("out_b")); cg2.setParamTable(m); - Map p1 = cg.paramTable(); - Map p2 = cg2.paramTable(); + Map p1 = cg.getParamTable(); + Map p2 = cg2.getParamTable(); for(String s : p1.keySet()){ INDArray i1 = p1.get(s); INDArray i2 = p2.get(s.replaceAll("out", "newOut")); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningMLNTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningMLNTest.java index cda7da0b4..f33c48738 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningMLNTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningMLNTest.java @@ -112,8 +112,8 @@ public class TransferLearningMLNTest extends BaseDL4JTest { assertEquals(expectedModel.params(), modelNow.params()); //Check json - NeuralNetConfiguration expectedConf = expectedModel.getConfiguration(); - assertEquals(expectedConf.toJson(), modelNow.getConfiguration().toJson()); + NeuralNetConfiguration expectedConf = expectedModel.getNetConfiguration(); + assertEquals(expectedConf.toJson(), modelNow.getNetConfiguration().toJson()); //Check params after fit modelNow.fit(randomData); @@ -160,9 +160,9 @@ public class TransferLearningMLNTest extends BaseDL4JTest { //Will fail - expected because of dist and weight init changes //assertEquals(modelExpectedArch.getConfiguration().toJson(), modelNow.getConfiguration().toJson()); - BaseLayer bl0 = ((BaseLayer) modelNow.getConfiguration().getConf(0).getLayer()); - BaseLayer bl1 = ((BaseLayer) modelNow.getConfiguration().getConf(1).getLayer()); - BaseLayer bl3 = ((BaseLayer) modelNow.getConfiguration().getConf(3).getLayer()); + BaseLayer bl0 = ((BaseLayer) modelNow.getNetConfiguration().getConf(0).getLayer()); + BaseLayer bl1 = ((BaseLayer) modelNow.getNetConfiguration().getConf(1).getLayer()); + BaseLayer bl3 = ((BaseLayer) modelNow.getNetConfiguration().getConf(3).getLayer()); assertEquals(bl0.getWeightInitFn().getClass(), WeightInitXavier.class); try { assertEquals(JsonMappers.getMapper().writeValueAsString(bl1.getWeightInitFn()), @@ -357,18 +357,18 @@ public class TransferLearningMLNTest extends BaseDL4JTest { .setInputPreProcessor(4, new FeedForwardToRnnPreProcessor()).build(); //modelNow should have the same architecture as modelExpectedArch - assertEquals(modelExpectedArch.getConfiguration().getConf(0).toJson(), - modelNow.getConfiguration().getConf(0).toJson()); + assertEquals(modelExpectedArch.getNetConfiguration().getConf(0).toJson(), + modelNow.getNetConfiguration().getConf(0).toJson()); //some learning related info the subsampling layer will not be overwritten //assertTrue(modelExpectedArch.getConfiguration().getConf(1).toJson().equals(modelNow.getConfiguration().getConf(1).toJson())); - assertEquals(modelExpectedArch.getConfiguration().getConf(2).toJson(), - modelNow.getConfiguration().getConf(2).toJson()); - assertEquals(modelExpectedArch.getConfiguration().getConf(3).toJson(), - modelNow.getConfiguration().getConf(3).toJson()); - assertEquals(modelExpectedArch.getConfiguration().getConf(4).toJson(), - modelNow.getConfiguration().getConf(4).toJson()); - assertEquals(modelExpectedArch.getConfiguration().getConf(5).toJson(), - modelNow.getConfiguration().getConf(5).toJson()); + assertEquals(modelExpectedArch.getNetConfiguration().getConf(2).toJson(), + modelNow.getNetConfiguration().getConf(2).toJson()); + assertEquals(modelExpectedArch.getNetConfiguration().getConf(3).toJson(), + modelNow.getNetConfiguration().getConf(3).toJson()); + assertEquals(modelExpectedArch.getNetConfiguration().getConf(4).toJson(), + modelNow.getNetConfiguration().getConf(4).toJson()); + assertEquals(modelExpectedArch.getNetConfiguration().getConf(5).toJson(), + modelNow.getNetConfiguration().getConf(5).toJson()); assertArrayEquals(modelExpectedArch.params().shape(), modelNow.params().shape()); assertArrayEquals(modelExpectedArch.getLayer(0).params().shape(), modelNow.getLayer(0).params().shape()); @@ -530,7 +530,7 @@ public class TransferLearningMLNTest extends BaseDL4JTest { assertEquals(new WeightInitRelu(), l1.getWeightInitFn()); assertEquals(0.2, TestUtils.getL2(l1), 1e-6); - assertEquals(BackpropType.TruncatedBPTT, net2.getConfiguration().getBackpropType()); + assertEquals(BackpropType.TruncatedBPTT, net2.getNetConfiguration().getBackpropType()); } @Test diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/updater/TestGradientNormalization.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/updater/TestGradientNormalization.java index 63c936b17..54d3a3174 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/updater/TestGradientNormalization.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/updater/TestGradientNormalization.java @@ -52,9 +52,9 @@ public class TestGradientNormalization extends BaseDL4JTest { .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).build()) .build(); - long numParams = conf.getFirstLayer().initializer().numParams(conf); + long numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - Layer layer = conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + Layer layer = conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); INDArray gradArray = Nd4j.rand(1, 220).muli(10).subi(5); layer.setBackpropGradientsViewArray(gradArray); INDArray weightGrad = Shape.newShapeNoCopy(gradArray.get(NDArrayIndex.point(0), NDArrayIndex.interval(0, 200)), @@ -98,9 +98,9 @@ public class TestGradientNormalization extends BaseDL4JTest { .gradientNormalization(GradientNormalization.RenormalizeL2PerParamType).build()) .build(); - long numParams = conf.getFirstLayer().initializer().numParams(conf); + long numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - Layer layer = conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + Layer layer = conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); layer.setBackpropGradientsViewArray(Nd4j.create(params.shape())); Updater updater = UpdaterCreator.getUpdater(layer); INDArray weightGrad = Nd4j.rand(10, 20); @@ -131,9 +131,9 @@ public class TestGradientNormalization extends BaseDL4JTest { .gradientNormalizationThreshold(threshold).build()) .build(); - long numParams = conf.getFirstLayer().initializer().numParams(conf); + long numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - Layer layer = conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + Layer layer = conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); INDArray gradArray = Nd4j.rand(1, 220).muli(10).subi(5); layer.setBackpropGradientsViewArray(gradArray); INDArray weightGrad = Shape.newShapeNoCopy(gradArray.get(NDArrayIndex.point(0), NDArrayIndex.interval(0, 200)), @@ -187,9 +187,9 @@ public class TestGradientNormalization extends BaseDL4JTest { .gradientNormalizationThreshold(threshold).build()) .build(); - val numParams = conf.getFirstLayer().initializer().numParams(conf); + val numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - Layer layer = conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + Layer layer = conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); INDArray gradArray = Nd4j.rand(1, 220).muli(t == 0 ? 0.05 : 10).subi(t == 0 ? 0 : 5); layer.setBackpropGradientsViewArray(gradArray); INDArray weightGrad = @@ -242,9 +242,9 @@ public class TestGradientNormalization extends BaseDL4JTest { .gradientNormalizationThreshold(threshold).build()) .build(); - val numParams = conf.getFirstLayer().initializer().numParams(conf); + val numParams = conf.getFlattenedLayerConfigurations().get(0).initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); - Layer layer = conf.getFirstLayer().instantiate(conf, null, 0, params, true, params.dataType()); + Layer layer = conf.getFlattenedLayerConfigurations().get(0).instantiate(conf, null, 0, params, true, params.dataType()); layer.setBackpropGradientsViewArray(Nd4j.create(params.shape())); Updater updater = UpdaterCreator.getUpdater(layer); INDArray weightGrad = Nd4j.rand(10, 20).muli(0.05); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java index 7753fae33..69afb6330 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java @@ -20,6 +20,7 @@ package org.deeplearning4j.optimize.solver; +import lombok.NonNull; import lombok.val; import net.brutex.ai.dnn.api.IModel; import org.deeplearning4j.BaseDL4JTest; @@ -44,6 +45,7 @@ import org.deeplearning4j.optimize.solvers.LineGradientDescent; import org.deeplearning4j.optimize.solvers.StochasticGradientDescent; import org.deeplearning4j.optimize.stepfunctions.NegativeDefaultStepFunction; import org.junit.jupiter.api.Test; +import org.nd4j.evaluation.IEvaluation; import org.nd4j.linalg.activations.Activation; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; @@ -52,7 +54,9 @@ import org.nd4j.linalg.api.ops.impl.transforms.strict.Sin; import org.nd4j.linalg.api.rng.DefaultRandom; import org.nd4j.linalg.api.rng.Random; import org.nd4j.linalg.dataset.DataSet; +import org.nd4j.linalg.dataset.api.MultiDataSet; import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; +import org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator; import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.indexing.conditions.Condition; @@ -317,6 +321,90 @@ public class TestOptimizers extends BaseDL4JTest { } + /** + * This method returns updater state (if applicable), null otherwise + * + * @return + */ + @Override + public INDArray updaterState() { + return null; + } + + /** + * This method fits model with a given DataSet + * + * @param dataSet + */ + @Override + public void fit(org.nd4j.linalg.dataset.api.DataSet dataSet) { + + } + + /** + * This method fits model with a given MultiDataSet + * + * @param dataSet + */ + @Override + public void fit(MultiDataSet dataSet) { + + } + + /** + * This method fits model with a given DataSetIterator + * + * @param iterator + */ + @Override + public void fit(DataSetIterator iterator) { + + } + + /** + * This method fits model with a given MultiDataSetIterator + * + * @param iterator + */ + @Override + public void fit(MultiDataSetIterator iterator) { + + } + + /** + * This method executes evaluation of the model against given iterator and evaluation + * implementations + * + * @param iterator + * @param evaluations + */ + @Override + public T[] doEvaluation(DataSetIterator iterator, + T... evaluations) { + return null; + } + + /** + * This method executes evaluation of the model against given iterator and evaluation + * implementations + * + * @param iterator + * @param evaluations + */ + @Override + public T[] doEvaluation(MultiDataSetIterator iterator, + T... evaluations) { + return null; + } + + /** + * @param netConfiguration + */ + @Override + public void setNetConfiguration(@NonNull NeuralNetConfiguration netConfiguration) { + + } + @Override public void computeGradientAndScore(LayerWorkspaceMgr workspaceMgr) { // Gradients: d(x^2)/dx = 2x @@ -464,6 +552,90 @@ public class TestOptimizers extends BaseDL4JTest { } + /** + * This method returns updater state (if applicable), null otherwise + * + * @return + */ + @Override + public INDArray updaterState() { + return null; + } + + /** + * This method fits model with a given DataSet + * + * @param dataSet + */ + @Override + public void fit(org.nd4j.linalg.dataset.api.DataSet dataSet) { + + } + + /** + * This method fits model with a given MultiDataSet + * + * @param dataSet + */ + @Override + public void fit(MultiDataSet dataSet) { + + } + + /** + * This method fits model with a given DataSetIterator + * + * @param iterator + */ + @Override + public void fit(DataSetIterator iterator) { + + } + + /** + * This method fits model with a given MultiDataSetIterator + * + * @param iterator + */ + @Override + public void fit(MultiDataSetIterator iterator) { + + } + + /** + * This method executes evaluation of the model against given iterator and evaluation + * implementations + * + * @param iterator + * @param evaluations + */ + @Override + public T[] doEvaluation(DataSetIterator iterator, + T... evaluations) { + return null; + } + + /** + * This method executes evaluation of the model against given iterator and evaluation + * implementations + * + * @param iterator + * @param evaluations + */ + @Override + public T[] doEvaluation(MultiDataSetIterator iterator, + T... evaluations) { + return null; + } + + /** + * @param netConfiguration + */ + @Override + public void setNetConfiguration(@NonNull NeuralNetConfiguration netConfiguration) { + + } + @Override public void computeGradientAndScore(LayerWorkspaceMgr workspaceMgr) { //Gradient decomposes due to sum, so: @@ -649,6 +821,90 @@ public class TestOptimizers extends BaseDL4JTest { return dist.sample(new int[] {1, nDimensions}); } + /** + * This method returns updater state (if applicable), null otherwise + * + * @return + */ + @Override + public INDArray updaterState() { + return null; + } + + /** + * This method fits model with a given DataSet + * + * @param dataSet + */ + @Override + public void fit(org.nd4j.linalg.dataset.api.DataSet dataSet) { + + } + + /** + * This method fits model with a given MultiDataSet + * + * @param dataSet + */ + @Override + public void fit(MultiDataSet dataSet) { + + } + + /** + * This method fits model with a given DataSetIterator + * + * @param iterator + */ + @Override + public void fit(DataSetIterator iterator) { + + } + + /** + * This method fits model with a given MultiDataSetIterator + * + * @param iterator + */ + @Override + public void fit(MultiDataSetIterator iterator) { + + } + + /** + * This method executes evaluation of the model against given iterator and evaluation + * implementations + * + * @param iterator + * @param evaluations + */ + @Override + public T[] doEvaluation(DataSetIterator iterator, + T... evaluations) { + return null; + } + + /** + * This method executes evaluation of the model against given iterator and evaluation + * implementations + * + * @param iterator + * @param evaluations + */ + @Override + public T[] doEvaluation(MultiDataSetIterator iterator, + T... evaluations) { + return null; + } + + /** + * @param netConfiguration + */ + @Override + public void setNetConfiguration(@NonNull NeuralNetConfiguration netConfiguration) { + + } + @Override public void computeGradientAndScore(LayerWorkspaceMgr workspaceMgr) { val nDims = parameters.length(); @@ -912,7 +1168,7 @@ public class TestOptimizers extends BaseDL4JTest { } @Override - public void setLayerConfiguration(NeuralNetConfiguration layerConfiguration) { + public void setLayerConfiguration(LayerConfiguration layerConfiguration) { throw new UnsupportedOperationException(); } @@ -934,13 +1190,13 @@ public class TestOptimizers extends BaseDL4JTest { } @Override - public Map paramTable() { + public Map getParamTable() { return Collections.singletonMap("W", getParam("W")); } @Override - public Map paramTable(boolean backpropParamsOnly) { - return paramTable(); + public Map getParamTable(boolean backpropParamsOnly) { + return getParamTable(); } @Override diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest050.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest050.java index 50c177332..df6f1e0cb 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest050.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest050.java @@ -65,7 +65,7 @@ public class RegressionTest050 extends BaseDL4JTest { MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); assertEquals(2, conf.getNetConfigurations().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); @@ -99,7 +99,7 @@ public class RegressionTest050 extends BaseDL4JTest { MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); assertEquals(2, conf.getNetConfigurations().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); @@ -138,7 +138,7 @@ public class RegressionTest050 extends BaseDL4JTest { MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); assertEquals(3, conf.getNetConfigurations().size()); ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer(); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest060.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest060.java index 9b0870b0f..d6c88b4d3 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest060.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest060.java @@ -67,7 +67,7 @@ public class RegressionTest060 extends BaseDL4JTest { MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); assertEquals(2, conf.getNetConfigurations().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); @@ -101,7 +101,7 @@ public class RegressionTest060 extends BaseDL4JTest { MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); assertEquals(2, conf.getNetConfigurations().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); @@ -144,7 +144,7 @@ public class RegressionTest060 extends BaseDL4JTest { MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); assertEquals(3, conf.getNetConfigurations().size()); ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer(); @@ -190,7 +190,7 @@ public class RegressionTest060 extends BaseDL4JTest { MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); assertEquals(3, conf.getNetConfigurations().size()); GravesLSTM l0 = (GravesLSTM) conf.getConf(0).getLayer(); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest071.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest071.java index e21f75680..bf14dba46 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest071.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest071.java @@ -68,7 +68,7 @@ public class RegressionTest071 extends BaseDL4JTest { MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); assertEquals(2, conf.getNetConfigurations().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); @@ -102,7 +102,7 @@ public class RegressionTest071 extends BaseDL4JTest { MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); assertEquals(2, conf.getNetConfigurations().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); @@ -145,7 +145,7 @@ public class RegressionTest071 extends BaseDL4JTest { MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); assertEquals(3, conf.getNetConfigurations().size()); ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer(); @@ -191,7 +191,7 @@ public class RegressionTest071 extends BaseDL4JTest { MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); assertEquals(3, conf.getNetConfigurations().size()); GravesLSTM l0 = (GravesLSTM) conf.getConf(0).getLayer(); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest080.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest080.java index 06af06ff4..4cc26f05a 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest080.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest080.java @@ -67,7 +67,7 @@ public class RegressionTest080 extends BaseDL4JTest { MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); assertEquals(2, conf.getNetConfigurations().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); @@ -106,7 +106,7 @@ public class RegressionTest080 extends BaseDL4JTest { MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); assertEquals(2, conf.getNetConfigurations().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); @@ -155,7 +155,7 @@ public class RegressionTest080 extends BaseDL4JTest { MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); assertEquals(3, conf.getNetConfigurations().size()); ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer(); @@ -206,7 +206,7 @@ public class RegressionTest080 extends BaseDL4JTest { MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); - NeuralNetConfiguration conf = net.getConfiguration(); + NeuralNetConfiguration conf = net.getNetConfiguration(); assertEquals(3, conf.getNetConfigurations().size()); GravesLSTM l0 = (GravesLSTM) conf.getConf(0).getLayer(); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100a.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100a.java index a847a85ef..6b6558c48 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100a.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100a.java @@ -107,9 +107,9 @@ public class RegressionTest100a extends BaseDL4JTest { assertEquals(new WeightDecay(0.001, false), TestUtils.getWeightDecayReg(l0)); assertEquals(new RmsProp(0.1), l0.getIUpdater()); - assertEquals(BackpropType.TruncatedBPTT, net.getConfiguration().getBackpropType()); - assertEquals(50, net.getConfiguration().getTbpttBackLength()); - assertEquals(50, net.getConfiguration().getTbpttFwdLength()); + assertEquals(BackpropType.TruncatedBPTT, net.getNetConfiguration().getBackpropType()); + assertEquals(50, net.getNetConfiguration().getTbpttBackLength()); + assertEquals(50, net.getNetConfiguration().getTbpttFwdLength()); INDArray outExp; File f2 = Resources.asFile("regression_testing/100a/GravesLSTMCharModelingExample_Output_100a.bin"); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100b3.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100b3.java index 23ae5d5bd..6d73c1074 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100b3.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100b3.java @@ -108,7 +108,7 @@ public class RegressionTest100b3 extends BaseDL4JTest { List activations = net.feedForward(in); - assertEquals(dt, net.getConfiguration().getDataType()); + assertEquals(dt, net.getNetConfiguration().getDataType()); assertEquals(dt, net.params().dataType()); assertEquals( outExp, outAct, dtype); } @@ -142,9 +142,9 @@ public class RegressionTest100b3 extends BaseDL4JTest { assertEquals(new WeightDecay(0.0001, false), TestUtils.getWeightDecayReg(l0)); assertEquals(new Adam(0.005), l0.getIUpdater()); - assertEquals(BackpropType.TruncatedBPTT, net.getConfiguration().getBackpropType()); - assertEquals(50, net.getConfiguration().getTbpttBackLength()); - assertEquals(50, net.getConfiguration().getTbpttFwdLength()); + assertEquals(BackpropType.TruncatedBPTT, net.getNetConfiguration().getBackpropType()); + assertEquals(50, net.getNetConfiguration().getTbpttBackLength()); + assertEquals(50, net.getNetConfiguration().getTbpttFwdLength()); INDArray outExp; File f2 = Resources.asFile("regression_testing/100b3/GravesLSTMCharModelingExample_Output_100b3.bin"); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100b4.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100b4.java index fbbe55592..bd2f231d2 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100b4.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100b4.java @@ -125,7 +125,7 @@ public class RegressionTest100b4 extends BaseDL4JTest { INDArray outAct = net.output(in); assertEquals(dtype, outAct.dataType()); - assertEquals(dtype, net.getConfiguration().getDataType()); + assertEquals(dtype, net.getNetConfiguration().getDataType()); assertEquals(dtype, net.params().dataType()); boolean eq = outExp.equalsWithEps(outAct, 0.01); assertTrue(eq, "Test for dtype: " + dtypeName + "\n" + outExp + " vs " + outAct); @@ -160,9 +160,9 @@ public class RegressionTest100b4 extends BaseDL4JTest { assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l2)); assertEquals(new Adam(0.005), l2.getIUpdater()); - assertEquals(BackpropType.TruncatedBPTT, net.getConfiguration().getBackpropType()); - assertEquals(50, net.getConfiguration().getTbpttBackLength()); - assertEquals(50, net.getConfiguration().getTbpttFwdLength()); + assertEquals(BackpropType.TruncatedBPTT, net.getNetConfiguration().getBackpropType()); + assertEquals(50, net.getNetConfiguration().getTbpttBackLength()); + assertEquals(50, net.getNetConfiguration().getTbpttFwdLength()); INDArray outExp; File f2 = Resources.asFile("regression_testing/100b4/GravesLSTMCharModelingExample_Output_100b4.bin"); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100b6.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100b6.java index 979518196..bf13cff1b 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100b6.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/RegressionTest100b6.java @@ -107,7 +107,7 @@ public class RegressionTest100b6 extends BaseDL4JTest { INDArray outAct = net.output(in); assertEquals(dtype, outAct.dataType()); - assertEquals(dtype, net.getConfiguration().getDataType()); + assertEquals(dtype, net.getNetConfiguration().getDataType()); assertEquals(dtype, net.params().dataType()); boolean eq = outExp.equalsWithEps(outAct, 0.01); assertTrue( eq, "Test for dtype: " + dtypeName + " - " + outExp + " vs " + outAct); @@ -142,9 +142,9 @@ public class RegressionTest100b6 extends BaseDL4JTest { assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l2)); assertEquals(new Adam(0.005), l2.getIUpdater()); - assertEquals(BackpropType.TruncatedBPTT, net.getConfiguration().getBackpropType()); - assertEquals(50, net.getConfiguration().getTbpttBackLength()); - assertEquals(50, net.getConfiguration().getTbpttFwdLength()); + assertEquals(BackpropType.TruncatedBPTT, net.getNetConfiguration().getBackpropType()); + assertEquals(50, net.getNetConfiguration().getTbpttBackLength()); + assertEquals(50, net.getNetConfiguration().getTbpttFwdLength()); INDArray outExp; File f2 = Resources.asFile("regression_testing/100b6/GravesLSTMCharModelingExample_Output_100b6.bin"); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/customlayer100a/CustomLayer.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/customlayer100a/CustomLayer.java index b8b3cdad6..72b55f9e6 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/customlayer100a/CustomLayer.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/customlayer100a/CustomLayer.java @@ -28,6 +28,7 @@ import org.deeplearning4j.nn.api.ParamInitializer; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.layers.FeedForwardLayer; +import org.deeplearning4j.nn.conf.layers.LayerConfiguration; import org.deeplearning4j.nn.conf.memory.LayerMemoryReport; import org.deeplearning4j.nn.conf.memory.MemoryReport; import org.deeplearning4j.nn.params.DefaultParamInitializer; @@ -68,11 +69,13 @@ public class CustomLayer extends FeedForwardLayer { @Override public Layer instantiate(NeuralNetConfiguration conf, Collection iterationListeners, int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) { + + LayerConfiguration lconf = conf.getFlattenedLayerConfigurations().get(0); //The instantiate method is how we go from the configuration class (i.e., this class) to the implementation class // (i.e., a CustomLayerImpl instance) //For the most part, it's the same for each type of layer - CustomLayerImpl myCustomLayer = new CustomLayerImpl(conf, networkDataType); + CustomLayerImpl myCustomLayer = new CustomLayerImpl(lconf, networkDataType); myCustomLayer.setListeners(iterationListeners); //Set the iteration listeners, if any myCustomLayer.setIndex(layerIndex); //Integer index of the layer @@ -87,7 +90,7 @@ public class CustomLayer extends FeedForwardLayer { // are in turn a view of the 'layerParamsView' array. Map paramTable = initializer().init(this, layerParamsView, initializeParams); myCustomLayer.setParamTable(paramTable); - myCustomLayer.setLayerConfiguration(conf); + myCustomLayer.setLayerConfiguration(lconf); return myCustomLayer; } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/customlayer100a/CustomLayerImpl.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/customlayer100a/CustomLayerImpl.java index 42b91d908..d233a5da3 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/customlayer100a/CustomLayerImpl.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/regressiontest/customlayer100a/CustomLayerImpl.java @@ -21,6 +21,7 @@ package org.deeplearning4j.regressiontest.customlayer100a; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; +import org.deeplearning4j.nn.conf.layers.LayerConfiguration; import org.deeplearning4j.nn.gradient.DefaultGradient; import org.deeplearning4j.nn.gradient.Gradient; import org.deeplearning4j.nn.layers.BaseLayer; @@ -35,7 +36,7 @@ import org.nd4j.common.primitives.Pair; public class CustomLayerImpl extends BaseLayer { //Generic parameter here: the configuration class type - public CustomLayerImpl(NeuralNetConfiguration conf, DataType dataType) { + public CustomLayerImpl(LayerConfiguration conf, DataType dataType) { super(conf, dataType); } @@ -56,7 +57,7 @@ public class CustomLayerImpl extends BaseLayer { //Generic paramete INDArray secondHalf = output.get(NDArrayIndex.all(), NDArrayIndex.interval(columns / 2, columns)); IActivation activation1 = layerConf().getActivationFn(); - IActivation activation2 = ((CustomLayer) layerConfiguration.getFirstLayer()).getSecondActivationFunction(); + IActivation activation2 = ((CustomLayer) getLayerConfiguration()).getSecondActivationFunction(); //IActivation function instances modify the activation functions in-place activation1.getActivation(firstHalf, training); @@ -105,7 +106,7 @@ public class CustomLayerImpl extends BaseLayer { //Generic paramete INDArray epsilonSecondHalf = epsilon.get(NDArrayIndex.all(), NDArrayIndex.interval(columns / 2, columns)); IActivation activation1 = layerConf().getActivationFn(); - IActivation activation2 = ((CustomLayer) layerConfiguration.getFirstLayer()).getSecondActivationFunction(); + IActivation activation2 = ((CustomLayer) getLayerConfiguration()).getSecondActivationFunction(); //IActivation backprop method modifies the 'firstHalf' and 'secondHalf' arrays in-place, to contain dL/dz activation1.backprop(firstHalf, epsilonFirstHalf); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelGuesserTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelGuesserTest.java index 02a1fdaf5..4415c5455 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelGuesserTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelGuesserTest.java @@ -155,7 +155,7 @@ public class ModelGuesserTest extends BaseDL4JTest { ModelSerializer.writeModel(net, tempFile, true); MultiLayerNetwork network = (MultiLayerNetwork) ModelGuesser.loadModelGuess(tempFile.getAbsolutePath()); - assertEquals(network.getConfiguration().toJson(), net.getConfiguration().toJson()); + assertEquals(network.getNetConfiguration().toJson(), net.getNetConfiguration().toJson()); assertEquals(net.params(), network.params()); assertEquals(net.getUpdater().getStateViewArray(), network.getUpdater().getStateViewArray()); @@ -172,7 +172,7 @@ public class ModelGuesserTest extends BaseDL4JTest { try (InputStream inputStream = new FileInputStream(tempFile)) { MultiLayerNetwork network = (MultiLayerNetwork) ModelGuesser.loadModelGuess(inputStream); Assertions.assertNotNull(network); - assertEquals(network.getConfiguration().toJson(), net.getConfiguration().toJson()); + assertEquals(network.getNetConfiguration().toJson(), net.getNetConfiguration().toJson()); assertEquals(net.params(), network.params()); assertEquals(net.getUpdater().getStateViewArray(), network.getUpdater().getStateViewArray()); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelSerializerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelSerializerTest.java index 9f52ae300..5124e15ac 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelSerializerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelSerializerTest.java @@ -80,7 +80,7 @@ public class ModelSerializerTest extends BaseDL4JTest { MultiLayerNetwork network = ModelSerializer.restoreMultiLayerNetwork(tempFile); - assertEquals(network.getConfiguration().toJson(), net.getConfiguration().toJson()); + assertEquals(network.getNetConfiguration().toJson(), net.getNetConfiguration().toJson()); assertEquals(net.params(), network.params()); assertEquals(net.getUpdater().getStateViewArray(), network.getUpdater().getStateViewArray()); } @@ -124,7 +124,7 @@ public class ModelSerializerTest extends BaseDL4JTest { MultiLayerNetwork network = ModelSerializer.restoreMultiLayerNetwork(fis); - assertEquals(network.getConfiguration().toJson(), net.getConfiguration().toJson()); + assertEquals(network.getNetConfiguration().toJson(), net.getNetConfiguration().toJson()); assertEquals(net.params(), network.params()); assertEquals(net.getUpdater().getStateViewArray(), network.getUpdater().getStateViewArray()); } diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasModelImportTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasModelImportTest.java index 20721371b..02c478093 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasModelImportTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasModelImportTest.java @@ -24,7 +24,6 @@ import java.util.List; import lombok.extern.slf4j.Slf4j; import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.conf.CNN2DFormat; -import org.deeplearning4j.nn.conf.NeuralNetConfiguration; import org.deeplearning4j.nn.conf.layers.ConvolutionLayer; import org.deeplearning4j.nn.conf.layers.LayerConfiguration; import org.deeplearning4j.nn.conf.layers.SubsamplingLayer; @@ -57,7 +56,7 @@ public class KerasModelImportTest extends BaseDL4JTest { @Test public void testNCHWNWHCChangeImport() { MultiLayerNetwork model = loadModel("modelimport/keras/weights/conv2dnchw/simpleconv2d.hdf5"); - List layerConfigs = model.getConfiguration().getFlattenedLayerConfigurations(); + List layerConfigs = model.getNetConfiguration().getFlattenedLayerConfigurations(); ConvolutionLayer convolutionLayer = (ConvolutionLayer) layerConfigs.get(0); assertEquals(CNN2DFormat.NCHW,convolutionLayer.getCnn2dDataFormat()); SubsamplingLayer subsamplingLayer = (SubsamplingLayer) layerConfigs.get(1); diff --git a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/Word2VecTestsSmall.java b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/Word2VecTestsSmall.java index 1d7144b20..49237b20c 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/Word2VecTestsSmall.java +++ b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/Word2VecTestsSmall.java @@ -208,7 +208,7 @@ public class Word2VecTestsSmall extends BaseDL4JTest { ByteArrayInputStream bais = new ByteArrayInputStream(bytes); final MultiLayerNetwork restored = ModelSerializer.restoreMultiLayerNetwork(bais, true); - assertEquals(net.getConfiguration(), restored.getConfiguration()); + assertEquals(net.getNetConfiguration(), restored.getNetConfiguration()); assertTrue(net.params().equalsWithEps(restored.params(), 2e-3)); } } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/net/brutex/ai/dnn/api/IModel.java b/cavis-dnn/cavis-dnn-nn/src/main/java/net/brutex/ai/dnn/api/IModel.java index 2c31319fc..9f81fd3d8 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/net/brutex/ai/dnn/api/IModel.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/net/brutex/ai/dnn/api/IModel.java @@ -46,6 +46,15 @@ import org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator; public interface IModel { + /** + * The param table + * + * @return + */ + + Map getParamTable(); + Map getParamTable(boolean backpropOnly); + /** * This method returns updater state (if applicable), null otherwise * @@ -273,6 +282,7 @@ public interface IModel { * @param listeners new listeners */ void setListeners(TrainingListener... listeners); + void setListeners(Collection listeners); /** * Add TrainingListeners to the model diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetConfiguration.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetConfiguration.java index 5c221222c..cb87e885c 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetConfiguration.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetConfiguration.java @@ -1126,6 +1126,17 @@ public class NeuralNetConfiguration extends NeuralNetBaseBuilderConfiguration { return getFlattenedLayerConfigurations().get(index); } + /** + * Deprecated, do not use. Workaround for old tests + * and getFlattenedLayerConfigurations().get(0); + * @return + */ + @Deprecated + public LayerConfiguration getFirstLayer() { + log.warn("This getFirstLayer method is an ugly workaround and will be removed."); + return getFlattenedLayerConfigurations().get(0); + } + public static abstract class NeuralNetConfigurationBuilder> extends NeuralNetBaseBuilderConfigurationBuilder { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/LayerConfiguration.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/LayerConfiguration.java index a41870c3d..bb98be57d 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/LayerConfiguration.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/LayerConfiguration.java @@ -258,6 +258,9 @@ public abstract class LayerConfiguration implements TrainingConfig, Serializable "Not supported: all layers with parameters should override this method"); } + @Getter + private IUpdater iUpdater; + @Override public void setDataType(DataType dataType) { //No-op for most layers diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java index 7da7f837c..575ee27e9 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java @@ -2443,6 +2443,14 @@ public class MultiLayerNetwork extends ArtificialNeuralNetwork implements Serial } } + /** + * @param listeners + */ + @Override + public void setListeners(Collection listeners) { + setListeners(listeners.toArray(new TrainingListener[]{})); + } + /** * @deprecated Use {@link #getListeners()} */ @@ -4525,4 +4533,5 @@ public class MultiLayerNetwork extends ArtificialNeuralNetwork implements Serial public String toString() { return getNetConfiguration().toString(); } + } diff --git a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/EarlyStoppingParallelTrainer.java b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/EarlyStoppingParallelTrainer.java index a7b4a98bc..73261f155 100644 --- a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/EarlyStoppingParallelTrainer.java +++ b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/EarlyStoppingParallelTrainer.java @@ -94,13 +94,13 @@ public class EarlyStoppingParallelTrainer implements IEarlySto Collection listeners = ((MultiLayerNetwork) model).getListeners(); Collection newListeners = new LinkedList<>(listeners); newListeners.add(trainerListener); - model.setListeners(newListeners); + model.setListeners(newListeners.toArray(new TrainingListener[]{})); } else if (model instanceof ComputationGraph) { Collection listeners = ((ComputationGraph) model).getListeners(); Collection newListeners = new LinkedList<>(listeners); newListeners.add(trainerListener); - model.setListeners(newListeners); + model.setListeners(newListeners.toArray(new TrainingListener[]{})); } this.wrapper = new ParallelWrapper.Builder<>(model).workers(workers).prefetchBuffer(prefetchBuffer) diff --git a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/InplaceParallelInference.java b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/InplaceParallelInference.java index 33009e994..0c1515109 100644 --- a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/InplaceParallelInference.java +++ b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/InplaceParallelInference.java @@ -204,7 +204,7 @@ public class InplaceParallelInference extends ParallelInference { if (loadBalanceMode == LoadBalanceMode.FIFO) queue.add(model); } else if (sourceModel instanceof MultiLayerNetwork) { - val model = new MultiLayerNetwork(NeuralNetConfiguration.fromJson(((MultiLayerNetwork) sourceModel).getConfiguration().toJson())); + val model = new MultiLayerNetwork(NeuralNetConfiguration.fromJson(((MultiLayerNetwork) sourceModel).getNetConfiguration().toJson())); model.init(params, false); Nd4j.getExecutioner().commit(); diff --git a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelInference.java b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelInference.java index ea2e02ad7..9d2c76a23 100644 --- a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelInference.java +++ b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelInference.java @@ -472,7 +472,7 @@ public class ParallelInference { } else if (protoModel instanceof MultiLayerNetwork) { if (!rootDevice) { this.replicatedModel = new MultiLayerNetwork(NeuralNetConfiguration.fromJson( - ((MultiLayerNetwork) protoModel).getConfiguration().toJson())); + ((MultiLayerNetwork) protoModel).getNetConfiguration().toJson())); this.replicatedModel.init(); synchronized (locker) { diff --git a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelWrapper.java b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelWrapper.java index e2a621508..5a880872d 100644 --- a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelWrapper.java +++ b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelWrapper.java @@ -957,10 +957,10 @@ public class ParallelWrapper implements AutoCloseable { List modelListeners = null; if (model instanceof MultiLayerNetwork) { modelListeners = new ArrayList<>(((MultiLayerNetwork) model).getListeners()); - model.setListeners(Collections.emptyList()); + model.setListeners(new TrainingListener[]{}); } else if (model instanceof ComputationGraph) { modelListeners = new ArrayList<>(((ComputationGraph) model).getListeners()); - model.setListeners(Collections.emptyList()); + model.setListeners(new TrainingListener[]{}); } if (modelListeners != null && !modelListeners.isEmpty()) { diff --git a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/trainer/DefaultTrainer.java b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/trainer/DefaultTrainer.java index dd7cda946..2a1cf4d4e 100644 --- a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/trainer/DefaultTrainer.java +++ b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/trainer/DefaultTrainer.java @@ -278,7 +278,7 @@ public class DefaultTrainer extends Thread implements Trainer { } configureListeners(uuid, oldListeners, replicatedListeners); - this.replicatedModel.setListeners(replicatedListeners); + this.replicatedModel.setListeners(replicatedListeners.toArray(new TrainingListener[]{})); } @Override @@ -296,7 +296,7 @@ public class DefaultTrainer extends Thread implements Trainer { if (originalModel instanceof MultiLayerNetwork) { if (!onRootModel) { NeuralNetConfiguration conf = NeuralNetConfiguration.fromJson( - ((MultiLayerNetwork) originalModel).getConfiguration().toJson()); + ((MultiLayerNetwork) originalModel).getNetConfiguration().toJson()); conf.setTrainingWorkspaceMode(workspaceMode); this.replicatedModel = new MultiLayerNetwork(conf); @@ -323,7 +323,7 @@ public class DefaultTrainer extends Thread implements Trainer { if (!((MultiLayerNetwork) replicatedModel).isInitCalled()) this.replicatedModel.init(); - ((MultiLayerNetwork) replicatedModel).getConfiguration() + ((MultiLayerNetwork) replicatedModel).getNetConfiguration() .setTrainingWorkspaceMode(workspaceMode); } } else if (originalModel instanceof ComputationGraph) { diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/SparkDl4jMultiLayer.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/SparkDl4jMultiLayer.java index 2e50414da..2a0c7b655 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/SparkDl4jMultiLayer.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/SparkDl4jMultiLayer.java @@ -122,7 +122,7 @@ public class SparkDl4jMultiLayer extends SparkListenable { public SparkDl4jMultiLayer(JavaSparkContext javaSparkContext, MultiLayerNetwork network, TrainingMaster trainingMaster) { sc = javaSparkContext; - this.conf = network.getConfiguration().clone(); + this.conf = network.getNetConfiguration().clone(); this.network = network; if (!network.isInitCalled()) network.init(); @@ -315,8 +315,8 @@ public class SparkDl4jMultiLayer extends SparkListenable { * @return the multi layer network that was fitDataSet */ public MultiLayerNetwork fitLabeledPoint(JavaRDD rdd) { - int nLayers = network.getConfiguration().getFlattenedLayerConfigurations().size(); - FeedForwardLayer ffl = (FeedForwardLayer) network.getConfiguration().getFlattenedLayerConfigurations().get(nLayers - 1); + int nLayers = network.getNetConfiguration().getFlattenedLayerConfigurations().size(); + FeedForwardLayer ffl = (FeedForwardLayer) network.getNetConfiguration().getFlattenedLayerConfigurations().get(nLayers - 1); JavaRDD ds = MLLibUtil.fromLabeledPoint(sc, rdd, ffl.getNOut()); return fit(ds); } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingMaster.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingMaster.java index 1dc1d4f1b..d3fb3355f 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingMaster.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingMaster.java @@ -275,7 +275,7 @@ public class ParameterAveragingTrainingMaster @Override public ParameterAveragingTrainingWorker getWorkerInstance(SparkDl4jMultiLayer network) { - NetBroadcastTuple tuple = new NetBroadcastTuple(network.getNetwork().getConfiguration(), + NetBroadcastTuple tuple = new NetBroadcastTuple(network.getNetwork().getNetConfiguration(), network.getNetwork().params(), network.getNetwork().getUpdater().getStateViewArray()); if (collectTrainingStats) @@ -727,7 +727,7 @@ public class ParameterAveragingTrainingMaster if (params != null) { //Params may be null for edge case (empty RDD) if (network != null) { - NeuralNetConfiguration conf = network.getNetwork().getConfiguration(); + NeuralNetConfiguration conf = network.getNetwork().getNetConfiguration(); int numUpdates = averagingFrequency; conf.setIterationCount(conf.getIterationCount() + numUpdates); } else { diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingWorker.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingWorker.java index 4820e938f..2322ba5c2 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingWorker.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingWorker.java @@ -172,9 +172,9 @@ public class ParameterAveragingTrainingWorker extends BaseTrainingWorker trainingListeners, int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) { - CustomLayerImpl ret = new CustomLayerImpl(conf, networkDataType); + LayerConfiguration lconf = conf.getFlattenedLayerConfigurations().get(layerIndex); + CustomLayerImpl ret = new CustomLayerImpl(lconf, networkDataType); ret.setListeners(trainingListeners); ret.setIndex(layerIndex); ret.setParamsViewArray(layerParamsView); Map paramTable = initializer().init(this, layerParamsView, initializeParams); ret.setParamTable(paramTable); - ret.setLayerConfiguration(conf); + ret.setLayerConfiguration(lconf); return ret; } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/customlayer/layer/CustomLayerImpl.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/customlayer/layer/CustomLayerImpl.java index 55b32d1dc..610f4079c 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/customlayer/layer/CustomLayerImpl.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/customlayer/layer/CustomLayerImpl.java @@ -21,11 +21,12 @@ package org.deeplearning4j.spark.impl.customlayer.layer; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; +import org.deeplearning4j.nn.conf.layers.LayerConfiguration; import org.deeplearning4j.nn.layers.BaseLayer; import org.nd4j.linalg.api.buffer.DataType; public class CustomLayerImpl extends BaseLayer { - public CustomLayerImpl(NeuralNetConfiguration conf, DataType dataType) { + public CustomLayerImpl(LayerConfiguration conf, DataType dataType) { super(conf, dataType); } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/misc/TestFrozenLayers.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/misc/TestFrozenLayers.java index 2e01cc17d..688135888 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/misc/TestFrozenLayers.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/misc/TestFrozenLayers.java @@ -154,7 +154,7 @@ public class TestFrozenLayers extends BaseSparkTest { ComputationGraph withFrozen = new TransferLearning.GraphBuilder(origModel).fineTuneConfiguration(finetune) .setFeatureExtractor("1").build(); - Map m = withFrozen.paramTable(); + Map m = withFrozen.getParamTable(); Map pCopy = new HashMap<>(); for (Map.Entry entry : m.entrySet()) { pCopy.put(entry.getKey(), entry.getValue().dup()); @@ -190,7 +190,7 @@ public class TestFrozenLayers extends BaseSparkTest { ComputationGraph fitted = sNet.getNetwork(); - Map fittedParams = fitted.paramTable(); + Map fittedParams = fitted.getParamTable(); for (Map.Entry entry : fittedParams.entrySet()) { INDArray orig = pCopy.get(entry.getKey()); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/paramavg/TestSparkMultiLayerParameterAveraging.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/paramavg/TestSparkMultiLayerParameterAveraging.java index 8376638f3..42fc1112c 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/paramavg/TestSparkMultiLayerParameterAveraging.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/paramavg/TestSparkMultiLayerParameterAveraging.java @@ -784,13 +784,13 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest { JavaRDD rdd = sc.parallelize(list); - assertEquals(0, sparkNet.getNetwork().getConfiguration().getIterationCount()); + assertEquals(0, sparkNet.getNetwork().getNetConfiguration().getIterationCount()); sparkNet.fit(rdd); assertEquals(minibatchesPerWorkerPerEpoch, - sparkNet.getNetwork().getConfiguration().getIterationCount()); + sparkNet.getNetwork().getNetConfiguration().getIterationCount()); sparkNet.fit(rdd); assertEquals(2 * minibatchesPerWorkerPerEpoch, - sparkNet.getNetwork().getConfiguration().getIterationCount()); + sparkNet.getNetwork().getNetConfiguration().getIterationCount()); sparkNet.getTrainingMaster().deleteTempFiles(sc); } @@ -1074,11 +1074,11 @@ public class TestSparkMultiLayerParameterAveraging extends BaseSparkTest { for(int i=0; i<3; i++ ){ - assertEquals(i, sn1.getNetwork().getConfiguration().getEpochCount()); + assertEquals(i, sn1.getNetwork().getNetConfiguration().getEpochCount()); assertEquals(i, sn2.getNetwork().getComputationGraphConfiguration().getEpochCount()); sn1.fit(rdd); sn2.fit(rdd); - assertEquals(i+1, sn1.getNetwork().getConfiguration().getEpochCount()); + assertEquals(i+1, sn1.getNetwork().getNetConfiguration().getEpochCount()); assertEquals(i+1, sn2.getNetwork().getComputationGraphConfiguration().getEpochCount()); } } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-parameterserver/src/main/java/org/deeplearning4j/spark/parameterserver/pw/SharedTrainingWrapper.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-parameterserver/src/main/java/org/deeplearning4j/spark/parameterserver/pw/SharedTrainingWrapper.java index 0265837bd..5bb21442c 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-parameterserver/src/main/java/org/deeplearning4j/spark/parameterserver/pw/SharedTrainingWrapper.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-parameterserver/src/main/java/org/deeplearning4j/spark/parameterserver/pw/SharedTrainingWrapper.java @@ -239,7 +239,7 @@ public class SharedTrainingWrapper { List listeners = worker.getListeners(); if(listeners != null){ - model.setListeners(listeners); + model.setListeners(listeners.toArray(new TrainingListener[]{})); StatsStorageRouter r = worker.getRouter(); if(r != null){ for(TrainingListener l : listeners){ @@ -425,7 +425,7 @@ public class SharedTrainingWrapper { .setTrainingWorkspaceMode(trainingConfiguration.getWorkspaceMode()); ((ComputationGraph) originalModel).setGradientsAccumulator(accumulator); } else if (model instanceof MultiLayerNetwork) { - ((MultiLayerNetwork) originalModel).getConfiguration() + ((MultiLayerNetwork) originalModel).getNetConfiguration() .setTrainingWorkspaceMode(trainingConfiguration.getWorkspaceMode()); ((MultiLayerNetwork) originalModel).setGradientsAccumulator(accumulator); } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-parameterserver/src/main/java/org/deeplearning4j/spark/parameterserver/training/SharedTrainingMaster.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-parameterserver/src/main/java/org/deeplearning4j/spark/parameterserver/training/SharedTrainingMaster.java index ef252470b..bb291c0b8 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-parameterserver/src/main/java/org/deeplearning4j/spark/parameterserver/training/SharedTrainingMaster.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-parameterserver/src/main/java/org/deeplearning4j/spark/parameterserver/training/SharedTrainingMaster.java @@ -262,7 +262,7 @@ public class SharedTrainingMaster extends BaseTrainingMaster T[] doEvaluation(DataSetIterator iterator, T... evaluations) { + return null; + } + + /** + * This method executes evaluation of the model against given iterator and evaluation + * implementations + * + * @param iterator + * @param evaluations + */ + @Override + public T[] doEvaluation(MultiDataSetIterator iterator, + T... evaluations) { + return null; + } + @Override public INDArray getParam(String param) { return null; } @Override - public void addListeners(TrainingListener... listener) { - // no-op + public void addListeners(TrainingListener... listener) {//no op } - @Override - public Map paramTable() { + public Map getParamTable() { return null; } - @Override - public Map paramTable(boolean backprapParamsOnly) { + public Map getParamTable(boolean backprapParamsOnly) { return null; } - @Override + public void setParamTable(Map paramTable) { } @@ -490,7 +569,7 @@ public class BarnesHutTsne implements IModel { * * @param listeners */ - @Override + public void setListeners(Collection listeners) { } @@ -901,8 +980,15 @@ public class BarnesHutTsne implements IModel { return null; } + /** + * @param netConfiguration + */ @Override - public void setLayerConfiguration(NeuralNetConfiguration layerConfiguration) { + public void setNetConfiguration(@NonNull NeuralNetConfiguration netConfiguration) { + + } + + public void setLayerConfiguration(LayerConfiguration layerConfiguration) { } @@ -1060,4 +1146,14 @@ public class BarnesHutTsne implements IModel { public void close(){ //No-op } + + /** + * Get the TrainingListeners + * + * @return training listener + */ + @Override + public Collection getListeners() { + return null; + } } diff --git a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/stats/BaseStatsListener.java b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/stats/BaseStatsListener.java index b9a7e985d..3797b6550 100644 --- a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/stats/BaseStatsListener.java +++ b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/stats/BaseStatsListener.java @@ -30,6 +30,7 @@ import org.deeplearning4j.core.storage.StorageMetaData; import org.deeplearning4j.core.storage.listener.RoutingIterationListener; import org.deeplearning4j.nn.api.Layer; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; +import org.deeplearning4j.nn.conf.layers.LayerConfiguration; import org.deeplearning4j.nn.gradient.Gradient; import org.deeplearning4j.nn.graph.ComputationGraph; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; @@ -426,10 +427,10 @@ public abstract class BaseStatsListener implements RoutingIterationListener { //Need to append "0_", "1_" etc to param names from layers... int layerIdx = 0; for (Layer l : ((MultiLayerNetwork) model).getLayers()) { - NeuralNetConfiguration conf = l.getNetConfiguration(); + LayerConfiguration conf = l.getLayerConfiguration(); List paramkeys = l.getLayerConfiguration().initializer().paramKeys(l.getLayerConfiguration()); for (String s : paramkeys) { - double lr = conf.getFirstLayer().getUpdaterByParam(s).getLearningRate(l.getIterationCount(), l.getEpochCount()); + double lr = conf.getUpdaterByParam(s).getLearningRate(l.getIterationCount(), l.getEpochCount()); if (Double.isNaN(lr)) { //Edge case: No-Op updater, AdaDelta etc - don't have a LR hence return NaN for IUpdater.getLearningRate lr = 0.0; @@ -440,11 +441,11 @@ public abstract class BaseStatsListener implements RoutingIterationListener { } } else if (model instanceof ComputationGraph) { for (Layer l : ((ComputationGraph) model).getLayers()) { - NeuralNetConfiguration conf = l.getNetConfiguration(); - String layerName = conf.getFirstLayer().getLayerName(); + LayerConfiguration conf = l.getLayerConfiguration(); + String layerName = conf.getLayerName(); List paramkeys = l.getLayerConfiguration().initializer().paramKeys(l.getLayerConfiguration()); for (String s : paramkeys) { - double lr = conf.getFirstLayer().getUpdaterByParam(s).getLearningRate(l.getIterationCount(), l.getEpochCount()); + double lr = conf.getUpdaterByParam(s).getLearningRate(l.getIterationCount(), l.getEpochCount()); if (Double.isNaN(lr)) { //Edge case: No-Op updater, AdaDelta etc - don't have a LR hence return NaN for IUpdater.getLearningRate lr = 0.0; @@ -467,7 +468,7 @@ public abstract class BaseStatsListener implements RoutingIterationListener { //--- Histograms --- if (updateConfig.collectHistograms(StatsType.Parameters)) { - Map paramHistograms = getHistograms(model.paramTable(backpropParamsOnly), + Map paramHistograms = getHistograms(model.getParamTable(backpropParamsOnly), updateConfig.numHistogramBins(StatsType.Parameters)); report.reportHistograms(StatsType.Parameters, paramHistograms); } @@ -490,7 +491,7 @@ public abstract class BaseStatsListener implements RoutingIterationListener { //--- Summary Stats: Mean, Variance, Mean Magnitudes --- if (updateConfig.collectMean(StatsType.Parameters)) { - Map meanParams = calculateSummaryStats(model.paramTable(backpropParamsOnly), StatType.Mean); + Map meanParams = calculateSummaryStats(model.getParamTable(backpropParamsOnly), StatType.Mean); report.reportMean(StatsType.Parameters, meanParams); } @@ -511,7 +512,7 @@ public abstract class BaseStatsListener implements RoutingIterationListener { if (updateConfig.collectStdev(StatsType.Parameters)) { Map stdevParams = - calculateSummaryStats(model.paramTable(backpropParamsOnly), StatType.Stdev); + calculateSummaryStats(model.getParamTable(backpropParamsOnly), StatType.Stdev); report.reportStdev(StatsType.Parameters, stdevParams); } @@ -532,7 +533,7 @@ public abstract class BaseStatsListener implements RoutingIterationListener { if (updateConfig.collectMeanMagnitudes(StatsType.Parameters)) { Map meanMagParams = - calculateSummaryStats(model.paramTable(backpropParamsOnly), StatType.MeanMagnitude); + calculateSummaryStats(model.getParamTable(backpropParamsOnly), StatType.MeanMagnitude); report.reportMeanMagnitudes(StatsType.Parameters, meanMagParams); } @@ -652,7 +653,7 @@ public abstract class BaseStatsListener implements RoutingIterationListener { long numParams; if (model instanceof MultiLayerNetwork) { MultiLayerNetwork net = ((MultiLayerNetwork) model); - jsonConf = net.getConfiguration().toJson(); + jsonConf = net.getNetConfiguration().toJson(); numLayers = net.getnLayers(); numParams = net.numParams(); } else if (model instanceof ComputationGraph) { @@ -670,7 +671,7 @@ public abstract class BaseStatsListener implements RoutingIterationListener { + (model == null ? null : model.getClass())); } - Map paramMap = model.paramTable(backpropParamsOnly); + Map paramMap = model.getParamTable(backpropParamsOnly); String[] paramNames = new String[paramMap.size()]; int i = 0; for (String s : paramMap.keySet()) { //Assuming sensible iteration order - LinkedHashMaps are used in MLN/CG for example diff --git a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModule.java b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModule.java index 7e384dec5..2ca083a4d 100644 --- a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModule.java +++ b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModule.java @@ -1129,8 +1129,8 @@ public class TrainModule implements UIModule { NeuralNetConfiguration conf = NeuralNetConfiguration.fromJson(configJson); int confIdx = layerIdx - 1; //-1 because of input if (confIdx >= 0) { - nnc = conf.getNetConfigurations().get(confIdx); - layer = nnc.getFirstLayer(); + layer = conf.getFlattenedLayerConfigurations().get(confIdx); + nnc = layer.getNetConfiguration(); } else { //Input layer layerType = "Input"; @@ -1144,7 +1144,7 @@ public class TrainModule implements UIModule { if (vertices.containsKey(vertexName) && vertices.get(vertexName) instanceof LayerVertex) { LayerVertex lv = (LayerVertex) vertices.get(vertexName); nnc = lv.getNetConfiguration(); - layer = nnc.getFirstLayer(); + layer = lv.getLayerConfiguration(); } else if (conf.getNetworkInputs().contains(vertexName)) { layerType = "Input"; } else { @@ -1177,7 +1177,7 @@ public class TrainModule implements UIModule { if (layer instanceof BaseLayer) { BaseLayer bl = (BaseLayer) layer; activationFn = bl.getActivationFn().toString(); - long nParams = layer.initializer().numParams(nnc.getFirstLayer()); + long nParams = layer.initializer().numParams(bl.getLayer()); layerInfoRows.add(new String[]{i18N.getMessage("train.model.layerinfotable.layerNParams"), String.valueOf(nParams)}); if (nParams > 0) { diff --git a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModuleUtils.java b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModuleUtils.java index 34b6563f1..aebfaffa7 100644 --- a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModuleUtils.java +++ b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModuleUtils.java @@ -62,24 +62,24 @@ public class TrainModuleUtils { layerInfo.add(Collections.emptyMap()); - List list = config.getNetConfigurations(); + List list = config.getFlattenedLayerConfigurations(); int layerIdx = 1; - for (NeuralNetConfiguration c : list) { - LayerConfiguration layer = c.getFirstLayer(); + for (LayerConfiguration c : list) { + LayerConfiguration layer = c; String layerName = layer.getLayerName(); if (layerName == null) layerName = "layer" + layerIdx; vertexNames.add(layerName); originalVertexName.add(String.valueOf(layerIdx - 1)); - String layerType = c.getFirstLayer().getClass().getSimpleName().replaceAll("Layer$", ""); + String layerType = c.getClass().getSimpleName().replaceAll("Layer$", ""); layerTypes.add(layerType); layerInputs.add(Collections.singletonList(layerIdx - 1)); layerIdx++; //Extract layer info - Map map = getLayerInfo(c, layer); + Map map = getLayerInfo(c.getNetConfiguration(), layer); layerInfo.add(map); } diff --git a/cavis-ui/cavis-ui-vertx/src/main/resources/templates/SameDiffUI.html b/cavis-ui/cavis-ui-vertx/src/main/resources/templates/SameDiffUI.html index 951aabeb5..2ecadd3ee 100644 --- a/cavis-ui/cavis-ui-vertx/src/main/resources/templates/SameDiffUI.html +++ b/cavis-ui/cavis-ui-vertx/src/main/resources/templates/SameDiffUI.html @@ -143,7 +143,7 @@ + Spread

diff --git a/cavis-zoo/cavis-zoo-models/src/test/java/org/deeplearning4j/zoo/TestUtils.java b/cavis-zoo/cavis-zoo-models/src/test/java/org/deeplearning4j/zoo/TestUtils.java index 240cabfcc..7a26046cd 100644 --- a/cavis-zoo/cavis-zoo-models/src/test/java/org/deeplearning4j/zoo/TestUtils.java +++ b/cavis-zoo/cavis-zoo-models/src/test/java/org/deeplearning4j/zoo/TestUtils.java @@ -45,7 +45,7 @@ public class TestUtils { ByteArrayInputStream bais = new ByteArrayInputStream(bytes); MultiLayerNetwork restored = ModelSerializer.restoreMultiLayerNetwork(bais, true); - assertEquals(net.getConfiguration(), restored.getConfiguration()); + assertEquals(net.getNetConfiguration(), restored.getNetConfiguration()); assertEquals(net.params(), restored.params()); return restored;