Playing with some new code 2 - clean build/test
Signed-off-by: brian <brian@brutex.de>master
parent
1f2e82d3ef
commit
9d4939ccfd
|
@ -57,6 +57,7 @@ public class AutoEncoder extends BasePretrainNetwork {
|
||||||
int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
|
int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
|
||||||
this.setNetConfiguration(conf);
|
this.setNetConfiguration(conf);
|
||||||
LayerConfiguration lconf = conf.getFlattenedLayerConfigurations().get(layerIndex);
|
LayerConfiguration lconf = conf.getFlattenedLayerConfigurations().get(layerIndex);
|
||||||
|
runInheritance();
|
||||||
org.deeplearning4j.nn.layers.feedforward.autoencoder.AutoEncoder ret =
|
org.deeplearning4j.nn.layers.feedforward.autoencoder.AutoEncoder ret =
|
||||||
new org.deeplearning4j.nn.layers.feedforward.autoencoder.AutoEncoder(lconf, networkDataType);
|
new org.deeplearning4j.nn.layers.feedforward.autoencoder.AutoEncoder(lconf, networkDataType);
|
||||||
|
|
||||||
|
|
|
@ -520,6 +520,7 @@ public abstract class BaseLayerConfiguration extends LayerConfiguration implemen
|
||||||
if(this.regularizationBias == null) this.regularizationBias = conf.getRegularizationBias();
|
if(this.regularizationBias == null) this.regularizationBias = conf.getRegularizationBias();
|
||||||
if(this.regularization == null ) this.regularization = conf.getRegularization();
|
if(this.regularization == null ) this.regularization = conf.getRegularization();
|
||||||
if(this.gradientNormalization == null) this.gradientNormalization = conf.getGradientNormalization();
|
if(this.gradientNormalization == null) this.gradientNormalization = conf.getGradientNormalization();
|
||||||
|
if(this.weightInit == null) this.weightInit = conf.getWeightInit();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
|
@ -92,8 +92,9 @@ public class BatchNormalization extends FeedForwardLayer {
|
||||||
public Layer instantiate(NeuralNetConfiguration conf, Collection<TrainingListener> trainingListeners,
|
public Layer instantiate(NeuralNetConfiguration conf, Collection<TrainingListener> trainingListeners,
|
||||||
int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
|
int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
|
||||||
this.setNetConfiguration(conf);
|
this.setNetConfiguration(conf);
|
||||||
|
|
||||||
LayerValidation.assertNOutSet("BatchNormalization", getLayerName(), layerIndex, getNOut());
|
LayerValidation.assertNOutSet("BatchNormalization", getLayerName(), layerIndex, getNOut());
|
||||||
|
runInheritance();
|
||||||
|
|
||||||
LayerConfiguration lconf = conf.getFlattenedLayerConfigurations().get(layerIndex);
|
LayerConfiguration lconf = conf.getFlattenedLayerConfigurations().get(layerIndex);
|
||||||
org.deeplearning4j.nn.layers.normalization.BatchNormalization ret =
|
org.deeplearning4j.nn.layers.normalization.BatchNormalization ret =
|
||||||
new org.deeplearning4j.nn.layers.normalization.BatchNormalization(lconf, networkDataType);
|
new org.deeplearning4j.nn.layers.normalization.BatchNormalization(lconf, networkDataType);
|
||||||
|
|
|
@ -54,6 +54,7 @@ public class OutputLayer extends BaseOutputLayer {
|
||||||
int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
|
int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
|
||||||
LayerValidation.assertNInNOutSet("OutputLayer", getLayerName(), layerIndex, getNIn(), getNOut());
|
LayerValidation.assertNInNOutSet("OutputLayer", getLayerName(), layerIndex, getNIn(), getNOut());
|
||||||
LayerConfiguration lconf = conf.getFlattenedLayerConfigurations().get(layerIndex);
|
LayerConfiguration lconf = conf.getFlattenedLayerConfigurations().get(layerIndex);
|
||||||
|
runInheritance();
|
||||||
|
|
||||||
org.deeplearning4j.nn.layers.OutputLayer ret = new org.deeplearning4j.nn.layers.OutputLayer(lconf, networkDataType);
|
org.deeplearning4j.nn.layers.OutputLayer ret = new org.deeplearning4j.nn.layers.OutputLayer(lconf, networkDataType);
|
||||||
ret.addTrainingListeners(trainingListeners);
|
ret.addTrainingListeners(trainingListeners);
|
||||||
|
|
|
@ -210,7 +210,7 @@ public class DefaultParamInitializer extends AbstractParamInitializer {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected INDArray createWeightMatrix(long nIn, long nOut,
|
protected INDArray createWeightMatrix(long nIn, long nOut,
|
||||||
@NonNull IWeightInit weightInit,
|
IWeightInit weightInit,
|
||||||
INDArray weightParamView, boolean initializeParameters) {
|
INDArray weightParamView, boolean initializeParameters) {
|
||||||
val shape = new long[] {nIn, nOut};
|
val shape = new long[] {nIn, nOut};
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue