Playing with some new code 2 - clean build/test

Signed-off-by: brian <brian@brutex.de>
master
Brian Rosenberger 2023-04-15 12:50:26 +02:00
parent 1f2e82d3ef
commit 9d4939ccfd
5 changed files with 6 additions and 2 deletions

View File

@ -57,6 +57,7 @@ public class AutoEncoder extends BasePretrainNetwork {
int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
this.setNetConfiguration(conf);
LayerConfiguration lconf = conf.getFlattenedLayerConfigurations().get(layerIndex);
runInheritance();
org.deeplearning4j.nn.layers.feedforward.autoencoder.AutoEncoder ret =
new org.deeplearning4j.nn.layers.feedforward.autoencoder.AutoEncoder(lconf, networkDataType);

View File

@ -520,6 +520,7 @@ public abstract class BaseLayerConfiguration extends LayerConfiguration implemen
if(this.regularizationBias == null) this.regularizationBias = conf.getRegularizationBias();
if(this.regularization == null ) this.regularization = conf.getRegularization();
if(this.gradientNormalization == null) this.gradientNormalization = conf.getGradientNormalization();
if(this.weightInit == null) this.weightInit = conf.getWeightInit();
}
}

View File

@ -92,8 +92,9 @@ public class BatchNormalization extends FeedForwardLayer {
public Layer instantiate(NeuralNetConfiguration conf, Collection<TrainingListener> trainingListeners,
int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
this.setNetConfiguration(conf);
LayerValidation.assertNOutSet("BatchNormalization", getLayerName(), layerIndex, getNOut());
runInheritance();
LayerConfiguration lconf = conf.getFlattenedLayerConfigurations().get(layerIndex);
org.deeplearning4j.nn.layers.normalization.BatchNormalization ret =
new org.deeplearning4j.nn.layers.normalization.BatchNormalization(lconf, networkDataType);

View File

@ -54,6 +54,7 @@ public class OutputLayer extends BaseOutputLayer {
int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
LayerValidation.assertNInNOutSet("OutputLayer", getLayerName(), layerIndex, getNIn(), getNOut());
LayerConfiguration lconf = conf.getFlattenedLayerConfigurations().get(layerIndex);
runInheritance();
org.deeplearning4j.nn.layers.OutputLayer ret = new org.deeplearning4j.nn.layers.OutputLayer(lconf, networkDataType);
ret.addTrainingListeners(trainingListeners);

View File

@ -210,7 +210,7 @@ public class DefaultParamInitializer extends AbstractParamInitializer {
}
protected INDArray createWeightMatrix(long nIn, long nOut,
@NonNull IWeightInit weightInit,
IWeightInit weightInit,
INDArray weightParamView, boolean initializeParameters) {
val shape = new long[] {nIn, nOut};