parent
ea504bff41
commit
1c39dbee52
|
@ -36,7 +36,8 @@ pom.xml.versionsBackup
|
||||||
pom.xml.next
|
pom.xml.next
|
||||||
release.properties
|
release.properties
|
||||||
*dependency-reduced-pom.xml
|
*dependency-reduced-pom.xml
|
||||||
*/build/*
|
**/build/*
|
||||||
|
.gradle/*
|
||||||
|
|
||||||
# Specific for Nd4j
|
# Specific for Nd4j
|
||||||
*.md5
|
*.md5
|
||||||
|
@ -84,3 +85,14 @@ bruai4j-native-common/cmake*
|
||||||
/bruai4j-native/bruai4j-native-common/blasbuild/
|
/bruai4j-native/bruai4j-native-common/blasbuild/
|
||||||
/bruai4j-native/bruai4j-native-common/build/
|
/bruai4j-native/bruai4j-native-common/build/
|
||||||
/cavis-native/cavis-native-lib/blasbuild/
|
/cavis-native/cavis-native-lib/blasbuild/
|
||||||
|
/cavis-dnn/cavis-dnn-core/build/reports/tests/cudaTest/classes/org.deeplearning4j.gradientcheck.AttentionLayerTest.html
|
||||||
|
/cavis-dnn/cavis-dnn-core/build/reports/tests/cudaTest/css/base-style.css
|
||||||
|
/cavis-dnn/cavis-dnn-core/build/reports/tests/cudaTest/css/style.css
|
||||||
|
/cavis-dnn/cavis-dnn-core/build/reports/tests/cudaTest/js/report.js
|
||||||
|
/cavis-dnn/cavis-dnn-core/build/reports/tests/cudaTest/packages/org.deeplearning4j.gradientcheck.html
|
||||||
|
/cavis-dnn/cavis-dnn-core/build/reports/tests/cudaTest/index.html
|
||||||
|
/cavis-dnn/cavis-dnn-core/build/resources/main/iris.dat
|
||||||
|
/cavis-dnn/cavis-dnn-core/build/resources/test/junit-platform.properties
|
||||||
|
/cavis-dnn/cavis-dnn-core/build/resources/test/logback-test.xml
|
||||||
|
/cavis-dnn/cavis-dnn-core/build/test-results/cudaTest/TEST-org.deeplearning4j.gradientcheck.AttentionLayerTest.xml
|
||||||
|
/cavis-dnn/cavis-dnn-core/build/tmp/jar/MANIFEST.MF
|
||||||
|
|
|
@ -309,7 +309,7 @@ public class TestInvalidConfigurations extends BaseDL4JTest {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict)
|
NeuralNetConfiguration conf = NeuralNetConfiguration.builder().convolutionMode(ConvolutionMode.Strict)
|
||||||
.list()
|
|
||||||
.layer(0, ConvolutionLayer.builder().kernelSize(2, 3).stride(2, 2).padding(0, 0).nOut(5)
|
.layer(0, ConvolutionLayer.builder().kernelSize(2, 3).stride(2, 2).padding(0, 0).nOut(5)
|
||||||
.build())
|
.build())
|
||||||
.layer(1, OutputLayer.builder().nOut(10).build())
|
.layer(1, OutputLayer.builder().nOut(10).build())
|
||||||
|
|
|
@ -114,7 +114,6 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
|
||||||
.dataType(DataType.DOUBLE)
|
.dataType(DataType.DOUBLE)
|
||||||
.updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
|
.updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
|
||||||
.dist(new NormalDistribution(0, 1))
|
.dist(new NormalDistribution(0, 1))
|
||||||
.list()
|
|
||||||
.layer(0, Convolution3D.builder().activation(afn).kernelSize(kernel)
|
.layer(0, Convolution3D.builder().activation(afn).kernelSize(kernel)
|
||||||
.stride(stride).nIn(convNIn).nOut(convNOut1).hasBias(false)
|
.stride(stride).nIn(convNIn).nOut(convNOut1).hasBias(false)
|
||||||
.convolutionMode(mode).dataFormat(df)
|
.convolutionMode(mode).dataFormat(df)
|
||||||
|
|
|
@ -565,6 +565,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
this.activation = activation;
|
this.activation = activation;
|
||||||
return self();
|
return self();
|
||||||
}
|
}
|
||||||
|
@JsonIgnore
|
||||||
public B activation(IActivation activation) {
|
public B activation(IActivation activation) {
|
||||||
this.activation = activation;
|
this.activation = activation;
|
||||||
return self();
|
return self();
|
||||||
|
@ -583,7 +584,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
public B constrainWeights(LayerConstraint... constraints) {
|
public B constrainWeights(LayerConstraint... constraints) {
|
||||||
constrainWeights$value = Arrays.asList(constraints);
|
constrainWeights$value = Arrays.asList(constraints);
|
||||||
constrainWeights$set = true;
|
constrainWeights$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -618,7 +619,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
public B constrainAllParameters(LayerConstraint... constraints) {
|
public B constrainAllParameters(LayerConstraint... constraints) {
|
||||||
allParamConstraints$value = Arrays.asList(constraints);
|
allParamConstraints$value = Arrays.asList(constraints);
|
||||||
allParamConstraints$set = true;
|
allParamConstraints$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -635,7 +636,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
public B constrainBias(LayerConstraint... constraints) {
|
public B constrainBias(LayerConstraint... constraints) {
|
||||||
biasConstraints$value = Arrays.asList(constraints);
|
biasConstraints$value = Arrays.asList(constraints);
|
||||||
biasConstraints$set = true;
|
biasConstraints$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -645,10 +646,11 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
* @param processor what to use to preProcess the data.
|
* @param processor what to use to preProcess the data.
|
||||||
* @return builder pattern
|
* @return builder pattern
|
||||||
*/
|
*/
|
||||||
public B inputPreProcessor(Integer layer, InputPreProcessor processor) {
|
public B inputPreProcessor(@NonNull Integer layer, @NonNull InputPreProcessor processor) {
|
||||||
|
if(inputPreProcessors$value==null) inputPreProcessors$value=new LinkedHashMap<>();
|
||||||
inputPreProcessors$value.put(layer, processor);
|
inputPreProcessors$value.put(layer, processor);
|
||||||
inputPreProcessors$set = true;
|
inputPreProcessors$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -658,7 +660,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
* @param layer the layer
|
* @param layer the layer
|
||||||
* @return builder
|
* @return builder
|
||||||
*/
|
*/
|
||||||
public B layer(Integer index, @NonNull LayerConfiguration layer) {
|
public B layer(@NonNull Integer index, @NonNull LayerConfiguration layer) {
|
||||||
innerConfigurations$value.add(index, layer);
|
innerConfigurations$value.add(index, layer);
|
||||||
innerConfigurations$set = true;
|
innerConfigurations$set = true;
|
||||||
return self();
|
return self();
|
||||||
|
@ -680,10 +682,11 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
* @param layer the layer
|
* @param layer the layer
|
||||||
* @return builder
|
* @return builder
|
||||||
*/
|
*/
|
||||||
|
@JsonIgnore
|
||||||
public B layer(@NonNull LayerConfiguration layer) {
|
public B layer(@NonNull LayerConfiguration layer) {
|
||||||
innerConfigurations$value.add(layer);
|
innerConfigurations$value.add(layer);
|
||||||
innerConfigurations$set = true;
|
innerConfigurations$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
public B layer(@NonNull LayerConfiguration.LayerConfigurationBuilder<?, ?> layer) {
|
public B layer(@NonNull LayerConfiguration.LayerConfigurationBuilder<?, ?> layer) {
|
||||||
return this.layer(layer.build());
|
return this.layer(layer.build());
|
||||||
|
@ -699,7 +702,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
public B layersFromArray(@NonNull LayerConfiguration[] arrLayers) {
|
public B layersFromArray(@NonNull LayerConfiguration[] arrLayers) {
|
||||||
innerConfigurations$value.addAll(List.of(arrLayers));
|
innerConfigurations$value.addAll(List.of(arrLayers));
|
||||||
innerConfigurations$set = true;
|
innerConfigurations$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Specify additional layer configurations */
|
/** Specify additional layer configurations */
|
||||||
|
@ -707,7 +710,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
public B layersFromList(@NonNull List<LayerConfiguration> listLayers) {
|
public B layersFromList(@NonNull List<LayerConfiguration> listLayers) {
|
||||||
innerConfigurations$value.addAll(listLayers);
|
innerConfigurations$value.addAll(listLayers);
|
||||||
innerConfigurations$set = true;
|
innerConfigurations$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -723,7 +726,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
regularization$value.add(new L1Regularization(l1));
|
regularization$value.add(new L1Regularization(l1));
|
||||||
}
|
}
|
||||||
regularization$set = true;
|
regularization$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -751,7 +754,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
regularization$value.add(new L2Regularization(l2));
|
regularization$value.add(new L2Regularization(l2));
|
||||||
}
|
}
|
||||||
regularization$set = true;
|
regularization$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -766,7 +769,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
regularizationBias$value.add(new L1Regularization(l1Bias));
|
regularizationBias$value.add(new L1Regularization(l1Bias));
|
||||||
}
|
}
|
||||||
regularizationBias$set = true;
|
regularizationBias$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -791,7 +794,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
"L2 bias regularization removed: incompatible with added WeightDecay regularization");
|
"L2 bias regularization removed: incompatible with added WeightDecay regularization");
|
||||||
regularizationBias$value.add(new L2Regularization(l2Bias));
|
regularizationBias$value.add(new L2Regularization(l2Bias));
|
||||||
}
|
}
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -833,7 +836,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
regularization$value.add(new WeightDecay(coefficient, applyLR));
|
regularization$value.add(new WeightDecay(coefficient, applyLR));
|
||||||
}
|
}
|
||||||
regularization$set = true;
|
regularization$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -870,7 +873,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
regularizationBias$value.add(new WeightDecay(coefficient, applyLR));
|
regularizationBias$value.add(new WeightDecay(coefficient, applyLR));
|
||||||
}
|
}
|
||||||
regularization$set = true;
|
regularization$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -881,7 +884,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
*/
|
*/
|
||||||
@Deprecated
|
@Deprecated
|
||||||
public B list() {
|
public B list() {
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -897,19 +900,19 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
public B weightInit(Distribution distribution) {
|
public B weightInit(Distribution distribution) {
|
||||||
this.weightInit$value = new WeightInitDistribution(distribution);
|
this.weightInit$value = new WeightInitDistribution(distribution);
|
||||||
this.weightInit$set = true;
|
this.weightInit$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
@JsonIgnore
|
@JsonIgnore
|
||||||
public B weightInit(WeightInit weightInit) {
|
public B weightInit(WeightInit weightInit) {
|
||||||
this.weightInit$value = weightInit.getWeightInitFunction();
|
this.weightInit$value = weightInit.getWeightInitFunction();
|
||||||
this.weightInit$set = true;
|
this.weightInit$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
|
|
||||||
public B weightInit(IWeightInit iWeightInit) {
|
public B weightInit(IWeightInit iWeightInit) {
|
||||||
this.weightInit$value = iWeightInit;
|
this.weightInit$value = iWeightInit;
|
||||||
this.weightInit$set = true;
|
this.weightInit$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -919,11 +922,11 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public B dist(@NonNull Distribution distribution) {
|
public B dist(@NonNull Distribution distribution) {
|
||||||
return (B) weightInit(distribution);
|
return weightInit(distribution);
|
||||||
}
|
}
|
||||||
|
|
||||||
public B dropOut(@NonNull IDropout dropout) {
|
public B dropOut(@NonNull IDropout dropout) {
|
||||||
return (B) idropOut(dropout);
|
return idropOut(dropout);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -933,7 +936,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
* @return builder
|
* @return builder
|
||||||
*/
|
*/
|
||||||
public B dropOut(double dropout) {
|
public B dropOut(double dropout) {
|
||||||
return (B) idropOut(new Dropout(dropout));
|
return idropOut(new Dropout(dropout));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -946,7 +949,7 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor
|
||||||
public B confs(@NonNull List<NeuralNetConfiguration> confs) {
|
public B confs(@NonNull List<NeuralNetConfiguration> confs) {
|
||||||
innerConfigurations$value.addAll(confs);
|
innerConfigurations$value.addAll(confs);
|
||||||
innerConfigurations$set = true;
|
innerConfigurations$set = true;
|
||||||
return (B) this;
|
return self();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,6 +38,7 @@ import org.nd4j.linalg.api.buffer.DataType;
|
||||||
import org.nd4j.linalg.api.ndarray.INDArray;
|
import org.nd4j.linalg.api.ndarray.INDArray;
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
@ToString(callSuper = true)
|
@ToString(callSuper = true)
|
||||||
@EqualsAndHashCode(callSuper = true)
|
@EqualsAndHashCode(callSuper = true)
|
||||||
@SuperBuilder(builderMethodName = "innerBuilder")
|
@SuperBuilder(builderMethodName = "innerBuilder")
|
||||||
|
|
|
@ -47,6 +47,7 @@ import org.nd4j.linalg.api.ndarray.INDArray;
|
||||||
* size, the stride and padding The pooling layer takes the kernel size
|
* size, the stride and padding The pooling layer takes the kernel size
|
||||||
*/
|
*/
|
||||||
@ToString(callSuper = true)
|
@ToString(callSuper = true)
|
||||||
|
@NoArgsConstructor
|
||||||
@EqualsAndHashCode(callSuper = true)
|
@EqualsAndHashCode(callSuper = true)
|
||||||
@SuperBuilder(builderMethodName = "innerBuilder")
|
@SuperBuilder(builderMethodName = "innerBuilder")
|
||||||
public class ConvolutionLayer extends FeedForwardLayer {
|
public class ConvolutionLayer extends FeedForwardLayer {
|
||||||
|
@ -361,7 +362,7 @@ public class ConvolutionLayer extends FeedForwardLayer {
|
||||||
* @param kernelSize kernel size
|
* @param kernelSize kernel size
|
||||||
*/
|
*/
|
||||||
public B kernelSize(int... kernelSize) {
|
public B kernelSize(int... kernelSize) {
|
||||||
this.kernelSize$value = ValidationUtils.validate2NonNegative(kernelSize, false, "kernelSize");
|
this.kernelSize$value = ValidationUtils.validate3NonNegative(kernelSize,"kernelSize");
|
||||||
this.kernelSize$set = true;
|
this.kernelSize$set = true;
|
||||||
return self();
|
return self();
|
||||||
}
|
}
|
||||||
|
@ -371,7 +372,7 @@ public class ConvolutionLayer extends FeedForwardLayer {
|
||||||
* @param stride kernel size
|
* @param stride kernel size
|
||||||
*/
|
*/
|
||||||
public B stride(int... stride) {
|
public B stride(int... stride) {
|
||||||
this.stride$value = ValidationUtils.validate2NonNegative(stride, false, "stride");
|
this.stride$value = ValidationUtils.validate3NonNegative(stride, "stride");
|
||||||
this.stride$set = true;
|
this.stride$set = true;
|
||||||
return self();
|
return self();
|
||||||
}
|
}
|
||||||
|
@ -382,7 +383,7 @@ public class ConvolutionLayer extends FeedForwardLayer {
|
||||||
* @param padding kernel size
|
* @param padding kernel size
|
||||||
*/
|
*/
|
||||||
public B padding(int... padding) {
|
public B padding(int... padding) {
|
||||||
this.padding$value = ValidationUtils.validate2NonNegative(padding, false, "padding");
|
this.padding$value = ValidationUtils.validate3NonNegative(padding, "padding");
|
||||||
this.padding$set = true;
|
this.padding$set = true;
|
||||||
return self();
|
return self();
|
||||||
}
|
}
|
||||||
|
@ -392,7 +393,7 @@ public class ConvolutionLayer extends FeedForwardLayer {
|
||||||
* @param dilation kernel size
|
* @param dilation kernel size
|
||||||
*/
|
*/
|
||||||
public B dilation(int... dilation) {
|
public B dilation(int... dilation) {
|
||||||
this.dilation$value = ValidationUtils.validate2NonNegative(dilation, false, "dilation");
|
this.dilation$value = ValidationUtils.validate3NonNegative(dilation, "dilation");
|
||||||
this.dilation$set = true;
|
this.dilation$set = true;
|
||||||
return self();
|
return self();
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,6 +38,7 @@ import org.nd4j.linalg.api.ndarray.INDArray;
|
||||||
|
|
||||||
/** Dense Layer Uses WeightInitXavier as default */
|
/** Dense Layer Uses WeightInitXavier as default */
|
||||||
@Data
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
@ToString(callSuper = true)
|
@ToString(callSuper = true)
|
||||||
@EqualsAndHashCode(callSuper = true)
|
@EqualsAndHashCode(callSuper = true)
|
||||||
@SuperBuilder
|
@SuperBuilder
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
|
|
||||||
package org.deeplearning4j.nn.conf.layers;
|
package org.deeplearning4j.nn.conf.layers;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||||
import lombok.*;
|
import lombok.*;
|
||||||
import lombok.experimental.SuperBuilder;
|
import lombok.experimental.SuperBuilder;
|
||||||
import org.deeplearning4j.nn.conf.DataFormat;
|
import org.deeplearning4j.nn.conf.DataFormat;
|
||||||
|
@ -44,7 +45,7 @@ public abstract class FeedForwardLayer extends BaseLayerConfiguration {
|
||||||
*/
|
*/
|
||||||
@Getter
|
@Getter
|
||||||
protected long nIn;
|
protected long nIn;
|
||||||
|
@JsonIgnore
|
||||||
public void setNIn(int in) {
|
public void setNIn(int in) {
|
||||||
this.nIn = in;
|
this.nIn = in;
|
||||||
}
|
}
|
||||||
|
|
|
@ -326,7 +326,7 @@ public abstract class LayerConfiguration
|
||||||
log.warn("Calling getUpdater() in {} will always return no-Op Updater.", LayerConfiguration.class.getSimpleName());
|
log.warn("Calling getUpdater() in {} will always return no-Op Updater.", LayerConfiguration.class.getSimpleName());
|
||||||
return Updater.NONE.getIUpdaterWithDefaultConfig();
|
return Updater.NONE.getIUpdaterWithDefaultConfig();
|
||||||
}
|
}
|
||||||
@Deprecated
|
@Deprecated @JsonIgnore
|
||||||
public void setUpdater(Updater updater) {
|
public void setUpdater(Updater updater) {
|
||||||
setUpdater(updater.getIUpdaterWithDefaultConfig());
|
setUpdater(updater.getIUpdaterWithDefaultConfig());
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,6 +35,7 @@ import org.nd4j.linalg.api.ndarray.INDArray;
|
||||||
import org.nd4j.linalg.lossfunctions.LossFunctions;
|
import org.nd4j.linalg.lossfunctions.LossFunctions;
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
@ToString(callSuper = true)
|
@ToString(callSuper = true)
|
||||||
@EqualsAndHashCode(callSuper = true)
|
@EqualsAndHashCode(callSuper = true)
|
||||||
@SuperBuilder(builderMethodName = "innerBuilder")
|
@SuperBuilder(builderMethodName = "innerBuilder")
|
||||||
|
|
|
@ -48,7 +48,7 @@ public class OCNNOutputLayer extends BaseOutputLayer {
|
||||||
* The hidden layer size for the one class neural network. Note this would be nOut on a dense
|
* The hidden layer size for the one class neural network. Note this would be nOut on a dense
|
||||||
* layer. NOut in this neural net is always set to 1 though.
|
* layer. NOut in this neural net is always set to 1 though.
|
||||||
*/
|
*/
|
||||||
@Builder.Default @Getter private int hiddenLayerSize; // embedded hidden layer size aka "K"
|
@Getter private int hiddenLayerSize; // embedded hidden layer size aka "K"
|
||||||
/** For nu definition see the paper */
|
/** For nu definition see the paper */
|
||||||
@Builder.Default @Getter private double nu = 0.04;
|
@Builder.Default @Getter private double nu = 0.04;
|
||||||
/**
|
/**
|
||||||
|
|
Loading…
Reference in New Issue