From c758cf918f4bd7f6f92adc30924cd18a6b31e5a5 Mon Sep 17 00:00:00 2001 From: brian Date: Mon, 8 May 2023 19:12:46 +0200 Subject: [PATCH] Fixing tests Signed-off-by: brian --- .../conf/NeuralNetBaseBuilderConfiguration.java | 3 ++- .../nn/conf/NeuralNetConfiguration.java | 13 +++++++++++-- .../deeplearning4j/nn/conf/inputs/InputType.java | 3 ++- .../nn/conf/layers/CapsuleLayer.java | 1 + .../nn/conf/layers/LayerConfiguration.java | 1 + .../layers/variational/LossFunctionWrapper.java | 16 ++++++++-------- .../nn/multilayer/MultiLayerNetwork.java | 2 +- 7 files changed, 26 insertions(+), 13 deletions(-) diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetBaseBuilderConfiguration.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetBaseBuilderConfiguration.java index 05a56ea1a..e19006f51 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetBaseBuilderConfiguration.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetBaseBuilderConfiguration.java @@ -27,6 +27,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import lombok.*; import lombok.experimental.SuperBuilder; +import lombok.extern.jackson.Jacksonized; import lombok.extern.slf4j.Slf4j; import net.brutex.ai.dnn.api.INeuralNetworkConfiguration; import org.deeplearning4j.nn.api.OptimizationAlgorithm; @@ -89,6 +90,7 @@ import java.util.*; @Slf4j // The inner builder, that we can then extend ... @SuperBuilder // TODO fix access +@Jacksonized @EqualsAndHashCode public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetworkConfiguration { @@ -895,7 +897,6 @@ public abstract class NeuralNetBaseBuilderConfiguration implements INeuralNetwor * * @param distribution Distribution to use for weight initialization */ - @JsonIgnore public B weightInit(Distribution distribution) { this.weightInit$value = new WeightInitDistribution(distribution); this.weightInit$set = true; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetConfiguration.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetConfiguration.java index c9a767342..5739a6710 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetConfiguration.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetConfiguration.java @@ -24,8 +24,12 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.MapperFeature; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.fasterxml.jackson.databind.exc.InvalidTypeIdException; +import com.fasterxml.jackson.databind.json.JsonMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import lombok.*; import lombok.experimental.SuperBuilder; @@ -165,7 +169,8 @@ public class NeuralNetConfiguration extends NeuralNetBaseBuilderConfiguration { * @return {@link NeuralNetConfiguration} */ public static NeuralNetConfiguration fromJson(String json) { - ObjectMapper mapper = NeuralNetConfiguration.mapper(); + //ObjectMapper mapper = NeuralNetConfiguration.mapper(); + JsonMapper mapper = JsonMapper.builder().build(); try { return mapper.readValue(json, NeuralNetConfiguration.class); } catch (JsonProcessingException e) { @@ -439,7 +444,11 @@ public class NeuralNetConfiguration extends NeuralNetBaseBuilderConfiguration { * @return JSON representation of NN configuration */ public String toJson() { - ObjectMapper mapper = NeuralNetConfiguration.mapper(); + JsonMapper mapper = JsonMapper.builder() + .enable(SerializationFeature.INDENT_OUTPUT) + .enable(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY) + .build(); + //ObjectMapper mapper = NeuralNetConfiguration.mapper(); synchronized (mapper) { //JSON mappers are supposed to be thread safe: however, in practice they seem to miss fields occasionally //when writeValueAsString is used by multiple threads. This results in invalid JSON. See issue #3243 diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/inputs/InputType.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/inputs/InputType.java index db98572c0..6284c9d08 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/inputs/InputType.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/inputs/InputType.java @@ -90,6 +90,7 @@ public abstract class InputType implements Serializable { * * @return int[] */ + @JsonIgnore public long[] getShape() { return getShape(false); } @@ -431,7 +432,7 @@ public abstract class InputType implements Serializable { return height * width * depth * channels; } - @Override + @Override @JsonIgnore public long[] getShape(boolean includeBatchDim) { if(dataFormat == Convolution3D.DataFormat.NDHWC){ if(includeBatchDim) return new long[]{-1, depth, height, width, channels}; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CapsuleLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CapsuleLayer.java index 32b0d8efe..151e5ec4e 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CapsuleLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CapsuleLayer.java @@ -40,6 +40,7 @@ import org.nd4j.linalg.factory.Nd4j; @EqualsAndHashCode(callSuper = true) @SuperBuilder(builderMethodName = "innerBuilder") +@NoArgsConstructor public class CapsuleLayer extends SameDiffLayer { private static final String WEIGHT_PARAM = "weight"; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/LayerConfiguration.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/LayerConfiguration.java index 9df926c34..230e6f815 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/LayerConfiguration.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/LayerConfiguration.java @@ -92,6 +92,7 @@ public abstract class LayerConfiguration * * @return activation function */ + @JsonIgnore public IActivation getActivationFn() { if (activation == null) throw new RuntimeException( diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/variational/LossFunctionWrapper.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/variational/LossFunctionWrapper.java index 3622018b6..f542a315e 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/variational/LossFunctionWrapper.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/variational/LossFunctionWrapper.java @@ -30,12 +30,12 @@ import com.fasterxml.jackson.annotation.JsonProperty; @Data public class LossFunctionWrapper implements ReconstructionDistribution { - private final IActivation activationFn; + private final IActivation activation; private final ILossFunction lossFunction; - public LossFunctionWrapper(@JsonProperty("activationFn") IActivation activationFn, + public LossFunctionWrapper(@JsonProperty("activation") IActivation activation, @JsonProperty("lossFunction") ILossFunction lossFunction) { - this.activationFn = activationFn; + this.activation = activation; this.lossFunction = lossFunction; } @@ -59,17 +59,17 @@ public class LossFunctionWrapper implements ReconstructionDistribution { //NOTE: The returned value here is NOT negative log probability, but it (the loss function value) // is equivalent, in terms of being something we want to minimize... - return lossFunction.computeScore(x, preOutDistributionParams, activationFn, null, average); + return lossFunction.computeScore(x, preOutDistributionParams, activation, null, average); } @Override public INDArray exampleNegLogProbability(INDArray x, INDArray preOutDistributionParams) { - return lossFunction.computeScoreArray(x, preOutDistributionParams, activationFn, null); + return lossFunction.computeScoreArray(x, preOutDistributionParams, activation, null); } @Override public INDArray gradient(INDArray x, INDArray preOutDistributionParams) { - return lossFunction.computeGradient(x, preOutDistributionParams, activationFn, null); + return lossFunction.computeGradient(x, preOutDistributionParams, activation, null); } @Override @@ -82,11 +82,11 @@ public class LossFunctionWrapper implements ReconstructionDistribution { public INDArray generateAtMean(INDArray preOutDistributionParams) { //Loss functions: not probabilistic -> not random INDArray out = preOutDistributionParams.dup(); - return activationFn.getActivation(out, true); + return activation.getActivation(out, true); } @Override public String toString() { - return "LossFunctionWrapper(afn=" + activationFn + "," + lossFunction + ")"; + return "LossFunctionWrapper(afn=" + activation + "," + lossFunction + ")"; } } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java index f3af995b5..126130bf9 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java @@ -118,7 +118,7 @@ import org.nd4j.linalg.workspace.WorkspaceUtils; */ @Slf4j // @JsonIdentityInfo(generator = ObjectIdGenerators.IntSequenceGenerator.class, property = "@id") -@JsonIgnoreProperties({"helper", "net", "initCalled", "iupdater", "activationFn"}) +@JsonIgnoreProperties({"helper", "net", "initCalled", "iupdater"}) public class MultiLayerNetwork extends ArtificialNeuralNetwork implements Serializable, Classifier, Layer, ITrainableLayer {