diff --git a/brutex-extended-tests/src/test/java/net/brutex/spark/BrianTest.java b/brutex-extended-tests/src/test/java/net/brutex/spark/BrianTest.java index d3a7179f6..7813efe6a 100644 --- a/brutex-extended-tests/src/test/java/net/brutex/spark/BrianTest.java +++ b/brutex-extended-tests/src/test/java/net/brutex/spark/BrianTest.java @@ -139,7 +139,6 @@ public class BrianTest /*extends BaseDL4JTest*/ { //.setExecutorEnv("spark.executor.cores", "2") //.setExecutorEnv("spark.executor.memory", "2g") //.set("spark.submit.deployMode", "client") - ; /* SparkSession spark = SparkSession @@ -240,7 +239,7 @@ public class BrianTest /*extends BaseDL4JTest*/ { */ TransformProcess tp = new TransformProcess.Builder(inputSchema) .removeAllColumnsExceptFor("country_code", "lat", "lon") - .stringToCategorical("country_code", Arrays.asList(new String[] {"GR", "FR", "DE", "CH"})) + .stringToCategorical("country_code", Arrays.asList("GR", "FR", "DE", "CH")) .filter(new FilterInvalidValues()) .categoricalToOneHot("country_code") .build(); diff --git a/brutex-extended-tests/src/test/java/net/brutex/spark/BrianTest2.java b/brutex-extended-tests/src/test/java/net/brutex/spark/BrianTest2.java index 436016352..be62228c1 100644 --- a/brutex-extended-tests/src/test/java/net/brutex/spark/BrianTest2.java +++ b/brutex-extended-tests/src/test/java/net/brutex/spark/BrianTest2.java @@ -225,7 +225,7 @@ public class BrianTest2 /*extends BaseDL4JTest*/ { */ TransformProcess tp = new TransformProcess.Builder(inputSchema) .removeAllColumnsExceptFor("country_code", "lat", "lon") - .stringToCategorical("country_code", Arrays.asList(new String[] {"GR", "FR", "DE", "CH"})) + .stringToCategorical("country_code", Arrays.asList("GR", "FR", "DE", "CH")) .filter(new FilterInvalidValues()) .categoricalToOneHot("country_code") .build(); diff --git a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/IntegrationTestRunner.java b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/IntegrationTestRunner.java index 29e80ce99..fbc0d60a3 100644 --- a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/IntegrationTestRunner.java +++ b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/IntegrationTestRunner.java @@ -91,10 +91,10 @@ public class IntegrationTestRunner { public static final double MAX_REL_ERROR_SCORES = 1e-4; - private static List> layerClasses = new ArrayList<>(); - private static List> preprocClasses = new ArrayList<>(); - private static List> graphVertexClasses = new ArrayList<>(); - private static List> evaluationClasses = new ArrayList<>(); + private static final List> layerClasses = new ArrayList<>(); + private static final List> preprocClasses = new ArrayList<>(); + private static final List> graphVertexClasses = new ArrayList<>(); + private static final List> evaluationClasses = new ArrayList<>(); private static Map, Integer> layerConfClassesSeen = new HashMap<>(); private static Map, Integer> preprocessorConfClassesSeen = new HashMap<>(); diff --git a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/CNN1DTestCases.java b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/CNN1DTestCases.java index 4ecc4dd2a..d65a0a9cc 100644 --- a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/CNN1DTestCases.java +++ b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/CNN1DTestCases.java @@ -67,8 +67,8 @@ public class CNN1DTestCases { testOverfitting = false; } - int miniBatchSize = 16; - int exampleLength = 128; + final int miniBatchSize = 16; + final int exampleLength = 128; @Override public ModelType modelType() { diff --git a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/CNN2DTestCases.java b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/CNN2DTestCases.java index 8b5cf6358..3b351e277 100644 --- a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/CNN2DTestCases.java +++ b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/CNN2DTestCases.java @@ -271,11 +271,11 @@ public class CNN2DTestCases { public static TestCase getYoloHouseNumbers() { return new TestCase() { - private int width = 416; - private int height = 416; - private int nChannels = 3; - private int gridWidth = 13; - private int gridHeight = 13; + private final int width = 416; + private final int height = 416; + private final int nChannels = 3; + private final int gridWidth = 13; + private final int gridHeight = 13; { testName = "YOLOHouseNumbers"; diff --git a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/CNN3DTestCases.java b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/CNN3DTestCases.java index 4c8448c63..f856d5159 100644 --- a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/CNN3DTestCases.java +++ b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/CNN3DTestCases.java @@ -108,7 +108,7 @@ public class CNN3DTestCases { public MultiDataSet getGradientsTestData() throws Exception { Nd4j.getRandom().setSeed(12345); //NCDHW format - INDArray arr = Nd4j.rand(new int[]{2, 3, 8, 8, 8}); + INDArray arr = Nd4j.rand(2, 3, 8, 8, 8); INDArray labels = org.deeplearning4j.integration.TestUtils.randomOneHot(2, 10); return new org.nd4j.linalg.dataset.MultiDataSet(arr, labels); } @@ -135,6 +135,6 @@ public class CNN3DTestCases { } }; - }; + } } diff --git a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/RNNTestCases.java b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/RNNTestCases.java index 025f1ab54..a2cf437fe 100644 --- a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/RNNTestCases.java +++ b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/RNNTestCases.java @@ -93,8 +93,8 @@ public class RNNTestCases { minAbsErrorParamsPostTraining = 2e-3; } - private int miniBatchSize = 32; - private int exampleLength = 200; + private final int miniBatchSize = 32; + private final int exampleLength = 200; @Override diff --git a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/misc/CharacterIterator.java b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/misc/CharacterIterator.java index a7be40676..4d038abf8 100644 --- a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/misc/CharacterIterator.java +++ b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/dl4j/misc/CharacterIterator.java @@ -31,23 +31,24 @@ import java.io.File; import java.io.IOException; import java.net.URL; import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.*; public class CharacterIterator implements DataSetIterator { //Valid characters - private char[] validCharacters; + private final char[] validCharacters; //Maps each character to an index ind the input/output - private Map charToIdxMap; + private final Map charToIdxMap; //All characters of the input file (after filtering to only those that are valid - private char[] fileCharacters; + private final char[] fileCharacters; //Length of each example/minibatch (number of characters) - private int exampleLength; + private final int exampleLength; //Size of each minibatch (number of examples) - private int miniBatchSize; - private Random rng; + private final int miniBatchSize; + private final Random rng; //Offsets for the start of each example - private LinkedList exampleStartOffsets = new LinkedList<>(); + private final LinkedList exampleStartOffsets = new LinkedList<>(); /** * @param textFilePath Path to text file to use for generating samples @@ -299,7 +300,7 @@ public class CharacterIterator implements DataSetIterator { if (!f.exists()) throw new IOException("File does not exist: " + fileLocation); //Download problem? char[] validCharacters = CharacterIterator.getMinimalCharacterSet(); //Which characters are allowed? Others will be removed - return new CharacterIterator(fileLocation, Charset.forName("UTF-8"), + return new CharacterIterator(fileLocation, StandardCharsets.UTF_8, miniBatchSize, sequenceLength, validCharacters, new Random(12345)); } diff --git a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/samediff/SameDiffCNNCases.java b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/samediff/SameDiffCNNCases.java index 98ec32dd0..2c28a2d3e 100644 --- a/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/samediff/SameDiffCNNCases.java +++ b/brutex-extended-tests/src/test/java/org/deeplearning4j/integration/testcases/samediff/SameDiffCNNCases.java @@ -305,7 +305,7 @@ public class SameDiffCNNCases { // [minibatch,8,1,1,1] - int channels_height_width_depth = 8 * 1 * 1 * 1; + int channels_height_width_depth = 8; SDVariable layer1_reshaped = layer1.reshape(-1, channels_height_width_depth); @@ -331,7 +331,7 @@ public class SameDiffCNNCases { public Map getGradientsTestDataSameDiff() throws Exception { Nd4j.getRandom().setSeed(12345); //NCDHW format - INDArray arr = Nd4j.rand(new int[]{2, 3, 8, 8, 8}); + INDArray arr = Nd4j.rand(2, 3, 8, 8, 8); INDArray labels = org.deeplearning4j.integration.TestUtils.randomOneHot(2, 10); Map map = new HashMap<>(); @@ -357,7 +357,7 @@ public class SameDiffCNNCases { Nd4j.getRandom().setSeed(12345); List> list = new ArrayList<>(); - INDArray arr = Nd4j.rand(new int[]{2, 3, 8, 8, 8}); + INDArray arr = Nd4j.rand(2, 3, 8, 8, 8); list.add(Collections.singletonMap("in", arr)); @@ -368,7 +368,7 @@ public class SameDiffCNNCases { public MultiDataSet getGradientsTestData() throws Exception { Nd4j.getRandom().setSeed(12345); //NCDHW format - INDArray arr = Nd4j.rand(new int[]{2, 3, 8, 8, 8}); + INDArray arr = Nd4j.rand(2, 3, 8, 8, 8); INDArray labels = org.deeplearning4j.integration.TestUtils.randomOneHot(2, 10); return new org.nd4j.linalg.dataset.MultiDataSet(arr, labels); } diff --git a/build_requirements.md b/build_requirements.md index db6532203..77d54050b 100644 --- a/build_requirements.md +++ b/build_requirements.md @@ -129,4 +129,20 @@ echo "nameserver 8.8.8.8" | sudo tee -a /etc/resolv.conf # Buildparameter: # -P\\ - CAVIS_AVX_EXTENSION = {avx2 | avx512}, default is avx2 \ No newline at end of file + CAVIS_AVX_EXTENSION = {avx2 | avx512}, default is avx2 + +# Zeppelin Spark dependencies # +3 + + +To add the dependency to the language models, use the following format in the Dependencies section of the of the Spark Interpreter configuration (Interpreters -> Spark -> Edit -> Dependencies): + +groupId:artifactId:packaging:classifier:version + +In your case it should work with + +edu.stanford.nlp:stanford-corenlp:jar:models:3.8.0 + + +Native cpu code under linux needs libc6-dev +/lib/x86_64-linux-gnu/libm.so.6: version `GLIBC_2.29' not found \ No newline at end of file diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/conf/Configuration.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/conf/Configuration.java index 71b7f7c2a..922b31aed 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/conf/Configuration.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/conf/Configuration.java @@ -266,7 +266,7 @@ public class Configuration implements Iterable>, Writa reloadConfiguration(); } - private static Pattern varPat = Pattern.compile("\\$\\{[^\\}\\$\u0020]+\\}"); + private static final Pattern varPat = Pattern.compile("\\$\\{[^\\}\\$\u0020]+\\}"); private String substituteVars(String expr) { if (expr == null) { @@ -555,7 +555,7 @@ public class Configuration implements Iterable>, Writa } /** - * Get the value of the name property as a Pattern. + * Get the value of the name property as a {@code Pattern}. * If no such property is specified, or if the specified value is not a valid * Pattern, then DefaultValue is returned. * diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/formats/output/OutputFormat.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/formats/output/OutputFormat.java index e66e37a6d..14322d888 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/formats/output/OutputFormat.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/formats/output/OutputFormat.java @@ -27,7 +27,7 @@ import org.datavec.api.records.writer.RecordWriter; public interface OutputFormat { - public static final String OUTPUT_PATH = "org.nd4j.outputpath"; + String OUTPUT_PATH = "org.nd4j.outputpath"; /** * Create a record writer diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/BinaryComparable.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/BinaryComparable.java index 4f19f0b78..a75fe0b30 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/BinaryComparable.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/BinaryComparable.java @@ -34,7 +34,7 @@ public abstract class BinaryComparable implements Comparable { /** * Compare bytes from {#getBytes()}. - * @see org.apache.hadoop.io.WritableComparator#compareBytes(byte[],int,int,byte[],int,int) + * {@code org.apache.hadoop.io.WritableComparator#compareBytes(byte[], int, int, byte[], int, int)} */ public int compareTo(BinaryComparable other) { if (this == other) @@ -63,7 +63,7 @@ public abstract class BinaryComparable implements Comparable { /** * Return a hash of the bytes returned from {#getBytes()}. - * @see org.apache.hadoop.io.WritableComparator#hashBytes(byte[],int) + * {@code org.apache.hadoop.io.WritableComparator#hashBytes(byte[],int)} */ public int hashCode() { return WritableComparator.hashBytes(getBytes(), getLength()); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/DataInputBuffer.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/DataInputBuffer.java index 7491f95bd..be57e50a3 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/DataInputBuffer.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/DataInputBuffer.java @@ -50,7 +50,7 @@ public class DataInputBuffer extends DataInputStream { } } - private Buffer buffer; + private final Buffer buffer; /** Constructs a new empty buffer. */ public DataInputBuffer() { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/DataOutputBuffer.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/DataOutputBuffer.java index 105ee2717..a43022885 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/DataOutputBuffer.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/DataOutputBuffer.java @@ -44,7 +44,7 @@ public class DataOutputBuffer extends DataOutputStream { public void write(DataInput in, int len) throws IOException { int newcount = count + len; if (newcount > buf.length) { - byte newbuf[] = new byte[Math.max(buf.length << 1, newcount)]; + byte[] newbuf = new byte[Math.max(buf.length << 1, newcount)]; System.arraycopy(buf, 0, newbuf, 0, count); buf = newbuf; } @@ -53,7 +53,7 @@ public class DataOutputBuffer extends DataOutputStream { } } - private Buffer buffer; + private final Buffer buffer; /** Constructs a new empty buffer. */ public DataOutputBuffer() { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/RawComparator.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/RawComparator.java index 4e3d056eb..0b4f83662 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/RawComparator.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/RawComparator.java @@ -25,6 +25,6 @@ import java.util.Comparator; public interface RawComparator extends Comparator { - public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2); + int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2); } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/WritableComparator.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/WritableComparator.java index 16cc4f35b..bcd2b8074 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/WritableComparator.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/WritableComparator.java @@ -31,7 +31,7 @@ import java.util.HashMap; public class WritableComparator implements RawComparator { - private static HashMap comparators = new HashMap<>(); // registry + private static final HashMap comparators = new HashMap<>(); // registry /** Get a comparator for a {@link WritableComparable} implementation. */ public static synchronized WritableComparator get(Class c) { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/WritableUtils.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/WritableUtils.java index ebac8c856..7070ce47b 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/WritableUtils.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/WritableUtils.java @@ -229,7 +229,7 @@ public final class WritableUtils { /** * Serializes an integer to a binary stream with zero-compressed encoding. - * For -120 <= i <= 127, only one byte is used with the actual value. + * For -120 <= i <= 127, only one byte is used with the actual value. * For other values of i, the first byte value indicates whether the * integer is positive or negative, and the number of bytes that follow. * If the first byte value v is between -121 and -124, the following integer @@ -248,7 +248,7 @@ public final class WritableUtils { /** * Serializes a long to a binary stream with zero-compressed encoding. - * For -112 <= i <= 127, only one byte is used with the actual value. + * For -112 <= i lt;= 127, only one byte is used with the actual value. * For other values of i, the first byte value indicates whether the * long is positive or negative, and the number of bytes that follow. * If the first byte value v is between -113 and -120, the following long diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/converters/LabelWriterConverter.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/converters/LabelWriterConverter.java index 6dc7c807e..470f88417 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/converters/LabelWriterConverter.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/converters/LabelWriterConverter.java @@ -27,7 +27,7 @@ import org.datavec.api.writable.Writable; import java.util.List; public class LabelWriterConverter implements WritableConverter { - private List labels; + private final List labels; public LabelWriterConverter(List labels) { this.labels = labels; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/labels/PathLabelGenerator.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/labels/PathLabelGenerator.java index 5995c4967..d5bb50d2a 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/labels/PathLabelGenerator.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/labels/PathLabelGenerator.java @@ -35,7 +35,7 @@ public interface PathLabelGenerator extends Serializable { * If true: infer the set of possible label classes, and convert these to integer indexes. If when true, the * returned Writables should be text writables.
*
- * For regression use cases (or PathLabelGenerator classification instances that do their own label -> integer + * For regression use cases (or PathLabelGenerator classification instances that do their own label -> integer * assignment), this should return false. * * @return whether label classes should be inferred diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/serializers/SerializationFactory.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/serializers/SerializationFactory.java index fc60b8fdc..b57ee475e 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/serializers/SerializationFactory.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/io/serializers/SerializationFactory.java @@ -35,7 +35,7 @@ public class SerializationFactory extends Configured { private static final Logger LOG = LoggerFactory.getLogger(SerializationFactory.class.getName()); - private List> serializations = new ArrayList<>(); + private final List> serializations = new ArrayList<>(); /** *

@@ -47,7 +47,7 @@ public class SerializationFactory extends Configured { public SerializationFactory(Configuration conf) { super(conf); for (String serializerName : conf.getStrings("io.serializations", - new String[] {"org.apache.hadoop.io.serializer.WritableSerialization"})) { + "org.apache.hadoop.io.serializer.WritableSerialization")) { add(conf, serializerName); } } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/Buffer.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/Buffer.java index 8c6dbfa1f..496af6f72 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/Buffer.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/Buffer.java @@ -113,7 +113,7 @@ public class Buffer implements Comparable, Cloneable { /** * Change the capacity of the backing storage. - * The data is preserved if newCapacity >= getCount(). + * The data is preserved if newCapacity >= getCount(). * @param newCapacity The new capacity in bytes. */ public void setCapacity(int newCapacity) { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/IOUtils.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/IOUtils.java index 2a4793ada..dc1c6cae1 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/IOUtils.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/IOUtils.java @@ -209,9 +209,7 @@ public class IOUtils { * @return */ static String toCSVBuffer(Buffer buf) { - StringBuilder sb = new StringBuilder("#"); - sb.append(buf.toString()); - return sb.toString(); + return "#" + buf.toString(); } /** @@ -441,7 +439,7 @@ public class IOUtils { /** * Serializes a long to a binary stream with zero-compressed encoding. - * For -112 <= i <= 127, only one byte is used with the actual value. + * For -112 <= i <= 127, only one byte is used with the actual value. * For other values of i, the first byte value indicates whether the * long is positive or negative, and the number of bytes that follow. * If the first byte value v is between -113 and -120, the following long diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/RecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/RecordReader.java index 14d1da31d..a72793529 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/RecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/RecordReader.java @@ -99,8 +99,6 @@ public interface RecordReader extends AutoCloseable, Serializable, Configurable /** * Reset record reader iterator - * - * @return */ void reset(); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/ComposableRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/ComposableRecordReader.java index 96693a1a3..905854f03 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/ComposableRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/ComposableRecordReader.java @@ -39,7 +39,7 @@ import java.util.List; */ public class ComposableRecordReader extends BaseRecordReader { - private RecordReader[] readers; + private final RecordReader[] readers; public ComposableRecordReader(RecordReader... readers) { this.readers = readers; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/ConcatenatingRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/ConcatenatingRecordReader.java index e01d93ed1..ab436407a 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/ConcatenatingRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/ConcatenatingRecordReader.java @@ -35,7 +35,7 @@ import java.util.List; public class ConcatenatingRecordReader extends BaseRecordReader { - private RecordReader[] readers; + private final RecordReader[] readers; public ConcatenatingRecordReader(RecordReader... readers) { this.readers = readers; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/FileRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/FileRecordReader.java index 33a79b0c2..a9448b981 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/FileRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/FileRecordReader.java @@ -23,14 +23,14 @@ package org.datavec.api.records.reader.impl; import lombok.Getter; import lombok.Setter; import org.datavec.api.conf.Configuration; -import org.datavec.api.Record; +import org.datavec.api.records.Record; import org.datavec.api.records.metadata.RecordMetaData; import org.datavec.api.records.metadata.RecordMetaDataURI; import org.datavec.api.records.reader.BaseRecordReader; import org.datavec.api.split.InputSplit; import org.datavec.api.writable.IntWritable; import org.datavec.api.writable.Text; -import org.datavec.api.Writable; +import org.datavec.api.writable.Writable; import java.io.*; import java.net.URI; @@ -40,206 +40,202 @@ import java.util.*; /** * File reader/writer + * + * @author Adam Gibson */ public class FileRecordReader extends BaseRecordReader { - protected Iterator locationsIterator; - protected Configuration conf; - protected URI currentUri; - protected List labels; - protected boolean appendLabel = false; - @Getter - @Setter - protected String charset = StandardCharsets.UTF_8.name(); //Using String as StandardCharsets.UTF_8 is not serializable + protected Iterator locationsIterator; + protected Configuration conf; + protected URI currentUri; + protected List labels; + protected boolean appendLabel = false; + @Getter @Setter + protected String charset = StandardCharsets.UTF_8.name(); //Using String as StandardCharsets.UTF_8 is not serializable - public FileRecordReader() { - } + public FileRecordReader() {} - @Override - public void initialize(InputSplit split) throws IOException, InterruptedException { - super.initialize(split); - doInitialize(split); - } + @Override + public void initialize(InputSplit split) throws IOException, InterruptedException { + super.initialize(split); + doInitialize(split); + } - protected void doInitialize(InputSplit split) { + protected void doInitialize(InputSplit split) { - if (labels == null && appendLabel) { - URI[] locations = split.locations(); - if (locations.length > 0) { - Set labels = new HashSet<>(); - for (URI u : locations) { - String[] pathSplit = u.toString().split("[/\\\\]"); - labels.add(pathSplit[pathSplit.length - 2]); + if (labels == null && appendLabel) { + URI[] locations = split.locations(); + if (locations.length > 0) { + Set labels = new HashSet<>(); + for(URI u : locations){ + String[] pathSplit = u.toString().split("[/\\\\]"); + labels.add(pathSplit[pathSplit.length-2]); + } + this.labels = new ArrayList<>(labels); + Collections.sort(this.labels); + } } - this.labels = new ArrayList<>(labels); - Collections.sort(this.labels); - } - } - locationsIterator = split.locationsIterator(); - } - - @Override - public void initialize(Configuration conf, InputSplit split) - throws IOException, InterruptedException { - appendLabel = conf.getBoolean(APPEND_LABEL, true); - doInitialize(split); - this.inputSplit = split; - this.conf = conf; - } - - @Override - public List next() { - return nextRecord().getRecord(); - } - - private List loadFromStream(URI uri, InputStream next, Charset charset) { - List ret = new ArrayList<>(); - try { - if (!(next instanceof BufferedInputStream)) { - next = new BufferedInputStream(next); - } - String s = org.apache.commons.io.IOUtils.toString(next, charset); - ret.add(new Text(s)); - if (appendLabel) { - int idx = getLabel(uri); - ret.add(new IntWritable(idx)); - } - } catch (IOException e) { - throw new IllegalStateException("Error reading from input stream: " + uri); - } - return ret; - } - - /** - * Return the current label. The index of the current file's parent directory in the label list - * - * @return The index of the current file's parent directory - */ - public int getCurrentLabel() { - return getLabel(currentUri); - } - - public int getLabel(URI uri) { - String s = uri.toString(); - int lastIdx = Math.max(s.lastIndexOf('/'), - s.lastIndexOf('\\')); //Note: if neither are found, -1 is fine here - String sub = s.substring(0, lastIdx); - int secondLastIdx = Math.max(sub.lastIndexOf('/'), sub.lastIndexOf('\\')); - String name = s.substring(secondLastIdx + 1, lastIdx); - return labels.indexOf(name); - } - - public List getLabels() { - return labels; - } - - public void setLabels(List labels) { - this.labels = labels; - } - - @Override - public boolean hasNext() { - return locationsIterator.hasNext(); - } - - @Override - public void close() throws IOException { - - } - - @Override - public void setConf(Configuration conf) { - this.conf = conf; - } - - @Override - public Configuration getConf() { - return conf; - } - - @Override - public List> next(int num) { - List> ret = new ArrayList<>(num); - int numBatches = 0; - while (hasNext() && numBatches < num) { - ret.add(next()); + locationsIterator = split.locationsIterator(); } - return ret; - } - - @Override - public void reset() { - if (inputSplit == null) { - throw new UnsupportedOperationException("Cannot reset without first initializing"); - } - try { - doInitialize(inputSplit); - } catch (Exception e) { - throw new RuntimeException("Error during LineRecordReader reset", e); - } - } - - @Override - public boolean resetSupported() { - if (inputSplit != null) { - return inputSplit.resetSupported(); - } - return false; //reset() throws exception on reset() if inputSplit is null - } - - @Override - public List record(URI uri, DataInputStream dataInputStream) throws IOException { - invokeListeners(uri); - //Here: reading the entire file to a Text writable - BufferedReader br = new BufferedReader(new InputStreamReader(dataInputStream)); - StringBuilder sb = new StringBuilder(); - String line; - while ((line = br.readLine()) != null) { - sb.append(line).append("\n"); - } - return Collections.singletonList(new Text(sb.toString())); - } - - @Override - public Record nextRecord() { - URI next = locationsIterator.next(); - invokeListeners(next); - - List ret; - try (InputStream s = streamCreatorFn.apply(next)) { - ret = loadFromStream(next, s, Charset.forName(charset)); - } catch (IOException e) { - throw new RuntimeException("Error reading from stream for URI: " + next); + @Override + public void initialize(Configuration conf, InputSplit split) throws IOException, InterruptedException { + appendLabel = conf.getBoolean(APPEND_LABEL, true); + doInitialize(split); + this.inputSplit = split; + this.conf = conf; } - return new org.datavec.api.records.impl.Record(ret, - new RecordMetaDataURI(next, FileRecordReader.class)); - } - - @Override - public Record loadFromMetaData(RecordMetaData recordMetaData) throws IOException { - return loadFromMetaData(Collections.singletonList(recordMetaData)).get(0); - } - - @Override - public List loadFromMetaData(List recordMetaDatas) throws IOException { - List out = new ArrayList<>(); - - for (RecordMetaData meta : recordMetaDatas) { - URI uri = meta.getURI(); - - List list; - try (InputStream s = streamCreatorFn.apply(uri)) { - list = loadFromStream(uri, s, Charset.forName(charset)); - } catch (IOException e) { - throw new RuntimeException("Error reading from stream for URI: " + uri); - } - - out.add(new org.datavec.api.records.impl.Record(list, meta)); + @Override + public List next() { + return nextRecord().getRecord(); } - return out; - } + private List loadFromStream(URI uri, InputStream next, Charset charset) { + List ret = new ArrayList<>(); + try { + if(!(next instanceof BufferedInputStream)){ + next = new BufferedInputStream(next); + } + String s = org.apache.commons.io.IOUtils.toString(next, charset); + ret.add(new Text(s)); + if (appendLabel) { + int idx = getLabel(uri); + ret.add(new IntWritable(idx)); + } + } catch (IOException e) { + throw new IllegalStateException("Error reading from input stream: " + uri); + } + return ret; + } + + /** + * Return the current label. + * The index of the current file's parent directory + * in the label list + * @return The index of the current file's parent directory + */ + public int getCurrentLabel() { + return getLabel(currentUri); + } + + public int getLabel(URI uri){ + String s = uri.toString(); + int lastIdx = Math.max(s.lastIndexOf('/'), s.lastIndexOf('\\')); //Note: if neither are found, -1 is fine here + String sub = s.substring(0, lastIdx); + int secondLastIdx = Math.max(sub.lastIndexOf('/'), sub.lastIndexOf('\\')); + String name = s.substring(secondLastIdx+1, lastIdx); + return labels.indexOf(name); + } + + public List getLabels() { + return labels; + } + + public void setLabels(List labels) { + this.labels = labels; + } + + @Override + public boolean hasNext() { + return locationsIterator.hasNext(); + } + + @Override + public void close() throws IOException { + + } + + @Override + public void setConf(Configuration conf) { + this.conf = conf; + } + + @Override + public Configuration getConf() { + return conf; + } + + @Override + public List> next(int num) { + List> ret = new ArrayList<>(num); + int numBatches = 0; + while (hasNext() && numBatches < num) { + ret.add(next()); + } + + return ret; + } + @Override + public void reset() { + if (inputSplit == null) + throw new UnsupportedOperationException("Cannot reset without first initializing"); + try { + doInitialize(inputSplit); + } catch (Exception e) { + throw new RuntimeException("Error during LineRecordReader reset", e); + } + } + + @Override + public boolean resetSupported() { + if(inputSplit != null){ + return inputSplit.resetSupported(); + } + return false; //reset() throws exception on reset() if inputSplit is null + } + + @Override + public List record(URI uri, DataInputStream dataInputStream) throws IOException { + invokeListeners(uri); + //Here: reading the entire file to a Text writable + BufferedReader br = new BufferedReader(new InputStreamReader(dataInputStream)); + StringBuilder sb = new StringBuilder(); + String line; + while ((line = br.readLine()) != null) { + sb.append(line).append("\n"); + } + return Collections.singletonList(new Text(sb.toString())); + } + + @Override + public Record nextRecord() { + URI next = locationsIterator.next(); + invokeListeners(next); + + List ret; + try(InputStream s = streamCreatorFn.apply(next)) { + ret = loadFromStream(next, s, Charset.forName(charset)); + } catch (IOException e){ + throw new RuntimeException("Error reading from stream for URI: " + next); + } + + return new org.datavec.api.records.impl.Record(ret,new RecordMetaDataURI(next, FileRecordReader.class)); + } + + @Override + public Record loadFromMetaData(RecordMetaData recordMetaData) throws IOException { + return loadFromMetaData(Collections.singletonList(recordMetaData)).get(0); + } + + @Override + public List loadFromMetaData(List recordMetaDatas) throws IOException { + List out = new ArrayList<>(); + + for (RecordMetaData meta : recordMetaDatas) { + URI uri = meta.getURI(); + + List list; + try(InputStream s = streamCreatorFn.apply(uri)) { + list = loadFromStream(uri, s, Charset.forName(charset)); + } catch (IOException e){ + throw new RuntimeException("Error reading from stream for URI: " + uri); + } + + out.add(new org.datavec.api.records.impl.Record(list, meta)); + } + + return out; + } } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/LineRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/LineRecordReader.java index 94314393e..b05b739df 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/LineRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/LineRecordReader.java @@ -200,7 +200,7 @@ public class LineRecordReader extends BaseRecordReader { //Here: we are reading a single line from the DataInputStream BufferedReader br = new BufferedReader(new InputStreamReader(dataInputStream)); String line = br.readLine(); - return Collections.singletonList((Writable) new Text(line)); + return Collections.singletonList(new Text(line)); } protected Iterator getIterator(int location) { @@ -265,7 +265,7 @@ public class LineRecordReader extends BaseRecordReader { throw new IllegalArgumentException( "Invalid metadata; expected RecordMetaDataLine instance; got: " + rmd); } - list.add(new Triple<>(count++, (RecordMetaDataLine) rmd, (List) null)); + list.add(new Triple<>(count++, (RecordMetaDataLine) rmd, null)); if (rmd.getURI() != null) uris.add(rmd.getURI()); } @@ -332,7 +332,7 @@ public class LineRecordReader extends BaseRecordReader { throw new IllegalStateException("Could not get line " + nextLineIdx + " from URI " + currentURI + ": has only " + currentLineIdx + " lines"); } - t.setThird(Collections.singletonList(new Text(line))); + t.setThird(Collections.singletonList(new Text(line))); } } else { //Not URI based: String split, etc @@ -347,7 +347,7 @@ public class LineRecordReader extends BaseRecordReader { line = iterator.next(); currentLineIdx++; } - t.setThird(Collections.singletonList(new Text(line))); + t.setThird(Collections.singletonList(new Text(line))); } closeIfRequired(iterator); } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/collection/CollectionSequenceRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/collection/CollectionSequenceRecordReader.java index c87b541f8..e33f0a9ec 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/collection/CollectionSequenceRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/collection/CollectionSequenceRecordReader.java @@ -43,7 +43,7 @@ public class CollectionSequenceRecordReader extends BaseRecordReader implements /** * - * @param records Collection of sequences. For example, List>> where the inner two lists + * @param records Collection of sequences. For example, {@code List>>} where the inner two lists * are a sequence, and the outer list/collection is a list of sequences */ public CollectionSequenceRecordReader(Collection>> records) { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/CSVMultiSequenceRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/CSVMultiSequenceRecordReader.java index abcc113ae..5e4571f81 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/CSVMultiSequenceRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/CSVMultiSequenceRecordReader.java @@ -45,9 +45,9 @@ public class CSVMultiSequenceRecordReader extends CSVRecordReader implements Seq PAD } - private String sequenceSeparatorRegex; - private Mode mode; - private Writable padValue; + private final String sequenceSeparatorRegex; + private final Mode mode; + private final Writable padValue; /** * Create a sequence reader using the default value for skip lines (0), the default delimiter (',') and the default diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/CSVNLinesSequenceRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/CSVNLinesSequenceRecordReader.java index 71faf9d81..86e9c3c64 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/CSVNLinesSequenceRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/CSVNLinesSequenceRecordReader.java @@ -41,7 +41,7 @@ public class CSVNLinesSequenceRecordReader extends CSVRecordReader implements Se public static final String LINES_PER_SEQUENCE = NAME_SPACE + ".nlinespersequence"; private int nLinesPerSequence; - private String delimiter; + private final String delimiter; /** * No-arg constructor with the default number of lines per sequence (10) @@ -124,7 +124,7 @@ public class CSVNLinesSequenceRecordReader extends CSVRecordReader implements Se "Invalid metadata; expected RecordMetaDataLineInterval instance; got: " + rmd); } list.add(new Triple<>(count++, (RecordMetaDataLineInterval) rmd, - (List>) new ArrayList>())); + new ArrayList>())); } //Sort by starting line number: diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/CSVVariableSlidingWindowRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/CSVVariableSlidingWindowRecordReader.java index 1a25a2ab4..02a94f8d8 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/CSVVariableSlidingWindowRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/CSVVariableSlidingWindowRecordReader.java @@ -39,8 +39,8 @@ public class CSVVariableSlidingWindowRecordReader extends CSVRecordReader implem public static final String LINES_PER_SEQUENCE = NAME_SPACE + ".nlinespersequence"; private int maxLinesPerSequence; - private String delimiter; - private int stride; + private final String delimiter; + private final int stride; private LinkedList> queue; private boolean exhausted; @@ -60,7 +60,7 @@ public class CSVVariableSlidingWindowRecordReader extends CSVRecordReader implem /** * @param maxLinesPerSequence Number of lines in each sequence, use default delemiter(,) between entries in the same line - * @param stride Number of lines between records (increment window > 1 line) + * @param stride Number of lines between records (increment window > 1 line) */ public CSVVariableSlidingWindowRecordReader(int maxLinesPerSequence, int stride) { this(maxLinesPerSequence, 0, stride, String.valueOf(CSVRecordReader.DEFAULT_DELIMITER)); @@ -68,7 +68,7 @@ public class CSVVariableSlidingWindowRecordReader extends CSVRecordReader implem /** * @param maxLinesPerSequence Number of lines in each sequence, use default delemiter(,) between entries in the same line - * @param stride Number of lines between records (increment window > 1 line) + * @param stride Number of lines between records (increment window > 1 line) */ public CSVVariableSlidingWindowRecordReader(int maxLinesPerSequence, int stride, String delimiter) { this(maxLinesPerSequence, 0, stride, String.valueOf(CSVRecordReader.DEFAULT_DELIMITER)); @@ -78,7 +78,7 @@ public class CSVVariableSlidingWindowRecordReader extends CSVRecordReader implem * * @param maxLinesPerSequence Number of lines in each sequences * @param skipNumLines Number of lines to skip at the start of the file (only skipped once, not per sequence) - * @param stride Number of lines between records (increment window > 1 line) + * @param stride Number of lines between records (increment window > 1 line) * @param delimiter Delimiter between entries in the same line, for example "," */ public CSVVariableSlidingWindowRecordReader(int maxLinesPerSequence, int skipNumLines, int stride, String delimiter) { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/SerializableCSVParser.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/SerializableCSVParser.java index f8b033633..f9222fb42 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/SerializableCSVParser.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/csv/SerializableCSVParser.java @@ -302,7 +302,7 @@ public class SerializableCSVParser implements Serializable { } /** - * precondition: sb.length() > 0 + * precondition: sb.length() > 0 * * @param sb A sequence of characters to examine * @return true if every character in the sequence is whitespace diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/inmemory/InMemoryRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/inmemory/InMemoryRecordReader.java index 105b2068a..d9023b46a 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/inmemory/InMemoryRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/inmemory/InMemoryRecordReader.java @@ -114,8 +114,6 @@ public class InMemoryRecordReader implements RecordReader { /** * Reset record reader iterator - * - * @return */ @Override public void reset() { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/inmemory/InMemorySequenceRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/inmemory/InMemorySequenceRecordReader.java index f97e5f28e..76be03200 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/inmemory/InMemorySequenceRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/inmemory/InMemorySequenceRecordReader.java @@ -195,8 +195,6 @@ public class InMemorySequenceRecordReader implements SequenceRecordReader { /** * Reset record reader iterator - * - * @return */ @Override public void reset() { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/FieldSelection.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/FieldSelection.java index 08644df9a..e3c36bb53 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/FieldSelection.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/FieldSelection.java @@ -31,8 +31,8 @@ public class FieldSelection implements Serializable { public static final Writable DEFAULT_MISSING_VALUE = new Text(""); - private List fieldPaths; - private List valueIfMissing; + private final List fieldPaths; + private final List valueIfMissing; private FieldSelection(Builder builder) { this.fieldPaths = builder.fieldPaths; @@ -53,8 +53,8 @@ public class FieldSelection implements Serializable { public static class Builder { - private List fieldPaths = new ArrayList<>(); - private List valueIfMissing = new ArrayList<>(); + private final List fieldPaths = new ArrayList<>(); + private final List valueIfMissing = new ArrayList<>(); /** diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/JacksonLineRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/JacksonLineRecordReader.java index 17f348e54..e759b6aa6 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/JacksonLineRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/JacksonLineRecordReader.java @@ -29,8 +29,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; public class JacksonLineRecordReader extends LineRecordReader { - private FieldSelection selection; - private ObjectMapper mapper; + private final FieldSelection selection; + private final ObjectMapper mapper; public JacksonLineRecordReader(FieldSelection selection, ObjectMapper mapper) { this.selection = selection; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/JacksonLineSequenceRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/JacksonLineSequenceRecordReader.java index 7b27cae0f..3c2d81f69 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/JacksonLineSequenceRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/JacksonLineSequenceRecordReader.java @@ -39,8 +39,8 @@ import java.util.NoSuchElementException; public class JacksonLineSequenceRecordReader extends FileRecordReader implements SequenceRecordReader { - private FieldSelection selection; - private ObjectMapper mapper; + private final FieldSelection selection; + private final ObjectMapper mapper; /** * diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/JacksonRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/JacksonRecordReader.java index 8e5e571e7..de0d41573 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/JacksonRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/jackson/JacksonRecordReader.java @@ -45,12 +45,12 @@ public class JacksonRecordReader extends BaseRecordReader { private static final TypeReference> typeRef = new TypeReference>() {}; - private FieldSelection selection; - private ObjectMapper mapper; - private boolean shuffle; - private long rngSeed; - private PathLabelGenerator labelGenerator; - private int labelPosition; + private final FieldSelection selection; + private final ObjectMapper mapper; + private final boolean shuffle; + private final long rngSeed; + private final PathLabelGenerator labelGenerator; + private final int labelPosition; private InputSplit is; private Random r; @Getter @Setter diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/misc/MatlabRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/misc/MatlabRecordReader.java index b9e52f33a..419c82c4d 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/misc/MatlabRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/misc/MatlabRecordReader.java @@ -35,7 +35,7 @@ import java.util.List; public class MatlabRecordReader extends FileRecordReader { - private List> records = new ArrayList<>(); + private final List> records = new ArrayList<>(); private Iterator> currIter; @Override diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/misc/SVMLightRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/misc/SVMLightRecordReader.java index 4534162bd..6d5bc5ea1 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/misc/SVMLightRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/misc/SVMLightRecordReader.java @@ -96,8 +96,6 @@ public class SVMLightRecordReader extends LineRecordReader { * Set configuration. * * @param conf DataVec configuration - * @throws IOException - * @throws InterruptedException */ @Override public void setConf(Configuration conf) { @@ -181,7 +179,7 @@ public class SVMLightRecordReader extends LineRecordReader { if (index < 0) throw new NumberFormatException(""); } catch (NumberFormatException e) { - String msg = String.format("Feature index must be positive integer (found %s)", featureTokens[i].toString()); + String msg = String.format("Feature index must be positive integer (found %s)", featureTokens[i]); throw new NumberFormatException(msg); } @@ -218,7 +216,7 @@ public class SVMLightRecordReader extends LineRecordReader { if (index < 0) throw new NumberFormatException(""); } catch (NumberFormatException e) { - String msg = String.format("Multilabel index must be positive integer (found %s)", labelTokens[i].toString()); + String msg = String.format("Multilabel index must be positive integer (found %s)", labelTokens[i]); throw new NumberFormatException(msg); } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/regex/RegexLineRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/regex/RegexLineRecordReader.java index 298a5d931..3a216d784 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/regex/RegexLineRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/regex/RegexLineRecordReader.java @@ -41,11 +41,11 @@ import java.util.regex.Pattern; public class RegexLineRecordReader extends LineRecordReader { public final static String SKIP_NUM_LINES = NAME_SPACE + ".skipnumlines"; - private String regex; + private final String regex; private int skipNumLines; - private Pattern pattern; + private final Pattern pattern; private int numLinesSkipped; - private int currLine = 0; + private final int currLine = 0; public RegexLineRecordReader(String regex, int skipNumLines) { this.regex = regex; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/regex/RegexSequenceRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/regex/RegexSequenceRecordReader.java index 41b9f2e1b..ebf685d50 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/regex/RegexSequenceRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/regex/RegexSequenceRecordReader.java @@ -61,11 +61,11 @@ public class RegexSequenceRecordReader extends FileRecordReader implements Seque public static final Logger LOG = LoggerFactory.getLogger(RegexSequenceRecordReader.class); - private String regex; + private final String regex; private int skipNumLines; - private Pattern pattern; + private final Pattern pattern; private transient Charset charset; - private LineErrorHandling errorHandling; + private final LineErrorHandling errorHandling; public RegexSequenceRecordReader(String regex, int skipNumLines) { this(regex, skipNumLines, DEFAULT_CHARSET, DEFAULT_ERROR_HANDLING); @@ -92,7 +92,7 @@ public class RegexSequenceRecordReader extends FileRecordReader implements Seque @Override public List> sequenceRecord(URI uri, DataInputStream dataInputStream) throws IOException { - String fileContents = IOUtils.toString(new BufferedInputStream(dataInputStream), charset.name()); + String fileContents = IOUtils.toString(new BufferedInputStream(dataInputStream), charset); return loadSequence(fileContents, uri); } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/transform/TransformProcessRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/transform/TransformProcessRecordReader.java index 160b2c134..2b8a38d58 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/transform/TransformProcessRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/transform/TransformProcessRecordReader.java @@ -145,8 +145,6 @@ public class TransformProcessRecordReader implements RecordReader { /** * Reset record reader iterator - * - * @return */ @Override public void reset() { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/transform/TransformProcessSequenceRecordReader.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/transform/TransformProcessSequenceRecordReader.java index 7023e70b4..cb9213dea 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/transform/TransformProcessSequenceRecordReader.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/reader/impl/transform/TransformProcessSequenceRecordReader.java @@ -195,8 +195,6 @@ public class TransformProcessSequenceRecordReader implements SequenceRecordReade /** * Reset record reader iterator - * - * @return */ @Override public void reset() { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/writer/impl/misc/SVMLightRecordWriter.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/writer/impl/misc/SVMLightRecordWriter.java index 56db1df58..c15f9ede6 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/writer/impl/misc/SVMLightRecordWriter.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/records/writer/impl/misc/SVMLightRecordWriter.java @@ -94,7 +94,7 @@ public class SVMLightRecordWriter extends FileRecordWriter { @Override public PartitionMetaData write(List record) throws IOException { if (!record.isEmpty()) { - List recordList = record instanceof List ? (List) record : new ArrayList<>(record); + List recordList = record instanceof List ? record : new ArrayList<>(record); /* Infer label columns, if necessary. The default is * to assume that last column is a label and that the @@ -198,7 +198,7 @@ public class SVMLightRecordWriter extends FileRecordWriter { } // Remove extra label delimiter at beginning - String line = result.substring(1).toString(); + String line = result.substring(1); out.write(line.getBytes()); out.write(NEW_LINE.getBytes()); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/BaseInputSplit.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/BaseInputSplit.java index 428a1df2e..7b26e65cf 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/BaseInputSplit.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/BaseInputSplit.java @@ -124,9 +124,7 @@ public abstract class BaseInputSplit implements InputSplit { for (int i = 0; i < weights.length; i++) { List uris = new ArrayList<>(); - for (int j = partitions[i]; j < partitions[i + 1]; j++) { - uris.add(paths[j]); - } + uris.addAll(Arrays.asList(paths).subList(partitions[i], partitions[i + 1])); splits[i] = new CollectionInputSplit(uris); } return splits; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/FileSplit.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/FileSplit.java index 97183f346..c6fb48d3d 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/FileSplit.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/FileSplit.java @@ -138,7 +138,7 @@ public class FileSplit extends BaseInputSplit { return addNewLocation(new File(rootDir, UUID.randomUUID().toString()).toURI().toString()); else { //add a file in the same directory as the file with the same extension as the original file - return addNewLocation(new File(rootDir.getParent(), UUID.randomUUID().toString() + "." + FilenameUtils.getExtension(rootDir.getAbsolutePath())).toURI().toString()); + return addNewLocation(new File(rootDir.getParent(), UUID.randomUUID() + "." + FilenameUtils.getExtension(rootDir.getAbsolutePath())).toURI().toString()); } } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/InputStreamInputSplit.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/InputStreamInputSplit.java index 7bd514745..fadb215cb 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/InputStreamInputSplit.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/InputStreamInputSplit.java @@ -31,7 +31,7 @@ import java.util.Iterator; public class InputStreamInputSplit implements InputSplit { private InputStream is; - private URI[] location; + private final URI[] location; /** * Instantiate with the given @@ -130,7 +130,7 @@ public class InputStreamInputSplit implements InputSplit { public Iterator locationsPathIterator() { if(location.length >= 1) return Collections.singletonList(location[0].getPath()).iterator(); - return Arrays.asList("").iterator(); + return Collections.singletonList("").iterator(); } @Override diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/ListStringSplit.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/ListStringSplit.java index d979bdad7..0d714e603 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/ListStringSplit.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/ListStringSplit.java @@ -33,7 +33,7 @@ import java.util.List; * has delimited data of some kind. */ public class ListStringSplit implements InputSplit { - private List> data; + private final List> data; public ListStringSplit(List> data) { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/NumberedFileInputSplit.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/NumberedFileInputSplit.java index c61b1d591..b534e8d12 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/NumberedFileInputSplit.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/NumberedFileInputSplit.java @@ -43,12 +43,12 @@ public class NumberedFileInputSplit implements InputSplit { * the index of the file, possibly zero-padded to x digits if the pattern is in the form %0xd. * @param minIdxInclusive Minimum index/number (starting number in sequence of files, inclusive) * @param maxIdxInclusive Maximum index/number (last number in sequence of files, inclusive) - * @see {NumberedFileInputSplitTest} + * */ public NumberedFileInputSplit(String baseString, int minIdxInclusive, int maxIdxInclusive) { Matcher m = p.matcher(baseString); if (baseString == null || !m.find()) { - throw new IllegalArgumentException("Base String must match this regular expression: " + p.toString()); + throw new IllegalArgumentException("Base String must match this regular expression: " + p); } this.baseString = baseString; this.minIdx = minIdxInclusive; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/StringSplit.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/StringSplit.java index 8db924475..21f97fef7 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/StringSplit.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/split/StringSplit.java @@ -31,7 +31,7 @@ import java.util.Iterator; * @author Adam Gibson */ public class StringSplit implements InputSplit { - private String data; + private final String data; public StringSplit(String data) { this.data = data; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/TransformProcess.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/TransformProcess.java index dfd848ec3..9673c9a4f 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/TransformProcess.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/TransformProcess.java @@ -449,7 +449,7 @@ public class TransformProcess implements Serializable { /** * Infer the categories for the given record reader for a particular column * Note that each "column index" is a column in the context of: - * List record = ...; + * {@code List record = ...;} * record.get(columnIndex); * * Note that anything passed in as a column will be automatically converted to a @@ -483,7 +483,7 @@ public class TransformProcess implements Serializable { * if you have more than one column you plan on inferring categories for) * * Note that each "column index" is a column in the context of: - * List record = ...; + * {@code List record = ...;} * record.get(columnIndex); * * @@ -607,8 +607,8 @@ public class TransformProcess implements Serializable { */ public static class Builder { - private List actionList = new ArrayList<>(); - private Schema initialSchema; + private final List actionList = new ArrayList<>(); + private final Schema initialSchema; public Builder(Schema initialSchema) { this.initialSchema = initialSchema; @@ -1274,7 +1274,7 @@ public class TransformProcess implements Serializable { * not be modified. * * @param columnName Name of the column in which to do replacement - * @param mapping Map of oldValues -> newValues + * @param mapping Map of oldValues -> newValues */ public Builder stringMapTransform(String columnName, Map mapping) { return transform(new StringMapTransform(columnName, mapping)); @@ -1358,7 +1358,8 @@ public class TransformProcess implements Serializable { * Keys in the map are the regular expressions; the Values in the map are their String replacements. * For example: *

- * + *
+ * * * * @@ -1378,7 +1379,7 @@ public class TransformProcess implements Serializable { * * * - * + * * * * diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/columns/NDArrayAnalysis.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/columns/NDArrayAnalysis.java index c97d7c744..f4fff17c7 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/columns/NDArrayAnalysis.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/columns/NDArrayAnalysis.java @@ -55,7 +55,7 @@ public class NDArrayAnalysis implements ColumnAnalysis { public String toString() { Map sortedCountsByRank = new LinkedHashMap<>(); List keys = - new ArrayList<>(countsByRank == null ? Collections.emptySet() : countsByRank.keySet()); + new ArrayList<>(countsByRank == null ? Collections.emptySet() : countsByRank.keySet()); Collections.sort(keys); for (Integer i : keys) { sortedCountsByRank.put(i, countsByRank.get(i)); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/counter/IntegerAnalysisCounter.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/counter/IntegerAnalysisCounter.java index 0028e5e1d..0a37ac1d4 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/counter/IntegerAnalysisCounter.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/counter/IntegerAnalysisCounter.java @@ -101,8 +101,8 @@ public class IntegerAnalysisCounter implements AnalysisCounter countsByRank = new HashMap<>(); + private final Map countsByRank = new HashMap<>(); private double minValue = Double.MAX_VALUE; private double maxValue = -Double.MAX_VALUE; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/counter/StringAnalysisCounter.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/counter/StringAnalysisCounter.java index c31f35ad8..a18237513 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/counter/StringAnalysisCounter.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/counter/StringAnalysisCounter.java @@ -83,7 +83,7 @@ public class StringAnalysisCounter implements AnalysisCounter counts = new HashMap<>(); + private final HashMap counts = new HashMap<>(); - private List stateNames; + private final List stateNames; public CategoricalHistogramCounter(List stateNames) { this.stateNames = stateNames; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/json/TDigestDeserializer.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/json/TDigestDeserializer.java index dd4289906..9b2650e94 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/json/TDigestDeserializer.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/json/TDigestDeserializer.java @@ -34,8 +34,8 @@ import java.io.ObjectInputStream; public class TDigestDeserializer extends JsonDeserializer { @Override - public TDigest deserialize(JsonParser jp, DeserializationContext d) throws IOException, JsonProcessingException { - JsonNode node = (JsonNode)jp.getCodec().readTree(jp); + public TDigest deserialize(JsonParser jp, DeserializationContext d) throws IOException { + JsonNode node = jp.getCodec().readTree(jp); String field = node.get("digest").asText(); Base64 b = new Base64(); byte[] bytes = b.decode(field); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/json/TDigestSerializer.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/json/TDigestSerializer.java index c3bd4517a..e2ad09f0a 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/json/TDigestSerializer.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/json/TDigestSerializer.java @@ -33,7 +33,7 @@ import java.io.ObjectOutputStream; public class TDigestSerializer extends JsonSerializer { @Override - public void serialize(TDigest td, JsonGenerator j, SerializerProvider sp) throws IOException, JsonProcessingException { + public void serialize(TDigest td, JsonGenerator j, SerializerProvider sp) throws IOException { try(ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos)){ oos.writeObject(td); oos.close(); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/quality/bytes/BytesQualityAnalysisState.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/quality/bytes/BytesQualityAnalysisState.java index 409387600..f6c6e8c3c 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/quality/bytes/BytesQualityAnalysisState.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/quality/bytes/BytesQualityAnalysisState.java @@ -29,7 +29,7 @@ import org.datavec.api.writable.Writable; public class BytesQualityAnalysisState implements QualityAnalysisState { @Getter - private BytesQuality bytesQuality; + private final BytesQuality bytesQuality; public BytesQualityAnalysisState() { this.bytesQuality = new BytesQuality(); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/quality/categorical/CategoricalQualityAnalysisState.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/quality/categorical/CategoricalQualityAnalysisState.java index 5dc13406a..44aaac563 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/quality/categorical/CategoricalQualityAnalysisState.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/analysis/quality/categorical/CategoricalQualityAnalysisState.java @@ -31,8 +31,8 @@ public class CategoricalQualityAnalysisState implements QualityAnalysisState=, !=, etc) + * @param op Operation {@code (<, >=, !=, etc)} * @param value Value to use in the condition */ public DoubleColumnCondition(String columnName, ConditionOp op, double value) { @@ -54,7 +54,7 @@ public class DoubleColumnCondition extends BaseColumnCondition { * * @param column Column to check for the condition * @param sequenceConditionMode Mode for handling sequence data - * @param op Operation (<, >=, !=, etc) + * @param op Operation {@code (<, >=, !=, etc)} * @param value Value to use in the condition */ public DoubleColumnCondition(String column, SequenceConditionMode sequenceConditionMode, ConditionOp op, diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/FloatColumnCondition.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/FloatColumnCondition.java index be8ab40e6..f4d40b45e 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/FloatColumnCondition.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/FloatColumnCondition.java @@ -42,7 +42,7 @@ public class FloatColumnCondition extends BaseColumnCondition { * Uses default sequence condition mode, {@link BaseColumnCondition#DEFAULT_SEQUENCE_CONDITION_MODE} * * @param columnName Column to check for the condition - * @param op Operation (<, >=, !=, etc) + * @param op Operation {@code (<, >=, !=, etc)} * @param value Value to use in the condition */ public FloatColumnCondition(String columnName, ConditionOp op, float value) { @@ -54,7 +54,7 @@ public class FloatColumnCondition extends BaseColumnCondition { * * @param column Column to check for the condition * @param sequenceConditionMode Mode for handling sequence data - * @param op Operation (<, >=, !=, etc) + * @param op Operation {@code (<, >=, !=, etc)} * @param value Value to use in the condition */ public FloatColumnCondition(String column, SequenceConditionMode sequenceConditionMode, ConditionOp op, diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/IntegerColumnCondition.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/IntegerColumnCondition.java index bd55caed5..0029eb044 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/IntegerColumnCondition.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/IntegerColumnCondition.java @@ -42,7 +42,7 @@ public class IntegerColumnCondition extends BaseColumnCondition { * Uses default sequence condition mode, {@link BaseColumnCondition#DEFAULT_SEQUENCE_CONDITION_MODE} * * @param columnName Column to check for the condition - * @param op Operation (<, >=, !=, etc) + * @param op Operation {@code (<, >=, !=, etc)} * @param value Value to use in the condition */ public IntegerColumnCondition(String columnName, ConditionOp op, int value) { @@ -54,7 +54,7 @@ public class IntegerColumnCondition extends BaseColumnCondition { * * @param column Column to check for the condition * @param sequenceConditionMode Mode for handling sequence data - * @param op Operation (<, >=, !=, etc) + * @param op Operation {@code (<, >=, !=, etc)} * @param value Value to use in the condition */ public IntegerColumnCondition(String column, SequenceConditionMode sequenceConditionMode, ConditionOp op, diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/LongColumnCondition.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/LongColumnCondition.java index 5855628fa..a83be4fcf 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/LongColumnCondition.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/LongColumnCondition.java @@ -42,7 +42,7 @@ public class LongColumnCondition extends BaseColumnCondition { * Uses default sequence condition mode, {@link BaseColumnCondition#DEFAULT_SEQUENCE_CONDITION_MODE} * * @param columnName Column to check for the condition - * @param op Operation (<, >=, !=, etc) + * @param op Operation {@code (<, >=, !=, etc)} * @param value Value to use in the condition */ public LongColumnCondition(String columnName, ConditionOp op, long value) { @@ -54,7 +54,7 @@ public class LongColumnCondition extends BaseColumnCondition { * * @param column Column to check for the condition * @param sequenceConditionMode Mode for handling sequence data - * @param op Operation (<, >=, !=, etc) + * @param op Operation {@code (<, >=, !=, etc)} * @param value Value to use in the condition */ public LongColumnCondition(String column, SequenceConditionMode sequenceConditionMode, ConditionOp op, long value) { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/TimeColumnCondition.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/TimeColumnCondition.java index 590ef4522..00c2714ce 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/TimeColumnCondition.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/condition/column/TimeColumnCondition.java @@ -42,7 +42,7 @@ public class TimeColumnCondition extends BaseColumnCondition { * Uses default sequence condition mode, {@link BaseColumnCondition#DEFAULT_SEQUENCE_CONDITION_MODE} * * @param columnName Column to check for the condition - * @param op Operation (<, >=, !=, etc) + * @param op Operation {@code (<, >=, !=, etc)} * @param value Time value (in epoch millisecond format) to use in the condition */ public TimeColumnCondition(String columnName, ConditionOp op, long value) { @@ -54,7 +54,7 @@ public class TimeColumnCondition extends BaseColumnCondition { * * @param column Column to check for the condition * @param sequenceConditionMode Mode for handling sequence data - * @param op Operation (<, >=, !=, etc) + * @param op Operation {@code (<, >=, !=, etc)} * @param value Time value (in epoch millisecond format) to use in the condition */ public TimeColumnCondition(String column, SequenceConditionMode sequenceConditionMode, ConditionOp op, long value) { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/filter/FilterInvalidValues.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/filter/FilterInvalidValues.java index 54b6cfe07..3a5a35b68 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/filter/FilterInvalidValues.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/filter/FilterInvalidValues.java @@ -111,24 +111,18 @@ public class FilterInvalidValues implements Filter { private boolean filterColumn(List row, int i) { ColumnMetaData meta = schema.getMetaData(i); if (row.get(i) instanceof Float) { - if (!meta.isValid(new FloatWritable((Float) row.get(i)))) - return true; + return !meta.isValid(new FloatWritable((Float) row.get(i))); } else if (row.get(i) instanceof Double) { - if (!meta.isValid(new DoubleWritable((Double) row.get(i)))) - return true; + return !meta.isValid(new DoubleWritable((Double) row.get(i))); } else if (row.get(i) instanceof String) { - if (!meta.isValid(new Text(((String) row.get(i)).toString()))) - return true; + return !meta.isValid(new Text(((String) row.get(i)))); } else if (row.get(i) instanceof Integer) { - if (!meta.isValid(new IntWritable((Integer) row.get(i)))) - return true; + return !meta.isValid(new IntWritable((Integer) row.get(i))); } else if (row.get(i) instanceof Long) { - if (!meta.isValid(new LongWritable((Long) row.get(i)))) - return true; + return !meta.isValid(new LongWritable((Long) row.get(i))); } else if (row.get(i) instanceof Boolean) { - if (!meta.isValid(new BooleanWritable((Boolean) row.get(i)))) - return true; + return !meta.isValid(new BooleanWritable((Boolean) row.get(i))); } return false; } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/join/Join.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/join/Join.java index d723c1448..d71b3c0c5 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/join/Join.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/join/Join.java @@ -96,7 +96,7 @@ public class Join implements Serializable { public static class Builder { - private JoinType joinType; + private final JoinType joinType; private Schema leftSchema; private Schema rightSchema; private String[] joinColumnsLeft; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/BinaryMetaData.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/BinaryMetaData.java index 3acb56ded..91a9238f1 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/BinaryMetaData.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/BinaryMetaData.java @@ -84,9 +84,8 @@ public class BinaryMetaData extends BaseColumnMetaData { @Override public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("BinaryMetaData(name=\"").append(name).append("\","); - sb.append(")"); - return sb.toString(); + String sb = "BinaryMetaData(name=\"" + name + "\"," + + ")"; + return sb; } } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/BooleanMetaData.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/BooleanMetaData.java index 5fae67985..66d8872b1 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/BooleanMetaData.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/BooleanMetaData.java @@ -84,9 +84,8 @@ public class BooleanMetaData extends BaseColumnMetaData { @Override public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("BooleanMetaData(name=\"").append(name).append("\","); - sb.append(")"); - return sb.toString(); + String sb = "BooleanMetaData(name=\"" + name + "\"," + + ")"; + return sb; } } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/DoubleMetaData.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/DoubleMetaData.java index 6a3aee77c..aaa85a489 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/DoubleMetaData.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/DoubleMetaData.java @@ -84,10 +84,7 @@ public class DoubleMetaData extends BaseColumnMetaData { if (minAllowedValue != null && d < minAllowedValue) return false; - if (maxAllowedValue != null && d > maxAllowedValue) - return false; - - return true; + return maxAllowedValue == null || !(d > maxAllowedValue); } /** @@ -115,10 +112,7 @@ public class DoubleMetaData extends BaseColumnMetaData { if (minAllowedValue != null && d < minAllowedValue) return false; - if (maxAllowedValue != null && d > maxAllowedValue) - return false; - - return true; + return maxAllowedValue == null || !(d > maxAllowedValue); } @Override diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/FloatMetaData.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/FloatMetaData.java index 69f087433..7bcb7abe2 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/FloatMetaData.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/FloatMetaData.java @@ -84,10 +84,7 @@ public class FloatMetaData extends BaseColumnMetaData { if (minAllowedValue != null && d < minAllowedValue) return false; - if (maxAllowedValue != null && d > maxAllowedValue) - return false; - - return true; + return maxAllowedValue == null || d <= maxAllowedValue; } /** @@ -115,10 +112,7 @@ public class FloatMetaData extends BaseColumnMetaData { if (minAllowedValue != null && d < minAllowedValue) return false; - if (maxAllowedValue != null && d > maxAllowedValue) - return false; - - return true; + return maxAllowedValue == null || d <= maxAllowedValue; } @Override diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/IntegerMetaData.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/IntegerMetaData.java index 2bf3a2bdc..c856da307 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/IntegerMetaData.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/IntegerMetaData.java @@ -65,9 +65,7 @@ public class IntegerMetaData extends BaseColumnMetaData { if (minAllowedValue != null && value < minAllowedValue) return false; - if (maxAllowedValue != null && value > maxAllowedValue) - return false; - return true; + return maxAllowedValue == null || value <= maxAllowedValue; } /** @@ -90,9 +88,7 @@ public class IntegerMetaData extends BaseColumnMetaData { if (minAllowedValue != null && value < minAllowedValue) return false; - if (maxAllowedValue != null && value > maxAllowedValue) - return false; - return true; + return maxAllowedValue == null || value <= maxAllowedValue; } @Override diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/LongMetaData.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/LongMetaData.java index 66a49874d..01119430e 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/LongMetaData.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/metadata/LongMetaData.java @@ -66,10 +66,7 @@ public class LongMetaData extends BaseColumnMetaData { } if (minAllowedValue != null && value < minAllowedValue) return false; - if (maxAllowedValue != null && value > maxAllowedValue) - return false; - - return true; + return maxAllowedValue == null || value <= maxAllowedValue; } /** @@ -92,10 +89,7 @@ public class LongMetaData extends BaseColumnMetaData { if (minAllowedValue != null && value < minAllowedValue) return false; - if (maxAllowedValue != null && value > maxAllowedValue) - return false; - - return true; + return maxAllowedValue == null || value <= maxAllowedValue; } @Override diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ops/AggregatorImpls.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ops/AggregatorImpls.java index 18fbf9af6..ce1b2b94d 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ops/AggregatorImpls.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ops/AggregatorImpls.java @@ -97,9 +97,9 @@ public class AggregatorImpls { } else if (a instanceof Float || b instanceof Float) { return new Float(a.floatValue() + b.floatValue()); } else if (a instanceof Long || b instanceof Long) { - return new Long(a.longValue() + b.longValue()); + return Long.valueOf(a.longValue() + b.longValue()); } else { - return new Integer(a.intValue() + b.intValue()); + return Integer.valueOf(a.intValue() + b.intValue()); } } @@ -146,9 +146,9 @@ public class AggregatorImpls { } else if (a instanceof Float || b instanceof Float) { return new Float(a.floatValue() * b.floatValue()); } else if (a instanceof Long || b instanceof Long) { - return new Long(a.longValue() * b.longValue()); + return Long.valueOf(a.longValue() * b.longValue()); } else { - return new Integer(a.intValue() * b.intValue()); + return Integer.valueOf(a.intValue() * b.intValue()); } } @@ -347,7 +347,7 @@ public class AggregatorImpls { * of the square root of the arithmetic mean of squared differences to the mean, corrected with Bessel's correction. * * See https://en.wikipedia.org/wiki/Unbiased_estimation_of_standard_deviation - * This is computed with Welford's method for increased numerical stability & aggregability. + * This is computed with Welford's method for increased numerical stability & aggregability. */ public static class AggregableStdDev implements IAggregableReduceOp { @@ -402,7 +402,7 @@ public class AggregatorImpls { * of the square root of the arithmetic mean of squared differences to the mean. * * See https://en.wikipedia.org/wiki/Unbiased_estimation_of_standard_deviation - * This is computed with Welford's method for increased numerical stability & aggregability. + * This is computed with Welford's method for increased numerical stability & aggregability. */ public static class AggregableUncorrectedStdDev extends AggregableStdDev { @@ -418,7 +418,7 @@ public class AggregatorImpls { * of the arithmetic mean of squared differences to the mean, corrected with Bessel's correction. * * See https://en.wikipedia.org/wiki/Unbiased_estimation_of_standard_deviation - * This is computed with Welford's method for increased numerical stability & aggregability. + * This is computed with Welford's method for increased numerical stability & aggregability. */ public static class AggregableVariance implements IAggregableReduceOp { @@ -474,7 +474,7 @@ public class AggregatorImpls { * of the arithmetic mean of squared differences to the mean. * * See https://en.wikipedia.org/wiki/Variance#Population_variance_and_sample_variance - * This is computed with Welford's method for increased numerical stability & aggregability. + * This is computed with Welford's method for increased numerical stability & aggregability. */ public static class AggregablePopulationVariance extends AggregableVariance { @@ -491,7 +491,7 @@ public class AggregatorImpls { * here. * * The relative accuracy is approximately `1.054 / sqrt(2^p)`. Setting - * a nonzero `sp > p` in HyperLogLogPlus(p, sp) would trigger sparse + * a nonzero `sp > p` in HyperLogLogPlus(p, sp) would trigger sparse * representation of registers, which may reduce the memory consumption * and increase accuracy when the cardinality is small. * @param @@ -501,7 +501,7 @@ public class AggregatorImpls { private float p = 0.05f; @Getter - private HyperLogLogPlus hll = new HyperLogLogPlus((int) Math.ceil(2.0 * Math.log(1.054 / p) / Math.log(2)), 0); + private final HyperLogLogPlus hll = new HyperLogLogPlus((int) Math.ceil(2.0 * Math.log(1.054 / p) / Math.log(2)), 0); public AggregableCountUnique(float precision) { this.p = precision; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ops/DispatchWithConditionOp.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ops/DispatchWithConditionOp.java index 6f44cac42..eb25b7b56 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ops/DispatchWithConditionOp.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ops/DispatchWithConditionOp.java @@ -36,7 +36,7 @@ public class DispatchWithConditionOp extends DispatchOp @Getter @NonNull - private List conditions; + private final List conditions; public DispatchWithConditionOp(List>> ops, List conds) { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/AggregableColumnReduction.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/AggregableColumnReduction.java index 6ed205b8b..b6db27c0f 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/AggregableColumnReduction.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/AggregableColumnReduction.java @@ -37,14 +37,13 @@ public interface AggregableColumnReduction extends Serializable, ColumnOp { * and NOT the single row * (as is usually the case for {@code List} instances * - * @param columnData The Writable objects for a column * @return Writable containing the reduced data */ IAggregableReduceOp> reduceOp(); /** * Post-reduce: what is the name of the column? - * For example, "myColumn" -> "mean(myColumn)" + * For example, "myColumn" -> "mean(myColumn)" * * @param columnInputName Name of the column before reduction * @return Name of the column after the reduction diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/ColumnReduction.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/ColumnReduction.java index 96a066c39..57a9fecf3 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/ColumnReduction.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/ColumnReduction.java @@ -43,7 +43,7 @@ public interface ColumnReduction extends Serializable, ColumnOp { /** * Post-reduce: what is the name of the column? - * For example, "myColumn" -> "mean(myColumn)" + * For example, "myColumn" -> "mean(myColumn)" * * @param columnInputName Name of the column before reduction * @return Name of the column after the reduction diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/Reducer.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/Reducer.java index 8536198f9..0979773a3 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/Reducer.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/Reducer.java @@ -291,11 +291,11 @@ public class Reducer implements IAssociativeReducer { public static class Builder { - private ReduceOp defaultOp; - private Map> opMap = new HashMap<>(); - private Map customReductions = new HashMap<>(); - private Map conditionalReductions = new HashMap<>(); - private Set ignoreInvalidInColumns = new HashSet<>(); + private final ReduceOp defaultOp; + private final Map> opMap = new HashMap<>(); + private final Map customReductions = new HashMap<>(); + private final Map conditionalReductions = new HashMap<>(); + private final Set ignoreInvalidInColumns = new HashSet<>(); private String[] keyColumns; @@ -480,7 +480,6 @@ public class Reducer implements IAssociativeReducer { * ignored/excluded. * * @param column Name of the column to execute the conditional reduction on - * @param outputName Name of the column, after the reduction has been executed * @param reductions Reductions to execute * @param condition Condition to use in the reductions */ @@ -500,7 +499,6 @@ public class Reducer implements IAssociativeReducer { * * @param column Name of the column to execute the conditional reduction on * @param outputName Name of the column, after the reduction has been executed - * @param reductions Reductions to execute * @param condition Condition to use in the reductions */ public Builder conditionalReduction(String column, String outputName, ReduceOp reduction, Condition condition) { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/impl/GeographicMidpointReduction.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/impl/GeographicMidpointReduction.java index 27933596f..b3538c8a7 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/impl/GeographicMidpointReduction.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/reduce/impl/GeographicMidpointReduction.java @@ -69,7 +69,7 @@ public class GeographicMidpointReduction implements AggregableColumnReduction { @Override public List getColumnOutputMetaData(List newColumnName, ColumnMetaData columnInputMeta) { - return Collections.singletonList(new StringMetaData(newColumnName.get(0))); + return Collections.singletonList(new StringMetaData(newColumnName.get(0))); } @Override @@ -111,7 +111,7 @@ public class GeographicMidpointReduction implements AggregableColumnReduction { public static class AverageCoordinateReduceOp implements IAggregableReduceOp> { private static final double PI_180 = Math.PI / 180.0; - private String delim; + private final String delim; private double sumx; private double sumy; @@ -186,7 +186,7 @@ public class GeographicMidpointReduction implements AggregableColumnReduction { Preconditions.checkState(!Double.isNaN(longDeg), "Final longitude is NaN"); String str = latDeg + delim + longDeg; - return Collections.singletonList(new Text(str)); + return Collections.singletonList(new Text(str)); } } } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/schema/conversion/TypeConversion.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/schema/conversion/TypeConversion.java index 1e6b4c87c..bc7fa2a98 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/schema/conversion/TypeConversion.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/schema/conversion/TypeConversion.java @@ -24,7 +24,7 @@ import org.datavec.api.writable.Writable; public class TypeConversion { - private static TypeConversion SINGLETON = new TypeConversion(); + private static final TypeConversion SINGLETON = new TypeConversion(); private TypeConversion() {} diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/sequence/split/SplitMaxLengthSequence.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/sequence/split/SplitMaxLengthSequence.java index 2dca4077e..1d80c1f5c 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/sequence/split/SplitMaxLengthSequence.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/sequence/split/SplitMaxLengthSequence.java @@ -44,7 +44,7 @@ public class SplitMaxLengthSequence implements SequenceSplit { /** * @param maxSequenceLength max length of sequences * @param equalSplits if true: split larger sequences into equal sized subsequences. If false: split into - * n maxSequenceLength sequences, and (if necessary) 1 with 1 <= length < maxSequenceLength + * n maxSequenceLength sequences, and (if necessary) 1 with 1 <= length < maxSequenceLength */ public SplitMaxLengthSequence(@JsonProperty("maxSequenceLength") int maxSequenceLength, @JsonProperty("equalSplits") boolean equalSplits) { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/BaseSerializer.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/BaseSerializer.java index 169b2b174..b5c1e7ceb 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/BaseSerializer.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/BaseSerializer.java @@ -295,7 +295,7 @@ public abstract class BaseSerializer { /** * Deserialize an IStringReducer List serialized using {@link #serializeReducerList(List)}, or - * an array serialized using {@link #serialize(IReducer[])} + * an array serialized using {@code #serialize(IReducer[])} * * @param str String representation (YAML/JSON) of the IStringReducer list * @return {@code List} diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/JsonMappers.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/JsonMappers.java index 7b28c2991..e70c6cb0c 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/JsonMappers.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/JsonMappers.java @@ -34,8 +34,8 @@ import com.fasterxml.jackson.datatype.joda.JodaModule; @Slf4j public class JsonMappers { - private static ObjectMapper jsonMapper; - private static ObjectMapper yamlMapper; + private static final ObjectMapper jsonMapper; + private static final ObjectMapper yamlMapper; private static ObjectMapper legacyMapper; //For 1.0.0-alpha and earlier TransformProcess etc static { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/JsonSerializer.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/JsonSerializer.java index 90d36ec1c..9733f9d8d 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/JsonSerializer.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/JsonSerializer.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; public class JsonSerializer extends BaseSerializer { - private ObjectMapper om; + private final ObjectMapper om; public JsonSerializer() { this.om = JsonMappers.getMapper(); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/ListWrappers.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/ListWrappers.java index 8e3b2ac56..efec02086 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/ListWrappers.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/ListWrappers.java @@ -37,7 +37,7 @@ public class ListWrappers { @Getter public static class TransformList { - private List list; + private final List list; public TransformList(@JsonProperty("list") List list) { this.list = list; @@ -46,7 +46,7 @@ public class ListWrappers { @Getter public static class FilterList { - private List list; + private final List list; public FilterList(@JsonProperty("list") List list) { this.list = list; @@ -55,7 +55,7 @@ public class ListWrappers { @Getter public static class ConditionList { - private List list; + private final List list; public ConditionList(@JsonProperty("list") List list) { this.list = list; @@ -64,7 +64,7 @@ public class ListWrappers { @Getter public static class ReducerList { - private List list; + private final List list; public ReducerList(@JsonProperty("list") List list) { this.list = list; @@ -73,7 +73,7 @@ public class ListWrappers { @Getter public static class SequenceComparatorList { - private List list; + private final List list; public SequenceComparatorList(@JsonProperty("list") List list) { this.list = list; @@ -82,7 +82,7 @@ public class ListWrappers { @Getter public static class DataActionList { - private List list; + private final List list; public DataActionList(@JsonProperty("list") List list) { this.list = list; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/YamlSerializer.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/YamlSerializer.java index 2afe02937..1e7a20846 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/YamlSerializer.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/serde/YamlSerializer.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; public class YamlSerializer extends BaseSerializer { - private ObjectMapper om; + private final ObjectMapper om; public YamlSerializer() { this.om = JsonMappers.getMapperYaml(); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/stringreduce/StringReducer.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/stringreduce/StringReducer.java index 907bd7d0c..17d3ef39b 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/stringreduce/StringReducer.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/stringreduce/StringReducer.java @@ -177,10 +177,10 @@ public class StringReducer implements IStringReducer { public static class Builder { - private StringReduceOp defaultOp; - private Map opMap = new HashMap<>(); - private Map customReductions = new HashMap<>(); - private Set ignoreInvalidInColumns = new HashSet<>(); + private final StringReduceOp defaultOp; + private final Map opMap = new HashMap<>(); + private final Map customReductions = new HashMap<>(); + private final Set ignoreInvalidInColumns = new HashSet<>(); private String outputColumnName; private List inputColumns; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/BaseColumnTransform.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/BaseColumnTransform.java index 6bea20d6c..67ef0ea43 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/BaseColumnTransform.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/BaseColumnTransform.java @@ -80,7 +80,7 @@ public abstract class BaseColumnTransform extends BaseTransform implements Colum if (writables.size() != inputSchema.numColumns()) { throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size() + ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns() - + "). Transform = " + toString()); + + "). Transform = " + this); } int n = writables.size(); List out = new ArrayList<>(n); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/CategoricalToIntegerTransform.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/CategoricalToIntegerTransform.java index 5afd6564e..236e0cc8e 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/CategoricalToIntegerTransform.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/CategoricalToIntegerTransform.java @@ -96,7 +96,7 @@ public class CategoricalToIntegerTransform extends BaseTransform { if (writables.size() != inputSchema.numColumns()) { throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size() + ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns() - + "). Transform = " + toString()); + + "). Transform = " + this); } int idx = getColumnIdx(); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/CategoricalToOneHotTransform.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/CategoricalToOneHotTransform.java index 56687431c..9a43b80fc 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/CategoricalToOneHotTransform.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/CategoricalToOneHotTransform.java @@ -123,7 +123,7 @@ public class CategoricalToOneHotTransform extends BaseTransform { if (writables.size() != inputSchema.numColumns()) { throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size() + ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns() - + "). Transform = " + toString()); + + "). Transform = " + this); } int idx = getColumnIdx(); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/IntegerToCategoricalTransform.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/IntegerToCategoricalTransform.java index e4f9debf9..881b88013 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/IntegerToCategoricalTransform.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/IntegerToCategoricalTransform.java @@ -89,7 +89,7 @@ public class IntegerToCategoricalTransform extends BaseColumnTransform { IntegerToCategoricalTransform o2 = (IntegerToCategoricalTransform) o; - return map != null ? map.equals(o2.map) : o2.map == null; + return Objects.equals(map, o2.map); } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/PivotTransform.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/PivotTransform.java index 39bc5c315..04b23f1e9 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/PivotTransform.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/categorical/PivotTransform.java @@ -169,7 +169,7 @@ public class PivotTransform extends BaseTransform { if (writables.size() != inputSchema.numColumns()) { throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size() + ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns() - + "). Transform = " + toString()); + + "). Transform = " + this); } int idxKey = inputSchema.getIndexOfColumn(keyColumn); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/column/DuplicateColumnsTransform.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/column/DuplicateColumnsTransform.java index 41f857c1a..62f419855 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/column/DuplicateColumnsTransform.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/column/DuplicateColumnsTransform.java @@ -112,7 +112,7 @@ public class DuplicateColumnsTransform implements Transform, ColumnOp { if (writables.size() != inputSchema.numColumns()) { throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size() + ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns() - + "). Transform = " + toString()); + + "). Transform = " + this); } List out = new ArrayList<>(writables.size() + columnsToDuplicate.size()); int i = 0; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/column/RemoveAllColumnsExceptForTransform.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/column/RemoveAllColumnsExceptForTransform.java index f71ab0d99..52e13cc8b 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/column/RemoveAllColumnsExceptForTransform.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/column/RemoveAllColumnsExceptForTransform.java @@ -89,7 +89,7 @@ public class RemoveAllColumnsExceptForTransform extends BaseTransform implements if (writables.size() != inputSchema.numColumns()) { throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size() + ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns() - + "). Transform = " + toString()); + + "). Transform = " + this); } List outList = new ArrayList<>(columnsToKeep.length); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/column/RemoveColumnsTransform.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/column/RemoveColumnsTransform.java index d5177a055..62de1b280 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/column/RemoveColumnsTransform.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/column/RemoveColumnsTransform.java @@ -123,7 +123,7 @@ public class RemoveColumnsTransform extends BaseTransform implements ColumnOp { String toString = StringUtils.join(list, ","); throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size() + ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns() - + "). Transform = " + toString() + " and record " + toString); + + "). Transform = " + this + " and record " + toString); } List outList = new ArrayList<>(writables.size() - columnsToRemove.length); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/integer/IntegerToOneHotTransform.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/integer/IntegerToOneHotTransform.java index ca27348ae..1bd907723 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/integer/IntegerToOneHotTransform.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/integer/IntegerToOneHotTransform.java @@ -103,7 +103,7 @@ public class IntegerToOneHotTransform extends BaseTransform { if (writables.size() != inputSchema.numColumns()) { throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size() + ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns() - + "). Transform = " + toString()); + + "). Transform = " + this); } int idx = getColumnIdx(); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/nlp/TextToCharacterIndexTransform.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/nlp/TextToCharacterIndexTransform.java index c882a76a2..20d1b1c2e 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/nlp/TextToCharacterIndexTransform.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/nlp/TextToCharacterIndexTransform.java @@ -57,7 +57,7 @@ public class TextToCharacterIndexTransform extends BaseSequenceExpansionTransfor @Override protected List expandedColumnMetaDatas(List origColumnMeta, List expandedColumnNames) { - return Collections.singletonList(new IntegerMetaData(expandedColumnNames.get(0), 0, characterIndexMap.size()-1)); + return Collections.singletonList(new IntegerMetaData(expandedColumnNames.get(0), 0, characterIndexMap.size()-1)); } @Override @@ -65,7 +65,7 @@ public class TextToCharacterIndexTransform extends BaseSequenceExpansionTransfor if(writableMap == null){ Map> m = new HashMap<>(); for(Map.Entry entry : characterIndexMap.entrySet()){ - m.put(entry.getKey(), Collections.singletonList(new IntWritable(entry.getValue()))); + m.put(entry.getKey(), Collections.singletonList(new IntWritable(entry.getValue()))); } writableMap = m; } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/nlp/TextToTermIndexSequenceTransform.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/nlp/TextToTermIndexSequenceTransform.java index 9adbf1771..fa2990e78 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/nlp/TextToTermIndexSequenceTransform.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/nlp/TextToTermIndexSequenceTransform.java @@ -84,7 +84,7 @@ public class TextToTermIndexSequenceTransform extends BaseSequenceExpansionTrans @Override protected List expandedColumnMetaDatas(List origColumnMeta, List expandedColumnNames) { - return Collections.singletonList(new IntegerMetaData(expandedColumnNames.get(0), 0, wordIndexMap.size()-1)); + return Collections.singletonList(new IntegerMetaData(expandedColumnNames.get(0), 0, wordIndexMap.size()-1)); } @Override @@ -92,7 +92,7 @@ public class TextToTermIndexSequenceTransform extends BaseSequenceExpansionTrans if(writableMap == null){ Map> m = new HashMap<>(); for(Map.Entry entry : wordIndexMap.entrySet()) { - m.put(entry.getKey(), Collections.singletonList(new IntWritable(entry.getValue()))); + m.put(entry.getKey(), Collections.singletonList(new IntWritable(entry.getValue()))); } writableMap = m; } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/sequence/SequenceDifferenceTransform.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/sequence/SequenceDifferenceTransform.java index 61bc30796..4ba0e8968 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/sequence/SequenceDifferenceTransform.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/sequence/SequenceDifferenceTransform.java @@ -68,7 +68,7 @@ public class SequenceDifferenceTransform implements Transform { * * @param columnName Name of the column to perform the operation on. * @param newColumnName New name for the column. May be same as the origina lcolumn name - * @param lookback Lookback period, in number of time steps. Must be > 0 + * @param lookback Lookback period, in number of time steps. Must be > 0 */ public SequenceDifferenceTransform(String columnName, String newColumnName, int lookback) { this(columnName, newColumnName, lookback, FirstStepMode.Default, null); @@ -80,7 +80,7 @@ public class SequenceDifferenceTransform implements Transform { * * @param columnName Name of the column to perform the operation on. * @param newColumnName New name for the column. May be same as the origina lcolumn name - * @param lookback Lookback period, in number of time steps. Must be > 0 + * @param lookback Lookback period, in number of time steps. Must be > 0 * @param firstStepMode see {@link FirstStepMode} * @param specifiedValueWritable Must be null if using FirstStepMode.Default, or non-null if using FirstStepMode.SpecifiedValue */ diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/string/StringListToCategoricalSetTransform.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/string/StringListToCategoricalSetTransform.java index 83d56fd7e..108da34e7 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/string/StringListToCategoricalSetTransform.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/string/StringListToCategoricalSetTransform.java @@ -123,7 +123,7 @@ public class StringListToCategoricalSetTransform extends BaseTransform { if (writables.size() != inputSchema.numColumns()) { throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size() + ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns() - + "). Transform = " + toString()); + + "). Transform = " + this); } int n = writables.size(); List out = new ArrayList<>(n); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/string/StringListToCountsNDArrayTransform.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/string/StringListToCountsNDArrayTransform.java index 9f3ff0dcf..e682dc099 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/string/StringListToCountsNDArrayTransform.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/string/StringListToCountsNDArrayTransform.java @@ -168,7 +168,7 @@ public class StringListToCountsNDArrayTransform extends BaseTransform { if (writables.size() != inputSchema.numColumns()) { throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size() + ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns() - + "). Transform = " + toString()); + + "). Transform = " + this); } int n = writables.size(); List out = new ArrayList<>(n); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/time/DeriveColumnsFromTimeTransform.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/time/DeriveColumnsFromTimeTransform.java index d1e290f7a..425b4cc68 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/time/DeriveColumnsFromTimeTransform.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/transform/time/DeriveColumnsFromTimeTransform.java @@ -147,7 +147,7 @@ public class DeriveColumnsFromTimeTransform implements Transform { if (writables.size() != inputSchema.numColumns()) { throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size() + ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns() - + "). Transform = " + toString()); + + "). Transform = " + this); } int i = 0; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ui/components/RenderableComponentHistogram.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ui/components/RenderableComponentHistogram.java index 7efa3d894..d18e9489c 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ui/components/RenderableComponentHistogram.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ui/components/RenderableComponentHistogram.java @@ -56,9 +56,9 @@ public class RenderableComponentHistogram extends RenderableComponent { public static class Builder { private String title; - private List lowerBounds = new ArrayList<>(); - private List upperBounds = new ArrayList<>(); - private List yValues = new ArrayList<>(); + private final List lowerBounds = new ArrayList<>(); + private final List upperBounds = new ArrayList<>(); + private final List yValues = new ArrayList<>(); private int marginTop = 60; private int marginBottom = 60; private int marginLeft = 60; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ui/components/RenderableComponentLineChart.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ui/components/RenderableComponentLineChart.java index f2cb8793b..735de97f6 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ui/components/RenderableComponentLineChart.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/transform/ui/components/RenderableComponentLineChart.java @@ -65,9 +65,9 @@ public class RenderableComponentLineChart extends RenderableComponent { public static class Builder { private String title; - private List x = new ArrayList<>(); - private List y = new ArrayList<>(); - private List seriesNames = new ArrayList<>(); + private final List x = new ArrayList<>(); + private final List y = new ArrayList<>(); + private final List seriesNames = new ArrayList<>(); private boolean removeAxisHorizontal = false; private boolean legend = true; diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/util/ReflectionUtils.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/util/ReflectionUtils.java index 7bc9618ca..826705f3a 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/util/ReflectionUtils.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/util/ReflectionUtils.java @@ -113,7 +113,7 @@ public class ReflectionUtils { /** * Allocate a buffer for each thread that tries to clone objects. */ - private static ThreadLocal cloneBuffers = new ThreadLocal() { + private static final ThreadLocal cloneBuffers = new ThreadLocal() { protected synchronized CopyInCopyOutBuffer initialValue() { return new CopyInCopyOutBuffer(); } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/util/jackson/DateTimeFieldTypeDeserializer.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/util/jackson/DateTimeFieldTypeDeserializer.java index 198b49755..b2a6e5788 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/util/jackson/DateTimeFieldTypeDeserializer.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/util/jackson/DateTimeFieldTypeDeserializer.java @@ -66,7 +66,7 @@ public class DateTimeFieldTypeDeserializer extends JsonDeserializer { @Override public void serialize(DateTimeFieldType dateTimeFieldType, JsonGenerator jsonGenerator, - SerializerProvider serializerProvider) throws IOException, JsonProcessingException { + SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeStartObject(); jsonGenerator.writeStringField("fieldType", dateTimeFieldType.getName()); jsonGenerator.writeEndObject(); diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/util/ndarray/RecordConverter.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/util/ndarray/RecordConverter.java index 98cc7d339..360f2aa74 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/util/ndarray/RecordConverter.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/util/ndarray/RecordConverter.java @@ -234,7 +234,7 @@ public class RecordConverter { } /** - * Convert a collection into a `List`, i.e. a record that can be used with other datavec methods. + * Convert a collection into a {@code List}, i.e. a record that can be used with other datavec methods. * Uses a schema to decide what kind of writable to use. * * @return a record diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/vector/Vectorizer.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/vector/Vectorizer.java index 3c1529fa8..55c0dba4c 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/vector/Vectorizer.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/vector/Vectorizer.java @@ -81,7 +81,7 @@ public interface Vectorizer { * This allows for neat inheritance and polymorphism * for fit and fit/transform among other things */ - public static interface RecordCallBack { + interface RecordCallBack { /** * The record callback * @param record diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/BooleanWritable.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/BooleanWritable.java index bec23e5e2..7fe2aadbc 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/BooleanWritable.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/BooleanWritable.java @@ -95,7 +95,7 @@ public class BooleanWritable implements WritableComparable { public int compareTo(Object o) { boolean a = this.value; boolean b = ((BooleanWritable) o).value; - return ((a == b) ? 0 : (a == false) ? -1 : 1); + return ((a == b) ? 0 : (!a) ? -1 : 1); } public String toString() { diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/ByteWritable.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/ByteWritable.java index f2f098cd8..68bf9ebd6 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/ByteWritable.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/ByteWritable.java @@ -65,15 +65,15 @@ public class ByteWritable implements WritableComparable { public boolean fuzzyEquals(Writable o, double tolerance) { double other; if (o instanceof IntWritable){ - other = ((IntWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof LongWritable) { - other = ((LongWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof ByteWritable) { - other = ((ByteWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof DoubleWritable) { - other = ((DoubleWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof FloatWritable) { - other = ((FloatWritable) o).toDouble(); + other = o.toDouble(); } else { return false; } return DoubleMath.fuzzyEquals(this.value, other, tolerance); } @@ -90,7 +90,7 @@ public class ByteWritable implements WritableComparable { } public int hashCode() { - return (int)value; + return value; } /** Compares two ByteWritables. */ diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/DoubleWritable.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/DoubleWritable.java index ed795e958..8a6ef79ed 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/DoubleWritable.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/DoubleWritable.java @@ -69,15 +69,15 @@ public class DoubleWritable implements WritableComparable { public boolean fuzzyEquals(Writable o, double tolerance) { double other; if (o instanceof IntWritable){ - other = ((IntWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof LongWritable) { - other = ((LongWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof ByteWritable) { - other = ((ByteWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof DoubleWritable) { - other = ((DoubleWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof FloatWritable) { - other = ((FloatWritable) o).toDouble(); + other = o.toDouble(); } else { return false; } return DoubleMath.fuzzyEquals(this.value, other, tolerance); } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/FloatWritable.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/FloatWritable.java index c98bc78f3..783e77b9a 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/FloatWritable.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/FloatWritable.java @@ -66,15 +66,15 @@ public class FloatWritable implements WritableComparable { public boolean fuzzyEquals(Writable o, double tolerance) { double other; if (o instanceof IntWritable){ - other = ((IntWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof LongWritable) { - other = ((LongWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof ByteWritable) { - other = ((ByteWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof DoubleWritable) { - other = ((DoubleWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof FloatWritable) { - other = ((FloatWritable) o).toDouble(); + other = o.toDouble(); } else { return false; } return DoubleMath.fuzzyEquals(this.value, other, tolerance); } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/IntWritable.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/IntWritable.java index 37d74df2f..56739a8f6 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/IntWritable.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/IntWritable.java @@ -66,15 +66,15 @@ public class IntWritable implements WritableComparable { public boolean fuzzyEquals(Writable o, double tolerance) { double other; if (o instanceof IntWritable){ - other = ((IntWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof LongWritable) { - other = ((LongWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof ByteWritable) { - other = ((ByteWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof DoubleWritable) { - other = ((DoubleWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof FloatWritable) { - other = ((FloatWritable) o).toDouble(); + other = o.toDouble(); } else { return false; } return DoubleMath.fuzzyEquals(this.value, other, tolerance); } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/LongWritable.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/LongWritable.java index 4b7dc3d35..599bde104 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/LongWritable.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/LongWritable.java @@ -65,15 +65,15 @@ public class LongWritable implements WritableComparable { public boolean fuzzyEquals(Writable o, double tolerance) { double other; if (o instanceof IntWritable){ - other = ((IntWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof LongWritable) { - other = ((LongWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof ByteWritable) { - other = ((ByteWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof DoubleWritable) { - other = ((DoubleWritable) o).toDouble(); + other = o.toDouble(); } else if (o instanceof FloatWritable) { - other = ((FloatWritable) o).toDouble(); + other = o.toDouble(); } else { return false; } return DoubleMath.fuzzyEquals(this.value, other, tolerance); } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/NDArrayWritable.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/NDArrayWritable.java index 383ac3aac..cf3d154e3 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/NDArrayWritable.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/NDArrayWritable.java @@ -184,7 +184,7 @@ public class NDArrayWritable extends ArrayWritable implements WritableComparable } for (int i = 0; i < array.rank(); i++) { - if (Long.compare(array.size(i), other.array.size(i)) != 0) { + if (array.size(i) != other.array.size(i)) { return Long.compare(array.size(i), other.array.size(i)); } } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/Text.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/Text.java index 43dc58036..b36452a0d 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/Text.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/Text.java @@ -39,14 +39,14 @@ import java.text.StringCharacterIterator; public class Text extends BinaryComparable implements WritableComparable { - private static ThreadLocal ENCODER_FACTORY = new ThreadLocal() { + private static final ThreadLocal ENCODER_FACTORY = new ThreadLocal() { protected CharsetEncoder initialValue() { return StandardCharsets.UTF_8.newEncoder().onMalformedInput(CodingErrorAction.REPORT) .onUnmappableCharacter(CodingErrorAction.REPORT); } }; - private static ThreadLocal DECODER_FACTORY = new ThreadLocal() { + private static final ThreadLocal DECODER_FACTORY = new ThreadLocal() { protected CharsetDecoder initialValue() { return StandardCharsets.UTF_8.newDecoder().onMalformedInput(CodingErrorAction.REPORT) .onUnmappableCharacter(CodingErrorAction.REPORT); @@ -106,7 +106,7 @@ public class Text extends BinaryComparable implements WritableComparable> map = new ConcurrentHashMap<>(); - private final Map> constructorMap = new ConcurrentHashMap<>(); + private final Map> map = new ConcurrentHashMap<>(); + private final Map> constructorMap = new ConcurrentHashMap<>(); - private WritableFactory() { - for (WritableType wt : WritableType.values()) { - if (wt.isCoreWritable()) { - registerWritableType((short) wt.ordinal(), wt.getWritableClass()); - } - } - } - - /** - * @return Singleton WritableFactory instance - */ - public static WritableFactory getInstance() { - return INSTANCE; - } - - /** - * Register a writable class with a specific key (as a short). Note that key values must be unique - * for each type of Writable, as they are used as type information in certain types of - * serialisation. Consequently, an exception will be thrown If the key value is not unique or is - * already assigned.
Note that in general, this method needs to only be used for custom - * Writable types; Care should be taken to ensure that the given key does not change once - * assigned. - * - * @param writableTypeKey Key for the Writable - * @param writableClass Class for the given key. Must have a no-arg constructor - */ - public void registerWritableType(short writableTypeKey, - @NonNull Class writableClass) { - if (map.containsKey(writableTypeKey)) { - throw new UnsupportedOperationException( - "Key " + writableTypeKey + " is already registered to type " - + map.get(writableTypeKey) + " and cannot be registered to " + writableClass); + private WritableFactory() { + for (WritableType wt : WritableType.values()) { + if (wt.isCoreWritable()) { + registerWritableType((short) wt.ordinal(), wt.getWritableClass()); + } + } } - Constructor c; - try { - c = writableClass.getDeclaredConstructor(); - } catch (NoSuchMethodException e) { - throw new RuntimeException("Cannot find no-arg constructor for class " + writableClass); + /** + * @return Singleton WritableFactory instance + */ + public static WritableFactory getInstance() { + return INSTANCE; } - map.put(writableTypeKey, writableClass); - constructorMap.put(writableTypeKey, c); - } + /** + * Register a writable class with a specific key (as a short). Note that key values must be unique for each type of + * Writable, as they are used as type information in certain types of serialisation. Consequently, an exception will + * be thrown If the key value is not unique or is already assigned.
+ * Note that in general, this method needs to only be used for custom Writable types; Care should be taken to ensure + * that the given key does not change once assigned. + * + * @param writableTypeKey Key for the Writable + * @param writableClass Class for the given key. Must have a no-arg constructor + */ + public void registerWritableType(short writableTypeKey, @NonNull Class writableClass) { + if (map.containsKey(writableTypeKey)) { + throw new UnsupportedOperationException("Key " + writableTypeKey + " is already registered to type " + + map.get(writableTypeKey) + " and cannot be registered to " + writableClass); + } - /** - * Create a new writable instance (using reflection) given the specified key - * - * @param writableTypeKey Key to create a new writable instance for - * @return A new (empty/default) Writable instance - */ - public Writable newWritable(short writableTypeKey) { - Constructor c = constructorMap.get(writableTypeKey); - if (c == null) { - throw new IllegalStateException("Unknown writable key: " + writableTypeKey); + Constructor c; + try { + c = writableClass.getDeclaredConstructor(); + } catch (NoSuchMethodException e) { + throw new RuntimeException("Cannot find no-arg constructor for class " + writableClass); + } + + map.put(writableTypeKey, writableClass); + constructorMap.put(writableTypeKey, c); } - try { - return c.newInstance(); - } catch (Exception e) { - throw new RuntimeException("Could not create new Writable instance"); + + /** + * Create a new writable instance (using reflection) given the specified key + * + * @param writableTypeKey Key to create a new writable instance for + * @return A new (empty/default) Writable instance + */ + public Writable newWritable(short writableTypeKey) { + Constructor c = constructorMap.get(writableTypeKey); + if (c == null) { + throw new IllegalStateException("Unknown writable key: " + writableTypeKey); + } + try { + return c.newInstance(); + } catch (Exception e) { + throw new RuntimeException("Could not create new Writable instance"); + } } - } - /** - * A convenience method for writing a given Writable object to a DataOutput. The key is 1st - * written (a single short) followed by the value from writable. - * - * @param w Writable value - * @param dataOutput DataOutput to write both key and value to - * @throws IOException If an error occurs during writing to the DataOutput - */ - public void writeWithType(Writable w, DataOutput dataOutput) throws IOException { - w.writeType(dataOutput); - w.write(dataOutput); - } + /** + * A convenience method for writing a given Writable object to a DataOutput. The key is 1st written (a single short) + * followed by the value from writable. + * + * @param w Writable value + * @param dataOutput DataOutput to write both key and value to + * @throws IOException If an error occurs during writing to the DataOutput + */ + public void writeWithType(Writable w, DataOutput dataOutput) throws IOException { + w.writeType(dataOutput); + w.write(dataOutput); + } - /** - * Read a Writable From the DataInput, where the Writable was previously written using - * {@link #writeWithType(Writable, DataOutput)} - * - * @param dataInput DataInput to read the Writable from - * @return Writable from the DataInput - * @throws IOException In an error occurs during reading - */ - public Writable readWithType(DataInput dataInput) throws IOException { - Writable w = newWritable(dataInput.readShort()); - w.readFields(dataInput); - return w; - } + /** + * Read a Writable From the DataInput, where the Writable was previously written using {@link #writeWithType(Writable, DataOutput)} + * + * @param dataInput DataInput to read the Writable from + * @return Writable from the DataInput + * @throws IOException In an error occurs during reading + */ + public Writable readWithType(DataInput dataInput) throws IOException { + Writable w = newWritable(dataInput.readShort()); + w.readFields(dataInput); + return w; + } } diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/batch/AbstractWritableRecordBatch.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/batch/AbstractWritableRecordBatch.java index b2d4b7621..715d2a674 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/batch/AbstractWritableRecordBatch.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/batch/AbstractWritableRecordBatch.java @@ -139,7 +139,7 @@ public abstract class AbstractWritableRecordBatch implements List public static class RecordBatchListIterator implements ListIterator> { private int index; - private AbstractWritableRecordBatch underlying; + private final AbstractWritableRecordBatch underlying; public RecordBatchListIterator(AbstractWritableRecordBatch underlying){ this.underlying = underlying; diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVLineSequenceRecordReaderTest.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVLineSequenceRecordReaderTest.java index 0817973a7..bfc531e7c 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVLineSequenceRecordReaderTest.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVLineSequenceRecordReaderTest.java @@ -53,15 +53,15 @@ public class CSVLineSequenceRecordReaderTest extends BaseND4JTest { rr.initialize(new FileSplit(source)); List> exp0 = Arrays.asList( - Collections.singletonList(new Text("a")), - Collections.singletonList(new Text("b")), - Collections.singletonList(new Text("c"))); + Collections.singletonList(new Text("a")), + Collections.singletonList(new Text("b")), + Collections.singletonList(new Text("c"))); List> exp1 = Arrays.asList( - Collections.singletonList(new Text("1")), - Collections.singletonList(new Text("2")), - Collections.singletonList(new Text("3")), - Collections.singletonList(new Text("4"))); + Collections.singletonList(new Text("1")), + Collections.singletonList(new Text("2")), + Collections.singletonList(new Text("3")), + Collections.singletonList(new Text("4"))); for( int i=0; i<3; i++ ) { int count = 0; diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVMultiSequenceRecordReaderTest.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVMultiSequenceRecordReaderTest.java index 882fc628f..e52f71cc2 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVMultiSequenceRecordReaderTest.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVMultiSequenceRecordReaderTest.java @@ -78,12 +78,12 @@ public class CSVMultiSequenceRecordReaderTest extends BaseND4JTest { List> exp0 = new ArrayList<>(); for (String s : "a,b,c,1,2,3,4,x,y".split(",")) { - exp0.add(Collections.singletonList(new Text(s))); + exp0.add(Collections.singletonList(new Text(s))); } List> exp1 = new ArrayList<>(); for (String s : "A,B,C".split(",")) { - exp1.add(Collections.singletonList(new Text(s))); + exp1.add(Collections.singletonList(new Text(s))); } assertEquals(exp0, seqRR.sequenceRecord()); @@ -131,10 +131,10 @@ public class CSVMultiSequenceRecordReaderTest extends BaseND4JTest { List> exp0 = Arrays.asList( - Arrays.asList(new Text("a"), new Text("1"), new Text("x")), - Arrays.asList(new Text("b"), new Text("2"), new Text("y"))); + Arrays.asList(new Text("a"), new Text("1"), new Text("x")), + Arrays.asList(new Text("b"), new Text("2"), new Text("y"))); - List> exp1 = Collections.singletonList(Arrays.asList(new Text("A"), new Text("B"), new Text("C"))); + List> exp1 = Collections.singletonList(Arrays.asList(new Text("A"), new Text("B"), new Text("C"))); assertEquals(exp0, seqRR.sequenceRecord()); assertEquals(exp1, seqRR.sequenceRecord()); @@ -181,10 +181,10 @@ public class CSVMultiSequenceRecordReaderTest extends BaseND4JTest { List> exp0 = Arrays.asList( - Arrays.asList(new Text("a"), new Text("1"), new Text("x")), - Arrays.asList(new Text("b"), new Text("PAD"), new Text("PAD"))); + Arrays.asList(new Text("a"), new Text("1"), new Text("x")), + Arrays.asList(new Text("b"), new Text("PAD"), new Text("PAD"))); - List> exp1 = Collections.singletonList(Arrays.asList(new Text("A"), new Text("B"), new Text("C"))); + List> exp1 = Collections.singletonList(Arrays.asList(new Text("A"), new Text("B"), new Text("C"))); assertEquals(exp0, seqRR.sequenceRecord()); assertEquals(exp1, seqRR.sequenceRecord()); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVRecordReaderTest.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVRecordReaderTest.java index 0b54b7147..16ed450df 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVRecordReaderTest.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVRecordReaderTest.java @@ -265,19 +265,19 @@ public class CSVRecordReaderTest extends BaseND4JTest { Assertions.assertThrows(NoSuchElementException.class, () -> { final int numLines = 4; - final List lineList = Arrays.asList((Writable) new IntWritable(numLines - 1), - (Writable) new Text("one"), (Writable) new Text("two"), (Writable) new Text("three")); + final List lineList = Arrays.asList(new IntWritable(numLines - 1), + new Text("one"), new Text("two"), new Text("three")); String header = ",one,two,three"; List lines = new ArrayList<>(); for (int i = 0; i < numLines; i++) - lines.add(Integer.toString(i) + header); + lines.add(i + header); File tempFile = File.createTempFile("csvSkipLines", ".csv"); FileUtils.writeLines(tempFile, lines); CSVRecordReader rr = new CSVRecordReader(numLines, ','); rr.initialize(new FileSplit(tempFile)); rr.reset(); - assertTrue(!rr.hasNext()); + assertFalse(rr.hasNext()); rr.next(); }); } @@ -285,12 +285,12 @@ public class CSVRecordReaderTest extends BaseND4JTest { @Test public void testCsvSkipAllButOneLine() throws IOException, InterruptedException { final int numLines = 4; - final List lineList = Arrays.asList(new Text(Integer.toString(numLines - 1)), + final List lineList = Arrays.asList(new Text(Integer.toString(numLines - 1)), new Text("one"), new Text("two"), new Text("three")); String header = ",one,two,three"; List lines = new ArrayList<>(); for (int i = 0; i < numLines; i++) - lines.add(Integer.toString(i) + header); + lines.add(i + header); File tempFile = File.createTempFile("csvSkipLines", ".csv"); FileUtils.writeLines(tempFile, lines); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/JacksonLineRecordReaderTest.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/JacksonLineRecordReaderTest.java index 883f0e0d4..3fcb9e9f5 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/JacksonLineRecordReaderTest.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/JacksonLineRecordReaderTest.java @@ -100,12 +100,12 @@ public class JacksonLineRecordReaderTest extends BaseND4JTest { rr.initialize(new CollectionInputSplit(u)); List> expSeq0 = new ArrayList<>(); - expSeq0.add(Arrays.asList((Writable) new Text("aValue0"), new Text("bValue0"), new Text("cxValue0"))); - expSeq0.add(Arrays.asList((Writable) new Text("aValue1"), new Text("MISSING_B"), new Text("cxValue1"))); - expSeq0.add(Arrays.asList((Writable) new Text("aValue2"), new Text("bValue2"), new Text("MISSING_CX"))); + expSeq0.add(Arrays.asList(new Text("aValue0"), new Text("bValue0"), new Text("cxValue0"))); + expSeq0.add(Arrays.asList(new Text("aValue1"), new Text("MISSING_B"), new Text("cxValue1"))); + expSeq0.add(Arrays.asList(new Text("aValue2"), new Text("bValue2"), new Text("MISSING_CX"))); List> expSeq1 = new ArrayList<>(); - expSeq1.add(Arrays.asList((Writable) new Text("aValue3"), new Text("bValue3"), new Text("cxValue3"))); + expSeq1.add(Arrays.asList(new Text("aValue3"), new Text("bValue3"), new Text("cxValue3"))); int count = 0; diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/JacksonRecordReaderTest.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/JacksonRecordReaderTest.java index b6f13adbd..08b94fdec 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/JacksonRecordReaderTest.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/JacksonRecordReaderTest.java @@ -65,7 +65,7 @@ public class JacksonRecordReaderTest extends BaseND4JTest { //For third JSON file: c:x:value is missing ClassPathResource cpr = new ClassPathResource("datavec-api/json/"); - File f = new File(FileUtils.getTempDirectoryPath()+File.separatorChar+ UUID.randomUUID().toString()); + File f = new File(FileUtils.getTempDirectoryPath()+File.separatorChar+ UUID.randomUUID()); FileUtils.forceMkdir(f); cpr.copyDirectory(f); String path = new File(f, "json_test_%d.txt").getAbsolutePath(); @@ -83,7 +83,7 @@ public class JacksonRecordReaderTest extends BaseND4JTest { //Exact same information as JSON format, but in YAML format ClassPathResource cpr = new ClassPathResource("datavec-api/yaml/"); - File f = new File(FileUtils.getTempDirectoryPath()+File.separatorChar+ UUID.randomUUID().toString()); + File f = new File(FileUtils.getTempDirectoryPath()+File.separatorChar+ UUID.randomUUID()); FileUtils.forceMkdir(f); cpr.copyDirectory(f); String path = new File(f, "yaml_test_%d.txt").getAbsolutePath(); @@ -102,7 +102,7 @@ public class JacksonRecordReaderTest extends BaseND4JTest { //Exact same information as JSON format, but in XML format ClassPathResource cpr = new ClassPathResource("datavec-api/xml/"); - File f = new File(FileUtils.getTempDirectoryPath()+File.separatorChar+ UUID.randomUUID().toString()); + File f = new File(FileUtils.getTempDirectoryPath()+File.separatorChar+ UUID.randomUUID()); FileUtils.forceMkdir(f); cpr.copyDirectory(f); String path = new File(f, "xml_test_%d.txt").getAbsolutePath(); @@ -126,17 +126,17 @@ public class JacksonRecordReaderTest extends BaseND4JTest { private static void testJacksonRecordReader(RecordReader rr) { List json0 = rr.next(); - List exp0 = Arrays.asList((Writable) new Text("aValue0"), new Text("bValue0"), new Text("cxValue0")); + List exp0 = Arrays.asList(new Text("aValue0"), new Text("bValue0"), new Text("cxValue0")); assertEquals(exp0, json0); List json1 = rr.next(); List exp1 = - Arrays.asList((Writable) new Text("aValue1"), new Text("MISSING_B"), new Text("cxValue1")); + Arrays.asList(new Text("aValue1"), new Text("MISSING_B"), new Text("cxValue1")); assertEquals(exp1, json1); List json2 = rr.next(); List exp2 = - Arrays.asList((Writable) new Text("aValue2"), new Text("bValue2"), new Text("MISSING_CX")); + Arrays.asList(new Text("aValue2"), new Text("bValue2"), new Text("MISSING_CX")); assertEquals(exp2, json2); assertFalse(rr.hasNext()); @@ -153,7 +153,7 @@ public class JacksonRecordReaderTest extends BaseND4JTest { public void testAppendingLabels() throws Exception { ClassPathResource cpr = new ClassPathResource("datavec-api/json/"); - File f = new File(FileUtils.getTempDirectoryPath()+File.separatorChar+ UUID.randomUUID().toString()); + File f = new File(FileUtils.getTempDirectoryPath()+File.separatorChar+ UUID.randomUUID()); FileUtils.forceMkdir(f); cpr.copyDirectory(f); String path = new File(f, "json_test_%d.txt").getAbsolutePath(); @@ -165,15 +165,15 @@ public class JacksonRecordReaderTest extends BaseND4JTest { new LabelGen()); rr.initialize(is); - List exp0 = Arrays.asList((Writable) new Text("aValue0"), new Text("bValue0"), new Text("cxValue0"), + List exp0 = Arrays.asList(new Text("aValue0"), new Text("bValue0"), new Text("cxValue0"), new IntWritable(0)); assertEquals(exp0, rr.next()); - List exp1 = Arrays.asList((Writable) new Text("aValue1"), new Text("MISSING_B"), new Text("cxValue1"), + List exp1 = Arrays.asList(new Text("aValue1"), new Text("MISSING_B"), new Text("cxValue1"), new IntWritable(1)); assertEquals(exp1, rr.next()); - List exp2 = Arrays.asList((Writable) new Text("aValue2"), new Text("bValue2"), new Text("MISSING_CX"), + List exp2 = Arrays.asList(new Text("aValue2"), new Text("bValue2"), new Text("MISSING_CX"), new IntWritable(2)); assertEquals(exp2, rr.next()); @@ -182,15 +182,15 @@ public class JacksonRecordReaderTest extends BaseND4JTest { new LabelGen(), 0); rr.initialize(is); - exp0 = Arrays.asList((Writable) new IntWritable(0), new Text("aValue0"), new Text("bValue0"), + exp0 = Arrays.asList(new IntWritable(0), new Text("aValue0"), new Text("bValue0"), new Text("cxValue0")); assertEquals(exp0, rr.next()); - exp1 = Arrays.asList((Writable) new IntWritable(1), new Text("aValue1"), new Text("MISSING_B"), + exp1 = Arrays.asList(new IntWritable(1), new Text("aValue1"), new Text("MISSING_B"), new Text("cxValue1")); assertEquals(exp1, rr.next()); - exp2 = Arrays.asList((Writable) new IntWritable(2), new Text("aValue2"), new Text("bValue2"), + exp2 = Arrays.asList(new IntWritable(2), new Text("aValue2"), new Text("bValue2"), new Text("MISSING_CX")); assertEquals(exp2, rr.next()); } @@ -198,7 +198,7 @@ public class JacksonRecordReaderTest extends BaseND4JTest { @Test public void testAppendingLabelsMetaData() throws Exception { ClassPathResource cpr = new ClassPathResource("datavec-api/json/"); - File f = new File(FileUtils.getTempDirectoryPath()+File.separatorChar+ UUID.randomUUID().toString()); + File f = new File(FileUtils.getTempDirectoryPath()+File.separatorChar+ UUID.randomUUID()); FileUtils.forceMkdir(f); cpr.copyDirectory(f); String path = new File(f, "json_test_%d.txt").getAbsolutePath(); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/RegexRecordReaderTest.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/RegexRecordReaderTest.java index a2d6622b3..481e3a8ba 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/RegexRecordReaderTest.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/RegexRecordReaderTest.java @@ -58,11 +58,11 @@ public class RegexRecordReaderTest extends BaseND4JTest { RecordReader rr = new RegexLineRecordReader(regex, 1); rr.initialize(new FileSplit(new ClassPathResource("datavec-api/logtestdata/logtestfile0.txt").getFile())); - List exp0 = Arrays.asList((Writable) new Text("2016-01-01 23:59:59.001"), new Text("1"), + List exp0 = Arrays.asList(new Text("2016-01-01 23:59:59.001"), new Text("1"), new Text("DEBUG"), new Text("First entry message!")); - List exp1 = Arrays.asList((Writable) new Text("2016-01-01 23:59:59.002"), new Text("2"), + List exp1 = Arrays.asList(new Text("2016-01-01 23:59:59.002"), new Text("2"), new Text("INFO"), new Text("Second entry message!")); - List exp2 = Arrays.asList((Writable) new Text("2016-01-01 23:59:59.003"), new Text("3"), + List exp2 = Arrays.asList(new Text("2016-01-01 23:59:59.003"), new Text("3"), new Text("WARN"), new Text("Third entry message!")); assertEquals(exp0, rr.next()); assertEquals(exp1, rr.next()); @@ -125,20 +125,20 @@ public class RegexRecordReaderTest extends BaseND4JTest { rr.initialize(is); List> exp0 = new ArrayList<>(); - exp0.add(Arrays.asList((Writable) new Text("2016-01-01 23:59:59.001"), new Text("1"), new Text("DEBUG"), + exp0.add(Arrays.asList(new Text("2016-01-01 23:59:59.001"), new Text("1"), new Text("DEBUG"), new Text("First entry message!"))); - exp0.add(Arrays.asList((Writable) new Text("2016-01-01 23:59:59.002"), new Text("2"), new Text("INFO"), + exp0.add(Arrays.asList(new Text("2016-01-01 23:59:59.002"), new Text("2"), new Text("INFO"), new Text("Second entry message!"))); - exp0.add(Arrays.asList((Writable) new Text("2016-01-01 23:59:59.003"), new Text("3"), new Text("WARN"), + exp0.add(Arrays.asList(new Text("2016-01-01 23:59:59.003"), new Text("3"), new Text("WARN"), new Text("Third entry message!"))); List> exp1 = new ArrayList<>(); - exp1.add(Arrays.asList((Writable) new Text("2016-01-01 23:59:59.011"), new Text("11"), new Text("DEBUG"), + exp1.add(Arrays.asList(new Text("2016-01-01 23:59:59.011"), new Text("11"), new Text("DEBUG"), new Text("First entry message!"))); - exp1.add(Arrays.asList((Writable) new Text("2016-01-01 23:59:59.012"), new Text("12"), new Text("INFO"), + exp1.add(Arrays.asList(new Text("2016-01-01 23:59:59.012"), new Text("12"), new Text("INFO"), new Text("Second entry message!"))); - exp1.add(Arrays.asList((Writable) new Text("2016-01-01 23:59:59.013"), new Text("13"), new Text("WARN"), + exp1.add(Arrays.asList(new Text("2016-01-01 23:59:59.013"), new Text("13"), new Text("WARN"), new Text("Third entry message!"))); assertEquals(exp0, rr.sequenceRecord()); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/TestCollectionRecordReaders.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/TestCollectionRecordReaders.java index decbf0275..c3287ffa6 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/TestCollectionRecordReaders.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/TestCollectionRecordReaders.java @@ -43,13 +43,13 @@ public class TestCollectionRecordReaders extends BaseND4JTest { List>> listOfSequences = new ArrayList<>(); List> sequence1 = new ArrayList<>(); - sequence1.add(Arrays.asList((Writable) new IntWritable(0), new IntWritable(1))); - sequence1.add(Arrays.asList((Writable) new IntWritable(2), new IntWritable(3))); + sequence1.add(Arrays.asList(new IntWritable(0), new IntWritable(1))); + sequence1.add(Arrays.asList(new IntWritable(2), new IntWritable(3))); listOfSequences.add(sequence1); List> sequence2 = new ArrayList<>(); - sequence2.add(Arrays.asList((Writable) new IntWritable(4), new IntWritable(5))); - sequence2.add(Arrays.asList((Writable) new IntWritable(6), new IntWritable(7))); + sequence2.add(Arrays.asList(new IntWritable(4), new IntWritable(5))); + sequence2.add(Arrays.asList(new IntWritable(6), new IntWritable(7))); listOfSequences.add(sequence2); SequenceRecordReader seqRR = new CollectionSequenceRecordReader(listOfSequences); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/transform/TransformProcessRecordReaderTests.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/transform/TransformProcessRecordReaderTests.java index ee2c9b091..3645c034a 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/transform/TransformProcessRecordReaderTests.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/reader/impl/transform/TransformProcessRecordReaderTests.java @@ -36,6 +36,7 @@ import org.nd4j.common.io.ClassPathResource; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -74,18 +75,18 @@ public class TransformProcessRecordReaderTests extends BaseND4JTest { public void simpleTransformTestSequence() { List> sequence = new ArrayList<>(); //First window: - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L), new IntWritable(0), + sequence.add(Arrays.asList(new LongWritable(1451606400000L), new IntWritable(0), new IntWritable(0))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 100L), new IntWritable(1), + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 100L), new IntWritable(1), new IntWritable(0))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 200L), new IntWritable(2), + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 200L), new IntWritable(2), new IntWritable(0))); Schema schema = new SequenceSchema.Builder().addColumnTime("timecolumn", DateTimeZone.UTC) .addColumnInteger("intcolumn").addColumnInteger("intcolumn2").build(); TransformProcess transformProcess = new TransformProcess.Builder(schema).removeColumns("intcolumn2").build(); InMemorySequenceRecordReader inMemorySequenceRecordReader = - new InMemorySequenceRecordReader(Arrays.asList(sequence)); + new InMemorySequenceRecordReader(Collections.singletonList(sequence)); TransformProcessSequenceRecordReader transformProcessSequenceRecordReader = new TransformProcessSequenceRecordReader(inMemorySequenceRecordReader, transformProcess); List> next = transformProcessSequenceRecordReader.sequenceRecord(); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/writer/impl/LibSvmRecordWriterTest.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/writer/impl/LibSvmRecordWriterTest.java index 885a75ec0..f0013e516 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/writer/impl/LibSvmRecordWriterTest.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/writer/impl/LibSvmRecordWriterTest.java @@ -168,7 +168,7 @@ public class LibSvmRecordWriterTest extends BaseND4JTest { arr3.putScalar(0, 13); arr3.putScalar(1, 14); arr3.putScalar(2, 15); - List record = Arrays.asList((Writable) new DoubleWritable(1), + List record = Arrays.asList(new DoubleWritable(1), new NDArrayWritable(arr2), new IntWritable(2), new DoubleWritable(3), @@ -204,7 +204,7 @@ public class LibSvmRecordWriterTest extends BaseND4JTest { arr3.putScalar(0, 0); arr3.putScalar(1, 1); arr3.putScalar(2, 0); - List record = Arrays.asList((Writable) new DoubleWritable(1), + List record = Arrays.asList(new DoubleWritable(1), new NDArrayWritable(arr2), new IntWritable(2), new DoubleWritable(3), @@ -241,7 +241,7 @@ public class LibSvmRecordWriterTest extends BaseND4JTest { arr3.putScalar(0, 0); arr3.putScalar(1, 1); arr3.putScalar(2, 0); - List record = Arrays.asList((Writable) new DoubleWritable(1), + List record = Arrays.asList(new DoubleWritable(1), new NDArrayWritable(arr2), new IntWritable(2), new DoubleWritable(3), @@ -273,7 +273,7 @@ public class LibSvmRecordWriterTest extends BaseND4JTest { @Test public void testNonIntegerButValidMultilabel() throws Exception { - List record = Arrays.asList((Writable) new IntWritable(3), + List record = Arrays.asList(new IntWritable(3), new IntWritable(2), new DoubleWritable(1.0)); File tempFile = File.createTempFile("LibSvmRecordWriter", ".txt"); @@ -296,7 +296,7 @@ public class LibSvmRecordWriterTest extends BaseND4JTest { @Test public void nonIntegerMultilabel() throws Exception { Assertions.assertThrows(NumberFormatException.class, () -> { - List record = Arrays.asList((Writable) new IntWritable(3), + List record = Arrays.asList(new IntWritable(3), new IntWritable(2), new DoubleWritable(1.2)); File tempFile = File.createTempFile("LibSvmRecordWriter", ".txt"); @@ -319,7 +319,7 @@ public class LibSvmRecordWriterTest extends BaseND4JTest { @Test public void nonBinaryMultilabel() throws Exception { - List record = Arrays.asList((Writable) new IntWritable(0), + List record = Arrays.asList(new IntWritable(0), new IntWritable(1), new IntWritable(2)); File tempFile = File.createTempFile("LibSvmRecordWriter", ".txt"); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/writer/impl/SVMLightRecordWriterTest.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/writer/impl/SVMLightRecordWriterTest.java index d38611cc4..48ee43c47 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/writer/impl/SVMLightRecordWriterTest.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/records/writer/impl/SVMLightRecordWriterTest.java @@ -165,7 +165,7 @@ public class SVMLightRecordWriterTest extends BaseND4JTest { arr3.putScalar(0, 13); arr3.putScalar(1, 14); arr3.putScalar(2, 15); - List record = Arrays.asList((Writable) new DoubleWritable(1), + List record = Arrays.asList(new DoubleWritable(1), new NDArrayWritable(arr2), new IntWritable(2), new DoubleWritable(3), @@ -201,7 +201,7 @@ public class SVMLightRecordWriterTest extends BaseND4JTest { arr3.putScalar(0, 0); arr3.putScalar(1, 1); arr3.putScalar(2, 0); - List record = Arrays.asList((Writable) new DoubleWritable(1), + List record = Arrays.asList(new DoubleWritable(1), new NDArrayWritable(arr2), new IntWritable(2), new DoubleWritable(3), @@ -238,7 +238,7 @@ public class SVMLightRecordWriterTest extends BaseND4JTest { arr3.putScalar(0, 0); arr3.putScalar(1, 1); arr3.putScalar(2, 0); - List record = Arrays.asList((Writable) new DoubleWritable(1), + List record = Arrays.asList(new DoubleWritable(1), new NDArrayWritable(arr2), new IntWritable(2), new DoubleWritable(3), @@ -270,7 +270,7 @@ public class SVMLightRecordWriterTest extends BaseND4JTest { @Test public void testNonIntegerButValidMultilabel() throws Exception { - List record = Arrays.asList((Writable) new IntWritable(3), + List record = Arrays.asList(new IntWritable(3), new IntWritable(2), new DoubleWritable(1.0)); File tempFile = File.createTempFile("SVMLightRecordWriter", ".txt"); @@ -293,7 +293,7 @@ public class SVMLightRecordWriterTest extends BaseND4JTest { @Test public void nonIntegerMultilabel() throws Exception { Assertions.assertThrows(NumberFormatException.class, () -> { - List record = Arrays.asList((Writable) new IntWritable(3), + List record = Arrays.asList(new IntWritable(3), new IntWritable(2), new DoubleWritable(1.2)); File tempFile = File.createTempFile("SVMLightRecordWriter", ".txt"); @@ -317,7 +317,7 @@ public class SVMLightRecordWriterTest extends BaseND4JTest { @Test public void nonBinaryMultilabel() throws Exception { Assertions.assertThrows(NumberFormatException.class, () -> { - List record = Arrays.asList((Writable) new IntWritable(0), + List record = Arrays.asList(new IntWritable(0), new IntWritable(1), new IntWritable(2)); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/split/InputSplitTests.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/split/InputSplitTests.java index 43d274151..7048e610b 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/split/InputSplitTests.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/split/InputSplitTests.java @@ -137,7 +137,7 @@ public class InputSplitTests extends BaseND4JTest { FileSplit boostrap = new FileSplit(tmpDir); Assertions.assertTrue(boostrap.needsBootstrapForWrite()); boostrap.bootStrapForWrite(); - Assertions.assertTrue(tmpDir.listFiles() != null); + Assertions.assertNotNull(tmpDir.listFiles()); } @Test @@ -156,6 +156,7 @@ public class InputSplitTests extends BaseND4JTest { for (int i = 0; i < paths2.length; i++) { if (!paths2[i].toString().startsWith("file:///label0/")) { notOnlyFirstLabel = true; + break; } } Assertions.assertTrue(notOnlyFirstLabel); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/split/TestStreamInputSplit.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/split/TestStreamInputSplit.java index 09b01cf8d..c53099d0f 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/split/TestStreamInputSplit.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/split/TestStreamInputSplit.java @@ -36,10 +36,7 @@ import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Random; +import java.util.*; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; @@ -67,9 +64,9 @@ public class TestStreamInputSplit extends BaseND4JTest { rr.initialize(is); List> exp = new ArrayList<>(); - exp.add(Arrays.asList(new Text("a"), new Text("b"), new Text("c"))); - exp.add(Arrays.asList(new Text("d"), new Text("e"), new Text("f"))); - exp.add(Arrays.asList(new Text("1"), new Text("2"), new Text("3"))); + exp.add(Arrays.asList(new Text("a"), new Text("b"), new Text("c"))); + exp.add(Arrays.asList(new Text("d"), new Text("e"), new Text("f"))); + exp.add(Arrays.asList(new Text("1"), new Text("2"), new Text("3"))); List> act = new ArrayList<>(); while(rr.hasNext()){ @@ -111,10 +108,10 @@ public class TestStreamInputSplit extends BaseND4JTest { List>> exp = new ArrayList<>(); exp.add(Arrays.asList( - Arrays.asList(new Text("a"), new Text("b"), new Text("c")), - Arrays.asList(new Text("d"), new Text("e"), new Text("f")))); - exp.add(Arrays.asList( - Arrays.asList(new Text("1"), new Text("2"), new Text("3")))); + Arrays.asList(new Text("a"), new Text("b"), new Text("c")), + Arrays.asList(new Text("d"), new Text("e"), new Text("f")))); + exp.add(Collections.singletonList( + Arrays.asList(new Text("1"), new Text("2"), new Text("3")))); List>> act = new ArrayList<>(); while (rr.hasNext()) { diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/TestTransformProcess.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/TestTransformProcess.java index 7a968ddfe..d478c934b 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/TestTransformProcess.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/TestTransformProcess.java @@ -51,8 +51,8 @@ public class TestTransformProcess extends BaseND4JTest { .doubleMathOp("col2", MathOp.Add, 1.0) .build(); - List in = Arrays.asList(new Text("Text"), new DoubleWritable(2.0)); - List exp = Arrays.asList(new Text("Text"), new DoubleWritable(3.0)); + List in = Arrays.asList(new Text("Text"), new DoubleWritable(2.0)); + List exp = Arrays.asList(new Text("Text"), new DoubleWritable(3.0)); List out = transformProcess.execute(in); assertEquals(exp, out); @@ -73,11 +73,11 @@ public class TestTransformProcess extends BaseND4JTest { .build(); String s = "in text"; - List input = Collections.singletonList(new Text(s)); + List input = Collections.singletonList(new Text(s)); List> expSeq = new ArrayList<>(s.length()); for( int i = 0; isingletonList(new IntWritable(m.get(s.charAt(i))))); + expSeq.add(Collections.singletonList(new IntWritable(m.get(s.charAt(i))))); } diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/condition/TestConditions.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/condition/TestConditions.java index f49e0c4d4..1f4af7292 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/condition/TestConditions.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/condition/TestConditions.java @@ -44,20 +44,20 @@ public class TestConditions extends BaseND4JTest { Condition condition = new IntegerColumnCondition("column", SequenceConditionMode.Or, ConditionOp.LessThan, 0); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new IntWritable(-1)))); - assertTrue(condition.condition(Collections.singletonList((Writable) new IntWritable(-2)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new IntWritable(0)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new IntWritable(1)))); + assertTrue(condition.condition(Collections.singletonList(new IntWritable(-1)))); + assertTrue(condition.condition(Collections.singletonList(new IntWritable(-2)))); + assertFalse(condition.condition(Collections.singletonList(new IntWritable(0)))); + assertFalse(condition.condition(Collections.singletonList(new IntWritable(1)))); Set set = new HashSet<>(); set.add(0); set.add(3); condition = new IntegerColumnCondition("column", SequenceConditionMode.Or, ConditionOp.InSet, set); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new IntWritable(0)))); - assertTrue(condition.condition(Collections.singletonList((Writable) new IntWritable(3)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new IntWritable(1)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new IntWritable(2)))); + assertTrue(condition.condition(Collections.singletonList(new IntWritable(0)))); + assertTrue(condition.condition(Collections.singletonList(new IntWritable(3)))); + assertFalse(condition.condition(Collections.singletonList(new IntWritable(1)))); + assertFalse(condition.condition(Collections.singletonList(new IntWritable(2)))); } @Test @@ -67,19 +67,19 @@ public class TestConditions extends BaseND4JTest { Condition condition = new LongColumnCondition("column", SequenceConditionMode.Or, ConditionOp.NotEqual, 5L); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new LongWritable(0)))); - assertTrue(condition.condition(Collections.singletonList((Writable) new LongWritable(1)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new LongWritable(5)))); + assertTrue(condition.condition(Collections.singletonList(new LongWritable(0)))); + assertTrue(condition.condition(Collections.singletonList(new LongWritable(1)))); + assertFalse(condition.condition(Collections.singletonList(new LongWritable(5)))); Set set = new HashSet<>(); set.add(0L); set.add(3L); condition = new LongColumnCondition("column", SequenceConditionMode.Or, ConditionOp.NotInSet, set); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new LongWritable(5)))); - assertTrue(condition.condition(Collections.singletonList((Writable) new LongWritable(10)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new LongWritable(0)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new LongWritable(3)))); + assertTrue(condition.condition(Collections.singletonList(new LongWritable(5)))); + assertTrue(condition.condition(Collections.singletonList(new LongWritable(10)))); + assertFalse(condition.condition(Collections.singletonList(new LongWritable(0)))); + assertFalse(condition.condition(Collections.singletonList(new LongWritable(3)))); } @Test @@ -90,20 +90,20 @@ public class TestConditions extends BaseND4JTest { new DoubleColumnCondition("column", SequenceConditionMode.Or, ConditionOp.GreaterOrEqual, 0); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new DoubleWritable(0.0)))); - assertTrue(condition.condition(Collections.singletonList((Writable) new DoubleWritable(0.5)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new DoubleWritable(-0.5)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new DoubleWritable(-1)))); + assertTrue(condition.condition(Collections.singletonList(new DoubleWritable(0.0)))); + assertTrue(condition.condition(Collections.singletonList(new DoubleWritable(0.5)))); + assertFalse(condition.condition(Collections.singletonList(new DoubleWritable(-0.5)))); + assertFalse(condition.condition(Collections.singletonList(new DoubleWritable(-1)))); Set set = new HashSet<>(); set.add(0.0); set.add(3.0); condition = new DoubleColumnCondition("column", SequenceConditionMode.Or, ConditionOp.InSet, set); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new DoubleWritable(0.0)))); - assertTrue(condition.condition(Collections.singletonList((Writable) new DoubleWritable(3.0)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new DoubleWritable(1.0)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new DoubleWritable(2.0)))); + assertTrue(condition.condition(Collections.singletonList(new DoubleWritable(0.0)))); + assertTrue(condition.condition(Collections.singletonList(new DoubleWritable(3.0)))); + assertFalse(condition.condition(Collections.singletonList(new DoubleWritable(1.0)))); + assertFalse(condition.condition(Collections.singletonList(new DoubleWritable(2.0)))); } @@ -115,20 +115,20 @@ public class TestConditions extends BaseND4JTest { new FloatColumnCondition("column", SequenceConditionMode.Or, ConditionOp.GreaterOrEqual, 0); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new FloatWritable(0.0f)))); - assertTrue(condition.condition(Collections.singletonList((Writable) new FloatWritable(0.5f)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new FloatWritable(-0.5f)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new FloatWritable(-1f)))); + assertTrue(condition.condition(Collections.singletonList(new FloatWritable(0.0f)))); + assertTrue(condition.condition(Collections.singletonList(new FloatWritable(0.5f)))); + assertFalse(condition.condition(Collections.singletonList(new FloatWritable(-0.5f)))); + assertFalse(condition.condition(Collections.singletonList(new FloatWritable(-1f)))); Set set = new HashSet(); set.add(0.0f); set.add(3.0f); condition = new FloatColumnCondition("column", SequenceConditionMode.Or, ConditionOp.InSet, set); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new FloatWritable(0.0f)))); - assertTrue(condition.condition(Collections.singletonList((Writable) new FloatWritable(3.0f)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new FloatWritable(1.0f)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new FloatWritable(2.0f)))); + assertTrue(condition.condition(Collections.singletonList(new FloatWritable(0.0f)))); + assertTrue(condition.condition(Collections.singletonList(new FloatWritable(3.0f)))); + assertFalse(condition.condition(Collections.singletonList(new FloatWritable(1.0f)))); + assertFalse(condition.condition(Collections.singletonList(new FloatWritable(2.0f)))); } @Test @@ -138,18 +138,18 @@ public class TestConditions extends BaseND4JTest { Condition condition = new StringColumnCondition("column", SequenceConditionMode.Or, ConditionOp.Equal, "value"); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new Text("value")))); - assertFalse(condition.condition(Collections.singletonList((Writable) new Text("not_value")))); + assertTrue(condition.condition(Collections.singletonList(new Text("value")))); + assertFalse(condition.condition(Collections.singletonList(new Text("not_value")))); Set set = new HashSet<>(); set.add("in set"); set.add("also in set"); condition = new StringColumnCondition("column", SequenceConditionMode.Or, ConditionOp.InSet, set); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new Text("in set")))); - assertTrue(condition.condition(Collections.singletonList((Writable) new Text("also in set")))); - assertFalse(condition.condition(Collections.singletonList((Writable) new Text("not in the set")))); - assertFalse(condition.condition(Collections.singletonList((Writable) new Text(":)")))); + assertTrue(condition.condition(Collections.singletonList(new Text("in set")))); + assertTrue(condition.condition(Collections.singletonList(new Text("also in set")))); + assertFalse(condition.condition(Collections.singletonList(new Text("not in the set")))); + assertFalse(condition.condition(Collections.singletonList(new Text(":)")))); } @Test @@ -160,18 +160,18 @@ public class TestConditions extends BaseND4JTest { new CategoricalColumnCondition("column", SequenceConditionMode.Or, ConditionOp.Equal, "alpha"); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new Text("alpha")))); - assertFalse(condition.condition(Collections.singletonList((Writable) new Text("beta")))); - assertFalse(condition.condition(Collections.singletonList((Writable) new Text("gamma")))); + assertTrue(condition.condition(Collections.singletonList(new Text("alpha")))); + assertFalse(condition.condition(Collections.singletonList(new Text("beta")))); + assertFalse(condition.condition(Collections.singletonList(new Text("gamma")))); Set set = new HashSet<>(); set.add("alpha"); set.add("beta"); condition = new StringColumnCondition("column", SequenceConditionMode.Or, ConditionOp.InSet, set); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new Text("alpha")))); - assertTrue(condition.condition(Collections.singletonList((Writable) new Text("beta")))); - assertFalse(condition.condition(Collections.singletonList((Writable) new Text("gamma")))); + assertTrue(condition.condition(Collections.singletonList(new Text("alpha")))); + assertTrue(condition.condition(Collections.singletonList(new Text("beta")))); + assertFalse(condition.condition(Collections.singletonList(new Text("gamma")))); } @Test @@ -183,18 +183,18 @@ public class TestConditions extends BaseND4JTest { 1451606400000L); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new LongWritable(1451606400000L)))); - assertTrue(condition.condition(Collections.singletonList((Writable) new LongWritable(1451606400000L - 1L)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new LongWritable(1451606400000L + 1L)))); + assertTrue(condition.condition(Collections.singletonList(new LongWritable(1451606400000L)))); + assertTrue(condition.condition(Collections.singletonList(new LongWritable(1451606400000L - 1L)))); + assertFalse(condition.condition(Collections.singletonList(new LongWritable(1451606400000L + 1L)))); assertFalse(condition - .condition(Collections.singletonList((Writable) new LongWritable(1451606400000L + 1000L)))); + .condition(Collections.singletonList(new LongWritable(1451606400000L + 1000L)))); Set set = new HashSet<>(); set.add(1451606400000L); condition = new TimeColumnCondition("column", SequenceConditionMode.Or, ConditionOp.InSet, set); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new LongWritable(1451606400000L)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new LongWritable(1451606400000L + 1L)))); + assertTrue(condition.condition(Collections.singletonList(new LongWritable(1451606400000L)))); + assertFalse(condition.condition(Collections.singletonList(new LongWritable(1451606400000L + 1L)))); } @Test @@ -206,22 +206,22 @@ public class TestConditions extends BaseND4JTest { Condition condition = new StringRegexColumnCondition("column", "abc.*"); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new Text("abc")))); - assertTrue(condition.condition(Collections.singletonList((Writable) new Text("abcdefghijk")))); - assertTrue(condition.condition(Collections.singletonList((Writable) new Text("abc more text \tetc")))); - assertFalse(condition.condition(Collections.singletonList((Writable) new Text("ab")))); - assertFalse(condition.condition(Collections.singletonList((Writable) new Text("also doesn't match")))); - assertFalse(condition.condition(Collections.singletonList((Writable) new Text(" abc")))); + assertTrue(condition.condition(Collections.singletonList(new Text("abc")))); + assertTrue(condition.condition(Collections.singletonList(new Text("abcdefghijk")))); + assertTrue(condition.condition(Collections.singletonList(new Text("abc more text \tetc")))); + assertFalse(condition.condition(Collections.singletonList(new Text("ab")))); + assertFalse(condition.condition(Collections.singletonList(new Text("also doesn't match")))); + assertFalse(condition.condition(Collections.singletonList(new Text(" abc")))); //Check application on non-String columns schema = TestTransforms.getSchema(ColumnType.Integer); condition = new StringRegexColumnCondition("column", "123\\d*"); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new IntWritable(123)))); - assertTrue(condition.condition(Collections.singletonList((Writable) new IntWritable(123456)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new IntWritable(-123)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new IntWritable(456789)))); + assertTrue(condition.condition(Collections.singletonList(new IntWritable(123)))); + assertTrue(condition.condition(Collections.singletonList(new IntWritable(123456)))); + assertFalse(condition.condition(Collections.singletonList(new IntWritable(-123)))); + assertFalse(condition.condition(Collections.singletonList(new IntWritable(456789)))); } @Test @@ -231,10 +231,10 @@ public class TestConditions extends BaseND4JTest { Condition condition = new NullWritableColumnCondition("column"); condition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) NullWritable.INSTANCE))); - assertTrue(condition.condition(Collections.singletonList((Writable) new NullWritable()))); - assertFalse(condition.condition(Collections.singletonList((Writable) new IntWritable(0)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new Text("1")))); + assertTrue(condition.condition(Collections.singletonList(NullWritable.INSTANCE))); + assertTrue(condition.condition(Collections.singletonList(new NullWritable()))); + assertFalse(condition.condition(Collections.singletonList(new IntWritable(0)))); + assertFalse(condition.condition(Collections.singletonList(new Text("1")))); } @Test @@ -248,16 +248,16 @@ public class TestConditions extends BaseND4JTest { Condition notCondition = BooleanCondition.NOT(condition); notCondition.setInputSchema(schema); - assertTrue(condition.condition(Collections.singletonList((Writable) new IntWritable(-1)))); - assertTrue(condition.condition(Collections.singletonList((Writable) new IntWritable(-2)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new IntWritable(0)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new IntWritable(1)))); + assertTrue(condition.condition(Collections.singletonList(new IntWritable(-1)))); + assertTrue(condition.condition(Collections.singletonList(new IntWritable(-2)))); + assertFalse(condition.condition(Collections.singletonList(new IntWritable(0)))); + assertFalse(condition.condition(Collections.singletonList(new IntWritable(1)))); //Expect opposite for not condition: - assertFalse(notCondition.condition(Collections.singletonList((Writable) new IntWritable(-1)))); - assertFalse(notCondition.condition(Collections.singletonList((Writable) new IntWritable(-2)))); - assertTrue(notCondition.condition(Collections.singletonList((Writable) new IntWritable(0)))); - assertTrue(notCondition.condition(Collections.singletonList((Writable) new IntWritable(1)))); + assertFalse(notCondition.condition(Collections.singletonList(new IntWritable(-1)))); + assertFalse(notCondition.condition(Collections.singletonList(new IntWritable(-2)))); + assertTrue(notCondition.condition(Collections.singletonList(new IntWritable(0)))); + assertTrue(notCondition.condition(Collections.singletonList(new IntWritable(1)))); } @Test @@ -274,10 +274,10 @@ public class TestConditions extends BaseND4JTest { Condition andCondition = BooleanCondition.AND(condition1, condition2); andCondition.setInputSchema(schema); - assertFalse(andCondition.condition(Collections.singletonList((Writable) new IntWritable(-1)))); - assertTrue(andCondition.condition(Collections.singletonList((Writable) new IntWritable(-2)))); - assertFalse(andCondition.condition(Collections.singletonList((Writable) new IntWritable(0)))); - assertFalse(andCondition.condition(Collections.singletonList((Writable) new IntWritable(1)))); + assertFalse(andCondition.condition(Collections.singletonList(new IntWritable(-1)))); + assertTrue(andCondition.condition(Collections.singletonList(new IntWritable(-2)))); + assertFalse(andCondition.condition(Collections.singletonList(new IntWritable(0)))); + assertFalse(andCondition.condition(Collections.singletonList(new IntWritable(1)))); } @@ -288,15 +288,15 @@ public class TestConditions extends BaseND4JTest { Condition condition = new InvalidValueColumnCondition("column"); condition.setInputSchema(schema); - assertFalse(condition.condition(Collections.singletonList((Writable) new IntWritable(-1)))); //Not invalid -> condition does not apply - assertFalse(condition.condition(Collections.singletonList((Writable) new IntWritable(-2)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new LongWritable(1000)))); - assertFalse(condition.condition(Collections.singletonList((Writable) new Text("1000")))); - assertTrue(condition.condition(Collections.singletonList((Writable) new Text("text")))); - assertTrue(condition.condition(Collections.singletonList((Writable) new Text("NaN")))); + assertFalse(condition.condition(Collections.singletonList(new IntWritable(-1)))); //Not invalid -> condition does not apply + assertFalse(condition.condition(Collections.singletonList(new IntWritable(-2)))); + assertFalse(condition.condition(Collections.singletonList(new LongWritable(1000)))); + assertFalse(condition.condition(Collections.singletonList(new Text("1000")))); + assertTrue(condition.condition(Collections.singletonList(new Text("text")))); + assertTrue(condition.condition(Collections.singletonList(new Text("NaN")))); assertTrue(condition.condition( - Collections.singletonList((Writable) new LongWritable(1L + (long) Integer.MAX_VALUE)))); - assertTrue(condition.condition(Collections.singletonList((Writable) new DoubleWritable(3.14159)))); + Collections.singletonList(new LongWritable(1L + (long) Integer.MAX_VALUE)))); + assertTrue(condition.condition(Collections.singletonList(new DoubleWritable(3.14159)))); } @Test @@ -304,14 +304,14 @@ public class TestConditions extends BaseND4JTest { Condition c = new SequenceLengthCondition(ConditionOp.LessThan, 2); - List> l1 = Arrays.asList(Collections.singletonList(NullWritable.INSTANCE)); + List> l1 = Collections.singletonList(Collections.singletonList(NullWritable.INSTANCE)); - List> l2 = Arrays.asList(Collections.singletonList(NullWritable.INSTANCE), - Collections.singletonList(NullWritable.INSTANCE)); + List> l2 = Arrays.asList(Collections.singletonList(NullWritable.INSTANCE), + Collections.singletonList(NullWritable.INSTANCE)); - List> l3 = Arrays.asList(Collections.singletonList(NullWritable.INSTANCE), - Collections.singletonList(NullWritable.INSTANCE), - Collections.singletonList(NullWritable.INSTANCE)); + List> l3 = Arrays.asList(Collections.singletonList(NullWritable.INSTANCE), + Collections.singletonList(NullWritable.INSTANCE), + Collections.singletonList(NullWritable.INSTANCE)); assertTrue(c.conditionSequence(l1)); assertFalse(c.conditionSequence(l2)); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/filter/TestFilters.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/filter/TestFilters.java index 0b339bffa..1f937609e 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/filter/TestFilters.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/filter/TestFilters.java @@ -45,9 +45,9 @@ public class TestFilters extends BaseND4JTest { @Test public void testFilterNumColumns() { List> list = new ArrayList<>(); - list.add(Collections.singletonList((Writable) new IntWritable(-1))); - list.add(Collections.singletonList((Writable) new IntWritable(0))); - list.add(Collections.singletonList((Writable) new IntWritable(2))); + list.add(Collections.singletonList(new IntWritable(-1))); + list.add(Collections.singletonList(new IntWritable(0))); + list.add(Collections.singletonList(new IntWritable(2))); Schema schema = new Schema.Builder().addColumnInteger("intCol", 0, 10) //Only values in the range 0 to 10 are ok .addColumnDouble("doubleCol", -100.0, 100.0) //-100 to 100 only; no NaN or infinite @@ -56,7 +56,7 @@ public class TestFilters extends BaseND4JTest { for (int i = 0; i < list.size(); i++) assertTrue(numColumns.removeExample(list.get(i))); - List correct = Arrays.asList(new IntWritable(0), new DoubleWritable(2)); + List correct = Arrays.asList(new IntWritable(0), new DoubleWritable(2)); assertFalse(numColumns.removeExample(correct)); } @@ -65,9 +65,9 @@ public class TestFilters extends BaseND4JTest { public void testFilterInvalidValues() { List> list = new ArrayList<>(); - list.add(Collections.singletonList((Writable) new IntWritable(-1))); - list.add(Collections.singletonList((Writable) new IntWritable(0))); - list.add(Collections.singletonList((Writable) new IntWritable(2))); + list.add(Collections.singletonList(new IntWritable(-1))); + list.add(Collections.singletonList(new IntWritable(0))); + list.add(Collections.singletonList(new IntWritable(2))); Schema schema = new Schema.Builder().addColumnInteger("intCol", 0, 10) //Only values in the range 0 to 10 are ok .addColumnDouble("doubleCol", -100.0, 100.0) //-100 to 100 only; no NaN or infinite @@ -77,16 +77,16 @@ public class TestFilters extends BaseND4JTest { filter.setInputSchema(schema); //Test valid examples: - assertFalse(filter.removeExample(asList((Writable) new IntWritable(0), new DoubleWritable(0)))); - assertFalse(filter.removeExample(asList((Writable) new IntWritable(10), new DoubleWritable(0)))); - assertFalse(filter.removeExample(asList((Writable) new IntWritable(0), new DoubleWritable(-100)))); - assertFalse(filter.removeExample(asList((Writable) new IntWritable(0), new DoubleWritable(100)))); + assertFalse(filter.removeExample(asList(new IntWritable(0), new DoubleWritable(0)))); + assertFalse(filter.removeExample(asList(new IntWritable(10), new DoubleWritable(0)))); + assertFalse(filter.removeExample(asList(new IntWritable(0), new DoubleWritable(-100)))); + assertFalse(filter.removeExample(asList(new IntWritable(0), new DoubleWritable(100)))); //Test invalid: - assertTrue(filter.removeExample(asList((Writable) new IntWritable(-1), new DoubleWritable(0)))); - assertTrue(filter.removeExample(asList((Writable) new IntWritable(11), new DoubleWritable(0)))); - assertTrue(filter.removeExample(asList((Writable) new IntWritable(0), new DoubleWritable(-101)))); - assertTrue(filter.removeExample(asList((Writable) new IntWritable(0), new DoubleWritable(101)))); + assertTrue(filter.removeExample(asList(new IntWritable(-1), new DoubleWritable(0)))); + assertTrue(filter.removeExample(asList(new IntWritable(11), new DoubleWritable(0)))); + assertTrue(filter.removeExample(asList(new IntWritable(0), new DoubleWritable(-101)))); + assertTrue(filter.removeExample(asList(new IntWritable(0), new DoubleWritable(101)))); } @Test @@ -98,11 +98,11 @@ public class TestFilters extends BaseND4JTest { Filter filter = new ConditionFilter(condition); - assertFalse(filter.removeExample(Collections.singletonList((Writable) new IntWritable(10)))); - assertFalse(filter.removeExample(Collections.singletonList((Writable) new IntWritable(1)))); - assertFalse(filter.removeExample(Collections.singletonList((Writable) new IntWritable(0)))); - assertTrue(filter.removeExample(Collections.singletonList((Writable) new IntWritable(-1)))); - assertTrue(filter.removeExample(Collections.singletonList((Writable) new IntWritable(-10)))); + assertFalse(filter.removeExample(Collections.singletonList(new IntWritable(10)))); + assertFalse(filter.removeExample(Collections.singletonList(new IntWritable(1)))); + assertFalse(filter.removeExample(Collections.singletonList(new IntWritable(0)))); + assertTrue(filter.removeExample(Collections.singletonList(new IntWritable(-1)))); + assertTrue(filter.removeExample(Collections.singletonList(new IntWritable(-10)))); } } diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/join/TestJoin.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/join/TestJoin.java index c41ebb165..ad056ccef 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/join/TestJoin.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/join/TestJoin.java @@ -47,20 +47,20 @@ public class TestJoin extends BaseND4JTest { Schema secondSchema = new Schema.Builder().addColumnString("keyColumn").addColumnsInteger("second0").build(); List> first = new ArrayList<>(); - first.add(Arrays.asList((Writable) new Text("key0"), new IntWritable(0), new IntWritable(1))); - first.add(Arrays.asList((Writable) new Text("key1"), new IntWritable(10), new IntWritable(11))); + first.add(Arrays.asList(new Text("key0"), new IntWritable(0), new IntWritable(1))); + first.add(Arrays.asList(new Text("key1"), new IntWritable(10), new IntWritable(11))); List> second = new ArrayList<>(); - second.add(Arrays.asList((Writable) new Text("key0"), new IntWritable(100))); - second.add(Arrays.asList((Writable) new Text("key1"), new IntWritable(110))); + second.add(Arrays.asList(new Text("key0"), new IntWritable(100))); + second.add(Arrays.asList(new Text("key1"), new IntWritable(110))); Join join = new Join.Builder(Join.JoinType.Inner).setJoinColumns("keyColumn") .setSchemas(firstSchema, secondSchema).build(); List> expected = new ArrayList<>(); - expected.add(Arrays.asList((Writable) new Text("key0"), new IntWritable(0), new IntWritable(1), + expected.add(Arrays.asList(new Text("key0"), new IntWritable(0), new IntWritable(1), new IntWritable(100))); - expected.add(Arrays.asList((Writable) new Text("key1"), new IntWritable(10), new IntWritable(11), + expected.add(Arrays.asList(new Text("key1"), new IntWritable(10), new IntWritable(11), new IntWritable(110))); @@ -74,9 +74,9 @@ public class TestJoin extends BaseND4JTest { //Check joining with null values: expected = new ArrayList<>(); - expected.add(Arrays.asList((Writable) new Text("key0"), new IntWritable(0), new IntWritable(1), + expected.add(Arrays.asList(new Text("key0"), new IntWritable(0), new IntWritable(1), NullWritable.INSTANCE)); - expected.add(Arrays.asList((Writable) new Text("key1"), new IntWritable(10), new IntWritable(11), + expected.add(Arrays.asList(new Text("key1"), new IntWritable(10), new IntWritable(11), NullWritable.INSTANCE)); for (int i = 0; i < first.size(); i++) { List out = join.joinExamples(first.get(i), null); @@ -84,9 +84,9 @@ public class TestJoin extends BaseND4JTest { } expected = new ArrayList<>(); - expected.add(Arrays.asList((Writable) new Text("key0"), NullWritable.INSTANCE, NullWritable.INSTANCE, + expected.add(Arrays.asList(new Text("key0"), NullWritable.INSTANCE, NullWritable.INSTANCE, new IntWritable(100))); - expected.add(Arrays.asList((Writable) new Text("key1"), NullWritable.INSTANCE, NullWritable.INSTANCE, + expected.add(Arrays.asList(new Text("key1"), NullWritable.INSTANCE, NullWritable.INSTANCE, new IntWritable(110))); for (int i = 0; i < first.size(); i++) { List out = join.joinExamples(null, second.get(i)); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ops/AggregableMultiOpTest.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ops/AggregableMultiOpTest.java index caadceb15..6106e37a3 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ops/AggregableMultiOpTest.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ops/AggregableMultiOpTest.java @@ -26,11 +26,12 @@ import org.nd4j.common.tests.BaseND4JTest; import java.util.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; public class AggregableMultiOpTest extends BaseND4JTest { - private List intList = new ArrayList<>(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9)); + private final List intList = new ArrayList<>(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9)); @Test public void testMulti() throws Exception { @@ -38,18 +39,18 @@ public class AggregableMultiOpTest extends BaseND4JTest { AggregatorImpls.AggregableSum as = new AggregatorImpls.AggregableSum<>(); AggregableMultiOp multi = new AggregableMultiOp<>(Arrays.asList(af, as)); - assertTrue(multi.getOperations().size() == 2); + assertEquals(2, multi.getOperations().size()); for (int i = 0; i < intList.size(); i++) { multi.accept(intList.get(i)); } // mutablility - assertTrue(as.get().toDouble() == 45D); - assertTrue(af.get().toInt() == 1); + assertEquals(45D, as.get().toDouble()); + assertEquals(1, af.get().toInt()); List res = multi.get(); - assertTrue(res.get(1).toDouble() == 45D); - assertTrue(res.get(0).toInt() == 1); + assertEquals(45D, res.get(1).toDouble()); + assertEquals(1, res.get(0).toInt()); AggregatorImpls.AggregableFirst rf = new AggregatorImpls.AggregableFirst<>(); AggregatorImpls.AggregableSum rs = new AggregatorImpls.AggregableSum<>(); @@ -60,12 +61,12 @@ public class AggregableMultiOpTest extends BaseND4JTest { } List revRes = reverse.get(); - assertTrue(revRes.get(1).toDouble() == 45D); - assertTrue(revRes.get(0).toInt() == 9); + assertEquals(45D, revRes.get(1).toDouble()); + assertEquals(9, revRes.get(0).toInt()); multi.combine(reverse); List combinedRes = multi.get(); - assertTrue(combinedRes.get(1).toDouble() == 90D); - assertTrue(combinedRes.get(0).toInt() == 1); + assertEquals(90D, combinedRes.get(1).toDouble()); + assertEquals(1, combinedRes.get(0).toInt()); } } diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ops/AggregatorImplsTest.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ops/AggregatorImplsTest.java index 8cfd5e979..0cad8f9f0 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ops/AggregatorImplsTest.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ops/AggregatorImplsTest.java @@ -33,8 +33,8 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class AggregatorImplsTest extends BaseND4JTest { - private List intList = new ArrayList<>(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9)); - private List stringList = new ArrayList<>(Arrays.asList("arakoa", "abracadabra", "blast", "acceptance")); + private final List intList = new ArrayList<>(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9)); + private final List stringList = new ArrayList<>(Arrays.asList("arakoa", "abracadabra", "blast", "acceptance")); @Test public void aggregableFirstTest() { @@ -48,7 +48,7 @@ public class AggregatorImplsTest extends BaseND4JTest { for (int i = 0; i < stringList.size(); i++) { firstS.accept(stringList.get(i)); } - assertTrue(firstS.get().toString().equals("arakoa")); + assertEquals("arakoa", firstS.get().toString()); AggregatorImpls.AggregableFirst reverse = new AggregatorImpls.AggregableFirst<>(); @@ -72,7 +72,7 @@ public class AggregatorImplsTest extends BaseND4JTest { for (int i = 0; i < stringList.size(); i++) { lastS.accept(stringList.get(i)); } - assertTrue(lastS.get().toString().equals("acceptance")); + assertEquals("acceptance", lastS.get().toString()); AggregatorImpls.AggregableLast reverse = new AggregatorImpls.AggregableLast<>(); @@ -182,7 +182,7 @@ public class AggregatorImplsTest extends BaseND4JTest { for (int i = 0; i < intList.size(); i++) { mn.accept(intList.get(i)); } - assertEquals(9l, (long) mn.getCount()); + assertEquals(9L, (long) mn.getCount()); assertEquals(5D, mn.get().toDouble(), 0.001); @@ -191,7 +191,7 @@ public class AggregatorImplsTest extends BaseND4JTest { reverse.accept(intList.get(intList.size() - i - 1)); } mn.combine(reverse); - assertEquals(18l, (long) mn.getCount()); + assertEquals(18L, (long) mn.getCount()); assertEquals(5D, mn.get().toDouble(), 0.001); } diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ops/DispatchOpTest.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ops/DispatchOpTest.java index a04d6f57a..a8c9aace5 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ops/DispatchOpTest.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ops/DispatchOpTest.java @@ -29,35 +29,36 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; public class DispatchOpTest extends BaseND4JTest { - private List intList = new ArrayList<>(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9)); - private List stringList = new ArrayList<>(Arrays.asList("arakoa", "abracadabra", "blast", "acceptance")); + private final List intList = new ArrayList<>(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9)); + private final List stringList = new ArrayList<>(Arrays.asList("arakoa", "abracadabra", "blast", "acceptance")); @Test public void testDispatchSimple() { AggregatorImpls.AggregableFirst af = new AggregatorImpls.AggregableFirst<>(); AggregatorImpls.AggregableSum as = new AggregatorImpls.AggregableSum<>(); AggregableMultiOp multiaf = - new AggregableMultiOp<>(Collections.>singletonList(af)); + new AggregableMultiOp<>(Collections.singletonList(af)); AggregableMultiOp multias = - new AggregableMultiOp<>(Collections.>singletonList(as)); + new AggregableMultiOp<>(Collections.singletonList(as)); DispatchOp parallel = - new DispatchOp<>(Arrays.>>asList(multiaf, multias)); + new DispatchOp<>(Arrays.asList(multiaf, multias)); - assertTrue(multiaf.getOperations().size() == 1); - assertTrue(multias.getOperations().size() == 1); - assertTrue(parallel.getOperations().size() == 2); + assertEquals(1, multiaf.getOperations().size()); + assertEquals(1, multias.getOperations().size()); + assertEquals(2, parallel.getOperations().size()); for (int i = 0; i < intList.size(); i++) { parallel.accept(Arrays.asList(intList.get(i), intList.get(i))); } List res = parallel.get(); - assertTrue(res.get(1).toDouble() == 45D); - assertTrue(res.get(0).toInt() == 1); + assertEquals(45D, res.get(1).toDouble()); + assertEquals(1, res.get(0).toInt()); } @@ -73,20 +74,20 @@ public class DispatchOpTest extends BaseND4JTest { DispatchOp parallel = new DispatchOp<>( - Arrays.>>asList(multi, otherMulti)); + Arrays.asList(multi, otherMulti)); - assertTrue(multi.getOperations().size() == 2); - assertTrue(otherMulti.getOperations().size() == 2); - assertTrue(parallel.getOperations().size() == 2); + assertEquals(2, multi.getOperations().size()); + assertEquals(2, otherMulti.getOperations().size()); + assertEquals(2, parallel.getOperations().size()); for (int i = 0; i < intList.size(); i++) { parallel.accept(Arrays.asList(intList.get(i), intList.get(i))); } List res = parallel.get(); - assertTrue(res.get(1).toDouble() == 45D); - assertTrue(res.get(0).toInt() == 1); - assertTrue(res.get(3).toDouble() == 9); - assertTrue(res.get(2).toInt() == 9); + assertEquals(45D, res.get(1).toDouble()); + assertEquals(1, res.get(0).toInt()); + assertEquals(9, res.get(3).toDouble()); + assertEquals(9, res.get(2).toInt()); } diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/reduce/TestMultiOpReduce.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/reduce/TestMultiOpReduce.java index 80d7d7eee..b42f75f6f 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/reduce/TestMultiOpReduce.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/reduce/TestMultiOpReduce.java @@ -46,10 +46,10 @@ public class TestMultiOpReduce extends BaseND4JTest { public void testMultiOpReducerDouble() { List> inputs = new ArrayList<>(); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new DoubleWritable(0))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new DoubleWritable(1))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new DoubleWritable(2))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new DoubleWritable(2))); + inputs.add(Arrays.asList(new Text("someKey"), new DoubleWritable(0))); + inputs.add(Arrays.asList(new Text("someKey"), new DoubleWritable(1))); + inputs.add(Arrays.asList(new Text("someKey"), new DoubleWritable(2))); + inputs.add(Arrays.asList(new Text("someKey"), new DoubleWritable(2))); Map exp = new LinkedHashMap<>(); exp.put(ReduceOp.Min, 0.0); @@ -90,10 +90,10 @@ public class TestMultiOpReduce extends BaseND4JTest { public void testReducerInteger() { List> inputs = new ArrayList<>(); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(0))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(1))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(2))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(2))); + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(0))); + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(1))); + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(2))); + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(2))); Map exp = new LinkedHashMap<>(); exp.put(ReduceOp.Min, 0.0); @@ -135,10 +135,10 @@ public class TestMultiOpReduce extends BaseND4JTest { public void testReduceString() { List> inputs = new ArrayList<>(); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new Text("1"))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new Text("2"))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new Text("3"))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new Text("4"))); + inputs.add(Arrays.asList(new Text("someKey"), new Text("1"))); + inputs.add(Arrays.asList(new Text("someKey"), new Text("2"))); + inputs.add(Arrays.asList(new Text("someKey"), new Text("3"))); + inputs.add(Arrays.asList(new Text("someKey"), new Text("4"))); Map exp = new LinkedHashMap<>(); exp.put(ReduceOp.Append, "1234"); @@ -171,12 +171,12 @@ public class TestMultiOpReduce extends BaseND4JTest { public void testReduceIntegerIgnoreInvalidValues() { List> inputs = new ArrayList<>(); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new Text("0"))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new Text("1"))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(2))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new Text("ignore me"))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new Text("also ignore me"))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new Text("2"))); + inputs.add(Arrays.asList(new Text("someKey"), new Text("0"))); + inputs.add(Arrays.asList(new Text("someKey"), new Text("1"))); + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(2))); + inputs.add(Arrays.asList(new Text("someKey"), new Text("ignore me"))); + inputs.add(Arrays.asList(new Text("someKey"), new Text("also ignore me"))); + inputs.add(Arrays.asList(new Text("someKey"), new Text("2"))); Map exp = new LinkedHashMap<>(); @@ -238,16 +238,16 @@ public class TestMultiOpReduce extends BaseND4JTest { public void testCustomReductions() { List> inputs = new ArrayList<>(); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(1), new Text("zero"), + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(1), new Text("zero"), new DoubleWritable(0))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(2), new Text("one"), + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(2), new Text("one"), new DoubleWritable(1))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(3), new Text("two"), + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(3), new Text("two"), new DoubleWritable(2))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(4), new Text("three"), + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(4), new Text("three"), new DoubleWritable(3))); - List expected = Arrays.asList((Writable) new Text("someKey"), new IntWritable(10), new Text("one"), + List expected = Arrays.asList(new Text("someKey"), new IntWritable(10), new Text("one"), new DoubleWritable(1)); @@ -288,16 +288,16 @@ public class TestMultiOpReduce extends BaseND4JTest { public void testCustomReductionsWithCondition() { List> inputs = new ArrayList<>(); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(1), new Text("zero"), + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(1), new Text("zero"), new DoubleWritable(0))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(2), new Text("one"), + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(2), new Text("one"), new DoubleWritable(1))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(3), new Text("two"), + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(3), new Text("two"), new DoubleWritable(2))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(4), new Text("three"), + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(4), new Text("three"), new DoubleWritable(3))); - List expected = Arrays.asList((Writable) new Text("someKey"), new IntWritable(10), new IntWritable(3), + List expected = Arrays.asList(new Text("someKey"), new IntWritable(10), new IntWritable(3), new DoubleWritable(1)); @@ -341,7 +341,7 @@ public class TestMultiOpReduce extends BaseND4JTest { public IAggregableReduceOp> reduceOp() { //For testing: let's take the second value return new AggregableMultiOp<>(Collections - .>singletonList(new AggregableSecond())); + .singletonList(new AggregableSecond())); } @Override @@ -483,12 +483,12 @@ public class TestMultiOpReduce extends BaseND4JTest { .addColumnString("filterCol").addColumnString("textCol").build(); List> inputs = new ArrayList<>(); - inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(1), new Text("a"), new Text("zero"))); - inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(2), new Text("b"), new Text("one"))); - inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(3), new Text("a"), new Text("two"))); - inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(4), new Text("b"), new Text("three"))); - inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(5), new Text("a"), new Text("three"))); - inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(6), new Text("b"), new Text("three"))); + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(1), new Text("a"), new Text("zero"))); + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(2), new Text("b"), new Text("one"))); + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(3), new Text("a"), new Text("two"))); + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(4), new Text("b"), new Text("three"))); + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(5), new Text("a"), new Text("three"))); + inputs.add(Arrays.asList(new Text("someKey"), new IntWritable(6), new Text("b"), new Text("three"))); Condition condition = new StringColumnCondition("filterCol", ConditionOp.Equal, "a"); @@ -504,7 +504,7 @@ public class TestMultiOpReduce extends BaseND4JTest { accumulator.accept(inputs.get(i)); } List out = accumulator.get(); - List expected = Arrays.asList(new Text("someKey"), new IntWritable(1 + 3 + 5), + List expected = Arrays.asList(new Text("someKey"), new IntWritable(1 + 3 + 5), new LongWritable(2), new LongWritable(4)); assertEquals(4, out.size()); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/reduce/TestReductions.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/reduce/TestReductions.java index f7aa89170..c5867e5dd 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/reduce/TestReductions.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/reduce/TestReductions.java @@ -43,7 +43,7 @@ public class TestReductions extends BaseND4JTest { Text t2 = new Text("41.8781136,-87.6297982"); Text t3 = new Text("33.7489954,-84.3879824"); - List list = Arrays.asList(t1, t1, t1, t2, t2, t3); + List list = Arrays.asList(t1, t1, t1, t2, t2, t3); GeographicMidpointReduction reduction = new GeographicMidpointReduction(","); @@ -68,8 +68,8 @@ public class TestReductions extends BaseND4JTest { //Test multiple reductions - list = Arrays.asList(t1, t1, t2); - List list2 = Arrays.asList(t1, t2, t3); + list = Arrays.asList(t1, t1, t2); + List list2 = Arrays.asList(t1, t2, t3); reduceOp = reduction.reduceOp(); for(Writable w : list){ reduceOp.accept(w); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/sequence/TestReduceSequenceByWindowFunction.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/sequence/TestReduceSequenceByWindowFunction.java index 1bb9ae62a..6add32050 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/sequence/TestReduceSequenceByWindowFunction.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/sequence/TestReduceSequenceByWindowFunction.java @@ -52,15 +52,15 @@ public class TestReduceSequenceByWindowFunction extends BaseND4JTest { //Create some data. List> sequence = new ArrayList<>(); //First window: - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L), new IntWritable(0))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 100L), new IntWritable(1))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 200L), new IntWritable(2))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L), new IntWritable(0))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 100L), new IntWritable(1))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 200L), new IntWritable(2))); //Second window: - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 1000L), new IntWritable(3))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 1000L), new IntWritable(3))); //Third window: empty //Fourth window: - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 3000L), new IntWritable(4))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 3100L), new IntWritable(5))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 3000L), new IntWritable(4))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 3100L), new IntWritable(5))); Schema schema = new SequenceSchema.Builder().addColumnTime("timecolumn", DateTimeZone.UTC) .addColumnInteger("intcolumn").build(); @@ -79,17 +79,17 @@ public class TestReduceSequenceByWindowFunction extends BaseND4JTest { assertEquals(4, postApply.size()); - List exp0 = Arrays.asList((Writable) new LongWritable(1451606400000L), new IntWritable(0 + 1 + 2)); + List exp0 = Arrays.asList(new LongWritable(1451606400000L), new IntWritable(1 + 2)); assertEquals(exp0, postApply.get(0)); - List exp1 = Arrays.asList((Writable) new LongWritable(1451606400000L + 1000L), new IntWritable(3)); + List exp1 = Arrays.asList(new LongWritable(1451606400000L + 1000L), new IntWritable(3)); assertEquals(exp1, postApply.get(1)); // here, takefirst of an empty window -> nullwritable makes more sense - List exp2 = Arrays.asList((Writable) NullWritable.INSTANCE, NullWritable.INSTANCE); + List exp2 = Arrays.asList(NullWritable.INSTANCE, NullWritable.INSTANCE); assertEquals(exp2, postApply.get(2)); - List exp3 = Arrays.asList((Writable) new LongWritable(1451606400000L + 3000L), new IntWritable(9)); + List exp3 = Arrays.asList(new LongWritable(1451606400000L + 3000L), new IntWritable(9)); assertEquals(exp3, postApply.get(3)); } diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/sequence/TestSequenceSplit.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/sequence/TestSequenceSplit.java index c26eaec61..cae0795c4 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/sequence/TestSequenceSplit.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/sequence/TestSequenceSplit.java @@ -46,13 +46,13 @@ public class TestSequenceSplit extends BaseND4JTest { .build(); List> inputSequence = new ArrayList<>(); - inputSequence.add(Arrays.asList((Writable) new LongWritable(0), new Text("t0"))); - inputSequence.add(Arrays.asList((Writable) new LongWritable(1000), new Text("t1"))); + inputSequence.add(Arrays.asList(new LongWritable(0), new Text("t0"))); + inputSequence.add(Arrays.asList(new LongWritable(1000), new Text("t1"))); //Second split: 74 seconds later - inputSequence.add(Arrays.asList((Writable) new LongWritable(75000), new Text("t2"))); - inputSequence.add(Arrays.asList((Writable) new LongWritable(100000), new Text("t3"))); + inputSequence.add(Arrays.asList(new LongWritable(75000), new Text("t2"))); + inputSequence.add(Arrays.asList(new LongWritable(100000), new Text("t3"))); //Third split: 1 minute and 1 milliseconds later - inputSequence.add(Arrays.asList((Writable) new LongWritable(160001), new Text("t4"))); + inputSequence.add(Arrays.asList(new LongWritable(160001), new Text("t4"))); SequenceSplit seqSplit = new SequenceSplitTimeSeparation("time", 1, TimeUnit.MINUTES); seqSplit.setInputSchema(schema); @@ -61,13 +61,13 @@ public class TestSequenceSplit extends BaseND4JTest { assertEquals(3, splits.size()); List> exp0 = new ArrayList<>(); - exp0.add(Arrays.asList((Writable) new LongWritable(0), new Text("t0"))); - exp0.add(Arrays.asList((Writable) new LongWritable(1000), new Text("t1"))); + exp0.add(Arrays.asList(new LongWritable(0), new Text("t0"))); + exp0.add(Arrays.asList(new LongWritable(1000), new Text("t1"))); List> exp1 = new ArrayList<>(); - exp1.add(Arrays.asList((Writable) new LongWritable(75000), new Text("t2"))); - exp1.add(Arrays.asList((Writable) new LongWritable(100000), new Text("t3"))); + exp1.add(Arrays.asList(new LongWritable(75000), new Text("t2"))); + exp1.add(Arrays.asList(new LongWritable(100000), new Text("t3"))); List> exp2 = new ArrayList<>(); - exp2.add(Arrays.asList((Writable) new LongWritable(160001), new Text("t4"))); + exp2.add(Arrays.asList(new LongWritable(160001), new Text("t4"))); assertEquals(exp0, splits.get(0)); assertEquals(exp1, splits.get(1)); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/sequence/TestWindowFunctions.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/sequence/TestWindowFunctions.java index ff45a3f3e..f1becef3c 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/sequence/TestWindowFunctions.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/sequence/TestWindowFunctions.java @@ -49,15 +49,15 @@ public class TestWindowFunctions extends BaseND4JTest { //Create some data. List> sequence = new ArrayList<>(); //First window: - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L), new IntWritable(0))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 100L), new IntWritable(1))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 200L), new IntWritable(2))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L), new IntWritable(0))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 100L), new IntWritable(1))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 200L), new IntWritable(2))); //Second window: - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 1000L), new IntWritable(3))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 1000L), new IntWritable(3))); //Third window: empty //Fourth window: - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 3000L), new IntWritable(4))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 3100L), new IntWritable(5))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 3000L), new IntWritable(4))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 3100L), new IntWritable(5))); Schema schema = new SequenceSchema.Builder().addColumnTime("timecolumn", DateTimeZone.UTC) .addColumnInteger("intcolumn").build(); @@ -100,15 +100,15 @@ public class TestWindowFunctions extends BaseND4JTest { //Create some data. List> sequence = new ArrayList<>(); //First window: - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L), new IntWritable(0))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 100L), new IntWritable(1))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 200L), new IntWritable(2))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L), new IntWritable(0))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 100L), new IntWritable(1))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 200L), new IntWritable(2))); //Second window: - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 1000L), new IntWritable(3))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 1000L), new IntWritable(3))); //Third window: empty //Fourth window: - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 3000L), new IntWritable(4))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 3100L), new IntWritable(5))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 3000L), new IntWritable(4))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 3100L), new IntWritable(5))); Schema schema = new SequenceSchema.Builder().addColumnTime("timecolumn", DateTimeZone.UTC) .addColumnInteger("intcolumn").build(); @@ -150,15 +150,15 @@ public class TestWindowFunctions extends BaseND4JTest { //Create some data. List> sequence = new ArrayList<>(); //First window: - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L), new IntWritable(0))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 100L), new IntWritable(1))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 200L), new IntWritable(2))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L), new IntWritable(0))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 100L), new IntWritable(1))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 200L), new IntWritable(2))); //Second window: - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 1000L), new IntWritable(3))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 1000L), new IntWritable(3))); //Third window: empty //Fourth window: - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 3000L), new IntWritable(4))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 3100L), new IntWritable(5))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 3000L), new IntWritable(4))); + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 3100L), new IntWritable(5))); Schema schema = new SequenceSchema.Builder().addColumnTime("timecolumn", DateTimeZone.UTC) .addColumnInteger("intcolumn").build(); @@ -188,13 +188,13 @@ public class TestWindowFunctions extends BaseND4JTest { //Create some data. List> sequence = new ArrayList<>(); //First window: - sequence.add(Arrays.asList((Writable) new LongWritable(0), new IntWritable(0))); - sequence.add(Arrays.asList((Writable) new LongWritable(100), new IntWritable(1))); - sequence.add(Arrays.asList((Writable) new LongWritable(200), new IntWritable(2))); - sequence.add(Arrays.asList((Writable) new LongWritable(1000), new IntWritable(3))); - sequence.add(Arrays.asList((Writable) new LongWritable(1500), new IntWritable(4))); - sequence.add(Arrays.asList((Writable) new LongWritable(2000), new IntWritable(5))); - sequence.add(Arrays.asList((Writable) new LongWritable(5000), new IntWritable(7))); + sequence.add(Arrays.asList(new LongWritable(0), new IntWritable(0))); + sequence.add(Arrays.asList(new LongWritable(100), new IntWritable(1))); + sequence.add(Arrays.asList(new LongWritable(200), new IntWritable(2))); + sequence.add(Arrays.asList(new LongWritable(1000), new IntWritable(3))); + sequence.add(Arrays.asList(new LongWritable(1500), new IntWritable(4))); + sequence.add(Arrays.asList(new LongWritable(2000), new IntWritable(5))); + sequence.add(Arrays.asList(new LongWritable(5000), new IntWritable(7))); Schema schema = new SequenceSchema.Builder().addColumnTime("timecolumn", DateTimeZone.UTC) @@ -207,32 +207,32 @@ public class TestWindowFunctions extends BaseND4JTest { //First window: -1000 to 1000 List> exp0 = new ArrayList<>(); - exp0.add(Arrays.asList((Writable) new LongWritable(0), new IntWritable(0))); - exp0.add(Arrays.asList((Writable) new LongWritable(100), new IntWritable(1))); - exp0.add(Arrays.asList((Writable) new LongWritable(200), new IntWritable(2))); + exp0.add(Arrays.asList(new LongWritable(0), new IntWritable(0))); + exp0.add(Arrays.asList(new LongWritable(100), new IntWritable(1))); + exp0.add(Arrays.asList(new LongWritable(200), new IntWritable(2))); //Second window: 0 to 2000 List> exp1 = new ArrayList<>(); - exp1.add(Arrays.asList((Writable) new LongWritable(0), new IntWritable(0))); - exp1.add(Arrays.asList((Writable) new LongWritable(100), new IntWritable(1))); - exp1.add(Arrays.asList((Writable) new LongWritable(200), new IntWritable(2))); - exp1.add(Arrays.asList((Writable) new LongWritable(1000), new IntWritable(3))); - exp1.add(Arrays.asList((Writable) new LongWritable(1500), new IntWritable(4))); + exp1.add(Arrays.asList(new LongWritable(0), new IntWritable(0))); + exp1.add(Arrays.asList(new LongWritable(100), new IntWritable(1))); + exp1.add(Arrays.asList(new LongWritable(200), new IntWritable(2))); + exp1.add(Arrays.asList(new LongWritable(1000), new IntWritable(3))); + exp1.add(Arrays.asList(new LongWritable(1500), new IntWritable(4))); //Third window: 1000 to 3000 List> exp2 = new ArrayList<>(); - exp2.add(Arrays.asList((Writable) new LongWritable(1000), new IntWritable(3))); - exp2.add(Arrays.asList((Writable) new LongWritable(1500), new IntWritable(4))); - exp2.add(Arrays.asList((Writable) new LongWritable(2000), new IntWritable(5))); + exp2.add(Arrays.asList(new LongWritable(1000), new IntWritable(3))); + exp2.add(Arrays.asList(new LongWritable(1500), new IntWritable(4))); + exp2.add(Arrays.asList(new LongWritable(2000), new IntWritable(5))); //Fourth window: 2000 to 4000 List> exp3 = new ArrayList<>(); - exp3.add(Arrays.asList((Writable) new LongWritable(2000), new IntWritable(5))); + exp3.add(Arrays.asList(new LongWritable(2000), new IntWritable(5))); //Fifth window: 3000 to 5000 List> exp4 = new ArrayList<>(); //Sixth window: 4000 to 6000 List> exp5 = new ArrayList<>(); - exp5.add(Arrays.asList((Writable) new LongWritable(5000), new IntWritable(7))); + exp5.add(Arrays.asList(new LongWritable(5000), new IntWritable(7))); //Seventh window: 5000 to 7000 List> exp6 = new ArrayList<>(); - exp6.add(Arrays.asList((Writable) new LongWritable(5000), new IntWritable(7))); + exp6.add(Arrays.asList(new LongWritable(5000), new IntWritable(7))); List>> windowsExp = Arrays.asList(exp0, exp1, exp2, exp3, exp4, exp5, exp6); @@ -250,13 +250,13 @@ public class TestWindowFunctions extends BaseND4JTest { //Create some data. List> sequence = new ArrayList<>(); //First window: - sequence.add(Arrays.asList((Writable) new LongWritable(0), new IntWritable(0))); - sequence.add(Arrays.asList((Writable) new LongWritable(100), new IntWritable(1))); - sequence.add(Arrays.asList((Writable) new LongWritable(200), new IntWritable(2))); - sequence.add(Arrays.asList((Writable) new LongWritable(1000), new IntWritable(3))); - sequence.add(Arrays.asList((Writable) new LongWritable(1500), new IntWritable(4))); - sequence.add(Arrays.asList((Writable) new LongWritable(2000), new IntWritable(5))); - sequence.add(Arrays.asList((Writable) new LongWritable(5000), new IntWritable(7))); + sequence.add(Arrays.asList(new LongWritable(0), new IntWritable(0))); + sequence.add(Arrays.asList(new LongWritable(100), new IntWritable(1))); + sequence.add(Arrays.asList(new LongWritable(200), new IntWritable(2))); + sequence.add(Arrays.asList(new LongWritable(1000), new IntWritable(3))); + sequence.add(Arrays.asList(new LongWritable(1500), new IntWritable(4))); + sequence.add(Arrays.asList(new LongWritable(2000), new IntWritable(5))); + sequence.add(Arrays.asList(new LongWritable(5000), new IntWritable(7))); Schema schema = new SequenceSchema.Builder().addColumnTime("timecolumn", DateTimeZone.UTC) @@ -272,31 +272,31 @@ public class TestWindowFunctions extends BaseND4JTest { //First window: -1000 to 1000 List> exp0 = new ArrayList<>(); - exp0.add(Arrays.asList((Writable) new LongWritable(0), new IntWritable(0))); - exp0.add(Arrays.asList((Writable) new LongWritable(100), new IntWritable(1))); - exp0.add(Arrays.asList((Writable) new LongWritable(200), new IntWritable(2))); + exp0.add(Arrays.asList(new LongWritable(0), new IntWritable(0))); + exp0.add(Arrays.asList(new LongWritable(100), new IntWritable(1))); + exp0.add(Arrays.asList(new LongWritable(200), new IntWritable(2))); //Second window: 0 to 2000 List> exp1 = new ArrayList<>(); - exp1.add(Arrays.asList((Writable) new LongWritable(0), new IntWritable(0))); - exp1.add(Arrays.asList((Writable) new LongWritable(100), new IntWritable(1))); - exp1.add(Arrays.asList((Writable) new LongWritable(200), new IntWritable(2))); - exp1.add(Arrays.asList((Writable) new LongWritable(1000), new IntWritable(3))); - exp1.add(Arrays.asList((Writable) new LongWritable(1500), new IntWritable(4))); + exp1.add(Arrays.asList(new LongWritable(0), new IntWritable(0))); + exp1.add(Arrays.asList(new LongWritable(100), new IntWritable(1))); + exp1.add(Arrays.asList(new LongWritable(200), new IntWritable(2))); + exp1.add(Arrays.asList(new LongWritable(1000), new IntWritable(3))); + exp1.add(Arrays.asList(new LongWritable(1500), new IntWritable(4))); //Third window: 1000 to 3000 List> exp2 = new ArrayList<>(); - exp2.add(Arrays.asList((Writable) new LongWritable(1000), new IntWritable(3))); - exp2.add(Arrays.asList((Writable) new LongWritable(1500), new IntWritable(4))); - exp2.add(Arrays.asList((Writable) new LongWritable(2000), new IntWritable(5))); + exp2.add(Arrays.asList(new LongWritable(1000), new IntWritable(3))); + exp2.add(Arrays.asList(new LongWritable(1500), new IntWritable(4))); + exp2.add(Arrays.asList(new LongWritable(2000), new IntWritable(5))); //Fourth window: 2000 to 4000 List> exp3 = new ArrayList<>(); - exp3.add(Arrays.asList((Writable) new LongWritable(2000), new IntWritable(5))); + exp3.add(Arrays.asList(new LongWritable(2000), new IntWritable(5))); //Fifth window: 3000 to 5000 -> Empty: excluded //Sixth window: 4000 to 6000 List> exp5 = new ArrayList<>(); - exp5.add(Arrays.asList((Writable) new LongWritable(5000), new IntWritable(7))); + exp5.add(Arrays.asList(new LongWritable(5000), new IntWritable(7))); //Seventh window: 5000 to 7000 List> exp6 = new ArrayList<>(); - exp6.add(Arrays.asList((Writable) new LongWritable(5000), new IntWritable(7))); + exp6.add(Arrays.asList(new LongWritable(5000), new IntWritable(7))); List>> windowsExp = Arrays.asList(exp0, exp1, exp2, exp3, exp5, exp6); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/stringreduce/TestReduce.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/stringreduce/TestReduce.java index f7eaa85ad..13730f588 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/stringreduce/TestReduce.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/stringreduce/TestReduce.java @@ -37,9 +37,9 @@ public class TestReduce extends BaseND4JTest { public void testReducerDouble() { List> inputs = new ArrayList<>(); - inputs.add(Arrays.asList((Writable) new Text("1"), new Text("2"))); - inputs.add(Arrays.asList((Writable) new Text("1"), new Text("2"))); - inputs.add(Arrays.asList((Writable) new Text("1"), new Text("2"))); + inputs.add(Arrays.asList(new Text("1"), new Text("2"))); + inputs.add(Arrays.asList(new Text("1"), new Text("2"))); + inputs.add(Arrays.asList(new Text("1"), new Text("2"))); Map exp = new LinkedHashMap<>(); exp.put(StringReduceOp.MERGE, "12"); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/transform/TestTransforms.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/transform/TestTransforms.java index c0468b916..9a6f8b1d3 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/transform/TestTransforms.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/transform/TestTransforms.java @@ -122,9 +122,9 @@ public class TestTransforms extends BaseND4JTest { assertNotNull(meta.getMaxAllowedValue()); assertEquals(2, (int) meta.getMaxAllowedValue()); - assertEquals(0, transform.map(Collections.singletonList((Writable) new Text("zero"))).get(0).toInt()); - assertEquals(1, transform.map(Collections.singletonList((Writable) new Text("one"))).get(0).toInt()); - assertEquals(2, transform.map(Collections.singletonList((Writable) new Text("two"))).get(0).toInt()); + assertEquals(0, transform.map(Collections.singletonList(new Text("zero"))).get(0).toInt()); + assertEquals(1, transform.map(Collections.singletonList(new Text("one"))).get(0).toInt()); + assertEquals(2, transform.map(Collections.singletonList(new Text("two"))).get(0).toInt()); } @Test @@ -147,11 +147,11 @@ public class TestTransforms extends BaseND4JTest { } assertEquals(Arrays.asList(new IntWritable(1), new IntWritable(0), new IntWritable(0)), - transform.map(Collections.singletonList((Writable) new Text("zero")))); + transform.map(Collections.singletonList(new Text("zero")))); assertEquals(Arrays.asList(new IntWritable(0), new IntWritable(1), new IntWritable(0)), - transform.map(Collections.singletonList((Writable) new Text("one")))); + transform.map(Collections.singletonList(new Text("one")))); assertEquals(Arrays.asList(new IntWritable(0), new IntWritable(0), new IntWritable(1)), - transform.map(Collections.singletonList((Writable) new Text("two")))); + transform.map(Collections.singletonList(new Text("two")))); } @Test @@ -177,16 +177,16 @@ public class TestTransforms extends BaseND4JTest { assertEquals(columnTypesExp, out.getColumnTypes()); //Expand (second,100) into (0,100,0). Leave the remaining columns as is - List e1 = Arrays.asList(new DoubleWritable(1), new DoubleWritable(0), new DoubleWritable(100), + List e1 = Arrays.asList(new DoubleWritable(1), new DoubleWritable(0), new DoubleWritable(100), new DoubleWritable(0), new DoubleWritable(-1)); - List a1 = t.map(Arrays.asList(new DoubleWritable(1), new Text("second"), new DoubleWritable(100), + List a1 = t.map(Arrays.asList(new DoubleWritable(1), new Text("second"), new DoubleWritable(100), new DoubleWritable(-1))); assertEquals(e1,a1); //Expand (third,200) into (0,0,200). Leave the remaining columns as is - List e2 = Arrays.asList(new DoubleWritable(1), new DoubleWritable(0), new DoubleWritable(0), + List e2 = Arrays.asList(new DoubleWritable(1), new DoubleWritable(0), new DoubleWritable(0), new DoubleWritable(200), new DoubleWritable(-1)); - List a2 = t.map(Arrays.asList(new DoubleWritable(1), new Text("third"), new DoubleWritable(200), + List a2 = t.map(Arrays.asList(new DoubleWritable(1), new Text("third"), new DoubleWritable(200), new DoubleWritable(-1))); assertEquals(e2,a2); } @@ -205,11 +205,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(Arrays.asList("zero", "one", "two"), meta.getStateNames()); assertEquals(Collections.singletonList((Writable) new Text("zero")), - transform.map(Collections.singletonList((Writable) new IntWritable(0)))); + transform.map(Collections.singletonList(new IntWritable(0)))); assertEquals(Collections.singletonList((Writable) new Text("one")), - transform.map(Collections.singletonList((Writable) new IntWritable(1)))); + transform.map(Collections.singletonList(new IntWritable(1)))); assertEquals(Collections.singletonList((Writable) new Text("two")), - transform.map(Collections.singletonList((Writable) new IntWritable(2)))); + transform.map(Collections.singletonList(new IntWritable(2)))); } @Test @@ -228,11 +228,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(Arrays.asList("column[3]", "column[4]", "column[5]"), out.getColumnNames()); assertEquals(Arrays.asList(new IntWritable(1), new IntWritable(0), new IntWritable(0)), - transform.map(Collections.singletonList((Writable) new IntWritable(3)))); + transform.map(Collections.singletonList(new IntWritable(3)))); assertEquals(Arrays.asList(new IntWritable(0), new IntWritable(1), new IntWritable(0)), - transform.map(Collections.singletonList((Writable) new IntWritable(4)))); + transform.map(Collections.singletonList(new IntWritable(4)))); assertEquals(Arrays.asList(new IntWritable(0), new IntWritable(0), new IntWritable(1)), - transform.map(Collections.singletonList((Writable) new IntWritable(5)))); + transform.map(Collections.singletonList(new IntWritable(5)))); } @Test @@ -249,11 +249,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(Arrays.asList("zero", "one", "two"), meta.getStateNames()); assertEquals(Collections.singletonList((Writable) new Text("zero")), - transform.map(Collections.singletonList((Writable) new Text("zero")))); + transform.map(Collections.singletonList(new Text("zero")))); assertEquals(Collections.singletonList((Writable) new Text("one")), - transform.map(Collections.singletonList((Writable) new Text("one")))); + transform.map(Collections.singletonList(new Text("one")))); assertEquals(Collections.singletonList((Writable) new Text("two")), - transform.map(Collections.singletonList((Writable) new Text("two")))); + transform.map(Collections.singletonList(new Text("two")))); } @Test @@ -350,7 +350,7 @@ public class TestTransforms extends BaseND4JTest { assertEquals(ColumnType.Integer, out.getMetaData(1).getColumnType()); assertEquals(Arrays.asList(new Text("one"), new IntWritable(1)), - transform.map(Arrays.asList((Writable) new DoubleWritable(1.0), new Text("one"), + transform.map(Arrays.asList(new DoubleWritable(1.0), new Text("one"), new IntWritable(1), new LongWritable(1L)))); } @@ -369,7 +369,7 @@ public class TestTransforms extends BaseND4JTest { assertEquals(ColumnType.Integer, out.getMetaData(1).getColumnType()); assertEquals(Arrays.asList(new Text("one"), new IntWritable(1)), - transform.map(Arrays.asList((Writable) new DoubleWritable(1.0), new Text("one"), + transform.map(Arrays.asList(new DoubleWritable(1.0), new Text("one"), new IntWritable(1), new LongWritable(1L)))); } @@ -386,11 +386,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(ColumnType.Integer, out.getMetaData(0).getColumnType()); assertEquals(Collections.singletonList((Writable) new IntWritable(0)), - transform.map(Collections.singletonList((Writable) new IntWritable(0)))); + transform.map(Collections.singletonList(new IntWritable(0)))); assertEquals(Collections.singletonList((Writable) new IntWritable(1)), - transform.map(Collections.singletonList((Writable) new IntWritable(1)))); + transform.map(Collections.singletonList(new IntWritable(1)))); assertEquals(Collections.singletonList((Writable) new IntWritable(1000)), - transform.map(Collections.singletonList((Writable) new Text("")))); + transform.map(Collections.singletonList(new Text("")))); } @Test @@ -405,11 +405,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(ColumnType.Integer, out.getMetaData(0).getColumnType()); assertEquals(Collections.singletonList((Writable) new IntWritable(0)), - transform.map(Collections.singletonList((Writable) new IntWritable(0)))); + transform.map(Collections.singletonList(new IntWritable(0)))); assertEquals(Collections.singletonList((Writable) new IntWritable(1)), - transform.map(Collections.singletonList((Writable) new IntWritable(1)))); + transform.map(Collections.singletonList(new IntWritable(1)))); assertEquals(Collections.singletonList((Writable) new IntWritable(1000)), - transform.map(Collections.singletonList((Writable) new Text("")))); + transform.map(Collections.singletonList(new Text("")))); } @Test @@ -434,13 +434,13 @@ public class TestTransforms extends BaseND4JTest { double loge2 = Math.log(2); assertEquals(0.0, - transform.map(Collections.singletonList((Writable) new DoubleWritable(min))).get(0).toDouble(), + transform.map(Collections.singletonList(new DoubleWritable(min))).get(0).toDouble(), 1e-6); double d = scale * Math.log((10 - min) / (mu - min) + 1) / loge2; - assertEquals(d, transform.map(Collections.singletonList((Writable) new DoubleWritable(10))).get(0).toDouble(), + assertEquals(d, transform.map(Collections.singletonList(new DoubleWritable(10))).get(0).toDouble(), 1e-6); d = scale * Math.log((3 - min) / (mu - min) + 1) / loge2; - assertEquals(d, transform.map(Collections.singletonList((Writable) new DoubleWritable(3))).get(0).toDouble(), + assertEquals(d, transform.map(Collections.singletonList(new DoubleWritable(3))).get(0).toDouble(), 1e-6); } @@ -466,22 +466,22 @@ public class TestTransforms extends BaseND4JTest { assertEquals(1, meta2.getMaxAllowedValue(), 1e-6); - assertEquals(0.0, transform.map(Collections.singletonList((Writable) new DoubleWritable(0))).get(0).toDouble(), + assertEquals(0.0, transform.map(Collections.singletonList(new DoubleWritable(0))).get(0).toDouble(), 1e-6); assertEquals(1.0, - transform.map(Collections.singletonList((Writable) new DoubleWritable(100))).get(0).toDouble(), + transform.map(Collections.singletonList(new DoubleWritable(100))).get(0).toDouble(), 1e-6); - assertEquals(0.5, transform.map(Collections.singletonList((Writable) new DoubleWritable(50))).get(0).toDouble(), + assertEquals(0.5, transform.map(Collections.singletonList(new DoubleWritable(50))).get(0).toDouble(), 1e-6); assertEquals(-1.0, - transform2.map(Collections.singletonList((Writable) new DoubleWritable(0))).get(0).toDouble(), + transform2.map(Collections.singletonList(new DoubleWritable(0))).get(0).toDouble(), 1e-6); assertEquals(1.0, - transform2.map(Collections.singletonList((Writable) new DoubleWritable(100))).get(0).toDouble(), + transform2.map(Collections.singletonList(new DoubleWritable(100))).get(0).toDouble(), 1e-6); assertEquals(0.0, - transform2.map(Collections.singletonList((Writable) new DoubleWritable(50))).get(0).toDouble(), + transform2.map(Collections.singletonList(new DoubleWritable(50))).get(0).toDouble(), 1e-6); } @@ -504,13 +504,13 @@ public class TestTransforms extends BaseND4JTest { assertNull(meta.getMaxAllowedValue()); - assertEquals(0.0, transform.map(Collections.singletonList((Writable) new DoubleWritable(mu))).get(0).toDouble(), + assertEquals(0.0, transform.map(Collections.singletonList(new DoubleWritable(mu))).get(0).toDouble(), 1e-6); double d = (10 - mu) / sigma; - assertEquals(d, transform.map(Collections.singletonList((Writable) new DoubleWritable(10))).get(0).toDouble(), + assertEquals(d, transform.map(Collections.singletonList(new DoubleWritable(10))).get(0).toDouble(), 1e-6); d = (-2 - mu) / sigma; - assertEquals(d, transform.map(Collections.singletonList((Writable) new DoubleWritable(-2))).get(0).toDouble(), + assertEquals(d, transform.map(Collections.singletonList(new DoubleWritable(-2))).get(0).toDouble(), 1e-6); } @@ -532,10 +532,10 @@ public class TestTransforms extends BaseND4JTest { assertNull(meta.getMaxAllowedValue()); - assertEquals(0.0, transform.map(Collections.singletonList((Writable) new DoubleWritable(mu))).get(0).toDouble(), + assertEquals(0.0, transform.map(Collections.singletonList(new DoubleWritable(mu))).get(0).toDouble(), 1e-6); assertEquals(10 - mu, - transform.map(Collections.singletonList((Writable) new DoubleWritable(10))).get(0).toDouble(), + transform.map(Collections.singletonList(new DoubleWritable(10))).get(0).toDouble(), 1e-6); } @@ -552,11 +552,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(ColumnType.String, out.getMetaData(0).getColumnType()); assertEquals(Collections.singletonList((Writable) new Text("one")), - transform.map(Collections.singletonList((Writable) new Text("one")))); + transform.map(Collections.singletonList(new Text("one")))); assertEquals(Collections.singletonList((Writable) new Text("two")), - transform.map(Collections.singletonList((Writable) new Text("two")))); + transform.map(Collections.singletonList(new Text("two")))); assertEquals(Collections.singletonList((Writable) new Text("replacement")), - transform.map(Collections.singletonList((Writable) new Text("this should be replaced")))); + transform.map(Collections.singletonList(new Text("this should be replaced")))); } @Test @@ -571,13 +571,13 @@ public class TestTransforms extends BaseND4JTest { assertEquals(ColumnType.String, out.getMetaData(0).getColumnType()); assertEquals(Collections.singletonList((Writable) new Text("one")), - transform.map(Collections.singletonList((Writable) new Text("one ")))); + transform.map(Collections.singletonList(new Text("one ")))); assertEquals(Collections.singletonList((Writable) new Text("two")), - transform.map(Collections.singletonList((Writable) new Text("two\t")))); + transform.map(Collections.singletonList(new Text("two\t")))); assertEquals(Collections.singletonList((Writable) new Text("three")), - transform.map(Collections.singletonList((Writable) new Text("three\n")))); + transform.map(Collections.singletonList(new Text("three\n")))); assertEquals(Collections.singletonList((Writable) new Text("one")), - transform.map(Collections.singletonList((Writable) new Text(" o n e\t")))); + transform.map(Collections.singletonList(new Text(" o n e\t")))); } @Test @@ -592,11 +592,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(ColumnType.String, out.getMetaData(0).getColumnType()); assertEquals(Collections.singletonList((Writable) new Text("one")), - transform.map(Collections.singletonList((Writable) new Text("one")))); + transform.map(Collections.singletonList(new Text("one")))); assertEquals(Collections.singletonList((Writable) new Text("newvalue")), - transform.map(Collections.singletonList((Writable) new Text("")))); + transform.map(Collections.singletonList(new Text("")))); assertEquals(Collections.singletonList((Writable) new Text("three")), - transform.map(Collections.singletonList((Writable) new Text("three")))); + transform.map(Collections.singletonList(new Text("three")))); } @Test @@ -611,11 +611,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(ColumnType.String, out.getMetaData(0).getColumnType()); assertEquals(Collections.singletonList((Writable) new Text("one_AppendThis")), - transform.map(Collections.singletonList((Writable) new Text("one")))); + transform.map(Collections.singletonList(new Text("one")))); assertEquals(Collections.singletonList((Writable) new Text("two_AppendThis")), - transform.map(Collections.singletonList((Writable) new Text("two")))); + transform.map(Collections.singletonList(new Text("two")))); assertEquals(Collections.singletonList((Writable) new Text("three_AppendThis")), - transform.map(Collections.singletonList((Writable) new Text("three")))); + transform.map(Collections.singletonList(new Text("three")))); } @Test @@ -637,17 +637,17 @@ public class TestTransforms extends BaseND4JTest { } assertEquals(Arrays.asList(new Text("false"), new Text("false"), new Text("false")), - transform.map(Collections.singletonList((Writable) new Text("")))); + transform.map(Collections.singletonList(new Text("")))); assertEquals(Arrays.asList(new Text("true"), new Text("false"), new Text("false")), - transform.map(Collections.singletonList((Writable) new Text("a")))); + transform.map(Collections.singletonList(new Text("a")))); assertEquals(Arrays.asList(new Text("false"), new Text("true"), new Text("false")), - transform.map(Collections.singletonList((Writable) new Text("b")))); + transform.map(Collections.singletonList(new Text("b")))); assertEquals(Arrays.asList(new Text("false"), new Text("false"), new Text("true")), - transform.map(Collections.singletonList((Writable) new Text("c")))); + transform.map(Collections.singletonList(new Text("c")))); assertEquals(Arrays.asList(new Text("true"), new Text("false"), new Text("true")), - transform.map(Collections.singletonList((Writable) new Text("a,c")))); + transform.map(Collections.singletonList(new Text("a,c")))); assertEquals(Arrays.asList(new Text("true"), new Text("true"), new Text("true")), - transform.map(Collections.singletonList((Writable) new Text("a,b,c")))); + transform.map(Collections.singletonList(new Text("a,b,c")))); } @Test @@ -665,11 +665,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(ColumnType.String, out.getMetaData(0).getColumnType()); assertEquals(Collections.singletonList((Writable) new Text("ONE")), - transform.map(Collections.singletonList((Writable) new Text("one")))); + transform.map(Collections.singletonList(new Text("one")))); assertEquals(Collections.singletonList((Writable) new Text("TWO")), - transform.map(Collections.singletonList((Writable) new Text("two")))); + transform.map(Collections.singletonList(new Text("two")))); assertEquals(Collections.singletonList((Writable) new Text("three")), - transform.map(Collections.singletonList((Writable) new Text("three")))); + transform.map(Collections.singletonList(new Text("three")))); } @@ -721,9 +721,9 @@ public class TestTransforms extends BaseND4JTest { long out2 = 1435708799000L; assertEquals(Collections.singletonList((Writable) new LongWritable(out1)), - transform.map(Collections.singletonList((Writable) new Text(in1)))); + transform.map(Collections.singletonList(new Text(in1)))); assertEquals(Collections.singletonList((Writable) new LongWritable(out2)), - transform.map(Collections.singletonList((Writable) new Text(in2)))); + transform.map(Collections.singletonList(new Text(in2)))); //Check serialization: things like DateTimeFormatter etc aren't serializable, hence we need custom serialization :/ ByteArrayOutputStream baos = new ByteArrayOutputStream(); @@ -737,9 +737,9 @@ public class TestTransforms extends BaseND4JTest { Transform deserialized = (Transform) ois.readObject(); assertEquals(Collections.singletonList((Writable) new LongWritable(out1)), - deserialized.map(Collections.singletonList((Writable) new Text(in1)))); + deserialized.map(Collections.singletonList(new Text(in1)))); assertEquals(Collections.singletonList((Writable) new LongWritable(out2)), - deserialized.map(Collections.singletonList((Writable) new Text(in2)))); + deserialized.map(Collections.singletonList(new Text(in2)))); } @@ -792,9 +792,9 @@ public class TestTransforms extends BaseND4JTest { out2.add(new Text("2015-06-30 23:59:59")); assertEquals(out1, - transform.map(Arrays.asList((Writable) new LongWritable(in1), new Text("otherColumnValue")))); + transform.map(Arrays.asList(new LongWritable(in1), new Text("otherColumnValue")))); assertEquals(out2, - transform.map(Arrays.asList((Writable) new LongWritable(in2), new Text("otherColumnValue")))); + transform.map(Arrays.asList(new LongWritable(in2), new Text("otherColumnValue")))); @@ -810,9 +810,9 @@ public class TestTransforms extends BaseND4JTest { Transform deserialized = (Transform) ois.readObject(); assertEquals(out1, deserialized - .map(Arrays.asList((Writable) new LongWritable(in1), new Text("otherColumnValue")))); + .map(Arrays.asList(new LongWritable(in1), new Text("otherColumnValue")))); assertEquals(out2, deserialized - .map(Arrays.asList((Writable) new LongWritable(in2), new Text("otherColumnValue")))); + .map(Arrays.asList(new LongWritable(in2), new Text("otherColumnValue")))); } @@ -839,8 +839,8 @@ public class TestTransforms extends BaseND4JTest { assertEquals(expOutTypes.get(i), out.getType(i)); } - List inList = Arrays.asList((Writable) new Text("one"), new IntWritable(2), new LongWritable(3L)); - List outList = Arrays.asList((Writable) new Text("one"), new IntWritable(2), new IntWritable(2), + List inList = Arrays.asList(new Text("one"), new IntWritable(2), new LongWritable(3L)); + List outList = Arrays.asList(new Text("one"), new IntWritable(2), new IntWritable(2), new LongWritable(3L), new LongWritable(3L)); assertEquals(outList, transform.map(inList)); @@ -861,11 +861,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(5, (int) meta.getMaxAllowedValue()); assertEquals(Collections.singletonList((Writable) new IntWritable(-5)), - transform.map(Collections.singletonList((Writable) new IntWritable(-1)))); + transform.map(Collections.singletonList(new IntWritable(-1)))); assertEquals(Collections.singletonList((Writable) new IntWritable(0)), - transform.map(Collections.singletonList((Writable) new IntWritable(0)))); + transform.map(Collections.singletonList(new IntWritable(0)))); assertEquals(Collections.singletonList((Writable) new IntWritable(5)), - transform.map(Collections.singletonList((Writable) new IntWritable(1)))); + transform.map(Collections.singletonList(new IntWritable(1)))); } @Test @@ -885,11 +885,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(Arrays.asList((Writable) new IntWritable(1), new Text("something"), new IntWritable(2), new IntWritable(3)), - transform.map(Arrays.asList((Writable) new IntWritable(1), new Text("something"), + transform.map(Arrays.asList(new IntWritable(1), new Text("something"), new IntWritable(2)))); assertEquals(Arrays.asList((Writable) new IntWritable(100), new Text("something2"), new IntWritable(21), new IntWritable(121)), - transform.map(Arrays.asList((Writable) new IntWritable(100), new Text("something2"), + transform.map(Arrays.asList(new IntWritable(100), new Text("something2"), new IntWritable(21)))); } @@ -908,11 +908,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(5, (long) meta.getMaxAllowedValue()); assertEquals(Collections.singletonList((Writable) new LongWritable(-5)), - transform.map(Collections.singletonList((Writable) new LongWritable(-1)))); + transform.map(Collections.singletonList(new LongWritable(-1)))); assertEquals(Collections.singletonList((Writable) new LongWritable(0)), - transform.map(Collections.singletonList((Writable) new LongWritable(0)))); + transform.map(Collections.singletonList(new LongWritable(0)))); assertEquals(Collections.singletonList((Writable) new LongWritable(5)), - transform.map(Collections.singletonList((Writable) new LongWritable(1)))); + transform.map(Collections.singletonList(new LongWritable(1)))); } @Test @@ -932,11 +932,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(Arrays.asList((Writable) new LongWritable(1), new Text("something"), new LongWritable(2), new LongWritable(3)), - transform.map(Arrays.asList((Writable) new LongWritable(1), new Text("something"), + transform.map(Arrays.asList(new LongWritable(1), new Text("something"), new LongWritable(2)))); assertEquals(Arrays.asList((Writable) new LongWritable(100), new Text("something2"), new LongWritable(21), new LongWritable(121)), - transform.map(Arrays.asList((Writable) new LongWritable(100), new Text("something2"), + transform.map(Arrays.asList(new LongWritable(100), new Text("something2"), new LongWritable(21)))); } @@ -952,9 +952,9 @@ public class TestTransforms extends BaseND4JTest { assertEquals(ColumnType.Time, out.getType(0)); assertEquals(Collections.singletonList((Writable) new LongWritable(1000 + 43200000)), - transform.map(Collections.singletonList((Writable) new LongWritable(1000)))); + transform.map(Collections.singletonList(new LongWritable(1000)))); assertEquals(Collections.singletonList((Writable) new LongWritable(1452441600000L + 43200000)), - transform.map(Collections.singletonList((Writable) new LongWritable(1452441600000L)))); + transform.map(Collections.singletonList(new LongWritable(1452441600000L)))); } @Test @@ -972,11 +972,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(5.0, meta.getMaxAllowedValue(), 1e-6); assertEquals(Collections.singletonList((Writable) new DoubleWritable(-5)), - transform.map(Collections.singletonList((Writable) new DoubleWritable(-1)))); + transform.map(Collections.singletonList(new DoubleWritable(-1)))); assertEquals(Collections.singletonList((Writable) new DoubleWritable(0)), - transform.map(Collections.singletonList((Writable) new DoubleWritable(0)))); + transform.map(Collections.singletonList(new DoubleWritable(0)))); assertEquals(Collections.singletonList((Writable) new DoubleWritable(5)), - transform.map(Collections.singletonList((Writable) new DoubleWritable(1)))); + transform.map(Collections.singletonList(new DoubleWritable(1)))); } @Test @@ -992,11 +992,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(ColumnType.String, out.getType(1)); assertEquals(Arrays.asList(new DoubleWritable(Math.sin(1)), new Text("0")), - transform.map(Arrays.asList(new DoubleWritable(1), new Text("0")))); + transform.map(Arrays.asList(new DoubleWritable(1), new Text("0")))); assertEquals(Arrays.asList(new DoubleWritable(Math.sin(2)), new Text("1")), - transform.map(Arrays.asList(new DoubleWritable(2), new Text("1")))); + transform.map(Arrays.asList(new DoubleWritable(2), new Text("1")))); assertEquals(Arrays.asList(new DoubleWritable(Math.sin(3)), new Text("2")), - transform.map(Arrays.asList(new DoubleWritable(3), new Text("2")))); + transform.map(Arrays.asList(new DoubleWritable(3), new Text("2")))); } @Test @@ -1016,11 +1016,11 @@ public class TestTransforms extends BaseND4JTest { assertEquals(Arrays.asList((Writable) new Text("something"), new DoubleWritable(1.0), new DoubleWritable(2.1), new DoubleWritable(3.1)), - transform.map(Arrays.asList((Writable) new Text("something"), new DoubleWritable(1.0), + transform.map(Arrays.asList(new Text("something"), new DoubleWritable(1.0), new DoubleWritable(2.1)))); assertEquals(Arrays.asList((Writable) new Text("something2"), new DoubleWritable(100.0), new DoubleWritable(21.1), new DoubleWritable(121.1)), - transform.map(Arrays.asList((Writable) new Text("something2"), new DoubleWritable(100.0), + transform.map(Arrays.asList(new Text("something2"), new DoubleWritable(100.0), new DoubleWritable(21.1)))); } @@ -1061,10 +1061,10 @@ public class TestTransforms extends BaseND4JTest { assertEquals(Arrays.asList(ColumnType.Integer, ColumnType.String, ColumnType.Double), out.getColumnTypes()); assertEquals(Arrays.asList((Writable) new IntWritable(1), new Text("one"), new DoubleWritable(1.1)), transform - .map(Arrays.asList((Writable) new DoubleWritable(1.1), new Text("one"), new IntWritable(1)))); + .map(Arrays.asList(new DoubleWritable(1.1), new Text("one"), new IntWritable(1)))); assertEquals(Arrays.asList((Writable) new IntWritable(2), new Text("two"), new DoubleWritable(200.2)), transform - .map(Arrays.asList((Writable) new DoubleWritable(200.2), new Text("two"), new IntWritable(2)))); + .map(Arrays.asList(new DoubleWritable(200.2), new Text("two"), new IntWritable(2)))); } @Test @@ -1078,15 +1078,15 @@ public class TestTransforms extends BaseND4JTest { transform.setInputSchema(schema); assertEquals(Collections.singletonList((Writable) new IntWritable(10)), - transform.map(Collections.singletonList((Writable) new IntWritable(10)))); + transform.map(Collections.singletonList(new IntWritable(10)))); assertEquals(Collections.singletonList((Writable) new IntWritable(1)), - transform.map(Collections.singletonList((Writable) new IntWritable(1)))); + transform.map(Collections.singletonList(new IntWritable(1)))); assertEquals(Collections.singletonList((Writable) new IntWritable(0)), - transform.map(Collections.singletonList((Writable) new IntWritable(0)))); + transform.map(Collections.singletonList(new IntWritable(0)))); assertEquals(Collections.singletonList((Writable) new IntWritable(0)), - transform.map(Collections.singletonList((Writable) new IntWritable(-1)))); + transform.map(Collections.singletonList(new IntWritable(-1)))); assertEquals(Collections.singletonList((Writable) new IntWritable(0)), - transform.map(Collections.singletonList((Writable) new IntWritable(-10)))); + transform.map(Collections.singletonList(new IntWritable(-10)))); } @Test @@ -1100,15 +1100,15 @@ public class TestTransforms extends BaseND4JTest { transform.setInputSchema(schema); assertEquals(Collections.singletonList((Writable) new IntWritable(1)), - transform.map(Collections.singletonList((Writable) new IntWritable(10)))); + transform.map(Collections.singletonList(new IntWritable(10)))); assertEquals(Collections.singletonList((Writable) new IntWritable(1)), - transform.map(Collections.singletonList((Writable) new IntWritable(1)))); + transform.map(Collections.singletonList(new IntWritable(1)))); assertEquals(Collections.singletonList((Writable) new IntWritable(1)), - transform.map(Collections.singletonList((Writable) new IntWritable(0)))); + transform.map(Collections.singletonList(new IntWritable(0)))); assertEquals(Collections.singletonList((Writable) new IntWritable(0)), - transform.map(Collections.singletonList((Writable) new IntWritable(-1)))); + transform.map(Collections.singletonList(new IntWritable(-1)))); assertEquals(Collections.singletonList((Writable) new IntWritable(0)), - transform.map(Collections.singletonList((Writable) new IntWritable(-10)))); + transform.map(Collections.singletonList(new IntWritable(-10)))); } @Test @@ -1119,11 +1119,11 @@ public class TestTransforms extends BaseND4JTest { Transform transform = new ConditionalCopyValueTransform("third", "second", condition); transform.setInputSchema(schema); - List list = Arrays.asList((Writable) new Text("first"), new Text("second"), new Text("third")); + List list = Arrays.asList(new Text("first"), new Text("second"), new Text("third")); assertEquals(list, transform.map(list)); - list = Arrays.asList((Writable) new Text("first"), new Text("second"), new Text("")); - List exp = Arrays.asList((Writable) new Text("first"), new Text("second"), new Text("second")); + list = Arrays.asList(new Text("first"), new Text("second"), new Text("")); + List exp = Arrays.asList(new Text("first"), new Text("second"), new Text("second")); assertEquals(exp, transform.map(list)); } @@ -1133,10 +1133,10 @@ public class TestTransforms extends BaseND4JTest { .addColumnDouble("thirdCol").build(); List> sequence = new ArrayList<>(); - sequence.add(Arrays.asList(new Text("val0"), new IntWritable(10), new DoubleWritable(10))); - sequence.add(Arrays.asList(new Text("val1"), new IntWritable(15), new DoubleWritable(15))); - sequence.add(Arrays.asList(new Text("val2"), new IntWritable(25), new DoubleWritable(25))); - sequence.add(Arrays.asList(new Text("val3"), new IntWritable(40), new DoubleWritable(40))); + sequence.add(Arrays.asList(new Text("val0"), new IntWritable(10), new DoubleWritable(10))); + sequence.add(Arrays.asList(new Text("val1"), new IntWritable(15), new DoubleWritable(15))); + sequence.add(Arrays.asList(new Text("val2"), new IntWritable(25), new DoubleWritable(25))); + sequence.add(Arrays.asList(new Text("val3"), new IntWritable(40), new DoubleWritable(40))); Transform t = new SequenceDifferenceTransform("secondCol"); t.setInputSchema(schema); @@ -1144,10 +1144,10 @@ public class TestTransforms extends BaseND4JTest { List> out = t.mapSequence(sequence); List> expected = new ArrayList<>(); - expected.add(Arrays.asList(new Text("val0"), new IntWritable(0), new DoubleWritable(10))); - expected.add(Arrays.asList(new Text("val1"), new IntWritable(15 - 10), new DoubleWritable(15))); - expected.add(Arrays.asList(new Text("val2"), new IntWritable(25 - 15), new DoubleWritable(25))); - expected.add(Arrays.asList(new Text("val3"), new IntWritable(40 - 25), new DoubleWritable(40))); + expected.add(Arrays.asList(new Text("val0"), new IntWritable(0), new DoubleWritable(10))); + expected.add(Arrays.asList(new Text("val1"), new IntWritable(15 - 10), new DoubleWritable(15))); + expected.add(Arrays.asList(new Text("val2"), new IntWritable(25 - 15), new DoubleWritable(25))); + expected.add(Arrays.asList(new Text("val3"), new IntWritable(40 - 25), new DoubleWritable(40))); assertEquals(expected, out); @@ -1160,10 +1160,10 @@ public class TestTransforms extends BaseND4JTest { assertEquals(outputSchema.getColumnNames(), Arrays.asList("firstCol", "secondCol", "newThirdColName")); expected = new ArrayList<>(); - expected.add(Arrays.asList(new Text("val0"), new IntWritable(10), NullWritable.INSTANCE)); - expected.add(Arrays.asList(new Text("val1"), new IntWritable(15), NullWritable.INSTANCE)); - expected.add(Arrays.asList(new Text("val2"), new IntWritable(25), new DoubleWritable(25 - 10))); - expected.add(Arrays.asList(new Text("val3"), new IntWritable(40), new DoubleWritable(40 - 15))); + expected.add(Arrays.asList(new Text("val0"), new IntWritable(10), NullWritable.INSTANCE)); + expected.add(Arrays.asList(new Text("val1"), new IntWritable(15), NullWritable.INSTANCE)); + expected.add(Arrays.asList(new Text("val2"), new IntWritable(25), new DoubleWritable(25 - 10))); + expected.add(Arrays.asList(new Text("val3"), new IntWritable(40), new DoubleWritable(40 - 15))); } @@ -1181,9 +1181,9 @@ public class TestTransforms extends BaseND4JTest { assertEquals(Arrays.asList((Writable) new Text("something"), new DoubleWritable(1.0), new IntWritable(10)), - transform.map(Arrays.asList((Writable) new Text("something"), new DoubleWritable(1.0)))); + transform.map(Arrays.asList(new Text("something"), new DoubleWritable(1.0)))); assertEquals(Arrays.asList((Writable) new Text("something2"), new DoubleWritable(100.0), new IntWritable(10)), - transform.map(Arrays.asList((Writable) new Text("something2"), new DoubleWritable(100.0)))); + transform.map(Arrays.asList(new Text("something2"), new DoubleWritable(100.0)))); } @Test @@ -1202,7 +1202,7 @@ public class TestTransforms extends BaseND4JTest { assertEquals(ColumnType.String, out.getMetaData(0).getColumnType()); assertEquals(Collections.singletonList((Writable) new Text("BoneConeTone")), - transform.map(Collections.singletonList((Writable) new Text("B1midT3")))); + transform.map(Collections.singletonList(new Text("B1midT3")))); // No link map = new HashMap<>(); @@ -1215,7 +1215,7 @@ public class TestTransforms extends BaseND4JTest { assertEquals(ColumnType.String, out.getMetaData(0).getColumnType()); assertEquals(Collections.singletonList((Writable) new Text("4.25")), - transform.map(Collections.singletonList((Writable) new Text(" 4.25 ")))); + transform.map(Collections.singletonList(new Text(" 4.25 ")))); } @Test @@ -1234,12 +1234,12 @@ public class TestTransforms extends BaseND4JTest { t.setInputSchema(schema); List> seq = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), - Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), + Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8))); List> exp = Collections.singletonList( - Arrays.asList(new DoubleWritable(3), new LongWritable(3L), new DoubleWritable(8))); + Arrays.asList(new DoubleWritable(3), new LongWritable(3L), new DoubleWritable(8))); List> act = t.mapSequence(seq); assertEquals(exp, act); @@ -1255,22 +1255,22 @@ public class TestTransforms extends BaseND4JTest { @Test public void testSequenceMovingWindowReduceTransform(){ List> seq = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), - Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8)), - Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), + Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8)), + Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11))); List> exp1 = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5), new DoubleWritable((2+5)/2.0)), - Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8), new DoubleWritable((2+5+8)/3.0)), - Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11), new DoubleWritable((5+8+11)/3.0))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5), new DoubleWritable((2+5)/2.0)), + Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8), new DoubleWritable((2+5+8)/3.0)), + Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11), new DoubleWritable((5+8+11)/3.0))); List> exp2 = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2), NullWritable.INSTANCE), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5), NullWritable.INSTANCE), - Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8), new DoubleWritable((2+5+8)/3.0)), - Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11), new DoubleWritable((5+8+11)/3.0))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2), NullWritable.INSTANCE), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5), NullWritable.INSTANCE), + Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8), new DoubleWritable((2+5+8)/3.0)), + Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11), new DoubleWritable((5+8+11)/3.0))); Schema schema = new SequenceSchema.Builder().addColumnsDouble("col%d",0,2).build(); Schema expOutSchema1 = new SequenceSchema.Builder().addColumnsDouble("col%d",0,2).addColumnDouble("mean(3,col2)").build(); @@ -1296,18 +1296,18 @@ public class TestTransforms extends BaseND4JTest { @Test public void testTrimSequenceTransform(){ List> seq = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), - Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8)), - Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), + Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8)), + Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11))); List> expTrimFirst = Arrays.asList( - Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8)), - Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11))); + Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8)), + Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11))); List> expTrimLast = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5))); SequenceTrimTransform tFirst = new SequenceTrimTransform(2, true); SequenceTrimTransform tLast = new SequenceTrimTransform(2, false); @@ -1323,15 +1323,15 @@ public class TestTransforms extends BaseND4JTest { @Test public void testSequenceTrimToLengthTransform(){ List> seq = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), - Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8)), - Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), + Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8)), + Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11))); List> expTrimLength3 = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), - Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), + Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8))); Schema s = new Schema.Builder() .addColumnsDouble("first", "second", "third") @@ -1346,8 +1346,8 @@ public class TestTransforms extends BaseND4JTest { List> seq2 = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5))); out = p.executeSequence(seq2); assertEquals(seq2, out); @@ -1361,28 +1361,28 @@ public class TestTransforms extends BaseND4JTest { @Test public void testSequenceTrimToLengthTransformTrimOrPad(){ List> seq = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), - Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8)), - Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11)), - Arrays.asList(new DoubleWritable(12), new DoubleWritable(13), new DoubleWritable(14))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), + Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8)), + Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11)), + Arrays.asList(new DoubleWritable(12), new DoubleWritable(13), new DoubleWritable(14))); List> seq2 = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5))); List> expTrimLength4 = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), - Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8)), - Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), + Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8)), + Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11))); Schema s = new Schema.Builder() .addColumnsDouble("first", "second", "third") .build(); TransformProcess p = new TransformProcess.Builder(s) - .trimOrPadSequenceToLength(4, Arrays.asList(new DoubleWritable(900), new DoubleWritable(901), new DoubleWritable(902))) + .trimOrPadSequenceToLength(4, Arrays.asList(new DoubleWritable(900), new DoubleWritable(901), new DoubleWritable(902))) .build(); List> out = p.executeSequence(seq); @@ -1390,10 +1390,10 @@ public class TestTransforms extends BaseND4JTest { List> exp2 = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), - Arrays.asList(new DoubleWritable(900), new DoubleWritable(901), new DoubleWritable(902)), - Arrays.asList(new DoubleWritable(900), new DoubleWritable(901), new DoubleWritable(902))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), + Arrays.asList(new DoubleWritable(900), new DoubleWritable(901), new DoubleWritable(902)), + Arrays.asList(new DoubleWritable(900), new DoubleWritable(901), new DoubleWritable(902))); out = p.executeSequence(seq2); assertEquals(exp2, out); @@ -1410,21 +1410,21 @@ public class TestTransforms extends BaseND4JTest { public void testSequenceOffsetTransform(){ List> seq = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), - Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8)), - Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(5)), + Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(8)), + Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(11))); Schema schema = new SequenceSchema.Builder().addColumnsDouble("col%d",0,2).build(); //First: test InPlace List> exp1 = Arrays.asList( - Arrays.asList(new DoubleWritable(6), new DoubleWritable(1), new DoubleWritable(8)), - Arrays.asList(new DoubleWritable(9), new DoubleWritable(4), new DoubleWritable(11))); + Arrays.asList(new DoubleWritable(6), new DoubleWritable(1), new DoubleWritable(8)), + Arrays.asList(new DoubleWritable(9), new DoubleWritable(4), new DoubleWritable(11))); List> exp2 = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(7), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(10), new DoubleWritable(5))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(7), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(10), new DoubleWritable(5))); //In-place + trim SequenceOffsetTransform t_inplace_trim_p2 = new SequenceOffsetTransform(Collections.singletonList("col1"), @@ -1447,15 +1447,15 @@ public class TestTransforms extends BaseND4JTest { t_inplace_specified_m2.setInputSchema(schema); List> exp3 = Arrays.asList( - Arrays.asList(new DoubleWritable(0), NullWritable.INSTANCE, new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), NullWritable.INSTANCE, new DoubleWritable(5)), - Arrays.asList(new DoubleWritable(6), new DoubleWritable(1), new DoubleWritable(8)), - Arrays.asList(new DoubleWritable(9), new DoubleWritable(4), new DoubleWritable(11))); + Arrays.asList(new DoubleWritable(0), NullWritable.INSTANCE, new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), NullWritable.INSTANCE, new DoubleWritable(5)), + Arrays.asList(new DoubleWritable(6), new DoubleWritable(1), new DoubleWritable(8)), + Arrays.asList(new DoubleWritable(9), new DoubleWritable(4), new DoubleWritable(11))); List> exp4 = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(7), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(10), new DoubleWritable(5)), - Arrays.asList(new DoubleWritable(6), NullWritable.INSTANCE, new DoubleWritable(8)), - Arrays.asList(new DoubleWritable(9), NullWritable.INSTANCE, new DoubleWritable(11))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(7), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(10), new DoubleWritable(5)), + Arrays.asList(new DoubleWritable(6), NullWritable.INSTANCE, new DoubleWritable(8)), + Arrays.asList(new DoubleWritable(9), NullWritable.INSTANCE, new DoubleWritable(11))); assertEquals(exp3, t_inplace_specified_p2.mapSequence(seq)); assertEquals(exp4, t_inplace_specified_m2.mapSequence(seq)); @@ -1465,12 +1465,12 @@ public class TestTransforms extends BaseND4JTest { //Second: test NewColumn List> exp1a = Arrays.asList( - Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(1), new DoubleWritable(8)), - Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(4), new DoubleWritable(11))); + Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(1), new DoubleWritable(8)), + Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(4), new DoubleWritable(11))); List> exp2a = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(7), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(10), new DoubleWritable(5))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(7), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(10), new DoubleWritable(5))); SequenceOffsetTransform t_newcol_trim_p2 = new SequenceOffsetTransform(Collections.singletonList("col1"), 2, SequenceOffsetTransform.OperationType.NewColumn, SequenceOffsetTransform.EdgeHandling.TrimSequence, null); SequenceOffsetTransform t_newcol_trim_m2 = new SequenceOffsetTransform(Collections.singletonList("col1"), @@ -1482,15 +1482,15 @@ public class TestTransforms extends BaseND4JTest { assertEquals(exp2a, t_newcol_trim_m2.mapSequence(seq)); List> exp3a = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), NullWritable.INSTANCE, new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), NullWritable.INSTANCE, new DoubleWritable(5)), - Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(1), new DoubleWritable(8)), - Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(4), new DoubleWritable(11))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), NullWritable.INSTANCE, new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), NullWritable.INSTANCE, new DoubleWritable(5)), + Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), new DoubleWritable(1), new DoubleWritable(8)), + Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), new DoubleWritable(4), new DoubleWritable(11))); List> exp4a = Arrays.asList( - Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(7), new DoubleWritable(2)), - Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(10), new DoubleWritable(5)), - Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), NullWritable.INSTANCE, new DoubleWritable(8)), - Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), NullWritable.INSTANCE, new DoubleWritable(11))); + Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), new DoubleWritable(7), new DoubleWritable(2)), + Arrays.asList(new DoubleWritable(3), new DoubleWritable(4), new DoubleWritable(10), new DoubleWritable(5)), + Arrays.asList(new DoubleWritable(6), new DoubleWritable(7), NullWritable.INSTANCE, new DoubleWritable(8)), + Arrays.asList(new DoubleWritable(9), new DoubleWritable(10), NullWritable.INSTANCE, new DoubleWritable(11))); SequenceOffsetTransform t_newcol_specified_p2 = new SequenceOffsetTransform(Collections.singletonList("col1"), 2, SequenceOffsetTransform.OperationType.NewColumn, SequenceOffsetTransform.EdgeHandling.SpecifiedValue, NullWritable.INSTANCE); @@ -1519,7 +1519,7 @@ public class TestTransforms extends BaseND4JTest { Schema s = new Schema.Builder().addColumnString("inCol").build(); t.setInputSchema(s); - List l = Collections.singletonList(new Text("cat,cat,dog,dog,dog,unknown")); + List l = Collections.singletonList(new Text("cat,cat,dog,dog,dog,unknown")); List out = t.map(l); @@ -1541,7 +1541,7 @@ public class TestTransforms extends BaseND4JTest { Schema s = new Schema.Builder().addColumnString("inCol").build(); t.setInputSchema(s); - List l = Collections.singletonList(new Text("cat,dog,dog,dog,unknown")); + List l = Collections.singletonList(new Text("cat,dog,dog,dog,unknown")); List out = t.map(l); @@ -1559,8 +1559,8 @@ public class TestTransforms extends BaseND4JTest { Schema s = new Schema.Builder().addColumnString("col").addColumnDouble("d").build(); List> inSeq = Arrays.asList( - Arrays.asList(new Text("text"), new DoubleWritable(1.0)), - Arrays.asList(new Text("ab"), new DoubleWritable(2.0))); + Arrays.asList(new Text("text"), new DoubleWritable(1.0)), + Arrays.asList(new Text("ab"), new DoubleWritable(2.0))); Map map = new HashMap<>(); map.put('a', 0); @@ -1570,12 +1570,12 @@ public class TestTransforms extends BaseND4JTest { map.put('x', 4); List> exp = Arrays.asList( - Arrays.asList(new IntWritable(3), new DoubleWritable(1.0)), - Arrays.asList(new IntWritable(2), new DoubleWritable(1.0)), - Arrays.asList(new IntWritable(4), new DoubleWritable(1.0)), - Arrays.asList(new IntWritable(3), new DoubleWritable(1.0)), - Arrays.asList(new IntWritable(0), new DoubleWritable(2.0)), - Arrays.asList(new IntWritable(1), new DoubleWritable(2.0))); + Arrays.asList(new IntWritable(3), new DoubleWritable(1.0)), + Arrays.asList(new IntWritable(2), new DoubleWritable(1.0)), + Arrays.asList(new IntWritable(4), new DoubleWritable(1.0)), + Arrays.asList(new IntWritable(3), new DoubleWritable(1.0)), + Arrays.asList(new IntWritable(0), new DoubleWritable(2.0)), + Arrays.asList(new IntWritable(1), new DoubleWritable(2.0))); Transform t = new TextToCharacterIndexTransform("col", "newName", map, false); t.setInputSchema(s); @@ -1603,8 +1603,8 @@ public class TestTransforms extends BaseND4JTest { .build(); List vocab = Arrays.asList("zero", "one", "two", "three"); List> inSeq = Arrays.asList( - Arrays.asList(new Text("a"), new Text("zero four two"), new DoubleWritable(4.2)), - Arrays.asList(new Text("b"), new Text("six one two four three five"), new DoubleWritable(87.9))); + Arrays.asList(new Text("a"), new Text("zero four two"), new DoubleWritable(4.2)), + Arrays.asList(new Text("b"), new Text("six one two four three five"), new DoubleWritable(87.9))); Schema expSchema = new Schema.Builder() .addColumnString("ID") @@ -1612,11 +1612,11 @@ public class TestTransforms extends BaseND4JTest { .addColumnDouble("FEATURE") .build(); List> exp = Arrays.asList( - Arrays.asList(new Text("a"), new IntWritable(0), new DoubleWritable(4.2)), - Arrays.asList(new Text("a"), new IntWritable(2), new DoubleWritable(4.2)), - Arrays.asList(new Text("b"), new IntWritable(1), new DoubleWritable(87.9)), - Arrays.asList(new Text("b"), new IntWritable(2), new DoubleWritable(87.9)), - Arrays.asList(new Text("b"), new IntWritable(3), new DoubleWritable(87.9))); + Arrays.asList(new Text("a"), new IntWritable(0), new DoubleWritable(4.2)), + Arrays.asList(new Text("a"), new IntWritable(2), new DoubleWritable(4.2)), + Arrays.asList(new Text("b"), new IntWritable(1), new DoubleWritable(87.9)), + Arrays.asList(new Text("b"), new IntWritable(2), new DoubleWritable(87.9)), + Arrays.asList(new Text("b"), new IntWritable(3), new DoubleWritable(87.9))); Transform t = new TextToTermIndexSequenceTransform("TEXT", "INDEXSEQ", vocab, " ", false); t.setInputSchema(schema); @@ -1664,16 +1664,16 @@ public class TestTransforms extends BaseND4JTest { assertEquals(Arrays.asList(ColumnType.String, ColumnType.Double, ColumnType.Categorical, ColumnType.Categorical), s2.getColumnTypes()); List> in = Arrays.asList( - Arrays.asList(new Text("a"), new DoubleWritable(3.14159), new Text("8e-4")), - Arrays.asList(new Text("b"), new DoubleWritable(2.71828), new Text("7e2")), - Arrays.asList(new Text("c"), new DoubleWritable(1.61803), new Text("6e8")), - Arrays.asList(new Text("c"), new DoubleWritable(-2), new Text("non numerical"))); + Arrays.asList(new Text("a"), new DoubleWritable(3.14159), new Text("8e-4")), + Arrays.asList(new Text("b"), new DoubleWritable(2.71828), new Text("7e2")), + Arrays.asList(new Text("c"), new DoubleWritable(1.61803), new Text("6e8")), + Arrays.asList(new Text("c"), new DoubleWritable(-2), new Text("non numerical"))); List> expected = Arrays.asList( - Arrays.asList(new Text("a"), new DoubleWritable(3.14159), new Text("3"), new Text("8")), - Arrays.asList(new Text("b"), new DoubleWritable(2.71828), new Text("2"), new Text("7")), - Arrays.asList(new Text("c"), new DoubleWritable(1.61803), new Text("1"), new Text("6")), - Arrays.asList(new Text("c"), new DoubleWritable(-2), new Text("2"), new Text("Other"))); + Arrays.asList(new Text("a"), new DoubleWritable(3.14159), new Text("3"), new Text("8")), + Arrays.asList(new Text("b"), new DoubleWritable(2.71828), new Text("2"), new Text("7")), + Arrays.asList(new Text("c"), new DoubleWritable(1.61803), new Text("1"), new Text("6")), + Arrays.asList(new Text("c"), new DoubleWritable(-2), new Text("2"), new Text("Other"))); List> out = new ArrayList<>(); for(List i : in){ diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/transform/ndarray/TestNDArrayWritableTransforms.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/transform/ndarray/TestNDArrayWritableTransforms.java index 8c4a44687..06c75574f 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/transform/ndarray/TestNDArrayWritableTransforms.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/transform/ndarray/TestNDArrayWritableTransforms.java @@ -54,11 +54,11 @@ public class TestNDArrayWritableTransforms extends BaseND4JTest { TransformProcess tp = new TransformProcess.Builder(s).ndArrayScalarOpTransform("col1", MathOp.Add, 100).build(); - List in = Arrays.asList(new DoubleWritable(0), new NDArrayWritable(Nd4j.linspace(0, 9, 10)), + List in = Arrays.asList(new DoubleWritable(0), new NDArrayWritable(Nd4j.linspace(0, 9, 10)), new Text("str0")); List out = tp.execute(in); - List exp = Arrays.asList(new DoubleWritable(0), + List exp = Arrays.asList(new DoubleWritable(0), new NDArrayWritable(Nd4j.linspace(0, 9, 10).addi(100)), new Text("str0")); assertEquals(exp, out); @@ -81,12 +81,12 @@ public class TestNDArrayWritableTransforms extends BaseND4JTest { assertEquals(expColNames, tp.getFinalSchema().getColumnNames()); - List in = Arrays.asList(new DoubleWritable(0), new NDArrayWritable(Nd4j.linspace(DataType.DOUBLE,0, 10, 1).reshape(1,10)), + List in = Arrays.asList(new DoubleWritable(0), new NDArrayWritable(Nd4j.linspace(DataType.DOUBLE,0, 10, 1).reshape(1,10)), new NDArrayWritable(Nd4j.valueArrayOf(1, 10, 2.0).castTo(DataType.DOUBLE))); List out = tp.execute(in); List exp = - Arrays.asList(new DoubleWritable(0), new NDArrayWritable(Nd4j.linspace(DataType.DOUBLE,0, 10, 1).reshape(1,10)), + Arrays.asList(new DoubleWritable(0), new NDArrayWritable(Nd4j.linspace(DataType.DOUBLE,0, 10, 1).reshape(1,10)), new NDArrayWritable(Nd4j.valueArrayOf(1, 10, 2.0).castTo(DataType.DOUBLE)), new NDArrayWritable(Nd4j.linspace(DataType.DOUBLE, 0, 10, 1).addi(2.0).reshape(1,10))); @@ -111,11 +111,11 @@ public class TestNDArrayWritableTransforms extends BaseND4JTest { assertEquals(expColNames, tp.getFinalSchema().getColumnNames()); - List in = Arrays.asList(new DoubleWritable(0), new NDArrayWritable(Nd4j.linspace(0, 9, 10)), + List in = Arrays.asList(new DoubleWritable(0), new NDArrayWritable(Nd4j.linspace(0, 9, 10)), new NDArrayWritable(Nd4j.valueArrayOf(1, 10, 2.0))); List out = tp.execute(in); - List exp = Arrays.asList(new DoubleWritable(0), + List exp = Arrays.asList(new DoubleWritable(0), new NDArrayWritable(Transforms.sin(Nd4j.linspace(0, 9, 10))), new NDArrayWritable(Transforms.sqrt(Nd4j.valueArrayOf(1, 10, 2.0)))); @@ -145,11 +145,11 @@ public class TestNDArrayWritableTransforms extends BaseND4JTest { INDArray arr2 = Nd4j.rand(1, 10); double cosine = Transforms.cosineSim(arr1, arr2); - List in = Arrays.asList(new DoubleWritable(0), new NDArrayWritable(arr1.dup()), + List in = Arrays.asList(new DoubleWritable(0), new NDArrayWritable(arr1.dup()), new NDArrayWritable(arr2.dup())); List out = tp.execute(in); - List exp = Arrays.asList(new DoubleWritable(0), new NDArrayWritable(arr1), + List exp = Arrays.asList(new DoubleWritable(0), new NDArrayWritable(arr1), new NDArrayWritable(arr2), new DoubleWritable(cosine)); assertEquals(exp, out); diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/transform/parse/ParseDoubleTransformTest.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/transform/parse/ParseDoubleTransformTest.java index e531d040f..f6f5ab4b0 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/transform/parse/ParseDoubleTransformTest.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/transform/parse/ParseDoubleTransformTest.java @@ -28,6 +28,7 @@ import org.nd4j.common.tests.BaseND4JTest; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -37,7 +38,7 @@ public class ParseDoubleTransformTest extends BaseND4JTest { public void testDoubleTransform() { List record = new ArrayList<>(); record.add(new Text("0.0")); - List transformed = Arrays.asList(new DoubleWritable(0.0)); + List transformed = Collections.singletonList(new DoubleWritable(0.0)); assertEquals(transformed, new ParseDoubleTransform().map(record)); } diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ui/TestUI.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ui/TestUI.java index 7b13b03d7..c774bc7cb 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ui/TestUI.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/transform/ui/TestUI.java @@ -151,7 +151,7 @@ public class TestUI extends BaseND4JTest { List> sequence = new ArrayList<>(nSteps); for (int i = 0; i < nSteps; i++) { String c = "s" + i % 3; - sequence.add(Arrays.asList(new DoubleWritable(Math.sin(i / 10.0)), new Text(c), + sequence.add(Arrays.asList(new DoubleWritable(Math.sin(i / 10.0)), new Text(c), new Text(String.valueOf(i)))); } diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/writable/RecordConverterTest.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/writable/RecordConverterTest.java index ed9c01793..40d74cfcf 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/writable/RecordConverterTest.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/writable/RecordConverterTest.java @@ -85,7 +85,7 @@ public class RecordConverterTest extends BaseND4JTest { @Test public void testNDArrayWritableConcat() { - List l = Arrays.asList(new DoubleWritable(1), + List l = Arrays.asList(new DoubleWritable(1), new NDArrayWritable(Nd4j.create(new double[]{2, 3, 4}, new long[]{1, 3}, DataType.FLOAT)), new DoubleWritable(5), new NDArrayWritable(Nd4j.create(new double[]{6, 7, 8}, new long[]{1, 3}, DataType.FLOAT)), new IntWritable(9), new IntWritable(1)); @@ -99,8 +99,8 @@ public class RecordConverterTest extends BaseND4JTest { @Test public void testNDArrayWritableConcatToMatrix(){ - List l1 = Arrays.asList(new DoubleWritable(1), new NDArrayWritable(Nd4j.create(new double[]{2, 3, 4}, new long[]{1,3}, DataType.FLOAT)), new DoubleWritable(5)); - List l2 = Arrays.asList(new DoubleWritable(6), new NDArrayWritable(Nd4j.create(new double[]{7, 8, 9}, new long[]{1,3}, DataType.FLOAT)), new DoubleWritable(10)); + List l1 = Arrays.asList(new DoubleWritable(1), new NDArrayWritable(Nd4j.create(new double[]{2, 3, 4}, new long[]{1,3}, DataType.FLOAT)), new DoubleWritable(5)); + List l2 = Arrays.asList(new DoubleWritable(6), new NDArrayWritable(Nd4j.create(new double[]{7, 8, 9}, new long[]{1,3}, DataType.FLOAT)), new DoubleWritable(10)); INDArray exp = Nd4j.create(new double[][]{ {1,2,3,4,5}, @@ -113,7 +113,7 @@ public class RecordConverterTest extends BaseND4JTest { @Test public void testToRecordWithListOfObject(){ - final List list = Arrays.asList((Object)3, 7.0f, "Foo", "Bar", 1.0, 3f, 3L, 7, 0L); + final List list = Arrays.asList(3, 7.0f, "Foo", "Bar", 1.0, 3f, 3L, 7, 0L); final Schema schema = new Schema.Builder() .addColumnInteger("a") .addColumnFloat("b") diff --git a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/writable/WritableTest.java b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/writable/WritableTest.java index 767742e4a..0d5acbbb4 100644 --- a/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/writable/WritableTest.java +++ b/cavis-datavec/cavis-datavec-api/src/test/java/org/datavec/api/writable/WritableTest.java @@ -47,7 +47,7 @@ public class WritableTest extends BaseND4JTest { assertEquals(new FloatWritable(1), new FloatWritable(1)); assertEquals(new Text("Hello"), new Text("Hello")); assertEquals(new BytesWritable("Hello".getBytes()),new BytesWritable("Hello".getBytes())); - INDArray ndArray = Nd4j.rand(new int[]{1, 100}); + INDArray ndArray = Nd4j.rand(1, 100); assertEquals(new NDArrayWritable(ndArray), new NDArrayWritable(ndArray)); assertEquals(new NullWritable(), new NullWritable()); @@ -61,7 +61,7 @@ public class WritableTest extends BaseND4JTest { public void testBytesWritableIndexing() { byte[] doubleWrite = new byte[16]; ByteBuffer wrapped = ByteBuffer.wrap(doubleWrite); - Buffer buffer = (Buffer) wrapped; + Buffer buffer = wrapped; wrapped.putDouble(1.0); wrapped.putDouble(2.0); buffer.rewind(); @@ -88,8 +88,8 @@ public class WritableTest extends BaseND4JTest { @Test public void testIntLongWritable() { - assertEquals(new IntWritable(1), new LongWritable(1l)); - assertEquals(new LongWritable(2l), new IntWritable(2)); + assertEquals(new IntWritable(1), new LongWritable(1L)); + assertEquals(new LongWritable(2L), new IntWritable(2)); long l = 1L << 34; // those would cast to the same Int @@ -134,8 +134,8 @@ public class WritableTest extends BaseND4JTest { for( int i=0; i<5; i++ ){ orig.get(0).add(Nd4j.rand(1,10)); - orig.get(1).add(Nd4j.rand(new int[]{1,5,6})); - orig.get(2).add(Nd4j.rand(new int[]{1,3,4,5})); + orig.get(1).add(Nd4j.rand(1,5,6)); + orig.get(2).add(Nd4j.rand(1,3,4,5)); } List> origByExample = new ArrayList<>(); //Outer list over examples, inner list over writables diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/main/java/org/datavec/arrow/ArrowConverter.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/main/java/org/datavec/arrow/ArrowConverter.java index 0bf5637a8..9d88cdb1c 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/main/java/org/datavec/arrow/ArrowConverter.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/main/java/org/datavec/arrow/ArrowConverter.java @@ -187,7 +187,7 @@ public class ArrowConverter { break; } - return Nd4j.create(buffer,new int[] {cols,1}); + return Nd4j.create(buffer, cols,1); } @@ -658,7 +658,7 @@ public class ArrowConverter { * @return the created vectors */ public static List toArrowColumnsStringSingle(final BufferAllocator bufferAllocator, final Schema schema, List dataVecRecord) { - return toArrowColumnsString(bufferAllocator,schema, Arrays.asList(dataVecRecord)); + return toArrowColumnsString(bufferAllocator,schema, Collections.singletonList(dataVecRecord)); } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/main/java/org/datavec/arrow/recordreader/ArrowRecordWriter.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/main/java/org/datavec/arrow/recordreader/ArrowRecordWriter.java index 32472e582..322de25a5 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/main/java/org/datavec/arrow/recordreader/ArrowRecordWriter.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/main/java/org/datavec/arrow/recordreader/ArrowRecordWriter.java @@ -31,12 +31,13 @@ import org.datavec.arrow.ArrowConverter; import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class ArrowRecordWriter implements RecordWriter { private Configuration configuration; - private Schema schema; + private final Schema schema; private Partitioner partitioner; public ArrowRecordWriter(Schema schema) { @@ -63,7 +64,7 @@ public class ArrowRecordWriter implements RecordWriter { @Override public PartitionMetaData write(List record) throws IOException { - return writeBatch(Arrays.asList(record)); + return writeBatch(Collections.singletonList(record)); } @Override diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/test/java/org/datavec/arrow/ArrowConverterTest.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/test/java/org/datavec/arrow/ArrowConverterTest.java index a2c1902d3..3c52adc80 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/test/java/org/datavec/arrow/ArrowConverterTest.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/test/java/org/datavec/arrow/ArrowConverterTest.java @@ -65,7 +65,7 @@ import static org.junit.jupiter.api.Assertions.assertFalse; @Slf4j public class ArrowConverterTest extends BaseND4JTest { - private static BufferAllocator bufferAllocator = new RootAllocator(Long.MAX_VALUE); + private static final BufferAllocator bufferAllocator = new RootAllocator(Long.MAX_VALUE); @TempDir public File testDir; @@ -80,7 +80,7 @@ public class ArrowConverterTest extends BaseND4JTest { int numRows = 4; List> ret = new ArrayList<>(numRows); for(int i = 0; i < numRows; i++) { - ret.add(Arrays.asList(new NDArrayWritable(Nd4j.linspace(1,4,4).reshape(1, 4)))); + ret.add(Collections.singletonList(new NDArrayWritable(Nd4j.linspace(1, 4, 4).reshape(1, 4)))); } List fieldVectors = ArrowConverter.toArrowColumns(bufferAllocator, schema, ret); @@ -144,7 +144,7 @@ public class ArrowConverterTest extends BaseND4JTest { List fieldVectors = ArrowConverter.toArrowColumnsStringSingle(bufferAllocator, schema.build(), single); List> records = ArrowConverter.toArrowWritables(fieldVectors, schema.build()); List> assertion = new ArrayList<>(); - assertion.add(Arrays.asList(new IntWritable(0),new IntWritable(1))); + assertion.add(Arrays.asList(new IntWritable(0),new IntWritable(1))); assertEquals(assertion,records); List> batch = new ArrayList<>(); @@ -156,8 +156,8 @@ public class ArrowConverterTest extends BaseND4JTest { List> batchRecords = ArrowConverter.toArrowWritables(fieldVectorsBatch, schema.build()); List> assertionBatch = new ArrayList<>(); - assertionBatch.add(Arrays.asList(new IntWritable(0),new IntWritable(0))); - assertionBatch.add(Arrays.asList(new IntWritable(1),new IntWritable(1))); + assertionBatch.add(Arrays.asList(new IntWritable(0),new IntWritable(0))); + assertionBatch.add(Arrays.asList(new IntWritable(1),new IntWritable(1))); assertEquals(assertionBatch,batchRecords); @@ -175,14 +175,14 @@ public class ArrowConverterTest extends BaseND4JTest { } List> input = Arrays.asList( - Arrays.asList(new LongWritable(0),new LongWritable(1)), - Arrays.asList(new LongWritable(2),new LongWritable(3)) + Arrays.asList(new LongWritable(0),new LongWritable(1)), + Arrays.asList(new LongWritable(2),new LongWritable(3)) ); List fieldVector = ArrowConverter.toArrowColumns(bufferAllocator,schema.build(),input); ArrowWritableRecordBatch writableRecordBatch = new ArrowWritableRecordBatch(fieldVector,schema.build()); - List assertion = Arrays.asList(new LongWritable(4), new LongWritable(5)); - writableRecordBatch.set(1, Arrays.asList(new LongWritable(4),new LongWritable(5))); + List assertion = Arrays.asList(new LongWritable(4), new LongWritable(5)); + writableRecordBatch.set(1, Arrays.asList(new LongWritable(4),new LongWritable(5))); List recordTest = writableRecordBatch.get(1); assertEquals(assertion,recordTest); } @@ -197,14 +197,14 @@ public class ArrowConverterTest extends BaseND4JTest { } List> input = Arrays.asList( - Arrays.asList(new IntWritable(0),new IntWritable(1)), - Arrays.asList(new IntWritable(2),new IntWritable(3)) + Arrays.asList(new IntWritable(0),new IntWritable(1)), + Arrays.asList(new IntWritable(2),new IntWritable(3)) ); List fieldVector = ArrowConverter.toArrowColumns(bufferAllocator,schema.build(),input); ArrowWritableRecordBatch writableRecordBatch = new ArrowWritableRecordBatch(fieldVector,schema.build()); - List assertion = Arrays.asList(new IntWritable(4), new IntWritable(5)); - writableRecordBatch.set(1, Arrays.asList(new IntWritable(4),new IntWritable(5))); + List assertion = Arrays.asList(new IntWritable(4), new IntWritable(5)); + writableRecordBatch.set(1, Arrays.asList(new IntWritable(4),new IntWritable(5))); List recordTest = writableRecordBatch.get(1); assertEquals(assertion,recordTest); } @@ -218,7 +218,7 @@ public class ArrowConverterTest extends BaseND4JTest { } for(int i = 0; i < 5; i++) { - List> arr = Arrays.asList(Arrays.asList(String.valueOf(i), String.valueOf(i), String.valueOf(i))); + List> arr = Collections.singletonList(Arrays.asList(String.valueOf(i), String.valueOf(i), String.valueOf(i))); entries.add(arr); } @@ -249,7 +249,7 @@ public class ArrowConverterTest extends BaseND4JTest { } for(int i = 0; i < 5; i++) { - List> arr = Arrays.asList(Arrays.asList(String.valueOf(i), String.valueOf(i), String.valueOf(i))); + List> arr = Collections.singletonList(Arrays.asList(String.valueOf(i), String.valueOf(i), String.valueOf(i))); entries.add(arr); } @@ -266,7 +266,7 @@ public class ArrowConverterTest extends BaseND4JTest { File f = testDir; - File tmpFile = new File(f, "tmp-arrow-file-" + UUID.randomUUID().toString() + ".arrorw"); + File tmpFile = new File(f, "tmp-arrow-file-" + UUID.randomUUID() + ".arrorw"); FileOutputStream outputStream = new FileOutputStream(tmpFile); tmpFile.deleteOnExit(); ArrowConverter.writeRecordBatchTo(recordsToWrite.getRight(),recordsToWrite.getFirst(),outputStream); @@ -302,7 +302,7 @@ public class ArrowConverterTest extends BaseND4JTest { assertEquals(matrix.rows(),vectors.size()); INDArray vector = Nd4j.linspace(1,4,4); - val vectors2 = ArrowConverter.convertToArrowVector(vector,Arrays.asList("test"), ColumnType.Double,bufferAllocator); + val vectors2 = ArrowConverter.convertToArrowVector(vector, Collections.singletonList("test"), ColumnType.Double,bufferAllocator); assertEquals(1,vectors2.size()); assertEquals(matrix.length(),vectors2.get(0).getValueCount()); @@ -440,7 +440,7 @@ public class ArrowConverterTest extends BaseND4JTest { File tmp = tmpDataFile(recordsToWrite); RecordReader recordReader = new ArrowRecordReader(); RecordMetaDataIndex recordMetaDataIndex = new RecordMetaDataIndex(0,tmp.toURI(),ArrowRecordReader.class); - recordReader.loadFromMetaData(Arrays.asList(recordMetaDataIndex)); + recordReader.loadFromMetaData(Collections.singletonList(recordMetaDataIndex)); Record record = recordReader.nextRecord(); assertEquals(2,record.getRecord().size()); @@ -474,7 +474,7 @@ public class ArrowConverterTest extends BaseND4JTest { File f = testDir; //send file - File tmp = new File(f,"tmp-file-" + UUID.randomUUID().toString()); + File tmp = new File(f,"tmp-file-" + UUID.randomUUID()); tmp.mkdirs(); File tmpFile = new File(tmp,"data.arrow"); tmpFile.deleteOnExit(); @@ -487,8 +487,8 @@ public class ArrowConverterTest extends BaseND4JTest { private Pair>> recordToWrite() { List> records = new ArrayList<>(); - records.add(Arrays.asList(new DoubleWritable(0.0),new DoubleWritable(0.0))); - records.add(Arrays.asList(new DoubleWritable(0.0),new DoubleWritable(0.0))); + records.add(Arrays.asList(new DoubleWritable(0.0),new DoubleWritable(0.0))); + records.add(Arrays.asList(new DoubleWritable(0.0),new DoubleWritable(0.0))); Schema.Builder schemaBuilder = new Schema.Builder(); for(int i = 0; i < 2; i++) { schemaBuilder.addColumnFloat("col-" + i); diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/test/java/org/datavec/arrow/recordreader/ArrowWritableRecordTimeSeriesBatchTests.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/test/java/org/datavec/arrow/recordreader/ArrowWritableRecordTimeSeriesBatchTests.java index b39b88013..e3c1471fe 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/test/java/org/datavec/arrow/recordreader/ArrowWritableRecordTimeSeriesBatchTests.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-arrow/src/test/java/org/datavec/arrow/recordreader/ArrowWritableRecordTimeSeriesBatchTests.java @@ -42,7 +42,7 @@ import static org.junit.jupiter.api.Assertions.assertFalse; public class ArrowWritableRecordTimeSeriesBatchTests extends BaseND4JTest { - private static BufferAllocator bufferAllocator = new RootAllocator(Long.MAX_VALUE); + private static final BufferAllocator bufferAllocator = new RootAllocator(Long.MAX_VALUE); @Test @@ -54,9 +54,9 @@ public class ArrowWritableRecordTimeSeriesBatchTests extends BaseND4JTest { List> timeStep = Arrays.asList( - Arrays.asList(new IntWritable(0),new IntWritable(1),new IntWritable(2)), - Arrays.asList(new IntWritable(1),new IntWritable(2),new IntWritable(3)), - Arrays.asList(new IntWritable(4),new IntWritable(5),new IntWritable(6)) + Arrays.asList(new IntWritable(0),new IntWritable(1),new IntWritable(2)), + Arrays.asList(new IntWritable(1),new IntWritable(2),new IntWritable(3)), + Arrays.asList(new IntWritable(4),new IntWritable(5),new IntWritable(6)) ); int numTimeSteps = 5; @@ -87,13 +87,13 @@ public class ArrowWritableRecordTimeSeriesBatchTests extends BaseND4JTest { .addColumnDouble("dbl"); List> firstSeq = Arrays.asList( - Arrays.asList(new Text("00"),new IntWritable(0),new DoubleWritable(2.0)), - Arrays.asList(new Text("01"),new IntWritable(1),new DoubleWritable(2.1)), - Arrays.asList(new Text("02"),new IntWritable(2),new DoubleWritable(2.2))); + Arrays.asList(new Text("00"),new IntWritable(0),new DoubleWritable(2.0)), + Arrays.asList(new Text("01"),new IntWritable(1),new DoubleWritable(2.1)), + Arrays.asList(new Text("02"),new IntWritable(2),new DoubleWritable(2.2))); List> secondSeq = Arrays.asList( - Arrays.asList(new Text("10"),new IntWritable(10),new DoubleWritable(12.0)), - Arrays.asList(new Text("11"),new IntWritable(11),new DoubleWritable(12.1))); + Arrays.asList(new Text("10"),new IntWritable(10),new DoubleWritable(12.0)), + Arrays.asList(new Text("11"),new IntWritable(11),new DoubleWritable(12.1))); List>> sequences = Arrays.asList(firstSeq, secondSeq); diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/Wave.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/Wave.java index db75a546f..909b57601 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/Wave.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/Wave.java @@ -61,7 +61,7 @@ public class Wave implements Serializable { initWaveWithInputStream(inputStream); inputStream.close(); } catch (IOException e) { - System.out.println(e.toString()); + System.out.println(e); } } @@ -96,7 +96,7 @@ public class Wave implements Serializable { data = new byte[inputStream.available()]; inputStream.read(data); } catch (IOException e) { - System.err.println(e.toString()); + System.err.println(e); } // end load data } else { @@ -132,7 +132,7 @@ public class Wave implements Serializable { waveHeader.setSubChunk2Size(subChunk2Size); byte[] trimmedData = new byte[(int) subChunk2Size]; - System.arraycopy(data, (int) leftTrimNumberOfSample, trimmedData, 0, (int) subChunk2Size); + System.arraycopy(data, leftTrimNumberOfSample, trimmedData, 0, (int) subChunk2Size); data = trimmedData; } else { System.err.println("Trim error: Negative length"); @@ -303,10 +303,9 @@ public class Wave implements Serializable { } public String toString() { - StringBuilder sb = new StringBuilder(waveHeader.toString()); - sb.append("\n"); - sb.append("length: " + timestamp()); - return sb.toString(); + String sb = waveHeader.toString() + "\n" + + "length: " + timestamp(); + return sb; } public double[] getNormalizedAmplitudes() { diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/WaveHeader.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/WaveHeader.java index 3f7af014a..0c5e462a7 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/WaveHeader.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/WaveHeader.java @@ -36,7 +36,7 @@ public class WaveHeader { public static final String DATA_HEADER = "data"; public static final int HEADER_BYTE_LENGTH = 44; // 44 bytes for header - private boolean valid; + private final boolean valid; private String chunkId; // 4 bytes private long chunkSize; // unsigned 4 bytes, little endian private String format; // 4 bytes @@ -82,7 +82,7 @@ public class WaveHeader { // little endian chunkSize = (long) (headerBuffer[pointer++] & 0xff) | (long) (headerBuffer[pointer++] & 0xff) << 8 | (long) (headerBuffer[pointer++] & 0xff) << 16 - | (long) (headerBuffer[pointer++] & 0xff << 24); + | (long) (headerBuffer[pointer++] & 0xffL << 24); format = new String(new byte[] {headerBuffer[pointer++], headerBuffer[pointer++], headerBuffer[pointer++], headerBuffer[pointer++]}); subChunk1Id = new String(new byte[] {headerBuffer[pointer++], headerBuffer[pointer++], @@ -90,16 +90,16 @@ public class WaveHeader { subChunk1Size = (long) (headerBuffer[pointer++] & 0xff) | (long) (headerBuffer[pointer++] & 0xff) << 8 | (long) (headerBuffer[pointer++] & 0xff) << 16 | (long) (headerBuffer[pointer++] & 0xff) << 24; - audioFormat = (int) ((headerBuffer[pointer++] & 0xff) | (headerBuffer[pointer++] & 0xff) << 8); - channels = (int) ((headerBuffer[pointer++] & 0xff) | (headerBuffer[pointer++] & 0xff) << 8); + audioFormat = (headerBuffer[pointer++] & 0xff) | (headerBuffer[pointer++] & 0xff) << 8; + channels = (headerBuffer[pointer++] & 0xff) | (headerBuffer[pointer++] & 0xff) << 8; sampleRate = (long) (headerBuffer[pointer++] & 0xff) | (long) (headerBuffer[pointer++] & 0xff) << 8 | (long) (headerBuffer[pointer++] & 0xff) << 16 | (long) (headerBuffer[pointer++] & 0xff) << 24; byteRate = (long) (headerBuffer[pointer++] & 0xff) | (long) (headerBuffer[pointer++] & 0xff) << 8 | (long) (headerBuffer[pointer++] & 0xff) << 16 | (long) (headerBuffer[pointer++] & 0xff) << 24; - blockAlign = (int) ((headerBuffer[pointer++] & 0xff) | (headerBuffer[pointer++] & 0xff) << 8); - bitsPerSample = (int) ((headerBuffer[pointer++] & 0xff) | (headerBuffer[pointer++] & 0xff) << 8); + blockAlign = (headerBuffer[pointer++] & 0xff) | (headerBuffer[pointer++] & 0xff) << 8; + bitsPerSample = (headerBuffer[pointer++] & 0xff) | (headerBuffer[pointer++] & 0xff) << 8; subChunk2Id = new String(new byte[] {headerBuffer[pointer++], headerBuffer[pointer++], headerBuffer[pointer++], headerBuffer[pointer++]}); subChunk2Size = (long) (headerBuffer[pointer++] & 0xff) | (long) (headerBuffer[pointer++] & 0xff) << 8 @@ -122,7 +122,7 @@ public class WaveHeader { } // check the format is support - if (chunkId.toUpperCase().equals(RIFF_HEADER) && format.toUpperCase().equals(WAVE_HEADER) && audioFormat == 1) { + if (chunkId.equalsIgnoreCase(RIFF_HEADER) && format.equalsIgnoreCase(WAVE_HEADER) && audioFormat == 1) { return true; } else { System.err.println("WaveHeader: Unsupported header format"); @@ -197,7 +197,7 @@ public class WaveHeader { } this.sampleRate = sampleRate; - this.byteRate = sampleRate * bitsPerSample / 8; + this.byteRate = (long) sampleRate * bitsPerSample / 8; this.chunkSize = newSubChunk2Size + 36; this.subChunk2Size = newSubChunk2Size; } @@ -252,32 +252,31 @@ public class WaveHeader { public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("chunkId: " + chunkId); - sb.append("\n"); - sb.append("chunkSize: " + chunkSize); - sb.append("\n"); - sb.append("format: " + format); - sb.append("\n"); - sb.append("subChunk1Id: " + subChunk1Id); - sb.append("\n"); - sb.append("subChunk1Size: " + subChunk1Size); - sb.append("\n"); - sb.append("audioFormat: " + audioFormat); - sb.append("\n"); - sb.append("channels: " + channels); - sb.append("\n"); - sb.append("sampleRate: " + sampleRate); - sb.append("\n"); - sb.append("byteRate: " + byteRate); - sb.append("\n"); - sb.append("blockAlign: " + blockAlign); - sb.append("\n"); - sb.append("bitsPerSample: " + bitsPerSample); - sb.append("\n"); - sb.append("subChunk2Id: " + subChunk2Id); - sb.append("\n"); - sb.append("subChunk2Size: " + subChunk2Size); - return sb.toString(); + String sb = "chunkId: " + chunkId + + "\n" + + "chunkSize: " + chunkSize + + "\n" + + "format: " + format + + "\n" + + "subChunk1Id: " + subChunk1Id + + "\n" + + "subChunk1Size: " + subChunk1Size + + "\n" + + "audioFormat: " + audioFormat + + "\n" + + "channels: " + channels + + "\n" + + "sampleRate: " + sampleRate + + "\n" + + "byteRate: " + byteRate + + "\n" + + "blockAlign: " + blockAlign + + "\n" + + "bitsPerSample: " + bitsPerSample + + "\n" + + "subChunk2Id: " + subChunk2Id + + "\n" + + "subChunk2Size: " + subChunk2Size; + return sb; } } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/dsp/WindowFunction.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/dsp/WindowFunction.java index c0d7b6253..6e859aab1 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/dsp/WindowFunction.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/dsp/WindowFunction.java @@ -39,15 +39,15 @@ public class WindowFunction { } public void setWindowType(String w) { - if (w.toUpperCase().equals("RECTANGULAR")) + if (w.equalsIgnoreCase("RECTANGULAR")) windowType = RECTANGULAR; - if (w.toUpperCase().equals("BARTLETT")) + if (w.equalsIgnoreCase("BARTLETT")) windowType = BARTLETT; - if (w.toUpperCase().equals("HANNING")) + if (w.equalsIgnoreCase("HANNING")) windowType = HANNING; - if (w.toUpperCase().equals("HAMMING")) + if (w.equalsIgnoreCase("HAMMING")) windowType = HAMMING; - if (w.toUpperCase().equals("BLACKMAN")) + if (w.equalsIgnoreCase("BLACKMAN")) windowType = BLACKMAN; } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/extension/NormalizedSampleAmplitudes.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/extension/NormalizedSampleAmplitudes.java index 9a8eaba58..76381efa9 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/extension/NormalizedSampleAmplitudes.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/extension/NormalizedSampleAmplitudes.java @@ -26,7 +26,7 @@ import org.datavec.audio.Wave; */ public class NormalizedSampleAmplitudes { - private Wave wave; + private final Wave wave; private double[] normalizedAmplitudes; // normalizedAmplitudes[sampleNumber]=normalizedAmplitudeInTheFrame public NormalizedSampleAmplitudes(Wave wave) { @@ -43,12 +43,9 @@ public class NormalizedSampleAmplitudes { if (normalizedAmplitudes == null) { - boolean signed = true; + boolean signed = wave.getWaveHeader().getBitsPerSample() != 8; // usually 8bit is unsigned - if (wave.getWaveHeader().getBitsPerSample() == 8) { - signed = false; - } short[] amplitudes = wave.getSampleAmplitudes(); int numSamples = amplitudes.length; diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/extension/Spectrogram.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/extension/Spectrogram.java index fdc680e1d..9d91f9b66 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/extension/Spectrogram.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/extension/Spectrogram.java @@ -31,11 +31,11 @@ public class Spectrogram { public static final int SPECTROGRAM_DEFAULT_FFT_SAMPLE_SIZE = 1024; public static final int SPECTROGRAM_DEFAULT_OVERLAP_FACTOR = 0; // 0 for no overlapping - private Wave wave; + private final Wave wave; private double[][] spectrogram; // relative spectrogram private double[][] absoluteSpectrogram; // absolute spectrogram - private int fftSampleSize; // number of sample in fft, the value needed to be a number to power of 2 - private int overlapFactor; // 1/overlapFactor overlapping, e.g. 1/4=25% overlapping + private final int fftSampleSize; // number of sample in fft, the value needed to be a number to power of 2 + private final int overlapFactor; // 1/overlapFactor overlapping, e.g. 1/4=25% overlapping private int numFrames; // number of frames of the spectrogram private int framesPerSecond; // frame per second of the spectrogram private int numFrequencyUnit; // number of y-axis unit diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/FingerprintManager.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/FingerprintManager.java index efa481a91..38435166d 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/FingerprintManager.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/FingerprintManager.java @@ -42,11 +42,11 @@ import java.util.List; @Slf4j public class FingerprintManager { - private FingerprintProperties fingerprintProperties = FingerprintProperties.getInstance(); - private int sampleSizePerFrame = fingerprintProperties.getSampleSizePerFrame(); - private int overlapFactor = fingerprintProperties.getOverlapFactor(); - private int numRobustPointsPerFrame = fingerprintProperties.getNumRobustPointsPerFrame(); - private int numFilterBanks = fingerprintProperties.getNumFilterBanks(); + private final FingerprintProperties fingerprintProperties = FingerprintProperties.getInstance(); + private final int sampleSizePerFrame = fingerprintProperties.getSampleSizePerFrame(); + private final int overlapFactor = fingerprintProperties.getOverlapFactor(); + private final int numRobustPointsPerFrame = fingerprintProperties.getNumRobustPointsPerFrame(); + private final int numFilterBanks = fingerprintProperties.getNumFilterBanks(); /** * Constructor diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/FingerprintSimilarity.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/FingerprintSimilarity.java index c76756310..6ca4335ec 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/FingerprintSimilarity.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/FingerprintSimilarity.java @@ -27,7 +27,7 @@ import org.datavec.audio.properties.FingerprintProperties; */ public class FingerprintSimilarity { - private FingerprintProperties fingerprintProperties = FingerprintProperties.getInstance(); + private final FingerprintProperties fingerprintProperties = FingerprintProperties.getInstance(); private int mostSimilarFramePosition; private float score; private float similarity; diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/FingerprintSimilarityComputer.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/FingerprintSimilarityComputer.java index 222bb5e67..3f832a884 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/FingerprintSimilarityComputer.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/FingerprintSimilarityComputer.java @@ -27,7 +27,7 @@ import java.util.List; */ public class FingerprintSimilarityComputer { - private FingerprintSimilarity fingerprintSimilarity; + private final FingerprintSimilarity fingerprintSimilarity; byte[] fingerprint1, fingerprint2; /** diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/MapRank.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/MapRank.java index 3378f5c09..9d31ccde9 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/MapRank.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/MapRank.java @@ -19,5 +19,5 @@ package org.datavec.audio.fingerprint; import java.util.List; public interface MapRank { - public List getOrderedKeyList(int numKeys, boolean sharpLimit); + List getOrderedKeyList(int numKeys, boolean sharpLimit); } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/MapRankDouble.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/MapRankDouble.java index a24ba0959..376e6f361 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/MapRankDouble.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/MapRankDouble.java @@ -21,7 +21,7 @@ import java.util.Map.Entry; public class MapRankDouble implements MapRank { - private Map map; + private final Map map; private boolean acsending = true; public MapRankDouble(Map map, boolean acsending) { @@ -95,7 +95,7 @@ public class MapRankDouble implements MapRank { } while (true) { - double targetValue = (Double) listArr[index]; + double targetValue = listArr[index]; Iterator passedMapIterator = passedMap.entrySet().iterator(); while (passedMapIterator.hasNext()) { Entry entry = passedMapIterator.next(); diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/MapRankInteger.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/MapRankInteger.java index befbcbe00..aa218d6e9 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/MapRankInteger.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/MapRankInteger.java @@ -21,7 +21,7 @@ import java.util.Map.Entry; public class MapRankInteger implements MapRank { - private Map map; + private final Map map; private boolean acsending = true; public MapRankInteger(Map map, boolean acsending) { @@ -95,7 +95,7 @@ public class MapRankInteger implements MapRank { } while (true) { - int targetValue = (Integer) listArr[index]; + int targetValue = listArr[index]; Iterator passedMapIterator = passedMap.entrySet().iterator(); while (passedMapIterator.hasNext()) { Entry entry = passedMapIterator.next(); diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/PairManager.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/PairManager.java index ff18c34c9..e072c7266 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/PairManager.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/PairManager.java @@ -33,16 +33,16 @@ import java.util.List; public class PairManager { FingerprintProperties fingerprintProperties = FingerprintProperties.getInstance(); - private int numFilterBanks = fingerprintProperties.getNumFilterBanks(); - private int bandwidthPerBank = fingerprintProperties.getNumFrequencyUnits() / numFilterBanks; - private int anchorPointsIntervalLength = fingerprintProperties.getAnchorPointsIntervalLength(); - private int numAnchorPointsPerInterval = fingerprintProperties.getNumAnchorPointsPerInterval(); - private int maxTargetZoneDistance = fingerprintProperties.getMaxTargetZoneDistance(); - private int numFrequencyUnits = fingerprintProperties.getNumFrequencyUnits(); + private final int numFilterBanks = fingerprintProperties.getNumFilterBanks(); + private final int bandwidthPerBank = fingerprintProperties.getNumFrequencyUnits() / numFilterBanks; + private final int anchorPointsIntervalLength = fingerprintProperties.getAnchorPointsIntervalLength(); + private final int numAnchorPointsPerInterval = fingerprintProperties.getNumAnchorPointsPerInterval(); + private final int maxTargetZoneDistance = fingerprintProperties.getMaxTargetZoneDistance(); + private final int numFrequencyUnits = fingerprintProperties.getNumFrequencyUnits(); - private int maxPairs; - private boolean isReferencePairing; - private HashMap stopPairTable = new HashMap<>(); + private final int maxPairs; + private final boolean isReferencePairing; + private final HashMap stopPairTable = new HashMap<>(); /** * Constructor diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortDouble.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortDouble.java index 258cbc888..0c127a484 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortDouble.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortDouble.java @@ -18,8 +18,8 @@ package org.datavec.audio.fingerprint; public class QuickSortDouble extends QuickSort { - private int[] indexes; - private double[] array; + private final int[] indexes; + private final double[] array; public QuickSortDouble(double[] array) { this.array = array; diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortIndexPreserved.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortIndexPreserved.java index 61e391d71..77aa155ca 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortIndexPreserved.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortIndexPreserved.java @@ -18,7 +18,7 @@ package org.datavec.audio.fingerprint; public class QuickSortIndexPreserved { - private QuickSort quickSort; + private final QuickSort quickSort; public QuickSortIndexPreserved(int[] array) { quickSort = new QuickSortInteger(array); diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortInteger.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortInteger.java index 178553865..63db39ac3 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortInteger.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortInteger.java @@ -18,8 +18,8 @@ package org.datavec.audio.fingerprint; public class QuickSortInteger extends QuickSort { - private int[] indexes; - private int[] array; + private final int[] indexes; + private final int[] array; public QuickSortInteger(int[] array) { this.array = array; diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortShort.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortShort.java index 8b4324b7e..91c275215 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortShort.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/fingerprint/QuickSortShort.java @@ -18,8 +18,8 @@ package org.datavec.audio.fingerprint; public class QuickSortShort extends QuickSort { - private int[] indexes; - private short[] array; + private final int[] indexes; + private final short[] array; public QuickSortShort(short[] array) { this.array = array; diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/processor/IntensityProcessor.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/processor/IntensityProcessor.java index 083ac4765..18521fd6f 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/processor/IntensityProcessor.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/processor/IntensityProcessor.java @@ -18,7 +18,7 @@ package org.datavec.audio.processor; public interface IntensityProcessor { - public void execute(); + void execute(); - public double[][] getIntensities(); + double[][] getIntensities(); } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/processor/RobustIntensityProcessor.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/processor/RobustIntensityProcessor.java index 1d884855c..667f8194a 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/processor/RobustIntensityProcessor.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/processor/RobustIntensityProcessor.java @@ -20,7 +20,7 @@ package org.datavec.audio.processor; public class RobustIntensityProcessor implements IntensityProcessor { private double[][] intensities; - private int numPointsPerFrame; + private final int numPointsPerFrame; public RobustIntensityProcessor(double[][] intensities, int numPointsPerFrame) { this.intensities = intensities; diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/properties/FingerprintProperties.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/properties/FingerprintProperties.java index db69a0b36..2109d60eb 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/properties/FingerprintProperties.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-audio/src/main/java/org/datavec/audio/properties/FingerprintProperties.java @@ -20,24 +20,24 @@ public class FingerprintProperties { protected static FingerprintProperties instance = null; - private int numRobustPointsPerFrame = 4; // number of points in each frame, i.e. top 4 intensities in fingerprint - private int sampleSizePerFrame = 2048; // number of audio samples in a frame, it is suggested to be the FFT Size - private int overlapFactor = 4; // 8 means each move 1/8 nSample length. 1 means no overlap, better 1,2,4,8 ... 32 - private int numFilterBanks = 4; + private final int numRobustPointsPerFrame = 4; // number of points in each frame, i.e. top 4 intensities in fingerprint + private final int sampleSizePerFrame = 2048; // number of audio samples in a frame, it is suggested to be the FFT Size + private final int overlapFactor = 4; // 8 means each move 1/8 nSample length. 1 means no overlap, better 1,2,4,8 ... 32 + private final int numFilterBanks = 4; - private int upperBoundedFrequency = 1500; // low pass - private int lowerBoundedFrequency = 400; // high pass - private int fps = 5; // in order to have 5fps with 2048 sampleSizePerFrame, wave's sample rate need to be 10240 (sampleSizePerFrame*fps) - private int sampleRate = sampleSizePerFrame * fps; // the audio's sample rate needed to resample to this in order to fit the sampleSizePerFrame and fps - private int numFramesInOneSecond = overlapFactor * fps; // since the overlap factor affects the actual number of fps, so this value is used to evaluate how many frames in one second eventually + private final int upperBoundedFrequency = 1500; // low pass + private final int lowerBoundedFrequency = 400; // high pass + private final int fps = 5; // in order to have 5fps with 2048 sampleSizePerFrame, wave's sample rate need to be 10240 (sampleSizePerFrame*fps) + private final int sampleRate = sampleSizePerFrame * fps; // the audio's sample rate needed to resample to this in order to fit the sampleSizePerFrame and fps + private final int numFramesInOneSecond = overlapFactor * fps; // since the overlap factor affects the actual number of fps, so this value is used to evaluate how many frames in one second eventually - private int refMaxActivePairs = 1; // max. active pairs per anchor point for reference songs - private int sampleMaxActivePairs = 10; // max. active pairs per anchor point for sample clip - private int numAnchorPointsPerInterval = 10; - private int anchorPointsIntervalLength = 4; // in frames (5fps,4 overlap per second) - private int maxTargetZoneDistance = 4; // in frame (5fps,4 overlap per second) + private final int refMaxActivePairs = 1; // max. active pairs per anchor point for reference songs + private final int sampleMaxActivePairs = 10; // max. active pairs per anchor point for sample clip + private final int numAnchorPointsPerInterval = 10; + private final int anchorPointsIntervalLength = 4; // in frames (5fps,4 overlap per second) + private final int maxTargetZoneDistance = 4; // in frame (5fps,4 overlap per second) - private int numFrequencyUnits = (upperBoundedFrequency - lowerBoundedFrequency + 1) / fps + 1; // num frequency units + private final int numFrequencyUnits = (upperBoundedFrequency - lowerBoundedFrequency + 1) / fps + 1; // num frequency units public static FingerprintProperties getInstance() { if (instance == null) { diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-codec/src/main/java/org/datavec/codec/reader/CodecRecordReader.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-codec/src/main/java/org/datavec/codec/reader/CodecRecordReader.java index 9e32b9bc0..475fe932b 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-codec/src/main/java/org/datavec/codec/reader/CodecRecordReader.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-codec/src/main/java/org/datavec/codec/reader/CodecRecordReader.java @@ -126,7 +126,7 @@ public class CodecRecordReader extends BaseCodecRecordReader { /** Ugly workaround to a bug in JCodec: https://github.com/jcodec/jcodec/issues/24 */ private static class FixedByteBufferSeekableByteChannel extends ByteBufferSeekableByteChannel { - private ByteBuffer backing; + private final ByteBuffer backing; public FixedByteBufferSeekableByteChannel(ByteBuffer backing) { super(backing); diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-excel/src/main/java/org/datavec/poi/excel/ExcelRecordReader.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-excel/src/main/java/org/datavec/poi/excel/ExcelRecordReader.java index 6925bc47d..2ec31f426 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-excel/src/main/java/org/datavec/poi/excel/ExcelRecordReader.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-excel/src/main/java/org/datavec/poi/excel/ExcelRecordReader.java @@ -47,7 +47,7 @@ public class ExcelRecordReader extends FileRecordReader { private Iterator sheetIterator; private Iterator rows; // Create a DataFormatter to format and get each cell's value as String - private DataFormatter dataFormatter = new DataFormatter(); + private final DataFormatter dataFormatter = new DataFormatter(); private Workbook currWorkBook; //we should ensure that the number of columns is consistent across all worksheets private int numColumns = -1; diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-geo/src/main/java/org/datavec/api/transform/reduce/geo/CoordinatesReduction.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-geo/src/main/java/org/datavec/api/transform/reduce/geo/CoordinatesReduction.java index d5e9e3439..44d6409c2 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-geo/src/main/java/org/datavec/api/transform/reduce/geo/CoordinatesReduction.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-geo/src/main/java/org/datavec/api/transform/reduce/geo/CoordinatesReduction.java @@ -121,7 +121,7 @@ public class CoordinatesReduction implements AggregableColumnReduction { throw new UnsupportedOperationException(); } - private IAggregableReduceOp> reducer; + private final IAggregableReduceOp> reducer; @Override public IAggregableReduceOp> reduceOp() { @@ -132,11 +132,11 @@ public class CoordinatesReduction implements AggregableColumnReduction { public static class CoordinateAggregableReduceOp implements IAggregableReduceOp> { - private int nOps; - private Supplier>> initialOpValue; + private final int nOps; + private final Supplier>> initialOpValue; @Getter - private ArrayList>> perCoordinateOps; // of size coords() - private String delimiter; + private final ArrayList>> perCoordinateOps; // of size coords() + private final String delimiter; public CoordinateAggregableReduceOp(int n, Supplier>> initialOp, String delim) { diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-geo/src/test/java/org/datavec/api/transform/reduce/TestGeoReduction.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-geo/src/test/java/org/datavec/api/transform/reduce/TestGeoReduction.java index 55fd5855a..14d89576e 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-geo/src/test/java/org/datavec/api/transform/reduce/TestGeoReduction.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-geo/src/test/java/org/datavec/api/transform/reduce/TestGeoReduction.java @@ -40,12 +40,12 @@ public class TestGeoReduction { public void testCustomReductions() { List> inputs = new ArrayList<>(); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new Text("1#5"))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new Text("2#6"))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new Text("3#7"))); - inputs.add(Arrays.asList((Writable) new Text("someKey"), new Text("4#8"))); + inputs.add(Arrays.asList(new Text("someKey"), new Text("1#5"))); + inputs.add(Arrays.asList(new Text("someKey"), new Text("2#6"))); + inputs.add(Arrays.asList(new Text("someKey"), new Text("3#7"))); + inputs.add(Arrays.asList(new Text("someKey"), new Text("4#8"))); - List expected = Arrays.asList((Writable) new Text("someKey"), new Text("10.0#26.0")); + List expected = Arrays.asList(new Text("someKey"), new Text("10.0#26.0")); Schema schema = new Schema.Builder().addColumnString("key").addColumnString("coord").build(); diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-geo/src/test/java/org/datavec/api/transform/transform/TestGeoTransforms.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-geo/src/test/java/org/datavec/api/transform/transform/TestGeoTransforms.java index d91d34b95..d6249b756 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-geo/src/test/java/org/datavec/api/transform/transform/TestGeoTransforms.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-geo/src/test/java/org/datavec/api/transform/transform/TestGeoTransforms.java @@ -71,10 +71,10 @@ public class TestGeoTransforms { out.getColumnTypes()); assertEquals(Arrays.asList((Writable) new Text("-30"), new Text("20"), new Text("10"), new DoubleWritable(5.0)), - transform.map(Arrays.asList((Writable) new Text("-30"), new Text("20"), new Text("10")))); + transform.map(Arrays.asList(new Text("-30"), new Text("20"), new Text("10")))); assertEquals(Arrays.asList((Writable) new Text("50|40"), new Text("10|-20"), new Text("10|5"), new DoubleWritable(Math.sqrt(160))), - transform.map(Arrays.asList((Writable) new Text("50|40"), new Text("10|-20"), + transform.map(Arrays.asList(new Text("50|40"), new Text("10|-20"), new Text("10|5")))); } @@ -94,7 +94,7 @@ public class TestGeoTransforms { double latitude = 51.5142; double longitude = -0.0931; - List writables = transform.map(Collections.singletonList((Writable) new Text(in))); + List writables = transform.map(Collections.singletonList(new Text(in))); assertEquals(1, writables.size()); String[] coordinates = writables.get(0).toString().split("CUSTOM_DELIMITER"); assertEquals(2, coordinates.length); @@ -112,7 +112,7 @@ public class TestGeoTransforms { ObjectInputStream ois = new ObjectInputStream(bais); Transform deserialized = (Transform) ois.readObject(); - writables = deserialized.map(Collections.singletonList((Writable) new Text(in))); + writables = deserialized.map(Collections.singletonList(new Text(in))); assertEquals(1, writables.size()); coordinates = writables.get(0).toString().split("CUSTOM_DELIMITER"); //System.out.println(Arrays.toString(coordinates)); @@ -141,7 +141,7 @@ public class TestGeoTransforms { assertEquals(1, out.getColumnMetaData().size()); assertEquals(ColumnType.String, out.getMetaData(0).getColumnType()); - List writables = transform.map(Collections.singletonList((Writable) new Text(in))); + List writables = transform.map(Collections.singletonList(new Text(in))); assertEquals(1, writables.size()); assertEquals(location, writables.get(0).toString()); //System.out.println(location); diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/conf/ConfigurationUtil.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/conf/ConfigurationUtil.java index 01d5b2f84..66bb5742f 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/conf/ConfigurationUtil.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/conf/ConfigurationUtil.java @@ -40,7 +40,7 @@ public class ConfigurationUtil { String baseConfPathTrimmed = baseConfPath.trim(); - if (false == "/".equals(baseConfPathTrimmed.endsWith("/"))) { + if (!"/".equals(baseConfPathTrimmed.endsWith("/"))) { baseConfPathTrimmed += "/"; diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/records/reader/mapfile/MapFileReader.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/records/reader/mapfile/MapFileReader.java index f5b28847e..3bc0e7111 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/records/reader/mapfile/MapFileReader.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/records/reader/mapfile/MapFileReader.java @@ -42,10 +42,10 @@ import java.util.List; */ public class MapFileReader implements Closeable { - private MapFile.Reader[] readers; - private IndexToKey indexToKey; - private Class recordClass; - private List> recordIndexesEachReader; + private final MapFile.Reader[] readers; + private final IndexToKey indexToKey; + private final Class recordClass; + private final List> recordIndexesEachReader; private Long numRecords; diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/records/reader/mapfile/MapFileRecordReader.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/records/reader/mapfile/MapFileRecordReader.java index df649f8e4..23909e2bc 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/records/reader/mapfile/MapFileRecordReader.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/records/reader/mapfile/MapFileRecordReader.java @@ -66,7 +66,7 @@ public class MapFileRecordReader implements RecordReader { private long numRecords; private long position; - private Random rng; + private final Random rng; private int[] order; /** diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/records/reader/mapfile/MapFileSequenceRecordReader.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/records/reader/mapfile/MapFileSequenceRecordReader.java index 3a0513132..03f071eae 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/records/reader/mapfile/MapFileSequenceRecordReader.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/main/java/org/datavec/hadoop/records/reader/mapfile/MapFileSequenceRecordReader.java @@ -68,7 +68,7 @@ public class MapFileSequenceRecordReader implements SequenceRecordReader { private long numSequences; private long position; - private Random rng; + private final Random rng; private int[] order; /** diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/test/java/org/datavec/hadoop/records/reader/TestMapFileRecordReader.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/test/java/org/datavec/hadoop/records/reader/TestMapFileRecordReader.java index d5595e53d..fa159c36c 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/test/java/org/datavec/hadoop/records/reader/TestMapFileRecordReader.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/test/java/org/datavec/hadoop/records/reader/TestMapFileRecordReader.java @@ -77,27 +77,27 @@ public class TestMapFileRecordReader { seqMap = new HashMap<>(); seqMap.put(new LongWritable(0), new SequenceRecordWritable(Arrays.asList( - Arrays.asList(new Text("zero"), new IntWritable(0), + Arrays.asList(new Text("zero"), new IntWritable(0), new DoubleWritable(0), new NDArrayWritable(Nd4j.valueArrayOf(10, 0.0))), - Arrays.asList(new Text("one"), new IntWritable(1), + Arrays.asList(new Text("one"), new IntWritable(1), new DoubleWritable(1.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 1.0))), - Arrays.asList(new Text("two"), new IntWritable(2), + Arrays.asList(new Text("two"), new IntWritable(2), new DoubleWritable(2.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 2.0)))))); seqMap.put(new LongWritable(1), new SequenceRecordWritable(Arrays.asList( - Arrays.asList(new Text("Bzero"), new IntWritable(10), + Arrays.asList(new Text("Bzero"), new IntWritable(10), new DoubleWritable(10), new NDArrayWritable(Nd4j.valueArrayOf(10, 10.0))), - Arrays.asList(new Text("Bone"), new IntWritable(11), + Arrays.asList(new Text("Bone"), new IntWritable(11), new DoubleWritable(11.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 11.0))), - Arrays.asList(new Text("Btwo"), new IntWritable(12), + Arrays.asList(new Text("Btwo"), new IntWritable(12), new DoubleWritable(12.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 12.0)))))); seqMap.put(new LongWritable(2), new SequenceRecordWritable(Arrays.asList( - Arrays.asList(new Text("Czero"), new IntWritable(20), + Arrays.asList(new Text("Czero"), new IntWritable(20), new DoubleWritable(20), new NDArrayWritable(Nd4j.valueArrayOf(10, 20.0))), - Arrays.asList(new Text("Cone"), new IntWritable(21), + Arrays.asList(new Text("Cone"), new IntWritable(21), new DoubleWritable(21.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 21.0))), - Arrays.asList(new Text("Ctwo"), new IntWritable(22), + Arrays.asList(new Text("Ctwo"), new IntWritable(22), new DoubleWritable(22.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 22.0)))))); @@ -125,17 +125,17 @@ public class TestMapFileRecordReader { recordMap = new HashMap<>(); recordMap.put(new LongWritable(0), - new RecordWritable(Arrays.asList(new Text("zero"), + new RecordWritable(Arrays.asList(new Text("zero"), new IntWritable(0), new DoubleWritable(0), new NDArrayWritable(Nd4j.valueArrayOf(10, 0.0))))); recordMap.put(new LongWritable(1), - new RecordWritable(Arrays.asList(new Text("one"), + new RecordWritable(Arrays.asList(new Text("one"), new IntWritable(11), new DoubleWritable(11.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 11.0))))); recordMap.put(new LongWritable(2), - new RecordWritable(Arrays.asList(new Text("two"), + new RecordWritable(Arrays.asList(new Text("two"), new IntWritable(22), new DoubleWritable(22.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 22.0))))); diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/test/java/org/datavec/hadoop/records/reader/TestMapFileRecordReaderMultipleParts.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/test/java/org/datavec/hadoop/records/reader/TestMapFileRecordReaderMultipleParts.java index 7b50373c8..81be8ce7c 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/test/java/org/datavec/hadoop/records/reader/TestMapFileRecordReaderMultipleParts.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/test/java/org/datavec/hadoop/records/reader/TestMapFileRecordReaderMultipleParts.java @@ -96,11 +96,11 @@ public class TestMapFileRecordReaderMultipleParts { for (int i = 0; i < 9; i++) { seqMap.put(new LongWritable(i), new SequenceRecordWritable(Arrays.asList( - Arrays.asList(new Text(i + "-0"), new IntWritable(3 * i), + Arrays.asList(new Text(i + "-0"), new IntWritable(3 * i), new DoubleWritable(3 * i)), - Arrays.asList(new Text(i + "-1"), + Arrays.asList(new Text(i + "-1"), new IntWritable(3 * i + 1), new DoubleWritable(3 * i + 1.0)), - Arrays.asList(new Text(i + "-2"), + Arrays.asList(new Text(i + "-2"), new IntWritable(3 * i + 2), new DoubleWritable(3 * i + 2.0))))); } @@ -141,7 +141,7 @@ public class TestMapFileRecordReaderMultipleParts { recordMap = new HashMap<>(); for (int i = 0; i < 9; i++) { - recordMap.put(new LongWritable(i), new RecordWritable(Arrays.asList( + recordMap.put(new LongWritable(i), new RecordWritable(Arrays.asList( new Text(String.valueOf(i)), new IntWritable(i), new DoubleWritable(i)))); } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/test/java/org/datavec/hadoop/records/reader/TestMapFileRecordReaderMultiplePartsSomeEmpty.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/test/java/org/datavec/hadoop/records/reader/TestMapFileRecordReaderMultiplePartsSomeEmpty.java index ff420241b..1a3999e05 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/test/java/org/datavec/hadoop/records/reader/TestMapFileRecordReaderMultiplePartsSomeEmpty.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-hadoop/src/test/java/org/datavec/hadoop/records/reader/TestMapFileRecordReaderMultiplePartsSomeEmpty.java @@ -96,11 +96,11 @@ public class TestMapFileRecordReaderMultiplePartsSomeEmpty { for (int i = 0; i < 6; i++) { seqMap.put(new LongWritable(i), new SequenceRecordWritable(Arrays.asList( - Arrays.asList(new Text(i + "-0"), new IntWritable(3 * i), + Arrays.asList(new Text(i + "-0"), new IntWritable(3 * i), new DoubleWritable(3 * i)), - Arrays.asList(new Text(i + "-1"), + Arrays.asList(new Text(i + "-1"), new IntWritable(3 * i + 1), new DoubleWritable(3 * i + 1.0)), - Arrays.asList(new Text(i + "-2"), + Arrays.asList(new Text(i + "-2"), new IntWritable(3 * i + 2), new DoubleWritable(3 * i + 2.0))))); } @@ -146,7 +146,7 @@ public class TestMapFileRecordReaderMultiplePartsSomeEmpty { recordMap = new HashMap<>(); for (int i = 0; i < 6; i++) { - recordMap.put(new LongWritable(i), new RecordWritable(Arrays.asList( + recordMap.put(new LongWritable(i), new RecordWritable(Arrays.asList( new Text(String.valueOf(i)), new IntWritable(i), new DoubleWritable(i)))); } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/loader/CifarLoader.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/loader/CifarLoader.java index 0a677f063..662b54148 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/loader/CifarLoader.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/loader/CifarLoader.java @@ -248,14 +248,11 @@ public class CifarLoader extends NativeImageLoader implements Serializable { File f; if (train) { f = new File(trainFilesSerialized + 1 + ".ser"); - if (!f.exists()) - return false; + return f.exists(); } else { f = new File(testFilesSerialized); - if (!f.exists()) - return false; + return f.exists(); } - return true; } /** @@ -315,9 +312,9 @@ public class CifarLoader extends NativeImageLoader implements Serializable { } for (int i = 0; i < result.numExamples(); i++) { INDArray newFeatures = result.get(i).getFeatures(); - newFeatures.tensorAlongDimension(0, new int[] {0, 2, 3}).divi(255); - newFeatures.tensorAlongDimension(1, new int[] {0, 2, 3}).subi(uMean).divi(uStd); - newFeatures.tensorAlongDimension(2, new int[] {0, 2, 3}).subi(vMean).divi(vStd); + newFeatures.tensorAlongDimension(0, 0, 2, 3).divi(255); + newFeatures.tensorAlongDimension(1, 0, 2, 3).subi(uMean).divi(uStd); + newFeatures.tensorAlongDimension(2, 0, 2, 3).subi(vMean).divi(vStd); result.get(i).setFeatures(newFeatures); } result.save(fileName); @@ -372,8 +369,8 @@ public class CifarLoader extends NativeImageLoader implements Serializable { for (DataSet data : result) { try { if (useSpecialPreProcessCifar) { - INDArray uChannel = data.getFeatures().tensorAlongDimension(1, new int[] {0, 2, 3}); - INDArray vChannel = data.getFeatures().tensorAlongDimension(2, new int[] {0, 2, 3}); + INDArray uChannel = data.getFeatures().tensorAlongDimension(1, 0, 2, 3); + INDArray vChannel = data.getFeatures().tensorAlongDimension(2, 0, 2, 3); uTempMean = uChannel.meanNumber().doubleValue(); // TODO INDArray.var result is incorrect based on dimensions passed in thus using manual uStd += varManual(uChannel, uTempMean); diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/loader/LFWLoader.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/loader/LFWLoader.java index dc75e7e1c..700978ff6 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/loader/LFWLoader.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/loader/LFWLoader.java @@ -158,17 +158,14 @@ public class LFWLoader extends BaseImageLoader implements Serializable { public boolean imageFilesExist() { if (useSubset) { File f = new File(BASE_DIR, lfwSubsetData.get("filesFilenameUnzipped")); - if (!f.exists()) - return false; + return f.exists(); } else { File f = new File(BASE_DIR, lfwData.get("filesFilenameUnzipped")); if (!f.exists()) return false; f = new File(BASE_DIR, lfwLabel.get("filesFilenameUnzipped")); - if (!f.exists()) - return false; + return f.exists(); } - return true; } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/loader/NativeImageLoader.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/loader/NativeImageLoader.java index bda972a86..3cf702a94 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/loader/NativeImageLoader.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/loader/NativeImageLoader.java @@ -59,8 +59,8 @@ import static org.bytedeco.opencv.global.opencv_imgproc.*; */ public class NativeImageLoader extends BaseImageLoader { private static final int MIN_BUFFER_STEP_SIZE = 64 * 1024; - private byte[] buffer = null; - private Mat bufferMat = null; + private final byte[] buffer = null; + private final Mat bufferMat = null; @Getter public static final String[] ALLOWED_FORMATS = {"bmp", "gif", "jpg", "jpeg", "jp2", "pbm", "pgm", "ppm", "pnm", @@ -239,7 +239,7 @@ public class NativeImageLoader extends BaseImageLoader { tempPix = pix = pix2; int channels = pix.d() / 8; dtype = CV_8UC(channels); - Mat mat = new Mat(height, width, dtype, pix.data(), 4 * pix.wpl()); + Mat mat = new Mat(height, width, dtype, pix.data(), 4L * pix.wpl()); mat2 = new Mat(height, width, CV_8UC(channels)); // swap bytes if needed int[] swap = {0, channels - 1, 1, channels - 2, 2, channels - 3, 3, channels - 4}, @@ -408,7 +408,7 @@ public class NativeImageLoader extends BaseImageLoader { ret.data().offset() * Nd4j.sizeOfDataType(ret.data().dataType())); if (pointer instanceof FloatPointer) { - FloatIndexer retidx = FloatIndexer.create((FloatPointer) pagedPointer.asFloatPointer(), + FloatIndexer retidx = FloatIndexer.create(pagedPointer.asFloatPointer(), new long[] {channels, rows, cols}, new long[] {stride[0], stride[1], stride[2]}, direct); if (idx instanceof UByteIndexer) { UByteIndexer ubyteidx = (UByteIndexer) idx; @@ -453,7 +453,7 @@ public class NativeImageLoader extends BaseImageLoader { } retidx.release(); } else if (pointer instanceof DoublePointer) { - DoubleIndexer retidx = DoubleIndexer.create((DoublePointer) pagedPointer.asDoublePointer(), + DoubleIndexer retidx = DoubleIndexer.create(pagedPointer.asDoublePointer(), new long[] {channels, rows, cols}, new long[] {stride[0], stride[1], stride[2]}, direct); if (idx instanceof UByteIndexer) { UByteIndexer ubyteidx = (UByteIndexer) idx; @@ -871,14 +871,13 @@ public class NativeImageLoader extends BaseImageLoader { PIX pix = pixa.pix(i); currentD = asMatrix(convert(pix)); pixDestroy(pix); - switch (this.multiPageMode) { - case MINIBATCH: - index = new INDArrayIndex[]{NDArrayIndex.point(i),NDArrayIndex.all(), NDArrayIndex.all(),NDArrayIndex.all(),NDArrayIndex.all()}; - break; -// case CHANNELS: + if (this.multiPageMode == MultiPageMode.MINIBATCH) { + index = new INDArrayIndex[]{NDArrayIndex.point(i), NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all()}; + // case CHANNELS: // index = new INDArrayIndex[]{NDArrayIndex.all(), NDArrayIndex.point(i), NDArrayIndex.all(), NDArrayIndex.all(),NDArrayIndex.all()}; // break; - default: throw new UnsupportedOperationException("Unsupported MultiPageMode: " + multiPageMode); + } else { + throw new UnsupportedOperationException("Unsupported MultiPageMode: " + multiPageMode); } data.put(index , currentD.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all(),NDArrayIndex.all())); } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/MnistDbFile.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/MnistDbFile.java index f39ea4ef6..03a347cb3 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/MnistDbFile.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/MnistDbFile.java @@ -25,7 +25,7 @@ import java.io.IOException; import java.io.RandomAccessFile; public abstract class MnistDbFile extends RandomAccessFile { - private int count; + private final int count; /** diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/MnistFetcher.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/MnistFetcher.java index df6cede7c..75202afea 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/MnistFetcher.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/MnistFetcher.java @@ -34,7 +34,7 @@ import java.net.URL; public class MnistFetcher { private File fileDir; - private static Logger log = LoggerFactory.getLogger(MnistFetcher.class); + private static final Logger log = LoggerFactory.getLogger(MnistFetcher.class); private static final String trainingFilesURL = "http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz"; private static final String trainingFilesFilename = "images-idx1-ubyte.gz"; diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/MnistImageFile.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/MnistImageFile.java index f13fd9ca9..c5d10183f 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/MnistImageFile.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/MnistImageFile.java @@ -25,8 +25,8 @@ import java.io.IOException; public class MnistImageFile extends MnistDbFile { - private int rows; - private int cols; + private final int rows; + private final int cols; /** * Creates new MNIST database image file ready for reading. diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/draw/DrawReconstruction.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/draw/DrawReconstruction.java index d58a279f1..beae5c881 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/draw/DrawReconstruction.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/mnist/draw/DrawReconstruction.java @@ -35,7 +35,7 @@ public class DrawReconstruction { public JFrame frame; BufferedImage img; - private INDArray data; + private final INDArray data; private int width = 28; private int height = 28; public String title = "TEST"; diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/BaseImageRecordReader.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/BaseImageRecordReader.java index 48502f95d..4a2426ac4 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/BaseImageRecordReader.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/BaseImageRecordReader.java @@ -122,7 +122,7 @@ public abstract class BaseImageRecordReader extends BaseRecordReader { } protected boolean containsFormat(String format) { - for (String format2 : imageLoader.getALLOWED_FORMATS()) + for (String format2 : BaseImageLoader.getALLOWED_FORMATS()) if (format.endsWith("." + format2)) return true; return false; @@ -235,7 +235,7 @@ public abstract class BaseImageRecordReader extends BaseRecordReader { try { NDArrayWritable ndArrayWritable = new NDArrayWritable(imageLoader.asMatrix(inputStreamInputSplit.getIs())); finishedInputStreamSplit = true; - return Arrays.asList(ndArrayWritable); + return Collections.singletonList(ndArrayWritable); } catch (IOException e) { log.error("",e); } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/objdetect/ObjectDetectionRecordReader.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/objdetect/ObjectDetectionRecordReader.java index a8f25d876..c7e8657cb 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/objdetect/ObjectDetectionRecordReader.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/objdetect/ObjectDetectionRecordReader.java @@ -139,9 +139,7 @@ public class ObjectDetectionRecordReader extends BaseImageRecordReader { List imageObjects = labelProvider.getImageObjectsForPath(location); for (ImageObject io : imageObjects) { String name = io.getLabel(); - if (!labelSet.contains(name)) { - labelSet.add(name); - } + labelSet.add(name); } } iter = new FileFromPathIterator(inputSplit.locationsPathIterator()); //This handles randomization internally if necessary diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/objdetect/impl/SvhnLabelProvider.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/objdetect/impl/SvhnLabelProvider.java index 9f29e0b17..c098105b6 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/objdetect/impl/SvhnLabelProvider.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/objdetect/impl/SvhnLabelProvider.java @@ -41,11 +41,11 @@ import java.util.Map; public class SvhnLabelProvider implements ImageObjectLabelProvider { - private static DataType refType = new DataType(PredType.STD_REF_OBJ()); - private static DataType charType = new DataType(PredType.NATIVE_CHAR()); - private static DataType intType = new DataType(PredType.NATIVE_INT()); + private static final DataType refType = new DataType(PredType.STD_REF_OBJ()); + private static final DataType charType = new DataType(PredType.NATIVE_CHAR()); + private static final DataType intType = new DataType(PredType.NATIVE_INT()); - private Map> labelMap; + private final Map> labelMap; public SvhnLabelProvider(File dir) throws IOException { labelMap = new HashMap>(); @@ -74,11 +74,11 @@ public class SvhnLabelProvider implements ImageObjectLabelProvider { PointerPointer labelPtr = new PointerPointer(256); IntPointer intPtr = new IntPointer(256); for (int i = 0; i < n; i++) { - DataSet nameRef = new DataSet(file, namePtr.position(i * ptrSize)); + DataSet nameRef = new DataSet(file, namePtr.position((long) i * ptrSize)); nameRef.read(bytePtr, charType); String filename = bytePtr.getString(); - Group bboxGroup = new Group(file, bboxPtr.position(i * ptrSize)); + Group bboxGroup = new Group(file, bboxPtr.position((long) i * ptrSize)); DataSet topDataset = bboxGroup.openDataSet("top"); DataSet leftDataset = bboxGroup.openDataSet("left"); DataSet heightDataset = bboxGroup.openDataSet("height"); @@ -101,23 +101,23 @@ public class SvhnLabelProvider implements ImageObjectLabelProvider { assert !isFloat || m == 1; for (int j = 0; j < m; j++) { - DataSet topSet = isFloat ? topDataset : new DataSet(file, topPtr.position(j * ptrSize)); + DataSet topSet = isFloat ? topDataset : new DataSet(file, topPtr.position((long) j * ptrSize)); topSet.read(intPtr, intType); int top = intPtr.get(); - DataSet leftSet = isFloat ? leftDataset : new DataSet(file, leftPtr.position(j * ptrSize)); + DataSet leftSet = isFloat ? leftDataset : new DataSet(file, leftPtr.position((long) j * ptrSize)); leftSet.read(intPtr, intType); int left = intPtr.get(); - DataSet heightSet = isFloat ? heightDataset : new DataSet(file, heightPtr.position(j * ptrSize)); + DataSet heightSet = isFloat ? heightDataset : new DataSet(file, heightPtr.position((long) j * ptrSize)); heightSet.read(intPtr, intType); int height = intPtr.get(); - DataSet widthSet = isFloat ? widthDataset : new DataSet(file, widthPtr.position(j * ptrSize)); + DataSet widthSet = isFloat ? widthDataset : new DataSet(file, widthPtr.position((long) j * ptrSize)); widthSet.read(intPtr, intType); int width = intPtr.get(); - DataSet labelSet = isFloat ? labelDataset : new DataSet(file, labelPtr.position(j * ptrSize)); + DataSet labelSet = isFloat ? labelDataset : new DataSet(file, labelPtr.position((long) j * ptrSize)); labelSet.read(intPtr, intType); int label = intPtr.get(); if (label == 10) { diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/objdetect/impl/VocLabelProvider.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/objdetect/impl/VocLabelProvider.java index 192d06fe2..22a300e40 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/objdetect/impl/VocLabelProvider.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/recordreader/objdetect/impl/VocLabelProvider.java @@ -42,7 +42,7 @@ public class VocLabelProvider implements ImageObjectLabelProvider { private static final String XMAX_TAG = ""; private static final String YMAX_TAG = ""; - private String annotationsDir; + private final String annotationsDir; public VocLabelProvider(@NonNull String baseDirectory){ this.annotationsDir = FilenameUtils.concat(baseDirectory, "Annotations"); diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/transform/ImageTransformProcess.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/transform/ImageTransformProcess.java index 788a26581..43f90a502 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/transform/ImageTransformProcess.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/main/java/org/datavec/image/transform/ImageTransformProcess.java @@ -162,7 +162,7 @@ public class ImageTransformProcess { */ public static class Builder { - private List transformList; + private final List transformList; private int seed = 0; public Builder() { diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/loader/LoaderTests.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/loader/LoaderTests.java index 2ca05dd55..427878e63 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/loader/LoaderTests.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/loader/LoaderTests.java @@ -70,7 +70,7 @@ public class LoaderTests { File dir = new File(FilenameUtils.concat(System.getProperty("user.home"), "cifar/cifar-10-batches-bin")); CifarLoader cifar = new CifarLoader(false, dir); assertTrue(dir.exists()); - assertTrue(cifar.getLabels() != null); + assertNotNull(cifar.getLabels()); } @Test @@ -171,7 +171,7 @@ public class LoaderTests { CifarLoader loader = new CifarLoader(row, col, channels, train, preProcessCifar); DataSet data = loader.next(numExamples); - long shape[] = data.getFeatures().shape(); + long[] shape = data.getFeatures().shape(); assertEquals(shape.length, 4); assertEquals(shape[0], numExamples); assertEquals(shape[1], channels); diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/loader/TestImageLoader.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/loader/TestImageLoader.java index 1273b8b31..433f2d25b 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/loader/TestImageLoader.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/loader/TestImageLoader.java @@ -37,8 +37,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; public class TestImageLoader { - private static long seed = 10; - private static Random rng = new Random(seed); + private static final long seed = 10; + private static final Random rng = new Random(seed); @Test public void testToIntArrayArray() throws Exception { diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/recordreader/TestImageRecordReader.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/recordreader/TestImageRecordReader.java index e075e8c5d..44f4a31ee 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/recordreader/TestImageRecordReader.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/recordreader/TestImageRecordReader.java @@ -207,7 +207,7 @@ public class TestImageRecordReader { List expLabels = new ArrayList<>(); for(URI u : arr){ String path = u.getPath(); - expLabels.add(testLabel(path.substring(path.length()-5, path.length()))); + expLabels.add(testLabel(path.substring(path.length()-5))); } int count = 0; @@ -280,7 +280,7 @@ public class TestImageRecordReader { @Override public Writable getLabelForPath(String path) { - String filename = path.substring(path.length()-5, path.length()); + String filename = path.substring(path.length()-5); return testLabel(filename); } @@ -336,7 +336,7 @@ public class TestImageRecordReader { List> expLabels = new ArrayList<>(); for(URI u : arr){ String path = u.getPath(); - expLabels.add(testMultiLabel(path.substring(path.length()-5, path.length()))); + expLabels.add(testMultiLabel(path.substring(path.length()-5))); } int count = 0; @@ -411,22 +411,22 @@ public class TestImageRecordReader { private static List testMultiLabel(String filename){ switch(filename){ case "0.jpg": - return Arrays.asList(new NDArrayWritable(Nd4j.create(new double[]{1,0}, new long[]{1,2}, DataType.FLOAT)), + return Arrays.asList(new NDArrayWritable(Nd4j.create(new double[]{1,0}, new long[]{1,2}, DataType.FLOAT)), new NDArrayWritable(Nd4j.create(new double[]{1,0,0}, new long[]{1,3}, DataType.FLOAT)), new DoubleWritable(0.0)); case "1.png": - return Arrays.asList(new NDArrayWritable(Nd4j.create(new double[]{1,0}, new long[]{1,2}, DataType.FLOAT)), + return Arrays.asList(new NDArrayWritable(Nd4j.create(new double[]{1,0}, new long[]{1,2}, DataType.FLOAT)), new NDArrayWritable(Nd4j.create(new double[]{0,1,0}, new long[]{1,3}, DataType.FLOAT)), new DoubleWritable(1.0)); case "2.jpg": - return Arrays.asList(new NDArrayWritable(Nd4j.create(new double[]{1,0}, new long[]{1,2}, DataType.FLOAT)), + return Arrays.asList(new NDArrayWritable(Nd4j.create(new double[]{1,0}, new long[]{1,2}, DataType.FLOAT)), new NDArrayWritable(Nd4j.create(new double[]{0,0,1}, new long[]{1,3}, DataType.FLOAT)), new DoubleWritable(2.0)); case "A.jpg": - return Arrays.asList(new NDArrayWritable(Nd4j.create(new double[]{0,1}, new long[]{1,2}, DataType.FLOAT)), + return Arrays.asList(new NDArrayWritable(Nd4j.create(new double[]{0,1}, new long[]{1,2}, DataType.FLOAT)), new NDArrayWritable(Nd4j.create(new double[]{1,0,0}, new long[]{1,3}, DataType.FLOAT)), new DoubleWritable(3.0)); case "B.png": - return Arrays.asList(new NDArrayWritable(Nd4j.create(new double[]{0,1}, new long[]{1,2}, DataType.FLOAT)), + return Arrays.asList(new NDArrayWritable(Nd4j.create(new double[]{0,1}, new long[]{1,2}, DataType.FLOAT)), new NDArrayWritable(Nd4j.create(new double[]{0,1,0}, new long[]{1,3}, DataType.FLOAT)), new DoubleWritable(4.0)); case "C.jpg": - return Arrays.asList(new NDArrayWritable(Nd4j.create(new double[]{0,1}, new long[]{1,2}, DataType.FLOAT)), + return Arrays.asList(new NDArrayWritable(Nd4j.create(new double[]{0,1}, new long[]{1,2}, DataType.FLOAT)), new NDArrayWritable(Nd4j.create(new double[]{0,0,1}, new long[]{1,3}, DataType.FLOAT)), new DoubleWritable(5.0)); default: throw new RuntimeException(filename); @@ -435,7 +435,7 @@ public class TestImageRecordReader { private static class CountingListener implements RecordListener { - private RecordListener listener; + private final RecordListener listener; private int count = 0; public CountingListener(RecordListener listener) { diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/transform/JsonYamlTest.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/transform/JsonYamlTest.java index f1d194769..2d27491f9 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/transform/JsonYamlTest.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/transform/JsonYamlTest.java @@ -95,11 +95,11 @@ public class JsonYamlTest { imgYaml = itYaml.transform(imgYaml); if (it instanceof RandomCropTransform) { - assertTrue(img.getFrame().imageHeight == imgJson.getFrame().imageHeight); - assertTrue(img.getFrame().imageWidth == imgJson.getFrame().imageWidth); + assertEquals(img.getFrame().imageHeight, imgJson.getFrame().imageHeight); + assertEquals(img.getFrame().imageWidth, imgJson.getFrame().imageWidth); - assertTrue(img.getFrame().imageHeight == imgYaml.getFrame().imageHeight); - assertTrue(img.getFrame().imageWidth == imgYaml.getFrame().imageWidth); + assertEquals(img.getFrame().imageHeight, imgYaml.getFrame().imageHeight); + assertEquals(img.getFrame().imageWidth, imgYaml.getFrame().imageWidth); } else if (it instanceof FilterImageTransform) { assertEquals(img.getFrame().imageHeight, imgJson.getFrame().imageHeight); assertEquals(img.getFrame().imageWidth, imgJson.getFrame().imageWidth); diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/transform/TestImageTransform.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/transform/TestImageTransform.java index ee713e091..593589eab 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/transform/TestImageTransform.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-image/src/test/java/org/datavec/image/transform/TestImageTransform.java @@ -76,7 +76,7 @@ public class TestImageTransform { assertEquals( - x, transformed[4], 0); assertEquals( - y, transformed[5], 0); } - assertEquals(null, transform.transform(null)); + assertNull(transform.transform(null)); } @Test @@ -93,7 +93,7 @@ public class TestImageTransform { assertTrue(f.imageWidth <= frame.imageWidth); assertEquals(f.imageChannels, frame.imageChannels); } - assertEquals(null, transform.transform(null)); + assertNull(transform.transform(null)); transform = new CropImageTransform(1, 2, 3, 4); writable = transform.transform(writable); @@ -118,29 +118,29 @@ public class TestImageTransform { assertEquals(f.imageWidth, frame.imageWidth); assertEquals(f.imageChannels, frame.imageChannels); } - assertEquals(null, transform.transform(null)); + assertNull(transform.transform(null)); transform = new FlipImageTransform(-2); writable = transform.transform(writable); - float[] transformed = transform.query(new float[] {10, 20}); + float[] transformed = transform.query(10, 20); assertEquals(10, transformed[0], 0); assertEquals(20, transformed[1], 0); transform = new FlipImageTransform(0); writable = transform.transform(writable); - transformed = transform.query(new float[] {30, 40}); + transformed = transform.query(30, 40); assertEquals(30, transformed[0], 0); assertEquals(frame.imageHeight - 40 - 1, transformed[1], 0); transform = new FlipImageTransform(1); writable = transform.transform(writable); - transformed = transform.query(new float[] {50, 60}); + transformed = transform.query(50, 60); assertEquals(frame.imageWidth - 50 - 1, transformed[0], 0); assertEquals(60, transformed[1], 0); transform = new FlipImageTransform(-1); writable = transform.transform(writable); - transformed = transform.query(new float[] {70, 80}); + transformed = transform.query(70, 80); assertEquals(frame.imageWidth - 70 - 1, transformed[0], 0); assertEquals(frame.imageHeight - 80 - 1, transformed[1], 0); } @@ -160,7 +160,7 @@ public class TestImageTransform { assertTrue(f.imageWidth <= 3 * frame.imageWidth / 2); assertEquals(f.imageChannels, frame.imageChannels); } - assertEquals(null, transform.transform(null)); + assertNull(transform.transform(null)); transform = new ScaleImageTransform(frame.imageWidth, 2 * frame.imageHeight); writable = transform.transform(writable); @@ -186,7 +186,7 @@ public class TestImageTransform { assertEquals(f.imageWidth, frame.imageWidth); assertEquals(f.imageChannels, frame.imageChannels); } - assertEquals(null, transform.transform(null)); + assertNull(transform.transform(null)); transform = new RotateImageTransform(0, 0, -90, 0); writable = transform.transform(writable); @@ -212,7 +212,7 @@ public class TestImageTransform { assertEquals(f.imageWidth, frame.imageWidth); assertEquals(f.imageChannels, frame.imageChannels); } - assertEquals(null, transform.transform(null)); + assertNull(transform.transform(null)); transform = new WarpImageTransform(1, 2, 3, 4, 5, 6, 7, 8); writable = transform.transform(writable); @@ -245,11 +245,11 @@ public class TestImageTransform { assertTrue(f.imageWidth <= frame.imageWidth + 20); assertEquals(f.imageChannels, frame.imageChannels); } - assertEquals(null, transform.transform(null)); + assertNull(transform.transform(null)); transform = new MultiImageTransform(new ColorConversionTransform(COLOR_BGR2RGB)); writable = transform.transform(writable); - float[] transformed = transform.query(new float[] {11, 22}); + float[] transformed = transform.query(11, 22); assertEquals(11, transformed[0], 0); assertEquals(22, transformed[1], 0); } @@ -269,7 +269,7 @@ public class TestImageTransform { assertEquals(f.imageWidth, frame.imageWidth); assertEquals(f.imageChannels, frame.imageChannels); } - assertEquals(null, transform.transform(null)); + assertNull(transform.transform(null)); } @Test @@ -284,9 +284,9 @@ public class TestImageTransform { assertEquals(w, writable); } - assertEquals(null, transform.transform(null)); + assertNull(transform.transform(null)); - float[] transformed = transform.query(new float[] {33, 44}); + float[] transformed = transform.query(33, 44); assertEquals(33, transformed[0], 0); assertEquals(44, transformed[1], 0); } @@ -312,9 +312,9 @@ public class TestImageTransform { Frame newframe = w.getFrame(); assertNotEquals(frame, newframe); - assertEquals(null, transform.transform(null)); + assertNull(transform.transform(null)); - float[] transformed = transform.query(new float[] {55, 66}); + float[] transformed = transform.query(55, 66); assertEquals(55, transformed[0], 0); assertEquals(66, transformed[1], 0); } @@ -336,9 +336,9 @@ public class TestImageTransform { showTrans.transform(writable); Frame newframe = w.getFrame(); assertNotEquals(frame, newframe); - assertEquals(null, transform.transform(null)); + assertNull(transform.transform(null)); - float[] transformed = transform.query(new float[] {66, 77}); + float[] transformed = transform.query(66, 77); assertEquals(66, transformed[0], 0); assertEquals(77, transformed[1], 0); } @@ -352,10 +352,10 @@ public class TestImageTransform { for (int i = 0; i < 100; i++) { ImageWritable w = transform.transform(writable); Frame f = w.getFrame(); - assertTrue(f.imageHeight == frame.imageHeight / 2); - assertTrue(f.imageWidth == frame.imageWidth / 2); + assertEquals(f.imageHeight, frame.imageHeight / 2); + assertEquals(f.imageWidth, frame.imageWidth / 2); } - assertEquals(null, transform.transform(null)); + assertNull(transform.transform(null)); transform = new RandomCropTransform(frame.imageHeight, frame.imageWidth); writable = transform.transform(writable); @@ -382,15 +382,15 @@ public class TestImageTransform { for (int i = 0; i < 100; i++) { ImageWritable w = transform.transform(writable); Frame f = w.getFrame(); - assertTrue(f.imageHeight == frame.imageHeight / 2); - assertTrue(f.imageWidth == frame.imageWidth / 2); + assertEquals(f.imageHeight, frame.imageHeight / 2); + assertEquals(f.imageWidth, frame.imageWidth / 2); assertEquals(f.imageChannels, frame.imageChannels); } - assertEquals(null, transform.transform(null)); + assertNull(transform.transform(null)); transform = new PipelineImageTransform(new EqualizeHistTransform()); writable = transform.transform(writable); - float[] transformed = transform.query(new float[] {88, 99}); + float[] transformed = transform.query(88, 99); assertEquals(88, transformed[0], 0); assertEquals(99, transformed[1], 0); } @@ -426,7 +426,7 @@ public class TestImageTransform { assertEquals(newFrame.imageHeight, 74); assertEquals(newFrame.imageWidth, 61); - float[] transformed = transform.query(new float[] {88, 32}); + float[] transformed = transform.query(88, 32); assertEquals(0, transformed[0], 0); assertEquals(0, transformed[1], 0); } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/annotator/PoStagger.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/annotator/PoStagger.java index d071a42a4..6ad2d8a1e 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/annotator/PoStagger.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/annotator/PoStagger.java @@ -161,7 +161,7 @@ public class PoStagger extends CasAnnotator_ImplBase { final List posTags = this.posTagger.tag(sentenceTokenList); - double posProbabilities[] = null; + double[] posProbabilities = null; if (this.probabilityFeature != null) { posProbabilities = this.posTagger.probs(); diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/metadata/DefaultVocabCache.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/metadata/DefaultVocabCache.java index 16dff8e5f..9b07d54ed 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/metadata/DefaultVocabCache.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/metadata/DefaultVocabCache.java @@ -30,10 +30,10 @@ import org.nd4j.common.util.Index; */ public class DefaultVocabCache implements VocabCache { - private Counter wordFrequencies = new Counter<>(); - private Counter docFrequencies = new Counter<>(); + private final Counter wordFrequencies = new Counter<>(); + private final Counter docFrequencies = new Counter<>(); private int minWordFrequency; - private Index vocabWords = new Index(); + private final Index vocabWords = new Index(); private double numDocs = 0; /** diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/movingwindow/ContextLabelRetriever.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/movingwindow/ContextLabelRetriever.java index 76b0244bd..b0b3da4a4 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/movingwindow/ContextLabelRetriever.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/movingwindow/ContextLabelRetriever.java @@ -35,8 +35,8 @@ import java.util.List; public class ContextLabelRetriever { - private static String BEGIN_LABEL = "<([A-Za-z]+|\\d+)>"; - private static String END_LABEL = ""; + private static final String BEGIN_LABEL = "<([A-Za-z]+|\\d+)>"; + private static final String END_LABEL = ""; private ContextLabelRetriever() {} @@ -66,7 +66,7 @@ public class ContextLabelRetriever { //no labels; add these as NONE and begin the new label if (!currTokens.isEmpty()) { - tokensWithSameLabel.add(new Pair<>("NONE", (List) new ArrayList<>(currTokens))); + tokensWithSameLabel.add(new Pair<>("NONE", new ArrayList<>(currTokens))); currTokens.clear(); } @@ -86,7 +86,7 @@ public class ContextLabelRetriever { Preconditions.checkState(currLabel.equals(endLabel), "Current label begin and end did not match for the parse. Was: %s ending with %s", currLabel, endLabel); - tokensWithSameLabel.add(new Pair<>(currLabel, (List) new ArrayList<>(currTokens))); + tokensWithSameLabel.add(new Pair<>(currLabel, new ArrayList<>(currTokens))); currTokens.clear(); @@ -100,7 +100,7 @@ public class ContextLabelRetriever { //no labels; add these as NONE and begin the new label if (!currTokens.isEmpty()) { - tokensWithSameLabel.add(new Pair<>("none", (List) new ArrayList<>(currTokens))); + tokensWithSameLabel.add(new Pair<>("none", new ArrayList<>(currTokens))); currTokens.clear(); } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/movingwindow/Window.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/movingwindow/Window.java index a75b37fd0..be7b46563 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/movingwindow/Window.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/movingwindow/Window.java @@ -40,8 +40,8 @@ public class Window implements Serializable { private boolean beginLabel; private boolean endLabel; private int median; - private static String BEGIN_LABEL = "<([A-Z]+|\\d+)>"; - private static String END_LABEL = ""; + private static final String BEGIN_LABEL = "<([A-Z]+|\\d+)>"; + private static final String END_LABEL = ""; private int begin, end; /** diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/ConcurrentTokenizer.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/ConcurrentTokenizer.java index d46e68790..4f7980435 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/ConcurrentTokenizer.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/ConcurrentTokenizer.java @@ -121,7 +121,7 @@ public class ConcurrentTokenizer extends AbstractTokenizer { protected void postProcessAnnotations(Span[] tokens, AnnotationFS[] tokenAnnotations) { // if interest if (probabilityFeature != null) { - double tokenProbabilties[] = tokenizer.getTokenProbabilities(); + double[] tokenProbabilties = tokenizer.getTokenProbabilities(); for (int i = 0; i < tokenAnnotations.length; i++) { tokenAnnotations[i].setDoubleValue(probabilityFeature, tokenProbabilties[i]); diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/DefaultStreamTokenizer.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/DefaultStreamTokenizer.java index 9216ed24a..f6872a768 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/DefaultStreamTokenizer.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/DefaultStreamTokenizer.java @@ -28,7 +28,7 @@ import java.util.List; */ public class DefaultStreamTokenizer implements Tokenizer { - private StreamTokenizer streamTokenizer; + private final StreamTokenizer streamTokenizer; private TokenPreProcess tokenPreProcess; diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/DefaultTokenizer.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/DefaultTokenizer.java index 4b393c0d5..c2972a606 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/DefaultTokenizer.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/DefaultTokenizer.java @@ -30,7 +30,7 @@ public class DefaultTokenizer implements Tokenizer { tokenizer = new StringTokenizer(tokens); } - private StringTokenizer tokenizer; + private final StringTokenizer tokenizer; private TokenPreProcess tokenPreProcess; @Override diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/PosUimaTokenizer.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/PosUimaTokenizer.java index e9e94bb99..a094a1f3e 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/PosUimaTokenizer.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/PosUimaTokenizer.java @@ -41,8 +41,8 @@ import java.util.List; public class PosUimaTokenizer implements Tokenizer { private static AnalysisEngine engine; - private List tokens; - private Collection allowedPosTags; + private final List tokens; + private final Collection allowedPosTags; private int index; private static CAS cas; @@ -85,9 +85,7 @@ public class PosUimaTokenizer implements Tokenizer { String check = token.getCoveredText(); if (check.matches("<[A-Z]+>") || check.matches("")) return false; - else if (token.getPos() != null && !this.allowedPosTags.contains(token.getPos())) - return false; - return true; + else return token.getPos() == null || this.allowedPosTags.contains(token.getPos()); } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/UimaTokenizer.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/UimaTokenizer.java index eb14fdabd..18d942005 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/UimaTokenizer.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizer/UimaTokenizer.java @@ -34,10 +34,10 @@ import java.util.List; */ public class UimaTokenizer implements Tokenizer { - private List tokens; + private final List tokens; private int index; - private static Logger log = LoggerFactory.getLogger(UimaTokenizer.class); - private boolean checkForLabel; + private static final Logger log = LoggerFactory.getLogger(UimaTokenizer.class); + private final boolean checkForLabel; private TokenPreProcess tokenPreProcessor; @@ -73,9 +73,7 @@ public class UimaTokenizer implements Tokenizer { } private boolean valid(String check) { - if (check.matches("<[A-Z]+>") || check.matches("")) - return false; - return true; + return !check.matches("<[A-Z]+>") && !check.matches(""); } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizerfactory/PosUimaTokenizerFactory.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizerfactory/PosUimaTokenizerFactory.java index 5419bf9ef..3a7368b02 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizerfactory/PosUimaTokenizerFactory.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizerfactory/PosUimaTokenizerFactory.java @@ -41,8 +41,8 @@ import static org.apache.uima.fit.factory.AnalysisEngineFactory.createEngineDesc */ public class PosUimaTokenizerFactory implements TokenizerFactory { - private AnalysisEngine tokenizer; - private Collection allowedPoSTags; + private final AnalysisEngine tokenizer; + private final Collection allowedPoSTags; private TokenPreProcess tokenPreProcess; diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizerfactory/UimaTokenizerFactory.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizerfactory/UimaTokenizerFactory.java index 7b24244ac..b91ae9a74 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizerfactory/UimaTokenizerFactory.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/tokenization/tokenizerfactory/UimaTokenizerFactory.java @@ -40,8 +40,8 @@ import java.io.InputStream; public class UimaTokenizerFactory implements TokenizerFactory { - private UimaResource uimaResource; - private boolean checkForLabel; + private final UimaResource uimaResource; + private final boolean checkForLabel; private static AnalysisEngine defaultAnalysisEngine; private TokenPreProcess preProcess; diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/transforms/GazeteerTransform.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/transforms/GazeteerTransform.java index 1c774e904..d99f629dc 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/transforms/GazeteerTransform.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/transforms/GazeteerTransform.java @@ -98,7 +98,7 @@ public class GazeteerTransform extends BaseColumnTransform implements BagOfWords @Override public List> mapSequence(List> sequence) { INDArray arr = (INDArray) mapSequence((Object) sequence); - return Collections.singletonList(Collections.singletonList(new NDArrayWritable(arr))); + return Collections.singletonList(Collections.singletonList(new NDArrayWritable(arr))); } @Override diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/transforms/MultiNlpTransform.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/transforms/MultiNlpTransform.java index b702422e8..8f8c0deb0 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/transforms/MultiNlpTransform.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/main/java/org/datavec/nlp/transforms/MultiNlpTransform.java @@ -40,9 +40,9 @@ import java.util.List; */ public class MultiNlpTransform extends BaseColumnTransform implements BagOfWordsTransform { - private BagOfWordsTransform[] transforms; - private String newColumnName; - private List vocabWords; + private final BagOfWordsTransform[] transforms; + private final String newColumnName; + private final List vocabWords; /** * @@ -80,7 +80,7 @@ public class MultiNlpTransform extends BaseColumnTransform implements BagOfWords @Override public List> mapSequence(List> sequence) { - return Collections.singletonList(Collections.singletonList(new NDArrayWritable(transformFrom(sequence)))); + return Collections.singletonList(Collections.singletonList(new NDArrayWritable(transformFrom(sequence)))); } @Override diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/test/java/org/datavec/nlp/transforms/TestGazeteerTransform.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/test/java/org/datavec/nlp/transforms/TestGazeteerTransform.java index c63ff14c7..7bfbe4eb0 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/test/java/org/datavec/nlp/transforms/TestGazeteerTransform.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/test/java/org/datavec/nlp/transforms/TestGazeteerTransform.java @@ -60,7 +60,7 @@ public class TestGazeteerTransform { String[] split = s.split(" "); List> seq = new ArrayList<>(); for(String s2 : split){ - seq.add(Collections.singletonList(new Text(s2))); + seq.add(Collections.singletonList(new Text(s2))); } input.add(seq); } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/test/java/org/datavec/nlp/transforms/TestMultiNLPTransform.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/test/java/org/datavec/nlp/transforms/TestMultiNLPTransform.java index b0642f2a9..ebb5c52c7 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/test/java/org/datavec/nlp/transforms/TestMultiNLPTransform.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/test/java/org/datavec/nlp/transforms/TestMultiNLPTransform.java @@ -52,7 +52,7 @@ public class TestMultiNLPTransform { String[] split = s.split(" "); List> seq = new ArrayList<>(); for(String s2 : split){ - seq.add(Collections.singletonList(new Text(s2))); + seq.add(Collections.singletonList(new Text(s2))); } input.add(seq); } diff --git a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/test/java/org/datavec/nlp/transforms/TokenizerBagOfWordsTermSequenceIndexTransformTest.java b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/test/java/org/datavec/nlp/transforms/TokenizerBagOfWordsTermSequenceIndexTransformTest.java index dded0cc06..dfa2e228a 100644 --- a/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/test/java/org/datavec/nlp/transforms/TokenizerBagOfWordsTermSequenceIndexTransformTest.java +++ b/cavis-datavec/cavis-datavec-data/cavis-datavec-data-nlp/src/test/java/org/datavec/nlp/transforms/TokenizerBagOfWordsTermSequenceIndexTransformTest.java @@ -89,7 +89,7 @@ public class TokenizerBagOfWordsTermSequenceIndexTransformTest { */ List>> input = new ArrayList<>(); - input.add(Arrays.asList(Arrays.asList(new Text(corpus[0])),Arrays.asList(new Text(corpus[1])))); + input.add(Arrays.asList(Collections.singletonList(new Text(corpus[0])), Collections.singletonList(new Text(corpus[1])))); // First: Check TfidfVectorizer vs. scikit: @@ -313,7 +313,7 @@ public class TokenizerBagOfWordsTermSequenceIndexTransformTest { //input.add(Arrays.asList(Arrays.asList(new Text(corpus[0])),Arrays.asList(new Text(corpus[1])))); List> seq = new ArrayList<>(); for(String s : corpus){ - seq.add(Collections.singletonList(new Text(s))); + seq.add(Collections.singletonList(new Text(s))); } input.add(seq); diff --git a/cavis-datavec/cavis-datavec-local/src/main/java/org/datavec/local/transforms/LocalTransformExecutor.java b/cavis-datavec/cavis-datavec-local/src/main/java/org/datavec/local/transforms/LocalTransformExecutor.java index 1e2bc42ed..2f7328954 100644 --- a/cavis-datavec/cavis-datavec-local/src/main/java/org/datavec/local/transforms/LocalTransformExecutor.java +++ b/cavis-datavec/cavis-datavec-local/src/main/java/org/datavec/local/transforms/LocalTransformExecutor.java @@ -68,7 +68,7 @@ public class LocalTransformExecutor { //returning empty records public final static String LOG_ERROR_PROPERTY = "org.datavec.spark.transform.logerrors"; - private static BufferAllocator bufferAllocator = new RootAllocator(Long.MAX_VALUE); + private static final BufferAllocator bufferAllocator = new RootAllocator(Long.MAX_VALUE); /** * Execute the specified TransformProcess with the given input data
@@ -98,7 +98,7 @@ public class LocalTransformExecutor { * Execute the specified TransformProcess with the given input data
* Note: this method can only be used if the TransformProcess * starts with non-sequential data, - * but returns sequence + * but returns sequence * data (after grouping or converting to a sequence as one of the steps) * * @param inputWritables Input data to process diff --git a/cavis-datavec/cavis-datavec-local/src/main/java/org/datavec/local/transforms/LocalTransformProcessSequenceRecordReader.java b/cavis-datavec/cavis-datavec-local/src/main/java/org/datavec/local/transforms/LocalTransformProcessSequenceRecordReader.java index e2427b409..8fc5fccae 100644 --- a/cavis-datavec/cavis-datavec-local/src/main/java/org/datavec/local/transforms/LocalTransformProcessSequenceRecordReader.java +++ b/cavis-datavec/cavis-datavec-local/src/main/java/org/datavec/local/transforms/LocalTransformProcessSequenceRecordReader.java @@ -26,6 +26,7 @@ import org.datavec.api.transform.TransformProcess; import org.datavec.api.writable.Writable; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class LocalTransformProcessSequenceRecordReader extends TransformProcessSequenceRecordReader { @@ -36,7 +37,7 @@ public class LocalTransformProcessSequenceRecordReader extends TransformProcessS @Override public List> sequenceRecord() { - return LocalTransformExecutor.executeSequenceToSequence(Arrays.asList(sequenceRecordReader.nextSequence().getSequenceRecord()),transformProcess + return LocalTransformExecutor.executeSequenceToSequence(Collections.singletonList(sequenceRecordReader.nextSequence().getSequenceRecord()),transformProcess ).get(0); } diff --git a/cavis-datavec/cavis-datavec-local/src/main/java/org/datavec/local/transforms/misc/SequenceMergeFunction.java b/cavis-datavec/cavis-datavec-local/src/main/java/org/datavec/local/transforms/misc/SequenceMergeFunction.java index 0e0c6afef..e0fdf697e 100644 --- a/cavis-datavec/cavis-datavec-local/src/main/java/org/datavec/local/transforms/misc/SequenceMergeFunction.java +++ b/cavis-datavec/cavis-datavec-local/src/main/java/org/datavec/local/transforms/misc/SequenceMergeFunction.java @@ -31,7 +31,7 @@ import java.util.List; public class SequenceMergeFunction implements Function>>>, List>> { - private SequenceMerge sequenceMerge; + private final SequenceMerge sequenceMerge; public SequenceMergeFunction(SequenceMerge sequenceMerge) { this.sequenceMerge = sequenceMerge; diff --git a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/LocalTransformProcessRecordReaderTests.java b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/LocalTransformProcessRecordReaderTests.java index 2ec96607e..25dc7b738 100644 --- a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/LocalTransformProcessRecordReaderTests.java +++ b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/LocalTransformProcessRecordReaderTests.java @@ -41,6 +41,7 @@ import org.nd4j.common.io.ClassPathResource; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -64,18 +65,18 @@ public class LocalTransformProcessRecordReaderTests { public void simpleTransformTestSequence() { List> sequence = new ArrayList<>(); //First window: - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L), new IntWritable(0), + sequence.add(Arrays.asList(new LongWritable(1451606400000L), new IntWritable(0), new IntWritable(0))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 100L), new IntWritable(1), + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 100L), new IntWritable(1), new IntWritable(0))); - sequence.add(Arrays.asList((Writable) new LongWritable(1451606400000L + 200L), new IntWritable(2), + sequence.add(Arrays.asList(new LongWritable(1451606400000L + 200L), new IntWritable(2), new IntWritable(0))); Schema schema = new SequenceSchema.Builder().addColumnTime("timecolumn", DateTimeZone.UTC) .addColumnInteger("intcolumn").addColumnInteger("intcolumn2").build(); TransformProcess transformProcess = new TransformProcess.Builder(schema).removeColumns("intcolumn2").build(); InMemorySequenceRecordReader inMemorySequenceRecordReader = - new InMemorySequenceRecordReader(Arrays.asList(sequence)); + new InMemorySequenceRecordReader(Collections.singletonList(sequence)); LocalTransformProcessSequenceRecordReader transformProcessSequenceRecordReader = new LocalTransformProcessSequenceRecordReader(inMemorySequenceRecordReader, transformProcess); List> next = transformProcessSequenceRecordReader.sequenceRecord(); diff --git a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/functions/TestNDArrayToWritablesFunction.java b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/functions/TestNDArrayToWritablesFunction.java index 37a86a2f3..95b6ebfab 100644 --- a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/functions/TestNDArrayToWritablesFunction.java +++ b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/functions/TestNDArrayToWritablesFunction.java @@ -31,6 +31,7 @@ import org.nd4j.linalg.factory.Nd4j; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -50,7 +51,7 @@ public class TestNDArrayToWritablesFunction { @Test public void testNDArrayToWritablesArray() throws Exception { INDArray arr = Nd4j.arange(5); - List expected = Arrays.asList((Writable) new NDArrayWritable(arr)); + List expected = Collections.singletonList(new NDArrayWritable(arr)); List actual = new NDArrayToWritablesFunction(true).apply(arr); assertEquals(expected, actual); } diff --git a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/functions/TestWritablesToStringFunctions.java b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/functions/TestWritablesToStringFunctions.java index fca45adb1..1086866f2 100644 --- a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/functions/TestWritablesToStringFunctions.java +++ b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/functions/TestWritablesToStringFunctions.java @@ -44,7 +44,7 @@ public class TestWritablesToStringFunctions { @Test public void testWritablesToString() throws Exception { - List l = Arrays.asList(new DoubleWritable(1.5), new Text("someValue")); + List l = Arrays.asList(new DoubleWritable(1.5), new Text("someValue")); String expected = l.get(0).toString() + "," + l.get(1).toString(); assertEquals(expected, new WritablesToStringFunction(",").apply(l)); @@ -53,8 +53,8 @@ public class TestWritablesToStringFunctions { @Test public void testSequenceWritablesToString() throws Exception { - List> l = Arrays.asList(Arrays.asList(new DoubleWritable(1.5), new Text("someValue")), - Arrays.asList(new DoubleWritable(2.5), new Text("otherValue"))); + List> l = Arrays.asList(Arrays.asList(new DoubleWritable(1.5), new Text("someValue")), + Arrays.asList(new DoubleWritable(2.5), new Text("otherValue"))); String expected = l.get(0).get(0).toString() + "," + l.get(0).get(1).toString() + "\n" + l.get(1).get(0).toString() + "," + l.get(1).get(1).toString(); diff --git a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/ExecutionTest.java b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/ExecutionTest.java index e265136f8..94aa8eeaf 100644 --- a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/ExecutionTest.java +++ b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/ExecutionTest.java @@ -87,9 +87,9 @@ public class ExecutionTest { .doubleMathOp("col2", MathOp.Add, 10.0).floatMathOp("col3", MathOp.Add, 5f).build(); List> inputData = new ArrayList<>(); - inputData.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1), new FloatWritable(0.3f))); - inputData.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1), new FloatWritable(1.7f))); - inputData.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1), new FloatWritable(3.6f))); + inputData.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1), new FloatWritable(0.3f))); + inputData.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1), new FloatWritable(1.7f))); + inputData.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1), new FloatWritable(3.6f))); List> rdd = (inputData); @@ -103,9 +103,9 @@ public class ExecutionTest { }); List> expected = new ArrayList<>(); - expected.add(Arrays.asList(new IntWritable(0), new IntWritable(2), new DoubleWritable(10.1), new FloatWritable(5.3f))); - expected.add(Arrays.asList(new IntWritable(1), new IntWritable(1), new DoubleWritable(11.1), new FloatWritable(6.7f))); - expected.add(Arrays.asList(new IntWritable(2), new IntWritable(0), new DoubleWritable(12.1), new FloatWritable(8.6f))); + expected.add(Arrays.asList(new IntWritable(0), new IntWritable(2), new DoubleWritable(10.1), new FloatWritable(5.3f))); + expected.add(Arrays.asList(new IntWritable(1), new IntWritable(1), new DoubleWritable(11.1), new FloatWritable(6.7f))); + expected.add(Arrays.asList(new IntWritable(2), new IntWritable(0), new DoubleWritable(12.1), new FloatWritable(8.6f))); assertEquals(expected, out); } @@ -116,9 +116,9 @@ public class ExecutionTest { .addColumnDouble("col1").addColumnDouble("col2") .addColumnDouble("col3").build(); List> inputData = new ArrayList<>(); - inputData.add(Arrays.asList(new IntWritable(0), new DoubleWritable(1), new DoubleWritable(0.1))); - inputData.add(Arrays.asList(new IntWritable(1), new DoubleWritable(3), new DoubleWritable(1.1))); - inputData.add(Arrays.asList(new IntWritable(2), new DoubleWritable(3), new DoubleWritable(2.1))); + inputData.add(Arrays.asList(new IntWritable(0), new DoubleWritable(1), new DoubleWritable(0.1))); + inputData.add(Arrays.asList(new IntWritable(1), new DoubleWritable(3), new DoubleWritable(1.1))); + inputData.add(Arrays.asList(new IntWritable(2), new DoubleWritable(3), new DoubleWritable(2.1))); TransformProcess transformProcess = new TransformProcess.Builder(filterSchema) .filter(new DoubleColumnCondition("col1",ConditionOp.LessThan,1)).build(); List> execute = LocalTransformExecutor.execute(inputData, transformProcess); @@ -136,12 +136,12 @@ public class ExecutionTest { List>> inputSequences = new ArrayList<>(); List> seq1 = new ArrayList<>(); - seq1.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1))); - seq1.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1))); - seq1.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1))); + seq1.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1))); + seq1.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1))); + seq1.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1))); List> seq2 = new ArrayList<>(); - seq2.add(Arrays.asList(new IntWritable(3), new Text("state0"), new DoubleWritable(3.1))); - seq2.add(Arrays.asList(new IntWritable(4), new Text("state1"), new DoubleWritable(4.1))); + seq2.add(Arrays.asList(new IntWritable(3), new Text("state0"), new DoubleWritable(3.1))); + seq2.add(Arrays.asList(new IntWritable(4), new Text("state1"), new DoubleWritable(4.1))); inputSequences.add(seq1); inputSequences.add(seq2); @@ -159,12 +159,12 @@ public class ExecutionTest { List>> expectedSequence = new ArrayList<>(); List> seq1e = new ArrayList<>(); - seq1e.add(Arrays.asList(new IntWritable(0), new IntWritable(2), new DoubleWritable(10.1))); - seq1e.add(Arrays.asList(new IntWritable(1), new IntWritable(1), new DoubleWritable(11.1))); - seq1e.add(Arrays.asList(new IntWritable(2), new IntWritable(0), new DoubleWritable(12.1))); + seq1e.add(Arrays.asList(new IntWritable(0), new IntWritable(2), new DoubleWritable(10.1))); + seq1e.add(Arrays.asList(new IntWritable(1), new IntWritable(1), new DoubleWritable(11.1))); + seq1e.add(Arrays.asList(new IntWritable(2), new IntWritable(0), new DoubleWritable(12.1))); List> seq2e = new ArrayList<>(); - seq2e.add(Arrays.asList(new IntWritable(3), new IntWritable(0), new DoubleWritable(13.1))); - seq2e.add(Arrays.asList(new IntWritable(4), new IntWritable(1), new DoubleWritable(14.1))); + seq2e.add(Arrays.asList(new IntWritable(3), new IntWritable(0), new DoubleWritable(13.1))); + seq2e.add(Arrays.asList(new IntWritable(4), new IntWritable(1), new DoubleWritable(14.1))); expectedSequence.add(seq1e); expectedSequence.add(seq2e); @@ -177,8 +177,8 @@ public class ExecutionTest { public void testReductionGlobal() { List> in = Arrays.asList( - Arrays.asList(new Text("first"), new DoubleWritable(3.0)), - Arrays.asList(new Text("second"), new DoubleWritable(5.0)) + Arrays.asList(new Text("first"), new DoubleWritable(3.0)), + Arrays.asList(new Text("second"), new DoubleWritable(5.0)) ); List> inData = in; @@ -198,7 +198,7 @@ public class ExecutionTest { List> out = outRdd; - List> expOut = Collections.singletonList(Arrays.asList(new Text("first"), new DoubleWritable(4.0))); + List> expOut = Collections.singletonList(Arrays.asList(new Text("first"), new DoubleWritable(4.0))); assertEquals(expOut, out); } @@ -207,10 +207,10 @@ public class ExecutionTest { public void testReductionByKey(){ List> in = Arrays.asList( - Arrays.asList(new IntWritable(0), new Text("first"), new DoubleWritable(3.0)), - Arrays.asList(new IntWritable(0), new Text("second"), new DoubleWritable(5.0)), - Arrays.asList(new IntWritable(1), new Text("f"), new DoubleWritable(30.0)), - Arrays.asList(new IntWritable(1), new Text("s"), new DoubleWritable(50.0)) + Arrays.asList(new IntWritable(0), new Text("first"), new DoubleWritable(3.0)), + Arrays.asList(new IntWritable(0), new Text("second"), new DoubleWritable(5.0)), + Arrays.asList(new IntWritable(1), new Text("f"), new DoubleWritable(30.0)), + Arrays.asList(new IntWritable(1), new Text("s"), new DoubleWritable(50.0)) ); List> inData = in; @@ -233,8 +233,8 @@ public class ExecutionTest { List> out = outRdd; List> expOut = Arrays.asList( - Arrays.asList(new IntWritable(0), new Text("first"), new DoubleWritable(4.0)), - Arrays.asList(new IntWritable(1), new Text("f"), new DoubleWritable(40.0))); + Arrays.asList(new IntWritable(0), new Text("first"), new DoubleWritable(4.0)), + Arrays.asList(new IntWritable(1), new Text("f"), new DoubleWritable(40.0))); out = new ArrayList<>(out); Collections.sort( diff --git a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/TestGeoTransforms.java b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/TestGeoTransforms.java index d0e431678..3cca330af 100644 --- a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/TestGeoTransforms.java +++ b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/TestGeoTransforms.java @@ -75,10 +75,10 @@ public class TestGeoTransforms { out.getColumnTypes()); assertEquals(Arrays.asList((Writable) new Text("-30"), new Text("20"), new Text("10"), new DoubleWritable(5.0)), - transform.map(Arrays.asList((Writable) new Text("-30"), new Text("20"), new Text("10")))); + transform.map(Arrays.asList(new Text("-30"), new Text("20"), new Text("10")))); assertEquals(Arrays.asList((Writable) new Text("50|40"), new Text("10|-20"), new Text("10|5"), new DoubleWritable(Math.sqrt(160))), - transform.map(Arrays.asList((Writable) new Text("50|40"), new Text("10|-20"), + transform.map(Arrays.asList(new Text("50|40"), new Text("10|-20"), new Text("10|5")))); } @@ -98,7 +98,7 @@ public class TestGeoTransforms { double latitude = 51.5142; double longitude = -0.0931; - List writables = transform.map(Collections.singletonList((Writable) new Text(in))); + List writables = transform.map(Collections.singletonList(new Text(in))); assertEquals(1, writables.size()); String[] coordinates = writables.get(0).toString().split("CUSTOM_DELIMITER"); assertEquals(2, coordinates.length); @@ -116,7 +116,7 @@ public class TestGeoTransforms { ObjectInputStream ois = new ObjectInputStream(bais); Transform deserialized = (Transform) ois.readObject(); - writables = deserialized.map(Collections.singletonList((Writable) new Text(in))); + writables = deserialized.map(Collections.singletonList(new Text(in))); assertEquals(1, writables.size()); coordinates = writables.get(0).toString().split("CUSTOM_DELIMITER"); //System.out.println(Arrays.toString(coordinates)); @@ -145,7 +145,7 @@ public class TestGeoTransforms { assertEquals(1, out.getColumnMetaData().size()); assertEquals(ColumnType.String, out.getMetaData(0).getColumnType()); - List writables = transform.map(Collections.singletonList((Writable) new Text(in))); + List writables = transform.map(Collections.singletonList(new Text(in))); assertEquals(1, writables.size()); assertEquals(location, writables.get(0).toString()); //System.out.println(location); diff --git a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/TestPythonTransformProcess.java b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/TestPythonTransformProcess.java index 21ae33b3f..1dd62a88e 100644 --- a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/TestPythonTransformProcess.java +++ b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/TestPythonTransformProcess.java @@ -69,7 +69,7 @@ public class TestPythonTransformProcess { .build() ).build(); - List inputs = Arrays.asList((Writable)new Text("Hello "), new Text("World!")); + List inputs = Arrays.asList(new Text("Hello "), new Text("World!")); List outputs = tp.execute(inputs); assertEquals((outputs.get(0)).toString(), "Hello "); @@ -100,7 +100,7 @@ public class TestPythonTransformProcess { .inputSchema(initialSchema) .build() ).build(); - List inputs = Arrays.asList((Writable)new IntWritable(10), + List inputs = Arrays.asList(new IntWritable(10), new FloatWritable(3.5f), new Text("5"), new DoubleWritable(2.0) @@ -134,7 +134,6 @@ public class TestPythonTransformProcess { .build() ).build(); List inputs = Arrays.asList( - (Writable) new NDArrayWritable(arr1), new NDArrayWritable(arr2) ); @@ -170,7 +169,6 @@ public class TestPythonTransformProcess { .build() ).build(); List inputs = Arrays.asList( - (Writable) new NDArrayWritable(arr1), new NDArrayWritable(arr2) ); @@ -206,7 +204,6 @@ public class TestPythonTransformProcess { ).build(); List inputs = Arrays.asList( - (Writable) new NDArrayWritable(arr1), new NDArrayWritable(arr2) ); @@ -271,7 +268,6 @@ public class TestPythonTransformProcess { List> inputs = new ArrayList<>(); inputs.add( Arrays.asList( - (Writable) new IntWritable(5), new FloatWritable(3.0f), new Text("abcd"), @@ -279,7 +275,6 @@ public class TestPythonTransformProcess { ); inputs.add( Arrays.asList( - (Writable) new IntWritable(-3), new FloatWritable(3.0f), new Text("abcd"), @@ -287,7 +282,6 @@ public class TestPythonTransformProcess { ); inputs.add( Arrays.asList( - (Writable) new IntWritable(5), new FloatWritable(11.2f), new Text("abcd"), @@ -305,7 +299,7 @@ public class TestPythonTransformProcess { .returnAllInputs(true) .build(); List> inputs = new ArrayList<>(); - inputs.add(Arrays.asList((Writable)new IntWritable(1))); + inputs.add(Collections.singletonList(new IntWritable(1))); Schema inputSchema = new Builder() .addColumnInteger("a") .build(); @@ -327,7 +321,7 @@ public class TestPythonTransformProcess { .build(); List> inputs = new ArrayList<>(); - inputs.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.scalar(1).reshape(1,1)))); + inputs.add(Collections.singletonList(new NDArrayWritable(Nd4j.scalar(1).reshape(1, 1)))); Schema inputSchema = new Builder() .addColumnNDArray("a",new long[]{1,1}) .build(); @@ -360,7 +354,7 @@ public class TestPythonTransformProcess { .build(); List> inputs = new ArrayList<>(); - inputs.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.scalar(1).reshape(1,1)), + inputs.add(Arrays.asList(new NDArrayWritable(Nd4j.scalar(1).reshape(1,1)), new NDArrayWritable(Nd4j.scalar(2).reshape(1,1)))); Schema inputSchema = new Builder() .addColumnNDArray("a",new long[]{1,1}) diff --git a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/join/TestJoin.java b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/join/TestJoin.java index adb511603..b7fc564c7 100644 --- a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/join/TestJoin.java +++ b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/join/TestJoin.java @@ -46,27 +46,27 @@ public class TestJoin { .addColumnDouble("amount").build(); List> infoList = new ArrayList<>(); - infoList.add(Arrays.asList(new LongWritable(12345), new Text("Customer12345"))); - infoList.add(Arrays.asList(new LongWritable(98765), new Text("Customer98765"))); - infoList.add(Arrays.asList(new LongWritable(50000), new Text("Customer50000"))); + infoList.add(Arrays.asList(new LongWritable(12345), new Text("Customer12345"))); + infoList.add(Arrays.asList(new LongWritable(98765), new Text("Customer98765"))); + infoList.add(Arrays.asList(new LongWritable(50000), new Text("Customer50000"))); List> purchaseList = new ArrayList<>(); - purchaseList.add(Arrays.asList(new LongWritable(1000000), new LongWritable(12345), + purchaseList.add(Arrays.asList(new LongWritable(1000000), new LongWritable(12345), new DoubleWritable(10.00))); - purchaseList.add(Arrays.asList(new LongWritable(1000001), new LongWritable(12345), + purchaseList.add(Arrays.asList(new LongWritable(1000001), new LongWritable(12345), new DoubleWritable(20.00))); - purchaseList.add(Arrays.asList(new LongWritable(1000002), new LongWritable(98765), + purchaseList.add(Arrays.asList(new LongWritable(1000002), new LongWritable(98765), new DoubleWritable(30.00))); Join join = new Join.Builder(Join.JoinType.RightOuter).setJoinColumns("customerID") .setSchemas(customerInfoSchema, purchasesSchema).build(); List> expected = new ArrayList<>(); - expected.add(Arrays.asList(new LongWritable(12345), new Text("Customer12345"), + expected.add(Arrays.asList(new LongWritable(12345), new Text("Customer12345"), new LongWritable(1000000), new DoubleWritable(10.00))); - expected.add(Arrays.asList(new LongWritable(12345), new Text("Customer12345"), + expected.add(Arrays.asList(new LongWritable(12345), new Text("Customer12345"), new LongWritable(1000001), new DoubleWritable(20.00))); - expected.add(Arrays.asList(new LongWritable(98765), new Text("Customer98765"), + expected.add(Arrays.asList(new LongWritable(98765), new Text("Customer98765"), new LongWritable(1000002), new DoubleWritable(30.00))); @@ -100,11 +100,11 @@ public class TestJoin { .setSchemas(purchasesSchema, customerInfoSchema).build(); List> expectedManyToOne = new ArrayList<>(); - expectedManyToOne.add(Arrays.asList(new LongWritable(1000000), new LongWritable(12345), + expectedManyToOne.add(Arrays.asList(new LongWritable(1000000), new LongWritable(12345), new DoubleWritable(10.00), new Text("Customer12345"))); - expectedManyToOne.add(Arrays.asList(new LongWritable(1000001), new LongWritable(12345), + expectedManyToOne.add(Arrays.asList(new LongWritable(1000001), new LongWritable(12345), new DoubleWritable(20.00), new Text("Customer12345"))); - expectedManyToOne.add(Arrays.asList(new LongWritable(1000002), new LongWritable(98765), + expectedManyToOne.add(Arrays.asList(new LongWritable(1000002), new LongWritable(98765), new DoubleWritable(30.00), new Text("Customer98765"))); List> joined2 = LocalTransformExecutor.executeJoin(join2, purchases, info); @@ -138,45 +138,45 @@ public class TestJoin { .addColumnCategorical("otherCategory", Arrays.asList("cat0", "cat1", "cat2")).build(); List> first = new ArrayList<>(); - first.add(Arrays.asList(new LongWritable(0), new Text("cat0"))); - first.add(Arrays.asList(new LongWritable(1), new Text("cat0"))); - first.add(Arrays.asList(new LongWritable(2), new Text("cat1"))); + first.add(Arrays.asList(new LongWritable(0), new Text("cat0"))); + first.add(Arrays.asList(new LongWritable(1), new Text("cat0"))); + first.add(Arrays.asList(new LongWritable(2), new Text("cat1"))); List> second = new ArrayList<>(); - second.add(Arrays.asList(new LongWritable(100), new Text("cat0"))); - second.add(Arrays.asList(new LongWritable(101), new Text("cat0"))); - second.add(Arrays.asList(new LongWritable(102), new Text("cat2"))); + second.add(Arrays.asList(new LongWritable(100), new Text("cat0"))); + second.add(Arrays.asList(new LongWritable(101), new Text("cat0"))); + second.add(Arrays.asList(new LongWritable(102), new Text("cat2"))); List> expOuterJoin = new ArrayList<>(); - expOuterJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); - expOuterJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); - expOuterJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); - expOuterJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); - expOuterJoin.add(Arrays.asList(new LongWritable(2), new Text("cat1"), new NullWritable())); - expOuterJoin.add(Arrays.asList(new NullWritable(), new Text("cat2"), new LongWritable(102))); + expOuterJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); + expOuterJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); + expOuterJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); + expOuterJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); + expOuterJoin.add(Arrays.asList(new LongWritable(2), new Text("cat1"), new NullWritable())); + expOuterJoin.add(Arrays.asList(new NullWritable(), new Text("cat2"), new LongWritable(102))); List> expLeftJoin = new ArrayList<>(); - expLeftJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); - expLeftJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); - expLeftJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); - expLeftJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); - expLeftJoin.add(Arrays.asList(new LongWritable(2), new Text("cat1"), new NullWritable())); + expLeftJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); + expLeftJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); + expLeftJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); + expLeftJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); + expLeftJoin.add(Arrays.asList(new LongWritable(2), new Text("cat1"), new NullWritable())); List> expRightJoin = new ArrayList<>(); - expRightJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); - expRightJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); - expRightJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); - expRightJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); - expRightJoin.add(Arrays.asList(new NullWritable(), new Text("cat2"), new LongWritable(102))); + expRightJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); + expRightJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); + expRightJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); + expRightJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); + expRightJoin.add(Arrays.asList(new NullWritable(), new Text("cat2"), new LongWritable(102))); List> expInnerJoin = new ArrayList<>(); - expInnerJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); - expInnerJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); - expInnerJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); - expInnerJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); + expInnerJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); + expInnerJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); + expInnerJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); + expInnerJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); List> firstRDD = (first); List> secondRDD = (second); diff --git a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/rank/TestCalculateSortedRank.java b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/rank/TestCalculateSortedRank.java index 39f3405a9..a14c5f468 100644 --- a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/rank/TestCalculateSortedRank.java +++ b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/rank/TestCalculateSortedRank.java @@ -45,10 +45,10 @@ public class TestCalculateSortedRank { public void testCalculateSortedRank() { List> data = new ArrayList<>(); - data.add(Arrays.asList((Writable) new Text("0"), new DoubleWritable(0.0))); - data.add(Arrays.asList((Writable) new Text("3"), new DoubleWritable(0.3))); - data.add(Arrays.asList((Writable) new Text("2"), new DoubleWritable(0.2))); - data.add(Arrays.asList((Writable) new Text("1"), new DoubleWritable(0.1))); + data.add(Arrays.asList(new Text("0"), new DoubleWritable(0.0))); + data.add(Arrays.asList(new Text("3"), new DoubleWritable(0.3))); + data.add(Arrays.asList(new Text("2"), new DoubleWritable(0.2))); + data.add(Arrays.asList(new Text("1"), new DoubleWritable(0.1))); List> rdd = (data); diff --git a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/sequence/TestConvertToSequence.java b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/sequence/TestConvertToSequence.java index 04a4a5c47..bd3ace8c8 100644 --- a/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/sequence/TestConvertToSequence.java +++ b/cavis-datavec/cavis-datavec-local/src/test/java/org/datavec/local/transforms/transform/sequence/TestConvertToSequence.java @@ -48,12 +48,12 @@ public class TestConvertToSequence { Schema s = new Schema.Builder().addColumnsString("key1", "key2").addColumnLong("time").build(); List> allExamples = - Arrays.asList(Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(10)), - Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(10)), - Arrays.asList(new Text("k1a"), new Text("k2a"), + Arrays.asList(Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(10)), + Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(10)), + Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(-10)), - Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(5)), - Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(0))); + Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(5)), + Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(0))); TransformProcess tp = new TransformProcess.Builder(s) .convertToSequence(Arrays.asList("key1", "key2"), new NumericalColumnComparator("time")) @@ -75,13 +75,13 @@ public class TestConvertToSequence { } List> expSeq0 = Arrays.asList( - Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(-10)), - Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(0)), - Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(10))); + Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(-10)), + Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(0)), + Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(10))); List> expSeq1 = Arrays.asList( - Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(5)), - Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(10))); + Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(5)), + Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(10))); assertEquals(expSeq0, seq0); assertEquals(expSeq1, seq1); @@ -96,9 +96,9 @@ public class TestConvertToSequence { .build(); List> allExamples = Arrays.asList( - Arrays.asList(new Text("a"), new LongWritable(0)), - Arrays.asList(new Text("b"), new LongWritable(1)), - Arrays.asList(new Text("c"), new LongWritable(2))); + Arrays.asList(new Text("a"), new LongWritable(0)), + Arrays.asList(new Text("b"), new LongWritable(1)), + Arrays.asList(new Text("c"), new LongWritable(2))); TransformProcess tp = new TransformProcess.Builder(s) .convertToSequence() diff --git a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/NumpyArray.java b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/NumpyArray.java index 708184de7..ca597c60b 100644 --- a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/NumpyArray.java +++ b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/NumpyArray.java @@ -46,8 +46,8 @@ import static org.nd4j.linalg.api.buffer.DataType.FLOAT; @NoArgsConstructor public class NumpyArray { - private static NativeOps nativeOps; - private static Map arrayCache; // Avoids re-allocation of device buffer + private static final NativeOps nativeOps; + private static final Map arrayCache; // Avoids re-allocation of device buffer private long address; private long[] shape; private long[] strides; @@ -62,7 +62,7 @@ public class NumpyArray { } @Builder - public NumpyArray(long address, long[] shape, long strides[], DataType dtype, boolean copy) { + public NumpyArray(long address, long[] shape, long[] strides, DataType dtype, boolean copy) { this.address = address; this.shape = shape; this.strides = strides; @@ -81,11 +81,11 @@ public class NumpyArray { return new NumpyArray(nd4jArray.dup()); } - public NumpyArray(long address, long[] shape, long strides[]) { + public NumpyArray(long address, long[] shape, long[] strides) { this(address, shape, strides, FLOAT, false); } - public NumpyArray(long address, long[] shape, long strides[], DataType dtype) { + public NumpyArray(long address, long[] shape, long[] strides, DataType dtype) { this(address, shape, strides, dtype, false); } diff --git a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonCondition.java b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonCondition.java index e94e5a171..62370246f 100644 --- a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonCondition.java +++ b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonCondition.java @@ -37,7 +37,7 @@ public class PythonCondition implements Condition { private Schema inputSchema; private PythonVariables pyInputs; private PythonTransform pythonTransform; - private String code; + private final String code; public PythonCondition(String pythonCode) { diff --git a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonContextManager.java b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonContextManager.java index c3563bfc2..b46610918 100644 --- a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonContextManager.java +++ b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonContextManager.java @@ -36,8 +36,8 @@ import java.util.concurrent.atomic.AtomicBoolean; public class PythonContextManager { - private static Set contexts = new HashSet<>(); - private static AtomicBoolean init = new AtomicBoolean(false); + private static final Set contexts = new HashSet<>(); + private static final AtomicBoolean init = new AtomicBoolean(false); private static String currentContext; private static final String MAIN_CONTEXT = "main"; static { diff --git a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonExecutioner.java b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonExecutioner.java index dd48cb104..f0e6f4eed 100644 --- a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonExecutioner.java +++ b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonExecutioner.java @@ -89,7 +89,7 @@ import static org.datavec.python.Python.*; public class PythonExecutioner { - private static AtomicBoolean init = new AtomicBoolean(false); + private static final AtomicBoolean init = new AtomicBoolean(false); public final static String DEFAULT_PYTHON_PATH_PROPERTY = "org.datavec.python.path"; public final static String JAVACPP_PYTHON_APPEND_TYPE = "org.datavec.python.javacpp.path.append"; public final static String DEFAULT_APPEND_TYPE = "before"; @@ -139,7 +139,7 @@ public class PythonExecutioner { * and b is the variable value. * @param varName Name of the python variable being set. Should be a valid python identifier string * @param pythonObject Value for the python variable - * @throws Exception + * @throws PythonException */ public static void setVariable(String varName, PythonObject pythonObject) throws PythonException{ if (!validateVariableName(varName)){ @@ -345,10 +345,8 @@ public class PythonExecutioner { //// TODO: fix in javacpp File sitePackagesWindows = new File(python.cachePackage(), "site-packages"); File[] packages2 = new File[packages.length + 1]; - for (int i = 0;i < packages.length; i++){ - //System.out.println(packages[i].getAbsolutePath()); - packages2[i] = packages[i]; - } + //System.out.println(packages[i].getAbsolutePath()); + System.arraycopy(packages, 0, packages2, 0, packages.length); packages2[packages.length] = sitePackagesWindows; //System.out.println(sitePackagesWindows.getAbsolutePath()); packages = packages2; @@ -369,7 +367,7 @@ public class PythonExecutioner { sb.append(path); - log.info("Prepending javacpp python path: {}", sb.toString()); + log.info("Prepending javacpp python path: {}", sb); break; case AFTER: sb.append(path); @@ -379,7 +377,7 @@ public class PythonExecutioner { sb.append(java.io.File.pathSeparator); } - log.info("Appending javacpp python path " + sb.toString()); + log.info("Appending javacpp python path " + sb); break; case NONE: log.info("Not appending javacpp path"); @@ -388,7 +386,7 @@ public class PythonExecutioner { } //prepend the javacpp packages - log.info("Final python path: {}", sb.toString()); + log.info("Final python path: {}", sb); Py_SetPath(sb.toString()); } diff --git a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonJob.java b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonJob.java index c50c9bb9e..81894d101 100644 --- a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonJob.java +++ b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonJob.java @@ -121,7 +121,7 @@ public class PythonJob { PythonObject arg = argsList.get(i); PythonObject val = Python.globals().get(arg); if (val.isNone()) { - throw new PythonException("Input value not received for run() argument: " + arg.toString()); + throw new PythonException("Input value not received for run() argument: " + arg); } runargs.set(arg, val); } @@ -153,7 +153,7 @@ public class PythonJob { PythonObject arg = argsList.get(i); PythonObject val = Python.globals().get(arg); if (val.isNone()) { - throw new PythonException("Input value not received for run() argument: " + arg.toString()); + throw new PythonException("Input value not received for run() argument: " + arg); } runargs.set(arg, val); } diff --git a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonObject.java b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonObject.java index 4a6a617d5..b9d809aab 100644 --- a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonObject.java +++ b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonObject.java @@ -158,7 +158,7 @@ public class PythonObject { } public PythonObject(int data) { - nativePythonObject = PyLong_FromLong((long) data); + nativePythonObject = PyLong_FromLong(data); } public PythonObject(long data) { @@ -208,7 +208,7 @@ public class PythonObject { } public PythonObject(Object[] data) { - PyObject pyList = PyList_New((long) data.length); + PyObject pyList = PyList_New(data.length); for (int i = 0; i < data.length; i++) { PyList_SetItem(pyList, i, j2pyObject(data[i]).nativePythonObject); } @@ -216,7 +216,7 @@ public class PythonObject { } public PythonObject(List data) { - PyObject pyList = PyList_New((long) data.size()); + PyObject pyList = PyList_New(data.size()); for (int i = 0; i < data.size(); i++) { PyList_SetItem(pyList, i, j2pyObject(data.get(i)).nativePythonObject); } @@ -384,9 +384,7 @@ public class PythonObject { public PythonObject call(Object... args) { if (args.length > 0 && args[args.length - 1] instanceof Map) { List args2 = new ArrayList<>(); - for (int i = 0; i < args.length - 1; i++) { - args2.add(args[i]); - } + args2.addAll(Arrays.asList(args).subList(0, args.length - 1)); return call(args2, (Map) args[args.length - 1]); } if (args.length == 0) { @@ -444,7 +442,7 @@ public class PythonObject { } public PythonObject get(int key) { - return get(PyLong_FromLong((long) key)); + return get(PyLong_FromLong(key)); } public PythonObject get(long key) { diff --git a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonProcess.java b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonProcess.java index a8ee56510..8c86cc69a 100644 --- a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonProcess.java +++ b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonProcess.java @@ -27,12 +27,10 @@ import java.util.Arrays; @Slf4j public class PythonProcess { - private static String pythonExecutable = Loader.load(org.bytedeco.cpython.python.class); + private static final String pythonExecutable = Loader.load(org.bytedeco.cpython.python.class); public static String runAndReturn(String... arguments)throws IOException, InterruptedException{ String[] allArgs = new String[arguments.length + 1]; - for (int i = 0; i < arguments.length; i++){ - allArgs[i + 1] = arguments[i]; - } + System.arraycopy(arguments, 0, allArgs, 1, arguments.length); allArgs[0] = pythonExecutable; log.info("Executing command: " + Arrays.toString(allArgs)); ProcessBuilder pb = new ProcessBuilder(allArgs); @@ -45,9 +43,7 @@ public class PythonProcess { public static void run(String... arguments)throws IOException, InterruptedException{ String[] allArgs = new String[arguments.length + 1]; - for (int i = 0; i < arguments.length; i++){ - allArgs[i + 1] = arguments[i]; - } + System.arraycopy(arguments, 0, allArgs, 1, arguments.length); allArgs[0] = pythonExecutable; log.info("Executing command: " + Arrays.toString(allArgs)); ProcessBuilder pb = new ProcessBuilder(allArgs); diff --git a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonType.java b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonType.java index d0a3f488f..4ac4bce2a 100644 --- a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonType.java +++ b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonType.java @@ -55,7 +55,7 @@ public abstract class PythonType { } public static PythonType valueOf(String typeName) throws PythonException{ try{ - typeName.valueOf(typeName); + String.valueOf(typeName); } catch (IllegalArgumentException iae){ throw new PythonException("Invalid python type: " + typeName, iae); } diff --git a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonUtils.java b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonUtils.java index d3e991b35..7d0ea15cd 100644 --- a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonUtils.java +++ b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/PythonUtils.java @@ -132,7 +132,7 @@ public class PythonUtils { pyVars.addBool(colName); break; default: - throw new Exception("Unsupported python input type: " + colType.toString()); + throw new Exception("Unsupported python input type: " + colType); } } @@ -220,7 +220,7 @@ public class PythonUtils { public static Map toMap(JSONObject jsonobj) { Map map = new HashMap<>(); - String[] keys = (String[]) jsonobj.keySet().toArray(new String[jsonobj.keySet().size()]); + String[] keys = jsonobj.keySet().toArray(new String[jsonobj.keySet().size()]); for (String key : keys) { Object value = jsonobj.get(key); if (value instanceof JSONArray) { diff --git a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/keras/Model.java b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/keras/Model.java index d8a9b0651..04d2cafd6 100644 --- a/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/keras/Model.java +++ b/cavis-datavec/cavis-datavec-python/src/main/java/org/datavec/python/keras/Model.java @@ -8,7 +8,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; public class Model { - private PythonObject pyModel; + private final PythonObject pyModel; private static PythonObject installAndImportTF() throws PythonException{ diff --git a/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/PythonNumpyTest.java b/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/PythonNumpyTest.java index 1c0721c32..83a8636e6 100644 --- a/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/PythonNumpyTest.java +++ b/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/PythonNumpyTest.java @@ -45,7 +45,7 @@ public class PythonNumpyTest { }; } - private DataType dataType; + private final DataType dataType; public PythonNumpyTest(DataType dataType) { this.dataType = dataType; diff --git a/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/ScalarAndArrayTest.java b/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/ScalarAndArrayTest.java index f6f39d68c..6d536655b 100644 --- a/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/ScalarAndArrayTest.java +++ b/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/ScalarAndArrayTest.java @@ -35,7 +35,7 @@ public class ScalarAndArrayTest { }; } - private INDArray indArray; + private final INDArray indArray; public ScalarAndArrayTest(INDArray indArray) { this.indArray = indArray; diff --git a/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/TestPythonList.java b/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/TestPythonList.java index 259431cba..a52c58fd9 100644 --- a/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/TestPythonList.java +++ b/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/TestPythonList.java @@ -92,7 +92,7 @@ public class TestPythonList { ), map }; PythonObject pyList = new PythonObject(objs); - System.out.println(pyList.toString()); + System.out.println(pyList); String expectedStr = "[1, 2, 'a', 3.0, 4, 5.0, [10" + ", 20, 'b', 30.0, 40, 50.0, {'arr': array([1.," + " 2., 3., 4.], dtype=float32), 1: 'a', 'a': [" + diff --git a/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/TestPythonVariables.java b/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/TestPythonVariables.java index b709e1608..668f820a9 100644 --- a/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/TestPythonVariables.java +++ b/cavis-datavec/cavis-datavec-python/src/test/java/org/datavec/python/TestPythonVariables.java @@ -60,12 +60,12 @@ public class TestPythonVariables { BytePointer bp = new BytePointer(arr.data().pointer()); Object[] values = { 1L,1.0,"1",true, Collections.singletonMap("1",1), - new Object[]{1}, Arrays.asList(1), arr, bp + new Object[]{1}, Collections.singletonList(1), arr, bp }; Object[] expectedValues = { 1L,1.0,"1",true, Collections.singletonMap("1",1), - Arrays.asList(1), Arrays.asList(1), arr, bp + Collections.singletonList(1), Collections.singletonList(1), arr, bp }; for(int i = 0; i < types.length; i++) { diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/functions/pairdata/PathToKeyFunction.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/functions/pairdata/PathToKeyFunction.java index 5c43c969f..39187a785 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/functions/pairdata/PathToKeyFunction.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/functions/pairdata/PathToKeyFunction.java @@ -28,8 +28,8 @@ import scala.Tuple3; public class PathToKeyFunction implements PairFunction, String, Tuple3> { - private PathToKeyConverter converter; - private int index; + private final PathToKeyConverter converter; + private final int index; public PathToKeyFunction(int index, PathToKeyConverter converter) { this.index = index; diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/DataFrames.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/DataFrames.java index 1e41cc3c1..2a9fdb9fb 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/DataFrames.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/DataFrames.java @@ -43,6 +43,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import static org.apache.spark.sql.functions.avg; @@ -380,8 +381,7 @@ public class DataFrames { */ public static List toList(String[] input) { List ret = new ArrayList<>(); - for (int i = 0; i < input.length; i++) - ret.add(input[i]); + Collections.addAll(ret, input); return ret; } diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/Normalization.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/Normalization.java index f4d513017..de5511017 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/Normalization.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/Normalization.java @@ -42,7 +42,7 @@ public class Normalization { * rdd */ public static Dataset zeromeanUnitVariance(Dataset frame) { - return zeromeanUnitVariance(frame, Collections.emptyList()); + return zeromeanUnitVariance(frame, Collections.emptyList()); } /** @@ -55,7 +55,7 @@ public class Normalization { * rdd */ public static JavaRDD> zeromeanUnitVariance(Schema schema, JavaRDD> data) { - return zeromeanUnitVariance(schema, data, Collections.emptyList()); + return zeromeanUnitVariance(schema, data, Collections.emptyList()); } /** @@ -67,7 +67,7 @@ public class Normalization { * @return the normalized dataframe per column */ public static Dataset normalize(Dataset dataFrame, double min, double max) { - return normalize(dataFrame, min, max, Collections.emptyList()); + return normalize(dataFrame, min, max, Collections.emptyList()); } /** @@ -82,7 +82,7 @@ public class Normalization { public static JavaRDD> normalize(Schema schema, JavaRDD> data, double min, double max) { Dataset frame = DataFrames.toDataFrame(schema, data); - return DataFrames.toRecords(normalize(frame, min, max, Collections.emptyList())).getSecond(); + return DataFrames.toRecords(normalize(frame, min, max, Collections.emptyList())).getSecond(); } @@ -93,7 +93,7 @@ public class Normalization { * @return the normalized dataframe per column */ public static Dataset normalize(Dataset dataFrame) { - return normalize(dataFrame, 0, 1, Collections.emptyList()); + return normalize(dataFrame, 0, 1, Collections.emptyList()); } /** @@ -104,7 +104,7 @@ public class Normalization { * @return the normalized ata */ public static JavaRDD> normalize(Schema schema, JavaRDD> data) { - return normalize(schema, data, 0, 1, Collections.emptyList()); + return normalize(schema, data, 0, 1, Collections.emptyList()); } diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/misc/SequenceMergeFunction.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/misc/SequenceMergeFunction.java index 03f3efebd..933fb6d4a 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/misc/SequenceMergeFunction.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/misc/SequenceMergeFunction.java @@ -31,7 +31,7 @@ import java.util.List; public class SequenceMergeFunction implements Function>>>, List>> { - private SequenceMerge sequenceMerge; + private final SequenceMerge sequenceMerge; public SequenceMergeFunction(SequenceMerge sequenceMerge) { this.sequenceMerge = sequenceMerge; diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/sparkfunction/SequenceToRows.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/sparkfunction/SequenceToRows.java index 7868cb3c7..faac3ac13 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/sparkfunction/SequenceToRows.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/sparkfunction/SequenceToRows.java @@ -32,8 +32,8 @@ import java.util.*; public class SequenceToRows implements FlatMapFunction>, Row> { - private Schema schema; - private StructType structType; + private final Schema schema; + private final StructType structType; public SequenceToRows(Schema schema) { this.schema = schema; diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/sparkfunction/ToRow.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/sparkfunction/ToRow.java index bac0740d8..6128901ee 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/sparkfunction/ToRow.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/sparkfunction/ToRow.java @@ -34,8 +34,8 @@ import java.util.List; public class ToRow implements Function, Row> { - private Schema schema; - private StructType structType; + private final Schema schema; + private final StructType structType; public ToRow(Schema schema) { this.schema = schema; diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/utils/SparkExport.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/utils/SparkExport.java index a9ed6f13a..549ff7e13 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/utils/SparkExport.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/utils/SparkExport.java @@ -165,7 +165,7 @@ public class SparkExport { throws Exception { baseDir.mkdirs(); if (!baseDir.isDirectory()) - throw new IllegalArgumentException("File is not a directory: " + baseDir.toString()); + throw new IllegalArgumentException("File is not a directory: " + baseDir); String baseDirStr = baseDir.toString(); List fileContents = sequences.map(new SequenceToStringFunction(",")).collect(); @@ -192,7 +192,7 @@ public class SparkExport { String delimiter, String filePrefix, String fileExtension) throws Exception { baseDir.mkdirs(); if (!baseDir.isDirectory()) - throw new IllegalArgumentException("File is not a directory: " + baseDir.toString()); + throw new IllegalArgumentException("File is not a directory: " + baseDir); String baseDirStr = baseDir.toString(); List fileContents = sequences.map(new SequenceToStringFunction(delimiter)).collect(); diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/utils/SparkUtils.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/utils/SparkUtils.java index 73ff3617a..e1d038495 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/utils/SparkUtils.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/transform/utils/SparkUtils.java @@ -274,7 +274,7 @@ public class SparkUtils { * Register the DataVec writable classes for Kryo */ public static void registerKryoClasses(SparkConf conf) { - List> classes = Arrays.>asList(BooleanWritable.class, ByteWritable.class, + List> classes = Arrays.asList(BooleanWritable.class, ByteWritable.class, DoubleWritable.class, FloatWritable.class, IntWritable.class, LongWritable.class, NullWritable.class, Text.class); diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/util/SerializableHadoopConfig.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/util/SerializableHadoopConfig.java index 237e62b5b..280339237 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/util/SerializableHadoopConfig.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/main/java/org/datavec/spark/util/SerializableHadoopConfig.java @@ -30,7 +30,7 @@ import java.util.Map; public class SerializableHadoopConfig implements Serializable { - private Map content; + private final Map content; private transient Configuration configuration; public SerializableHadoopConfig(@NonNull Configuration configuration){ diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/TestKryoSerialization.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/TestKryoSerialization.java index a684fb61d..781de0e5d 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/TestKryoSerialization.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/TestKryoSerialization.java @@ -63,6 +63,6 @@ public class TestKryoSerialization extends BaseSparkTest { private T serDe(T in, SerializerInstance si){ ByteBuffer bb = si.serialize(in, null); - return (T)si.deserialize(bb, null); + return si.deserialize(bb, null); } } diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestNDArrayToWritablesFunction.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestNDArrayToWritablesFunction.java index 4990cfe03..5143b01eb 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestNDArrayToWritablesFunction.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestNDArrayToWritablesFunction.java @@ -30,6 +30,7 @@ import org.nd4j.linalg.factory.Nd4j; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -49,7 +50,7 @@ public class TestNDArrayToWritablesFunction { @Test public void testNDArrayToWritablesArray() throws Exception { INDArray arr = Nd4j.arange(5); - List expected = Arrays.asList((Writable) new NDArrayWritable(arr)); + List expected = Collections.singletonList(new NDArrayWritable(arr)); List actual = new NDArrayToWritablesFunction(true).call(arr); assertEquals(expected, actual); } diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestPairSequenceRecordReaderBytesFunction.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestPairSequenceRecordReaderBytesFunction.java index b96041e3f..0e4df00cc 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestPairSequenceRecordReaderBytesFunction.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestPairSequenceRecordReaderBytesFunction.java @@ -78,7 +78,7 @@ public class TestPairSequenceRecordReaderBytesFunction extends BaseSparkTest { Path p = Files.createTempDirectory("dl4j_rrbytesPairOut"); p.toFile().deleteOnExit(); - String outPath = p.toString() + "/out"; + String outPath = p + "/out"; new File(outPath).deleteOnExit(); toWrite.saveAsNewAPIHadoopFile(outPath, Text.class, BytesPairWritable.class, SequenceFileOutputFormat.class); diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestRecordReaderBytesFunction.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestRecordReaderBytesFunction.java index aa6e5f76d..9afc645ad 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestRecordReaderBytesFunction.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestRecordReaderBytesFunction.java @@ -77,7 +77,7 @@ public class TestRecordReaderBytesFunction extends BaseSparkTest { //Write the sequence file: Path p = Files.createTempDirectory("dl4j_rrbytesTest"); p.toFile().deleteOnExit(); - String outPath = p.toString() + "/out"; + String outPath = p + "/out"; filesAsBytes.saveAsNewAPIHadoopFile(outPath, Text.class, BytesWritable.class, SequenceFileOutputFormat.class); //Load data from sequence file, parse via RecordReader: diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestSequenceRecordReaderBytesFunction.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestSequenceRecordReaderBytesFunction.java index 2f9bc4410..212b9bb64 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestSequenceRecordReaderBytesFunction.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestSequenceRecordReaderBytesFunction.java @@ -73,7 +73,7 @@ public class TestSequenceRecordReaderBytesFunction extends BaseSparkTest { //Write the sequence file: Path p = Files.createTempDirectory("dl4j_rrbytesTest"); p.toFile().deleteOnExit(); - String outPath = p.toString() + "/out"; + String outPath = p + "/out"; filesAsBytes.saveAsNewAPIHadoopFile(outPath, Text.class, BytesWritable.class, SequenceFileOutputFormat.class); //Load data from sequence file, parse via SequenceRecordReader: diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestWritablesToStringFunctions.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestWritablesToStringFunctions.java index 070bda4ed..19847cec0 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestWritablesToStringFunctions.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/functions/TestWritablesToStringFunctions.java @@ -77,7 +77,7 @@ public class TestWritablesToStringFunctions extends BaseSparkTest { @Test public void testWritablesToString() throws Exception { - List l = Arrays.asList(new DoubleWritable(1.5), new Text("someValue")); + List l = Arrays.asList(new DoubleWritable(1.5), new Text("someValue")); String expected = l.get(0).toString() + "," + l.get(1).toString(); assertEquals(expected, new WritablesToStringFunction(",").call(l)); @@ -86,8 +86,8 @@ public class TestWritablesToStringFunctions extends BaseSparkTest { @Test public void testSequenceWritablesToString() throws Exception { - List> l = Arrays.asList(Arrays.asList(new DoubleWritable(1.5), new Text("someValue")), - Arrays.asList(new DoubleWritable(2.5), new Text("otherValue"))); + List> l = Arrays.asList(Arrays.asList(new DoubleWritable(1.5), new Text("someValue")), + Arrays.asList(new DoubleWritable(2.5), new Text("otherValue"))); String expected = l.get(0).get(0).toString() + "," + l.get(0).get(1).toString() + "\n" + l.get(1).get(0).toString() + "," + l.get(1).get(1).toString(); diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/storage/TestSparkStorageUtils.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/storage/TestSparkStorageUtils.java index a0a10e876..eaafa1d14 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/storage/TestSparkStorageUtils.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/storage/TestSparkStorageUtils.java @@ -46,11 +46,11 @@ public class TestSparkStorageUtils extends BaseSparkTest { return; } List> l = new ArrayList<>(); - l.add(Arrays.asList(new Text("zero"), new IntWritable(0), + l.add(Arrays.asList(new Text("zero"), new IntWritable(0), new DoubleWritable(0), new NDArrayWritable(Nd4j.valueArrayOf(10, 0.0)))); - l.add(Arrays.asList(new Text("one"), new IntWritable(11), + l.add(Arrays.asList(new Text("one"), new IntWritable(11), new DoubleWritable(11.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 11.0)))); - l.add(Arrays.asList(new Text("two"), new IntWritable(22), + l.add(Arrays.asList(new Text("two"), new IntWritable(22), new DoubleWritable(22.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 22.0)))); JavaRDD> rdd = sc.parallelize(l); @@ -92,27 +92,27 @@ public class TestSparkStorageUtils extends BaseSparkTest { } List>> l = new ArrayList<>(); l.add(Arrays.asList( - Arrays.asList(new Text("zero"), new IntWritable(0), + Arrays.asList(new Text("zero"), new IntWritable(0), new DoubleWritable(0), new NDArrayWritable(Nd4j.valueArrayOf(10, 0.0))), - Arrays.asList(new Text("one"), new IntWritable(1), + Arrays.asList(new Text("one"), new IntWritable(1), new DoubleWritable(1.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 1.0))), - Arrays.asList(new Text("two"), new IntWritable(2), + Arrays.asList(new Text("two"), new IntWritable(2), new DoubleWritable(2.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 2.0))))); l.add(Arrays.asList( - Arrays.asList(new Text("Bzero"), new IntWritable(10), + Arrays.asList(new Text("Bzero"), new IntWritable(10), new DoubleWritable(10), new NDArrayWritable(Nd4j.valueArrayOf(10, 10.0))), - Arrays.asList(new Text("Bone"), new IntWritable(11), + Arrays.asList(new Text("Bone"), new IntWritable(11), new DoubleWritable(11.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 11.0))), - Arrays.asList(new Text("Btwo"), new IntWritable(12), + Arrays.asList(new Text("Btwo"), new IntWritable(12), new DoubleWritable(12.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 12.0))))); l.add(Arrays.asList( - Arrays.asList(new Text("Czero"), new IntWritable(20), + Arrays.asList(new Text("Czero"), new IntWritable(20), new DoubleWritable(20), new NDArrayWritable(Nd4j.valueArrayOf(10, 20.0))), - Arrays.asList(new Text("Cone"), new IntWritable(21), + Arrays.asList(new Text("Cone"), new IntWritable(21), new DoubleWritable(21.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 21.0))), - Arrays.asList(new Text("Ctwo"), new IntWritable(22), + Arrays.asList(new Text("Ctwo"), new IntWritable(22), new DoubleWritable(22.0), new NDArrayWritable(Nd4j.valueArrayOf(10, 22.0))))); JavaRDD>> rdd = sc.parallelize(l); diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/DataFramesTests.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/DataFramesTests.java index 62237f0b4..4a6bca8dc 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/DataFramesTests.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/DataFramesTests.java @@ -110,15 +110,15 @@ public class DataFramesTests extends BaseSparkTest { public void testNormalize() { List> data = new ArrayList<>(); - data.add(Arrays.asList(new DoubleWritable(1), new DoubleWritable(10))); - data.add(Arrays.asList(new DoubleWritable(2), new DoubleWritable(20))); - data.add(Arrays.asList(new DoubleWritable(3), new DoubleWritable(30))); + data.add(Arrays.asList(new DoubleWritable(1), new DoubleWritable(10))); + data.add(Arrays.asList(new DoubleWritable(2), new DoubleWritable(20))); + data.add(Arrays.asList(new DoubleWritable(3), new DoubleWritable(30))); List> expMinMax = new ArrayList<>(); - expMinMax.add(Arrays.asList(new DoubleWritable(0.0), new DoubleWritable(0.0))); - expMinMax.add(Arrays.asList(new DoubleWritable(0.5), new DoubleWritable(0.5))); - expMinMax.add(Arrays.asList(new DoubleWritable(1.0), new DoubleWritable(1.0))); + expMinMax.add(Arrays.asList(new DoubleWritable(0.0), new DoubleWritable(0.0))); + expMinMax.add(Arrays.asList(new DoubleWritable(0.5), new DoubleWritable(0.5))); + expMinMax.add(Arrays.asList(new DoubleWritable(1.0), new DoubleWritable(1.0))); double m1 = (1 + 2 + 3) / 3.0; double s1 = new StandardDeviation().evaluate(new double[] {1, 2, 3}); @@ -127,11 +127,11 @@ public class DataFramesTests extends BaseSparkTest { List> expStandardize = new ArrayList<>(); expStandardize.add( - Arrays.asList(new DoubleWritable((1 - m1) / s1), new DoubleWritable((10 - m2) / s2))); + Arrays.asList(new DoubleWritable((1 - m1) / s1), new DoubleWritable((10 - m2) / s2))); expStandardize.add( - Arrays.asList(new DoubleWritable((2 - m1) / s1), new DoubleWritable((20 - m2) / s2))); + Arrays.asList(new DoubleWritable((2 - m1) / s1), new DoubleWritable((20 - m2) / s2))); expStandardize.add( - Arrays.asList(new DoubleWritable((3 - m1) / s1), new DoubleWritable((30 - m2) / s2))); + Arrays.asList(new DoubleWritable((3 - m1) / s1), new DoubleWritable((30 - m2) / s2))); JavaRDD> rdd = sc.parallelize(data); @@ -178,13 +178,13 @@ public class DataFramesTests extends BaseSparkTest { List>> sequences = new ArrayList<>(); List> seq1 = new ArrayList<>(); - seq1.add(Arrays.asList(new DoubleWritable(1), new DoubleWritable(10), new DoubleWritable(100))); - seq1.add(Arrays.asList(new DoubleWritable(2), new DoubleWritable(20), new DoubleWritable(200))); - seq1.add(Arrays.asList(new DoubleWritable(3), new DoubleWritable(30), new DoubleWritable(300))); + seq1.add(Arrays.asList(new DoubleWritable(1), new DoubleWritable(10), new DoubleWritable(100))); + seq1.add(Arrays.asList(new DoubleWritable(2), new DoubleWritable(20), new DoubleWritable(200))); + seq1.add(Arrays.asList(new DoubleWritable(3), new DoubleWritable(30), new DoubleWritable(300))); List> seq2 = new ArrayList<>(); - seq2.add(Arrays.asList(new DoubleWritable(4), new DoubleWritable(40), new DoubleWritable(400))); - seq2.add(Arrays.asList(new DoubleWritable(5), new DoubleWritable(50), new DoubleWritable(500))); + seq2.add(Arrays.asList(new DoubleWritable(4), new DoubleWritable(40), new DoubleWritable(400))); + seq2.add(Arrays.asList(new DoubleWritable(5), new DoubleWritable(50), new DoubleWritable(500))); sequences.add(seq1); sequences.add(seq2); @@ -199,21 +199,21 @@ public class DataFramesTests extends BaseSparkTest { //Min/max normalization: List> expSeq1MinMax = new ArrayList<>(); - expSeq1MinMax.add(Arrays.asList(new DoubleWritable((1 - 1.0) / (5.0 - 1.0)), + expSeq1MinMax.add(Arrays.asList(new DoubleWritable((1 - 1.0) / (5.0 - 1.0)), new DoubleWritable((10 - 10.0) / (50.0 - 10.0)), new DoubleWritable((100 - 100.0) / (500.0 - 100.0)))); - expSeq1MinMax.add(Arrays.asList(new DoubleWritable((2 - 1.0) / (5.0 - 1.0)), + expSeq1MinMax.add(Arrays.asList(new DoubleWritable((2 - 1.0) / (5.0 - 1.0)), new DoubleWritable((20 - 10.0) / (50.0 - 10.0)), new DoubleWritable((200 - 100.0) / (500.0 - 100.0)))); - expSeq1MinMax.add(Arrays.asList(new DoubleWritable((3 - 1.0) / (5.0 - 1.0)), + expSeq1MinMax.add(Arrays.asList(new DoubleWritable((3 - 1.0) / (5.0 - 1.0)), new DoubleWritable((30 - 10.0) / (50.0 - 10.0)), new DoubleWritable((300 - 100.0) / (500.0 - 100.0)))); List> expSeq2MinMax = new ArrayList<>(); - expSeq2MinMax.add(Arrays.asList(new DoubleWritable((4 - 1.0) / (5.0 - 1.0)), + expSeq2MinMax.add(Arrays.asList(new DoubleWritable((4 - 1.0) / (5.0 - 1.0)), new DoubleWritable((40 - 10.0) / (50.0 - 10.0)), new DoubleWritable((400 - 100.0) / (500.0 - 100.0)))); - expSeq2MinMax.add(Arrays.asList(new DoubleWritable((5 - 1.0) / (5.0 - 1.0)), + expSeq2MinMax.add(Arrays.asList(new DoubleWritable((5 - 1.0) / (5.0 - 1.0)), new DoubleWritable((50 - 10.0) / (50.0 - 10.0)), new DoubleWritable((500 - 100.0) / (500.0 - 100.0)))); @@ -246,17 +246,17 @@ public class DataFramesTests extends BaseSparkTest { double s3 = new StandardDeviation().evaluate(new double[] {100, 200, 300, 400, 500}); List> expSeq1Std = new ArrayList<>(); - expSeq1Std.add(Arrays.asList(new DoubleWritable((1 - m1) / s1), new DoubleWritable((10 - m2) / s2), + expSeq1Std.add(Arrays.asList(new DoubleWritable((1 - m1) / s1), new DoubleWritable((10 - m2) / s2), new DoubleWritable((100 - m3) / s3))); - expSeq1Std.add(Arrays.asList(new DoubleWritable((2 - m1) / s1), new DoubleWritable((20 - m2) / s2), + expSeq1Std.add(Arrays.asList(new DoubleWritable((2 - m1) / s1), new DoubleWritable((20 - m2) / s2), new DoubleWritable((200 - m3) / s3))); - expSeq1Std.add(Arrays.asList(new DoubleWritable((3 - m1) / s1), new DoubleWritable((30 - m2) / s2), + expSeq1Std.add(Arrays.asList(new DoubleWritable((3 - m1) / s1), new DoubleWritable((30 - m2) / s2), new DoubleWritable((300 - m3) / s3))); List> expSeq2Std = new ArrayList<>(); - expSeq2Std.add(Arrays.asList(new DoubleWritable((4 - m1) / s1), new DoubleWritable((40 - m2) / s2), + expSeq2Std.add(Arrays.asList(new DoubleWritable((4 - m1) / s1), new DoubleWritable((40 - m2) / s2), new DoubleWritable((400 - m3) / s3))); - expSeq2Std.add(Arrays.asList(new DoubleWritable((5 - m1) / s1), new DoubleWritable((50 - m2) / s2), + expSeq2Std.add(Arrays.asList(new DoubleWritable((5 - m1) / s1), new DoubleWritable((50 - m2) / s2), new DoubleWritable((500 - m3) / s3))); diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/ExecutionTest.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/ExecutionTest.java index c863af460..8da6f146b 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/ExecutionTest.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/ExecutionTest.java @@ -57,9 +57,9 @@ public class ExecutionTest extends BaseSparkTest { .doubleMathOp("col2", MathOp.Add, 10.0).build(); List> inputData = new ArrayList<>(); - inputData.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1))); - inputData.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1))); - inputData.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1))); + inputData.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1))); + inputData.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1))); + inputData.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1))); JavaRDD> rdd = sc.parallelize(inputData); @@ -73,9 +73,9 @@ public class ExecutionTest extends BaseSparkTest { }); List> expected = new ArrayList<>(); - expected.add(Arrays.asList(new IntWritable(0), new IntWritable(2), new DoubleWritable(10.1))); - expected.add(Arrays.asList(new IntWritable(1), new IntWritable(1), new DoubleWritable(11.1))); - expected.add(Arrays.asList(new IntWritable(2), new IntWritable(0), new DoubleWritable(12.1))); + expected.add(Arrays.asList(new IntWritable(0), new IntWritable(2), new DoubleWritable(10.1))); + expected.add(Arrays.asList(new IntWritable(1), new IntWritable(1), new DoubleWritable(11.1))); + expected.add(Arrays.asList(new IntWritable(2), new IntWritable(0), new DoubleWritable(12.1))); assertEquals(expected, out); } @@ -91,12 +91,12 @@ public class ExecutionTest extends BaseSparkTest { List>> inputSequences = new ArrayList<>(); List> seq1 = new ArrayList<>(); - seq1.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1))); - seq1.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1))); - seq1.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1))); + seq1.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1))); + seq1.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1))); + seq1.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1))); List> seq2 = new ArrayList<>(); - seq2.add(Arrays.asList(new IntWritable(3), new Text("state0"), new DoubleWritable(3.1))); - seq2.add(Arrays.asList(new IntWritable(4), new Text("state1"), new DoubleWritable(4.1))); + seq2.add(Arrays.asList(new IntWritable(3), new Text("state0"), new DoubleWritable(3.1))); + seq2.add(Arrays.asList(new IntWritable(4), new Text("state1"), new DoubleWritable(4.1))); inputSequences.add(seq1); inputSequences.add(seq2); @@ -115,12 +115,12 @@ public class ExecutionTest extends BaseSparkTest { List>> expectedSequence = new ArrayList<>(); List> seq1e = new ArrayList<>(); - seq1e.add(Arrays.asList(new IntWritable(0), new IntWritable(2), new DoubleWritable(10.1))); - seq1e.add(Arrays.asList(new IntWritable(1), new IntWritable(1), new DoubleWritable(11.1))); - seq1e.add(Arrays.asList(new IntWritable(2), new IntWritable(0), new DoubleWritable(12.1))); + seq1e.add(Arrays.asList(new IntWritable(0), new IntWritable(2), new DoubleWritable(10.1))); + seq1e.add(Arrays.asList(new IntWritable(1), new IntWritable(1), new DoubleWritable(11.1))); + seq1e.add(Arrays.asList(new IntWritable(2), new IntWritable(0), new DoubleWritable(12.1))); List> seq2e = new ArrayList<>(); - seq2e.add(Arrays.asList(new IntWritable(3), new IntWritable(0), new DoubleWritable(13.1))); - seq2e.add(Arrays.asList(new IntWritable(4), new IntWritable(1), new DoubleWritable(14.1))); + seq2e.add(Arrays.asList(new IntWritable(3), new IntWritable(0), new DoubleWritable(13.1))); + seq2e.add(Arrays.asList(new IntWritable(4), new IntWritable(1), new DoubleWritable(14.1))); expectedSequence.add(seq1e); expectedSequence.add(seq2e); @@ -133,8 +133,8 @@ public class ExecutionTest extends BaseSparkTest { public void testReductionGlobal() { List> in = Arrays.asList( - Arrays.asList(new Text("first"), new DoubleWritable(3.0)), - Arrays.asList(new Text("second"), new DoubleWritable(5.0)) + Arrays.asList(new Text("first"), new DoubleWritable(3.0)), + Arrays.asList(new Text("second"), new DoubleWritable(5.0)) ); JavaRDD> inData = sc.parallelize(in); @@ -154,7 +154,7 @@ public class ExecutionTest extends BaseSparkTest { List> out = outRdd.collect(); - List> expOut = Collections.singletonList(Arrays.asList(new Text("first"), new DoubleWritable(4.0))); + List> expOut = Collections.singletonList(Arrays.asList(new Text("first"), new DoubleWritable(4.0))); assertEquals(expOut, out); } @@ -163,10 +163,10 @@ public class ExecutionTest extends BaseSparkTest { public void testReductionByKey(){ List> in = Arrays.asList( - Arrays.asList(new IntWritable(0), new Text("first"), new DoubleWritable(3.0)), - Arrays.asList(new IntWritable(0), new Text("second"), new DoubleWritable(5.0)), - Arrays.asList(new IntWritable(1), new Text("f"), new DoubleWritable(30.0)), - Arrays.asList(new IntWritable(1), new Text("s"), new DoubleWritable(50.0)) + Arrays.asList(new IntWritable(0), new Text("first"), new DoubleWritable(3.0)), + Arrays.asList(new IntWritable(0), new Text("second"), new DoubleWritable(5.0)), + Arrays.asList(new IntWritable(1), new Text("f"), new DoubleWritable(30.0)), + Arrays.asList(new IntWritable(1), new Text("s"), new DoubleWritable(50.0)) ); JavaRDD> inData = sc.parallelize(in); @@ -189,8 +189,8 @@ public class ExecutionTest extends BaseSparkTest { List> out = outRdd.collect(); List> expOut = Arrays.asList( - Arrays.asList(new IntWritable(0), new Text("first"), new DoubleWritable(4.0)), - Arrays.asList(new IntWritable(1), new Text("f"), new DoubleWritable(40.0))); + Arrays.asList(new IntWritable(0), new Text("first"), new DoubleWritable(4.0)), + Arrays.asList(new IntWritable(1), new Text("f"), new DoubleWritable(40.0))); out = new ArrayList<>(out); Collections.sort( @@ -215,15 +215,15 @@ public class ExecutionTest extends BaseSparkTest { .addColumnDouble("col2").build(); List> inputData = new ArrayList<>(); - inputData.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1))); - inputData.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1))); - inputData.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1))); - inputData.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1))); - inputData.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1))); - inputData.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1))); - inputData.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1))); - inputData.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1))); - inputData.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1))); + inputData.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1))); + inputData.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1))); + inputData.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1))); + inputData.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1))); + inputData.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1))); + inputData.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1))); + inputData.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1))); + inputData.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1))); + inputData.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1))); JavaRDD> rdd = sc.parallelize(inputData); @@ -254,9 +254,9 @@ public class ExecutionTest extends BaseSparkTest { .outputSchema(finalSchema).build() ).build(); List> inputData = new ArrayList<>(); - inputData.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1))); - inputData.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1))); - inputData.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1))); + inputData.add(Arrays.asList(new IntWritable(0), new Text("state2"), new DoubleWritable(0.1))); + inputData.add(Arrays.asList(new IntWritable(1), new Text("state1"), new DoubleWritable(1.1))); + inputData.add(Arrays.asList(new IntWritable(2), new Text("state0"), new DoubleWritable(2.1))); JavaRDD> rdd = sc.parallelize(inputData); @@ -270,9 +270,9 @@ public class ExecutionTest extends BaseSparkTest { }); List> expected = new ArrayList<>(); - expected.add(Arrays.asList(new IntWritable(0), new IntWritable(2), new DoubleWritable(10.1))); - expected.add(Arrays.asList(new IntWritable(1), new IntWritable(1), new DoubleWritable(11.1))); - expected.add(Arrays.asList(new IntWritable(2), new IntWritable(0), new DoubleWritable(12.1))); + expected.add(Arrays.asList(new IntWritable(0), new IntWritable(2), new DoubleWritable(10.1))); + expected.add(Arrays.asList(new IntWritable(1), new IntWritable(1), new DoubleWritable(11.1))); + expected.add(Arrays.asList(new IntWritable(2), new IntWritable(0), new DoubleWritable(12.1))); assertEquals(expected, out); } @@ -299,9 +299,9 @@ public class ExecutionTest extends BaseSparkTest { INDArray twos = ones.add(ones); List> inputData = new ArrayList<>(); - inputData.add(Arrays.asList(new IntWritable(0), new NDArrayWritable(zeros), new NDArrayWritable(zeros))); - inputData.add(Arrays.asList(new IntWritable(1), new NDArrayWritable(zeros), new NDArrayWritable(ones))); - inputData.add(Arrays.asList(new IntWritable(2), new NDArrayWritable(ones), new NDArrayWritable(ones))); + inputData.add(Arrays.asList(new IntWritable(0), new NDArrayWritable(zeros), new NDArrayWritable(zeros))); + inputData.add(Arrays.asList(new IntWritable(1), new NDArrayWritable(zeros), new NDArrayWritable(ones))); + inputData.add(Arrays.asList(new IntWritable(2), new NDArrayWritable(ones), new NDArrayWritable(ones))); JavaRDD> rdd = sc.parallelize(inputData); @@ -315,9 +315,9 @@ public class ExecutionTest extends BaseSparkTest { }); List> expected = new ArrayList<>(); - expected.add(Arrays.asList(new IntWritable(0), new NDArrayWritable(zeros), new NDArrayWritable(zeros), new NDArrayWritable(zeros))); - expected.add(Arrays.asList(new IntWritable(1), new NDArrayWritable(zeros), new NDArrayWritable(ones), new NDArrayWritable(ones))); - expected.add(Arrays.asList(new IntWritable(2), new NDArrayWritable(ones), new NDArrayWritable(ones), new NDArrayWritable(twos))); + expected.add(Arrays.asList(new IntWritable(0), new NDArrayWritable(zeros), new NDArrayWritable(zeros), new NDArrayWritable(zeros))); + expected.add(Arrays.asList(new IntWritable(1), new NDArrayWritable(zeros), new NDArrayWritable(ones), new NDArrayWritable(ones))); + expected.add(Arrays.asList(new IntWritable(2), new NDArrayWritable(ones), new NDArrayWritable(ones), new NDArrayWritable(twos))); } @Test @@ -329,14 +329,14 @@ public class ExecutionTest extends BaseSparkTest { .build(); List> in = Arrays.asList( - Arrays.asList(new Text("a"), new DoubleWritable(3.14159), new Text("8e-4")), - Arrays.asList(new Text("a2"), new DoubleWritable(3.14159), new Text("7e-4")), - Arrays.asList(new Text("b"), new DoubleWritable(2.71828), new Text("7e2")), - Arrays.asList(new Text("c"), new DoubleWritable(1.61803), new Text("6e8")), - Arrays.asList(new Text("c"), new DoubleWritable(1.61803), new Text("2.0")), - Arrays.asList(new Text("c"), new DoubleWritable(1.61803), new Text("2.1")), - Arrays.asList(new Text("c"), new DoubleWritable(1.61803), new Text("2.2")), - Arrays.asList(new Text("c"), new DoubleWritable(-2), new Text("non numerical"))); + Arrays.asList(new Text("a"), new DoubleWritable(3.14159), new Text("8e-4")), + Arrays.asList(new Text("a2"), new DoubleWritable(3.14159), new Text("7e-4")), + Arrays.asList(new Text("b"), new DoubleWritable(2.71828), new Text("7e2")), + Arrays.asList(new Text("c"), new DoubleWritable(1.61803), new Text("6e8")), + Arrays.asList(new Text("c"), new DoubleWritable(1.61803), new Text("2.0")), + Arrays.asList(new Text("c"), new DoubleWritable(1.61803), new Text("2.1")), + Arrays.asList(new Text("c"), new DoubleWritable(1.61803), new Text("2.2")), + Arrays.asList(new Text("c"), new DoubleWritable(-2), new Text("non numerical"))); //Test Benfords law use case: TransformProcess tp = new TransformProcess.Builder(s) @@ -354,7 +354,7 @@ public class ExecutionTest extends BaseSparkTest { assertEquals(1, out.size()); List l = out.get(0); - List exp = Arrays.asList( + List exp = Arrays.asList( new IntWritable(0), //0 new IntWritable(0), //1 new IntWritable(3), //2 diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/analysis/TestAnalysis.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/analysis/TestAnalysis.java index 4fc4f3323..8ed68b55e 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/analysis/TestAnalysis.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/analysis/TestAnalysis.java @@ -59,13 +59,13 @@ public class TestAnalysis extends BaseSparkTest { .addColumnNDArray("ndarray", new long[] {1, 10}).build(); List> data = new ArrayList<>(); - data.add(Arrays.asList((Writable) new IntWritable(0), new DoubleWritable(1.0), new LongWritable(1000), + data.add(Arrays.asList(new IntWritable(0), new DoubleWritable(1.0), new LongWritable(1000), new Text("A"), new NDArrayWritable(Nd4j.valueArrayOf(1, 10, 100.0)))); - data.add(Arrays.asList((Writable) new IntWritable(5), new DoubleWritable(0.0), new LongWritable(2000), + data.add(Arrays.asList(new IntWritable(5), new DoubleWritable(0.0), new LongWritable(2000), new Text("A"), new NDArrayWritable(Nd4j.valueArrayOf(1, 10, 200.0)))); - data.add(Arrays.asList((Writable) new IntWritable(3), new DoubleWritable(10.0), new LongWritable(3000), + data.add(Arrays.asList(new IntWritable(3), new DoubleWritable(10.0), new LongWritable(3000), new Text("A"), new NDArrayWritable(Nd4j.valueArrayOf(1, 10, 300.0)))); - data.add(Arrays.asList((Writable) new IntWritable(-1), new DoubleWritable(-1.0), new LongWritable(20000), + data.add(Arrays.asList(new IntWritable(-1), new DoubleWritable(-1.0), new LongWritable(20000), new Text("B"), new NDArrayWritable(Nd4j.valueArrayOf(1, 10, 400.0)))); JavaRDD> rdd = sc.parallelize(data); @@ -253,21 +253,21 @@ public class TestAnalysis extends BaseSparkTest { public void testSampleMostFrequent() { List> toParallelize = new ArrayList<>(); - toParallelize.add(Arrays.asList(new Text("a"), new Text("MostCommon"))); - toParallelize.add(Arrays.asList(new Text("b"), new Text("SecondMostCommon"))); - toParallelize.add(Arrays.asList(new Text("c"), new Text("SecondMostCommon"))); - toParallelize.add(Arrays.asList(new Text("d"), new Text("0"))); - toParallelize.add(Arrays.asList(new Text("e"), new Text("MostCommon"))); - toParallelize.add(Arrays.asList(new Text("f"), new Text("ThirdMostCommon"))); - toParallelize.add(Arrays.asList(new Text("c"), new Text("MostCommon"))); - toParallelize.add(Arrays.asList(new Text("h"), new Text("1"))); - toParallelize.add(Arrays.asList(new Text("i"), new Text("SecondMostCommon"))); - toParallelize.add(Arrays.asList(new Text("j"), new Text("2"))); - toParallelize.add(Arrays.asList(new Text("k"), new Text("ThirdMostCommon"))); - toParallelize.add(Arrays.asList(new Text("l"), new Text("MostCommon"))); - toParallelize.add(Arrays.asList(new Text("m"), new Text("3"))); - toParallelize.add(Arrays.asList(new Text("n"), new Text("4"))); - toParallelize.add(Arrays.asList(new Text("o"), new Text("5"))); + toParallelize.add(Arrays.asList(new Text("a"), new Text("MostCommon"))); + toParallelize.add(Arrays.asList(new Text("b"), new Text("SecondMostCommon"))); + toParallelize.add(Arrays.asList(new Text("c"), new Text("SecondMostCommon"))); + toParallelize.add(Arrays.asList(new Text("d"), new Text("0"))); + toParallelize.add(Arrays.asList(new Text("e"), new Text("MostCommon"))); + toParallelize.add(Arrays.asList(new Text("f"), new Text("ThirdMostCommon"))); + toParallelize.add(Arrays.asList(new Text("c"), new Text("MostCommon"))); + toParallelize.add(Arrays.asList(new Text("h"), new Text("1"))); + toParallelize.add(Arrays.asList(new Text("i"), new Text("SecondMostCommon"))); + toParallelize.add(Arrays.asList(new Text("j"), new Text("2"))); + toParallelize.add(Arrays.asList(new Text("k"), new Text("ThirdMostCommon"))); + toParallelize.add(Arrays.asList(new Text("l"), new Text("MostCommon"))); + toParallelize.add(Arrays.asList(new Text("m"), new Text("3"))); + toParallelize.add(Arrays.asList(new Text("n"), new Text("4"))); + toParallelize.add(Arrays.asList(new Text("o"), new Text("5"))); JavaRDD> rdd = sc.parallelize(toParallelize); diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/join/TestJoin.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/join/TestJoin.java index 29da7a0a4..853800a03 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/join/TestJoin.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/join/TestJoin.java @@ -45,27 +45,27 @@ public class TestJoin extends BaseSparkTest { .addColumnDouble("amount").build(); List> infoList = new ArrayList<>(); - infoList.add(Arrays.asList(new LongWritable(12345), new Text("Customer12345"))); - infoList.add(Arrays.asList(new LongWritable(98765), new Text("Customer98765"))); - infoList.add(Arrays.asList(new LongWritable(50000), new Text("Customer50000"))); + infoList.add(Arrays.asList(new LongWritable(12345), new Text("Customer12345"))); + infoList.add(Arrays.asList(new LongWritable(98765), new Text("Customer98765"))); + infoList.add(Arrays.asList(new LongWritable(50000), new Text("Customer50000"))); List> purchaseList = new ArrayList<>(); - purchaseList.add(Arrays.asList(new LongWritable(1000000), new LongWritable(12345), + purchaseList.add(Arrays.asList(new LongWritable(1000000), new LongWritable(12345), new DoubleWritable(10.00))); - purchaseList.add(Arrays.asList(new LongWritable(1000001), new LongWritable(12345), + purchaseList.add(Arrays.asList(new LongWritable(1000001), new LongWritable(12345), new DoubleWritable(20.00))); - purchaseList.add(Arrays.asList(new LongWritable(1000002), new LongWritable(98765), + purchaseList.add(Arrays.asList(new LongWritable(1000002), new LongWritable(98765), new DoubleWritable(30.00))); Join join = new Join.Builder(Join.JoinType.RightOuter).setJoinColumns("customerID") .setSchemas(customerInfoSchema, purchasesSchema).build(); List> expected = new ArrayList<>(); - expected.add(Arrays.asList(new LongWritable(12345), new Text("Customer12345"), + expected.add(Arrays.asList(new LongWritable(12345), new Text("Customer12345"), new LongWritable(1000000), new DoubleWritable(10.00))); - expected.add(Arrays.asList(new LongWritable(12345), new Text("Customer12345"), + expected.add(Arrays.asList(new LongWritable(12345), new Text("Customer12345"), new LongWritable(1000001), new DoubleWritable(20.00))); - expected.add(Arrays.asList(new LongWritable(98765), new Text("Customer98765"), + expected.add(Arrays.asList(new LongWritable(98765), new Text("Customer98765"), new LongWritable(1000002), new DoubleWritable(30.00))); @@ -99,11 +99,11 @@ public class TestJoin extends BaseSparkTest { .setSchemas(purchasesSchema, customerInfoSchema).build(); List> expectedManyToOne = new ArrayList<>(); - expectedManyToOne.add(Arrays.asList(new LongWritable(1000000), new LongWritable(12345), + expectedManyToOne.add(Arrays.asList(new LongWritable(1000000), new LongWritable(12345), new DoubleWritable(10.00), new Text("Customer12345"))); - expectedManyToOne.add(Arrays.asList(new LongWritable(1000001), new LongWritable(12345), + expectedManyToOne.add(Arrays.asList(new LongWritable(1000001), new LongWritable(12345), new DoubleWritable(20.00), new Text("Customer12345"))); - expectedManyToOne.add(Arrays.asList(new LongWritable(1000002), new LongWritable(98765), + expectedManyToOne.add(Arrays.asList(new LongWritable(1000002), new LongWritable(98765), new DoubleWritable(30.00), new Text("Customer98765"))); JavaRDD> joined2 = SparkTransformExecutor.executeJoin(join2, purchases, info); @@ -137,45 +137,45 @@ public class TestJoin extends BaseSparkTest { .addColumnCategorical("otherCategory", Arrays.asList("cat0", "cat1", "cat2")).build(); List> first = new ArrayList<>(); - first.add(Arrays.asList(new LongWritable(0), new Text("cat0"))); - first.add(Arrays.asList(new LongWritable(1), new Text("cat0"))); - first.add(Arrays.asList(new LongWritable(2), new Text("cat1"))); + first.add(Arrays.asList(new LongWritable(0), new Text("cat0"))); + first.add(Arrays.asList(new LongWritable(1), new Text("cat0"))); + first.add(Arrays.asList(new LongWritable(2), new Text("cat1"))); List> second = new ArrayList<>(); - second.add(Arrays.asList(new LongWritable(100), new Text("cat0"))); - second.add(Arrays.asList(new LongWritable(101), new Text("cat0"))); - second.add(Arrays.asList(new LongWritable(102), new Text("cat2"))); + second.add(Arrays.asList(new LongWritable(100), new Text("cat0"))); + second.add(Arrays.asList(new LongWritable(101), new Text("cat0"))); + second.add(Arrays.asList(new LongWritable(102), new Text("cat2"))); List> expOuterJoin = new ArrayList<>(); - expOuterJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); - expOuterJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); - expOuterJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); - expOuterJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); - expOuterJoin.add(Arrays.asList(new LongWritable(2), new Text("cat1"), new NullWritable())); - expOuterJoin.add(Arrays.asList(new NullWritable(), new Text("cat2"), new LongWritable(102))); + expOuterJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); + expOuterJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); + expOuterJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); + expOuterJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); + expOuterJoin.add(Arrays.asList(new LongWritable(2), new Text("cat1"), new NullWritable())); + expOuterJoin.add(Arrays.asList(new NullWritable(), new Text("cat2"), new LongWritable(102))); List> expLeftJoin = new ArrayList<>(); - expLeftJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); - expLeftJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); - expLeftJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); - expLeftJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); - expLeftJoin.add(Arrays.asList(new LongWritable(2), new Text("cat1"), new NullWritable())); + expLeftJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); + expLeftJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); + expLeftJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); + expLeftJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); + expLeftJoin.add(Arrays.asList(new LongWritable(2), new Text("cat1"), new NullWritable())); List> expRightJoin = new ArrayList<>(); - expRightJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); - expRightJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); - expRightJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); - expRightJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); - expRightJoin.add(Arrays.asList(new NullWritable(), new Text("cat2"), new LongWritable(102))); + expRightJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); + expRightJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); + expRightJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); + expRightJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); + expRightJoin.add(Arrays.asList(new NullWritable(), new Text("cat2"), new LongWritable(102))); List> expInnerJoin = new ArrayList<>(); - expInnerJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); - expInnerJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); - expInnerJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); - expInnerJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); + expInnerJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(100))); + expInnerJoin.add(Arrays.asList(new LongWritable(0), new Text("cat0"), new LongWritable(101))); + expInnerJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(100))); + expInnerJoin.add(Arrays.asList(new LongWritable(1), new Text("cat0"), new LongWritable(101))); JavaRDD> firstRDD = sc.parallelize(first); JavaRDD> secondRDD = sc.parallelize(second); diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/rank/TestCalculateSortedRank.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/rank/TestCalculateSortedRank.java index 6ff564418..daf2794f2 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/rank/TestCalculateSortedRank.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/rank/TestCalculateSortedRank.java @@ -44,10 +44,10 @@ public class TestCalculateSortedRank extends BaseSparkTest { public void testCalculateSortedRank() { List> data = new ArrayList<>(); - data.add(Arrays.asList((Writable) new Text("0"), new DoubleWritable(0.0))); - data.add(Arrays.asList((Writable) new Text("3"), new DoubleWritable(0.3))); - data.add(Arrays.asList((Writable) new Text("2"), new DoubleWritable(0.2))); - data.add(Arrays.asList((Writable) new Text("1"), new DoubleWritable(0.1))); + data.add(Arrays.asList(new Text("0"), new DoubleWritable(0.0))); + data.add(Arrays.asList(new Text("3"), new DoubleWritable(0.3))); + data.add(Arrays.asList(new Text("2"), new DoubleWritable(0.2))); + data.add(Arrays.asList(new Text("1"), new DoubleWritable(0.1))); JavaRDD> rdd = sc.parallelize(data); diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/sequence/TestConvertToSequence.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/sequence/TestConvertToSequence.java index 7faca7235..ad545172c 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/sequence/TestConvertToSequence.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/transform/sequence/TestConvertToSequence.java @@ -46,12 +46,12 @@ public class TestConvertToSequence extends BaseSparkTest { Schema s = new Schema.Builder().addColumnsString("key1", "key2").addColumnLong("time").build(); List> allExamples = - Arrays.asList(Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(10)), - Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(10)), - Arrays.asList(new Text("k1a"), new Text("k2a"), + Arrays.asList(Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(10)), + Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(10)), + Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(-10)), - Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(5)), - Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(0))); + Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(5)), + Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(0))); TransformProcess tp = new TransformProcess.Builder(s) .convertToSequence(Arrays.asList("key1", "key2"), new NumericalColumnComparator("time")) @@ -73,13 +73,13 @@ public class TestConvertToSequence extends BaseSparkTest { } List> expSeq0 = Arrays.asList( - Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(-10)), - Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(0)), - Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(10))); + Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(-10)), + Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(0)), + Arrays.asList(new Text("k1a"), new Text("k2a"), new LongWritable(10))); List> expSeq1 = Arrays.asList( - Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(5)), - Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(10))); + Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(5)), + Arrays.asList(new Text("k1b"), new Text("k2b"), new LongWritable(10))); assertEquals(expSeq0, seq0); assertEquals(expSeq1, seq1); @@ -94,9 +94,9 @@ public class TestConvertToSequence extends BaseSparkTest { .build(); List> allExamples = Arrays.asList( - Arrays.asList(new Text("a"), new LongWritable(0)), - Arrays.asList(new Text("b"), new LongWritable(1)), - Arrays.asList(new Text("c"), new LongWritable(2))); + Arrays.asList(new Text("a"), new LongWritable(0)), + Arrays.asList(new Text("b"), new LongWritable(1)), + Arrays.asList(new Text("c"), new LongWritable(2))); TransformProcess tp = new TransformProcess.Builder(s) .convertToSequence() diff --git a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/util/TestSparkUtil.java b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/util/TestSparkUtil.java index a2dd04ce0..1ed67934b 100644 --- a/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/util/TestSparkUtil.java +++ b/cavis-datavec/cavis-datavec-spark/cavis-datavec-spark-core/src/test/java/org/datavec/spark/util/TestSparkUtil.java @@ -46,8 +46,8 @@ public class TestSparkUtil extends BaseSparkTest { return; } List> l = new ArrayList<>(); - l.add(Arrays.asList(new Text("abc"), new DoubleWritable(2.0), new IntWritable(-1))); - l.add(Arrays.asList(new Text("def"), new DoubleWritable(4.0), new IntWritable(-2))); + l.add(Arrays.asList(new Text("abc"), new DoubleWritable(2.0), new IntWritable(-1))); + l.add(Arrays.asList(new Text("def"), new DoubleWritable(4.0), new IntWritable(-2))); File f = File.createTempFile("testSparkUtil", "txt"); f.deleteOnExit(); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/execution/input/Operands.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/execution/input/Operands.java index 0bf569f6d..2ea351b38 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/execution/input/Operands.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/execution/input/Operands.java @@ -27,7 +27,7 @@ import org.nd4j.common.primitives.Pair; import java.util.*; public class Operands { - private Map map = new LinkedHashMap<>(); + private final Map map = new LinkedHashMap<>(); /** * This method allows to pass array to the node identified by its name diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/functions/DifferentialFunction.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/functions/DifferentialFunction.java index c7920422f..8a642e7a2 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/functions/DifferentialFunction.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/functions/DifferentialFunction.java @@ -734,7 +734,7 @@ public abstract class DifferentialFunction { * @return The data types of the outputs */ public List calculateOutputDataTypes(List dataTypes){ - throw new UnsupportedOperationException("Op type of " + getClass().getName() + " and name " + this.toString() + " did not override calculateOutputDataTypes()! This function has not been implemented for " + getClass().getName()); + throw new UnsupportedOperationException("Op type of " + getClass().getName() + " and name " + this + " did not override calculateOutputDataTypes()! This function has not been implemented for " + getClass().getName()); } @@ -746,9 +746,9 @@ public abstract class DifferentialFunction { DifferentialFunction that = (DifferentialFunction) o; if (inPlace != that.inPlace) return false; - if (scalarValue != null ? !scalarValue.equals(that.scalarValue) : that.scalarValue != null) return false; + if (!Objects.equals(scalarValue, that.scalarValue)) return false; if (!Arrays.equals(dimensions, that.dimensions)) return false; - return ownName != null ? ownName.equals(that.ownName) : that.ownName == null; + return Objects.equals(ownName, that.ownName); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/ListenerResponse.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/ListenerResponse.java index 7ab5a262a..81dcee826 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/ListenerResponse.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/ListenerResponse.java @@ -21,5 +21,5 @@ package org.nd4j.autodiff.listeners; public enum ListenerResponse { - CONTINUE, STOP; + CONTINUE, STOP } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/Loss.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/Loss.java index 43bf09b4d..4461f290d 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/Loss.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/Loss.java @@ -96,7 +96,7 @@ public class Loss { public static Loss sum(List losses) { if (losses.isEmpty()) - return new Loss(Collections.emptyList(), new double[0]); + return new Loss(Collections.emptyList(), new double[0]); double[] lossValues = new double[losses.get(0).losses.length]; List lossNames = new ArrayList<>(losses.get(0).lossNames); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/checkpoint/CheckpointListener.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/checkpoint/CheckpointListener.java index 7bc8f044e..8c4b12823 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/checkpoint/CheckpointListener.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/checkpoint/CheckpointListener.java @@ -44,27 +44,27 @@ import java.util.concurrent.TimeUnit; @Slf4j public class CheckpointListener extends BaseListener implements Serializable { - private enum KeepMode {ALL, LAST, LAST_AND_EVERY}; + private enum KeepMode {ALL, LAST, LAST_AND_EVERY} - private File rootDir; - private String fileNamePrefix; - private KeepMode keepMode; - private int keepLast; - private int keepEvery; - private boolean logSaving; - private boolean deleteExisting; - private boolean saveUpdaterState; + private final File rootDir; + private final String fileNamePrefix; + private final KeepMode keepMode; + private final int keepLast; + private final int keepEvery; + private final boolean logSaving; + private final boolean deleteExisting; + private final boolean saveUpdaterState; - private Integer saveEveryNEpochs; - private Integer saveEveryNIterations; - private boolean saveEveryNIterSinceLast; - private Long saveEveryAmount; - private TimeUnit saveEveryUnit; + private final Integer saveEveryNEpochs; + private final Integer saveEveryNIterations; + private final boolean saveEveryNIterSinceLast; + private final Long saveEveryAmount; + private final TimeUnit saveEveryUnit; private Long saveEveryMs; - private boolean saveEverySinceLast; + private final boolean saveEverySinceLast; private int lastCheckpointNum = -1; - private File checkpointRecordFile; + private final File checkpointRecordFile; private Checkpoint lastCheckpoint; private long startTime = -1; @@ -168,7 +168,6 @@ public class CheckpointListener extends BaseListener implements Serializable { long lastSaveTime = (lastCheckpoint != null ? lastCheckpoint.getTimestamp() : startTime); if((time - lastSaveTime) >= saveEveryMs){ saveCheckpoint(sd, at); - return; } } else { //Save periodically, regardless of when last model was saved @@ -176,7 +175,6 @@ public class CheckpointListener extends BaseListener implements Serializable { if((time - lastSave) > saveEveryMs){ saveCheckpoint(sd, at); lastSaveEveryMsNoSinceLast = time; - return; } } } @@ -215,7 +213,6 @@ public class CheckpointListener extends BaseListener implements Serializable { //Finally: determine if we should delete some old models... if(keepMode == null || keepMode == KeepMode.ALL){ - return; } else if(keepMode == KeepMode.LAST){ List checkpoints = availableCheckpoints(); Iterator iter = checkpoints.iterator(); @@ -423,7 +420,7 @@ public class CheckpointListener extends BaseListener implements Serializable { public static class Builder { - private File rootDir; + private final File rootDir; private String fileNamePrefix = "SameDiff"; private KeepMode keepMode; private int keepLast; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/debugging/ExecDebuggingListener.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/debugging/ExecDebuggingListener.java index a862a2fe9..ab4423020 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/debugging/ExecDebuggingListener.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/debugging/ExecDebuggingListener.java @@ -186,7 +186,7 @@ public class ExecDebuggingListener extends BaseListener { sb.append("Nd4j.exec(op);\n"); } - System.out.print(sb.toString()); + System.out.print(sb); } private static String createString(INDArray arr){ diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/impl/HistoryListener.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/impl/HistoryListener.java index 4e03b6efd..9569122fb 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/impl/HistoryListener.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/impl/HistoryListener.java @@ -42,14 +42,14 @@ public class HistoryListener extends BaseEvaluationListener { @Setter private ListenerEvaluations evaluations; - private List trainingHistory = new ArrayList<>(); - private List validationHistory = new ArrayList<>(); + private final List trainingHistory = new ArrayList<>(); + private final List validationHistory = new ArrayList<>(); private LossCurve loss = null; private long startTime; private long endTime; - private List validationTimes = new ArrayList<>(); + private final List validationTimes = new ArrayList<>(); private long validationStartTime; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/impl/UIListener.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/impl/UIListener.java index f5bc5c8b6..e9ef1bcf2 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/impl/UIListener.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/impl/UIListener.java @@ -83,19 +83,19 @@ public class UIListener extends BaseListener { public enum HistogramType {PARAMETERS, PARAMETER_GRADIENTS, PARAMETER_UPDATES, ACTIVATIONS, ACTIVATION_GRADIENTS} - private FileMode fileMode; - private File logFile; - private int lossPlotFreq; - private int performanceStatsFrequency; - private int updateRatioFrequency; - private UpdateRatio updateRatioType; - private int histogramFrequency; - private HistogramType[] histogramTypes; - private int opProfileFrequency; - private Map, List> trainEvalMetrics; - private int trainEvalFrequency; - private TestEvaluation testEvaluation; - private int learningRateFrequency; + private final FileMode fileMode; + private final File logFile; + private final int lossPlotFreq; + private final int performanceStatsFrequency; + private final int updateRatioFrequency; + private final UpdateRatio updateRatioType; + private final int histogramFrequency; + private final HistogramType[] histogramTypes; + private final int opProfileFrequency; + private final Map, List> trainEvalMetrics; + private final int trainEvalFrequency; + private final TestEvaluation testEvaluation; + private final int learningRateFrequency; private MultiDataSet currentIterDataSet; @@ -535,7 +535,7 @@ public class UIListener extends BaseListener { public static class Builder { private FileMode fileMode = FileMode.CREATE_OR_APPEND; - private File logFile; + private final File logFile; private int lossPlotFreq = 1; private int performanceStatsFrequency = -1; //Disabled by default diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/profiler/ProfilingListener.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/profiler/ProfilingListener.java index c2d20756f..0b702e259 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/profiler/ProfilingListener.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/profiler/ProfilingListener.java @@ -64,8 +64,8 @@ public class ProfilingListener extends BaseListener { private boolean logActive = false; private long opStartNano; - private Writer writer; - private ObjectMapper json; + private final Writer writer; + private final ObjectMapper json; private final Thread fileWritingThread; private final BlockingQueue writeQueue; @@ -209,7 +209,7 @@ public class ProfilingListener extends BaseListener { .pid((int)pid) .tid(tid) .ph(Phase.X) - .args(Collections.singletonMap("name", op.getName())) + .args(Collections.singletonMap("name", op.getName())) .build(); writeQueue.add(event); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/profiler/comparison/ProfileAnalyzer.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/profiler/comparison/ProfileAnalyzer.java index 68520efe7..3d81b729e 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/profiler/comparison/ProfileAnalyzer.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/profiler/comparison/ProfileAnalyzer.java @@ -551,7 +551,7 @@ public class ProfileAnalyzer { } - private static Map TF_PROFILE_ALIASES = new HashMap<>(); + private static final Map TF_PROFILE_ALIASES = new HashMap<>(); static { TF_PROFILE_ALIASES.put("_MklSoftmax", "Softmax"); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/records/EvaluationRecord.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/records/EvaluationRecord.java index 02295f330..2fe2cee27 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/records/EvaluationRecord.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/records/EvaluationRecord.java @@ -39,7 +39,7 @@ import org.nd4j.evaluation.IMetric; public class EvaluationRecord { private Map> evaluations; - private Map, IEvaluation> classEvaluations = new HashMap<>(); + private final Map, IEvaluation> classEvaluations = new HashMap<>(); private boolean isEmpty = true; public EvaluationRecord(Map> evaluations) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/records/History.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/records/History.java index dd642eb0e..9fe7f4814 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/records/History.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/records/History.java @@ -33,13 +33,13 @@ import org.nd4j.evaluation.IMetric; @Getter public class History { - private List trainingHistory; - private List validationHistory; + private final List trainingHistory; + private final List validationHistory; - private LossCurve lossCurve; + private final LossCurve lossCurve; - private long trainingTimeMillis; - private List validationTimesMillis; + private final long trainingTimeMillis; + private final List validationTimesMillis; public History(List training, List validation, LossCurve loss, long trainingTimeMillis, List validationTimesMillis){ diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/records/LossCurve.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/records/LossCurve.java index 8c68dbafd..b0f3a78e7 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/records/LossCurve.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/listeners/records/LossCurve.java @@ -34,7 +34,7 @@ import org.nd4j.linalg.factory.Nd4j; public class LossCurve { @Getter - private List lossNames; + private final List lossNames; @Getter private INDArray lossValues; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/SDIndex.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/SDIndex.java index ea08bf132..5c5f50db2 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/SDIndex.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/SDIndex.java @@ -36,7 +36,7 @@ public class SDIndex { private boolean pointKeepDim; private Long intervalBegin = null; private Long intervalEnd = null; - private Long intervalStrides = 1l; + private Long intervalStrides = 1L; public SDIndex(){} diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/SameDiff.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/SameDiff.java index 577569ed1..559cf124f 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/SameDiff.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/SameDiff.java @@ -138,7 +138,7 @@ public class SameDiff extends SDBaseOps { // private DifferentialFunctionFactory functionFactory; // counter for auto-naming variables - private int variableId = 0; + private final int variableId = 0; //////////////////////////////////////// @@ -244,12 +244,12 @@ public class SameDiff extends SDBaseOps { return linalg; } - private Map sameDiffFunctionInstances; + private final Map sameDiffFunctionInstances; - private Table fieldVariableResolutionMapping; + private final Table fieldVariableResolutionMapping; // flag, shows if graph was already registered with libnd4j - private transient AtomicBoolean wasRegistered = new AtomicBoolean(false); + private final transient AtomicBoolean wasRegistered = new AtomicBoolean(false); //debug mode variables @@ -257,11 +257,11 @@ public class SameDiff extends SDBaseOps { private boolean debugMode; @Getter - private Stack argumentInterceptors = new Stack<>(); + private final Stack argumentInterceptors = new Stack<>(); @Getter - private Set pausedArgumentInterceptors = new HashSet<>(); + private final Set pausedArgumentInterceptors = new HashSet<>(); - private Set blockNames = new HashSet<>(); + private final Set blockNames = new HashSet<>(); @Getter @Setter @@ -2159,7 +2159,7 @@ public class SameDiff extends SDBaseOps { MultiDataSet ds = iterator.next(); Map placeholderMap = toPlaceholderMap(ds); - Map m = directExecHelper(placeholderMap, at, ds, Collections.emptyList(), activeListeners, requiredVarsArr); + Map m = directExecHelper(placeholderMap, at, ds, Collections.emptyList(), activeListeners, requiredVarsArr); for (Map.Entry> e : variableEvals.entrySet()) { INDArray prediction = m.get(e.getKey()); @@ -5802,8 +5802,10 @@ public class SameDiff extends SDBaseOps { // ensure that there are no variables that look like they are outputs of this op boolean varWithName = false; for (String varName : variables.keySet()) - if (varName.startsWith(name + ":") || varName.equals(name)) + if (varName.startsWith(name + ":") || varName.equals(name)) { varWithName = true; + break; + } if (!ops.containsKey(name) && !varWithName) break; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/TrainingConfig.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/TrainingConfig.java index f06cfec9c..442d47f27 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/TrainingConfig.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/TrainingConfig.java @@ -73,7 +73,7 @@ public class TrainingConfig { */ public TrainingConfig(IUpdater updater, List regularization, String dataSetFeatureMapping, String dataSetLabelMapping) { this(updater, regularization, true, Collections.singletonList(dataSetFeatureMapping), Collections.singletonList(dataSetLabelMapping), - Collections.emptyList(), Collections.emptyList(), null); + Collections.emptyList(), Collections.emptyList(), null); } /** @@ -154,11 +154,11 @@ public class TrainingConfig { private boolean skipValidation = false; private boolean markLabelsUnused = false; - private Map> trainEvaluations = new HashMap<>(); - private Map trainEvaluationLabels = new HashMap<>(); + private final Map> trainEvaluations = new HashMap<>(); + private final Map trainEvaluationLabels = new HashMap<>(); - private Map> validationEvaluations = new HashMap<>(); - private Map validationEvaluationLabels = new HashMap<>(); + private final Map> validationEvaluations = new HashMap<>(); + private final Map validationEvaluationLabels = new HashMap<>(); /** * Set the updater (such as {@link org.nd4j.linalg.learning.config.Adam}, {@link org.nd4j.linalg.learning.config.Nesterovs} diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/AbstractSession.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/AbstractSession.java index 0b0079cb1..d00efcba7 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/AbstractSession.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/AbstractSession.java @@ -167,10 +167,7 @@ public abstract class AbstractSession { For example, we might have a label placeholder, and we're doing inference not training */ for (String s : phNames) { - boolean required = false; - if (variables.contains(s)) { - required = true; - } + boolean required = variables.contains(s); if (!required) { Variable v = sameDiff.getVariables().get(s); if (v.getInputsForOp() != null) { @@ -973,8 +970,6 @@ public abstract class AbstractSession { */ protected enum ExecType {OP, VARIABLE, CONSTANT, PLACEHOLDER, SWITCH_L, SWITCH_R, EXEC_START, CONTROL_DEP} - ; - /** * ExecStep represents a single execution step, for a single op (or variable/constant etc) at a specific frame/iteration */ @@ -1022,5 +1017,4 @@ public abstract class AbstractSession { } } - ; } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/FrameIter.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/FrameIter.java index e1a215a36..6cbf7d4bd 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/FrameIter.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/FrameIter.java @@ -32,7 +32,7 @@ public class FrameIter { @Override public String toString() { - return "(\"" + frame + "\"," + iteration + (parentFrame == null ? "" : ",parent=" + parentFrame.toString()) + ")"; + return "(\"" + frame + "\"," + iteration + (parentFrame == null ? "" : ",parent=" + parentFrame) + ")"; } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/InferenceSession.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/InferenceSession.java index e2df94563..8d3b414ab 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/InferenceSession.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/InferenceSession.java @@ -74,7 +74,7 @@ public class InferenceSession extends AbstractSession arrayUseTracker = new IdentityDependencyTracker<>(); - private Map opContexts = new HashMap<>(); + private final Map opContexts = new HashMap<>(); public InferenceSession(@NonNull SameDiff sameDiff) { super(sameDiff); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/memory/ArrayCacheMemoryMgr.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/memory/ArrayCacheMemoryMgr.java index b07f4e094..e3624b70b 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/memory/ArrayCacheMemoryMgr.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/internal/memory/ArrayCacheMemoryMgr.java @@ -42,10 +42,10 @@ public class ArrayCacheMemoryMgr extends AbstractMemoryMgr { private final long totalMemBytes; private long currentCacheSize = 0; - private Map arrayStores = new HashMap<>(); + private final Map arrayStores = new HashMap<>(); - private LinkedHashSet lruCache = new LinkedHashSet<>(); - private Map lruCacheValues = new HashMap<>(); + private final LinkedHashSet lruCache = new LinkedHashSet<>(); + private final Map lruCacheValues = new HashMap<>(); /** * Create an ArrayCacheMemoryMgr with default settings as per {@link ArrayCacheMemoryMgr} diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/serde/LegacyOpMapper.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/serde/LegacyOpMapper.java index 44d255345..2d3953ad8 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/serde/LegacyOpMapper.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/serde/LegacyOpMapper.java @@ -188,7 +188,7 @@ public class LegacyOpMapper { return FloorDivOp.class; case 23: return TruncateDivOp.class; - case 24:; + case 24: return And.class; case 25: return Or.class; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/validation/OpValidation.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/validation/OpValidation.java index 8746ef281..0e8c272cd 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/validation/OpValidation.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/validation/OpValidation.java @@ -451,11 +451,11 @@ public class OpValidation { private static List nonMappedLibnd4jOps; private static Map,CustomOpDescriptor>> dedupedCustomOps; private static int countTotalLibnd4jOps; - private static Map gradCheckCoverageCountPerClass = new LinkedHashMap<>(); - private static Map fwdPassCoverageCountPerClass = new LinkedHashMap<>(); - private static Map singleOpTestCountPerClass = new LinkedHashMap<>(); - private static Map opsWithTFMappingTFImportCounts = new LinkedHashMap<>(); - private static Map tfMappedOpsImportTestCounts = new LinkedHashMap<>(); + private static final Map gradCheckCoverageCountPerClass = new LinkedHashMap<>(); + private static final Map fwdPassCoverageCountPerClass = new LinkedHashMap<>(); + private static final Map singleOpTestCountPerClass = new LinkedHashMap<>(); + private static final Map opsWithTFMappingTFImportCounts = new LinkedHashMap<>(); + private static final Map tfMappedOpsImportTestCounts = new LinkedHashMap<>(); private static void collectCoverageInformation(TestCase testCase) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/validation/TestCase.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/validation/TestCase.java index f98b766a0..e755ea0e9 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/validation/TestCase.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/validation/TestCase.java @@ -38,7 +38,7 @@ import java.util.*; @Accessors(fluent = true) @Getter public class TestCase { - public enum TestSerialization {BEFORE_EXEC, AFTER_EXEC, BOTH, NONE}; + public enum TestSerialization {BEFORE_EXEC, AFTER_EXEC, BOTH, NONE} public static final boolean GC_DEFAULT_PRINT = false; public static final boolean GC_DEFAULT_EXIT_FIRST_FAILURE = false; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/validation/listeners/NonInplaceValidationListener.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/validation/listeners/NonInplaceValidationListener.java index f5afa8b21..534340af5 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/validation/listeners/NonInplaceValidationListener.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/validation/listeners/NonInplaceValidationListener.java @@ -41,11 +41,11 @@ import org.nd4j.linalg.dataset.api.MultiDataSet; public class NonInplaceValidationListener extends BaseListener { @Getter - private static AtomicInteger useCounter = new AtomicInteger(); + private static final AtomicInteger useCounter = new AtomicInteger(); @Getter - private static AtomicInteger passCounter = new AtomicInteger(); + private static final AtomicInteger passCounter = new AtomicInteger(); @Getter - private static AtomicInteger failCounter = new AtomicInteger(); + private static final AtomicInteger failCounter = new AtomicInteger(); protected INDArray[] opInputs; protected INDArray[] opInputsOrig; @@ -64,7 +64,6 @@ public class NonInplaceValidationListener extends BaseListener { Op o = (Op)op.getOp(); if(oc.getInputArray(0) == null){ //No input op - return; } else if(oc.getInputArray(1) == null){ opInputsOrig = new INDArray[]{oc.getInputArray(0)}; opInputs = new INDArray[]{oc.getInputArray(0).dup()}; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/context/Nd4jContext.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/context/Nd4jContext.java index 3f1599391..5c13da776 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/context/Nd4jContext.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/context/Nd4jContext.java @@ -30,8 +30,8 @@ import java.util.Properties; @Slf4j public class Nd4jContext implements Serializable { - private Properties conf; - private static Nd4jContext INSTANCE = new Nd4jContext(); + private final Properties conf; + private static final Nd4jContext INSTANCE = new Nd4jContext(); private Nd4jContext() { conf = new Properties(); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/enums/ImageResizeMethod.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/enums/ImageResizeMethod.java index ad8c82573..82e9f7172 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/enums/ImageResizeMethod.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/enums/ImageResizeMethod.java @@ -28,5 +28,5 @@ public enum ImageResizeMethod { ResizeGaussian, ResizeLanczos3, ResizeLanczos5, - ResizeMitchellcubic; + ResizeMitchellcubic } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/BaseEvaluation.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/BaseEvaluation.java index ec62b7bb3..31c388bb6 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/BaseEvaluation.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/BaseEvaluation.java @@ -71,7 +71,7 @@ public abstract class BaseEvaluation implements IEvalu } catch (InvalidTypeIdException e) { if (e.getMessage().contains("Could not resolve type id")) { try { - return (T) attempFromLegacyFromJson(json, e); + return attempFromLegacyFromJson(json, e); } catch (Throwable t) { throw new RuntimeException("Cannot deserialize from JSON - JSON is invalid?", t); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/IMetric.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/IMetric.java index 18161d0a1..6372529d5 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/IMetric.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/IMetric.java @@ -25,10 +25,10 @@ public interface IMetric { /** * The {@link IEvaluation} class this metric is for */ - public Class getEvaluationClass(); + Class getEvaluationClass(); /** * Whether this metric should be minimized (aka whether lower values are better). */ - public boolean minimize(); + boolean minimize(); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/classification/ConfusionMatrix.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/classification/ConfusionMatrix.java index 8e865da20..4f372ae2f 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/classification/ConfusionMatrix.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/classification/ConfusionMatrix.java @@ -32,7 +32,7 @@ import java.util.concurrent.ConcurrentHashMap; public class ConfusionMatrix> implements Serializable { @Getter - private volatile Map> matrix; + private final Map> matrix; private List classes; /** diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/classification/EvaluationBinary.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/classification/EvaluationBinary.java index 715e23d37..c3a66aa93 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/classification/EvaluationBinary.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/classification/EvaluationBinary.java @@ -363,7 +363,7 @@ public class EvaluationBinary extends BaseEvaluation { ret += accuracy(i); } - ret /= (double) numLabels(); + ret /= numLabels(); return ret; } @@ -381,7 +381,7 @@ public class EvaluationBinary extends BaseEvaluation { ret += precision(i); } - ret /= (double) numLabels(); + ret /= numLabels(); return ret; } @@ -401,7 +401,7 @@ public class EvaluationBinary extends BaseEvaluation { ret += recall(i); } - ret /= (double) numLabels(); + ret /= numLabels(); return ret; } @@ -420,7 +420,7 @@ public class EvaluationBinary extends BaseEvaluation { ret += f1(i); } - ret /= (double) numLabels(); + ret /= numLabels(); return ret; } @@ -469,7 +469,7 @@ public class EvaluationBinary extends BaseEvaluation { ret += matthewsCorrelation(i); } - ret /= (double) numLabels(); + ret /= numLabels(); return ret; } @@ -496,7 +496,7 @@ public class EvaluationBinary extends BaseEvaluation { ret += gMeasure(i); } - ret /= (double) numLabels(); + ret /= numLabels(); return ret; } @@ -578,7 +578,7 @@ public class EvaluationBinary extends BaseEvaluation { ret += falseAlarmRate(i); } - ret /= (double) numLabels(); + ret /= numLabels(); return ret; } @@ -657,7 +657,7 @@ public class EvaluationBinary extends BaseEvaluation { String label = (labels == null ? String.valueOf(i) : labels.get(i)); - List args = Arrays.asList(label, acc, f1, precision, recall, totalCount, + List args = Arrays.asList(label, acc, f1, precision, recall, totalCount, truePositives(i), trueNegatives(i), falsePositives(i), falseNegatives(i)); if (rocBinary != null) { args = new ArrayList<>(args); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/custom/EvaluationLambda.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/custom/EvaluationLambda.java index f228354b6..7f5a05876 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/custom/EvaluationLambda.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/custom/EvaluationLambda.java @@ -26,6 +26,6 @@ import java.util.List; import org.nd4j.linalg.api.ndarray.INDArray; public interface EvaluationLambda { - public T eval(INDArray labels, INDArray networkPredictions, INDArray maskArray, + T eval(INDArray labels, INDArray networkPredictions, INDArray maskArray, List recordMetaData); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/custom/MergeLambda.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/custom/MergeLambda.java index ee3a6966a..71a3c7962 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/custom/MergeLambda.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/custom/MergeLambda.java @@ -23,5 +23,5 @@ package org.nd4j.evaluation.custom; import java.util.List; public interface MergeLambda { - public List merge(List a, List b); + List merge(List a, List b); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/custom/ResultLambda.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/custom/ResultLambda.java index 722efa346..a9175d953 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/custom/ResultLambda.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/custom/ResultLambda.java @@ -23,5 +23,5 @@ package org.nd4j.evaluation.custom; import java.util.List; public interface ResultLambda { - public double toResult(List data); + double toResult(List data); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/regression/RegressionEvaluation.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/regression/RegressionEvaluation.java index d34bd5c4e..7ec1d3faa 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/regression/RegressionEvaluation.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/regression/RegressionEvaluation.java @@ -58,10 +58,7 @@ public class RegressionEvaluation extends BaseEvaluation { */ @Override public boolean minimize(){ - if(this == R2 || this == PC){ - return false; - } - return true; + return this != R2 && this != PC; } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/serde/ConfusionMatrixDeserializer.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/serde/ConfusionMatrixDeserializer.java index e01f0d51a..3dff297c1 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/serde/ConfusionMatrixDeserializer.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/serde/ConfusionMatrixDeserializer.java @@ -38,7 +38,7 @@ import java.util.Map; public class ConfusionMatrixDeserializer extends JsonDeserializer> { @Override public ConfusionMatrix deserialize(JsonParser jp, DeserializationContext ctxt) - throws IOException, JsonProcessingException { + throws IOException { JsonNode n = jp.getCodec().readTree(jp); //Get class names/labels diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/serde/ConfusionMatrixSerializer.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/serde/ConfusionMatrixSerializer.java index 4234fed16..9c4d0f833 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/serde/ConfusionMatrixSerializer.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/serde/ConfusionMatrixSerializer.java @@ -35,7 +35,7 @@ import java.util.Map; public class ConfusionMatrixSerializer extends JsonSerializer> { @Override public void serialize(ConfusionMatrix cm, JsonGenerator gen, SerializerProvider provider) - throws IOException, JsonProcessingException { + throws IOException { List classes = cm.getClasses(); Map> matrix = cm.getMatrix(); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/serde/ROCArraySerializer.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/serde/ROCArraySerializer.java index 6da654cae..d4c3526ed 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/serde/ROCArraySerializer.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/serde/ROCArraySerializer.java @@ -33,7 +33,7 @@ public class ROCArraySerializer extends JsonSerializer { @Override public void serialize(ROC[] rocs, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) - throws IOException, JsonProcessingException { + throws IOException { jsonGenerator.writeStartArray(); for (ROC r : rocs) { jsonGenerator.writeStartObject(); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatArray.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatArray.java index 2d740a969..23cafa242 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatArray.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatArray.java @@ -37,7 +37,7 @@ public final class FlatArray extends Table { public int shapeLength() { int o = __offset(4); return o != 0 ? __vector_len(o) : 0; } public ByteBuffer shapeAsByteBuffer() { return __vector_as_bytebuffer(4, 8); } public ByteBuffer shapeInByteBuffer(ByteBuffer _bb) { return __vector_in_bytebuffer(_bb, 4, 8); } - public byte buffer(int j) { int o = __offset(6); return o != 0 ? bb.get(__vector(o) + j * 1) : 0; } + public byte buffer(int j) { int o = __offset(6); return o != 0 ? bb.get(__vector(o) + j) : 0; } public int bufferLength() { int o = __offset(6); return o != 0 ? __vector_len(o) : 0; } public ByteBuffer bufferAsByteBuffer() { return __vector_as_bytebuffer(6, 1); } public ByteBuffer bufferInByteBuffer(ByteBuffer _bb) { return __vector_in_bytebuffer(_bb, 6, 1); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatConfiguration.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatConfiguration.java index 8c341212c..28b8d0ff4 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatConfiguration.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatConfiguration.java @@ -37,7 +37,7 @@ public final class FlatConfiguration extends Table { public byte executionMode() { int o = __offset(6); return o != 0 ? bb.get(o + bb_pos) : 0; } public byte profilingMode() { int o = __offset(8); return o != 0 ? bb.get(o + bb_pos) : 0; } public byte outputMode() { int o = __offset(10); return o != 0 ? bb.get(o + bb_pos) : 0; } - public boolean timestats() { int o = __offset(12); return o != 0 ? 0!=bb.get(o + bb_pos) : false; } + public boolean timestats() { int o = __offset(12); return o != 0 && 0 != bb.get(o + bb_pos); } public long footprintForward() { int o = __offset(14); return o != 0 ? bb.getLong(o + bb_pos) : 0L; } public long footprintBackward() { int o = __offset(16); return o != 0 ? bb.getLong(o + bb_pos) : 0L; } public byte direction() { int o = __offset(18); return o != 0 ? bb.get(o + bb_pos) : 0; } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatNode.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatNode.java index 212116196..ee45ab97d 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatNode.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatNode.java @@ -61,7 +61,7 @@ public final class FlatNode extends Table { public int extraIntegerLength() { int o = __offset(22); return o != 0 ? __vector_len(o) : 0; } public ByteBuffer extraIntegerAsByteBuffer() { return __vector_as_bytebuffer(22, 8); } public ByteBuffer extraIntegerInByteBuffer(ByteBuffer _bb) { return __vector_in_bytebuffer(_bb, 22, 8); } - public boolean extraBools(int j) { int o = __offset(24); return o != 0 ? 0!=bb.get(__vector(o) + j * 1) : false; } + public boolean extraBools(int j) { int o = __offset(24); return o != 0 && 0 != bb.get(__vector(o) + j); } public int extraBoolsLength() { int o = __offset(24); return o != 0 ? __vector_len(o) : 0; } public ByteBuffer extraBoolsAsByteBuffer() { return __vector_as_bytebuffer(24, 1); } public ByteBuffer extraBoolsInByteBuffer(ByteBuffer _bb) { return __vector_in_bytebuffer(_bb, 24, 1); } @@ -79,7 +79,7 @@ public final class FlatNode extends Table { public String opName() { int o = __offset(36); return o != 0 ? __string(o + bb_pos) : null; } public ByteBuffer opNameAsByteBuffer() { return __vector_as_bytebuffer(36, 1); } public ByteBuffer opNameInByteBuffer(ByteBuffer _bb) { return __vector_in_bytebuffer(_bb, 36, 1); } - public byte outputTypes(int j) { int o = __offset(38); return o != 0 ? bb.get(__vector(o) + j * 1) : 0; } + public byte outputTypes(int j) { int o = __offset(38); return o != 0 ? bb.get(__vector(o) + j) : 0; } public int outputTypesLength() { int o = __offset(38); return o != 0 ? __vector_len(o) : 0; } public ByteBuffer outputTypesAsByteBuffer() { return __vector_as_bytebuffer(38, 1); } public ByteBuffer outputTypesInByteBuffer(ByteBuffer _bb) { return __vector_in_bytebuffer(_bb, 38, 1); } @@ -91,7 +91,7 @@ public final class FlatNode extends Table { public int varControlDepsLength() { int o = __offset(44); return o != 0 ? __vector_len(o) : 0; } public String controlDepFor(int j) { int o = __offset(46); return o != 0 ? __string(__vector(o) + j * 4) : null; } public int controlDepForLength() { int o = __offset(46); return o != 0 ? __vector_len(o) : 0; } - public byte extraTypes(int j) { int o = __offset(48); return o != 0 ? bb.get(__vector(o) + j * 1) : 0; } + public byte extraTypes(int j) { int o = __offset(48); return o != 0 ? bb.get(__vector(o) + j) : 0; } public int extraTypesLength() { int o = __offset(48); return o != 0 ? __vector_len(o) : 0; } public ByteBuffer extraTypesAsByteBuffer() { return __vector_as_bytebuffer(48, 1); } public ByteBuffer extraTypesInByteBuffer(ByteBuffer _bb) { return __vector_in_bytebuffer(_bb, 48, 1); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatProperties.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatProperties.java index 96ff452b3..3371e3a8a 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatProperties.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/graph/FlatProperties.java @@ -51,7 +51,7 @@ public final class FlatProperties extends Table { public FlatArray a(int j) { return a(new FlatArray(), j); } public FlatArray a(FlatArray obj, int j) { int o = __offset(12); return o != 0 ? obj.__assign(__indirect(__vector(o) + j * 4), bb) : null; } public int aLength() { int o = __offset(12); return o != 0 ? __vector_len(o) : 0; } - public boolean b(int j) { int o = __offset(14); return o != 0 ? 0!=bb.get(__vector(o) + j * 1) : false; } + public boolean b(int j) { int o = __offset(14); return o != 0 && 0 != bb.get(__vector(o) + j); } public int bLength() { int o = __offset(14); return o != 0 ? __vector_len(o) : 0; } public ByteBuffer bAsByteBuffer() { return __vector_as_bytebuffer(14, 1); } public ByteBuffer bInByteBuffer(ByteBuffer _bb) { return __vector_in_bytebuffer(_bb, 14, 1); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/converters/DifferentialFunctionClassHolder.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/converters/DifferentialFunctionClassHolder.java index 93e731576..3a1a0527c 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/converters/DifferentialFunctionClassHolder.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/converters/DifferentialFunctionClassHolder.java @@ -41,16 +41,16 @@ import java.util.*; @Slf4j public class DifferentialFunctionClassHolder { - private Map nodeConverters = ImportClassMapping.getOpNameMapping(); - private Map tensorFlowNames = ImportClassMapping.getTFOpMappingFunctions(); - private Map onnxNames = ImportClassMapping.getOnnxOpMappingFunctions(); - private Map> customOpHashToClass = new HashMap<>(); - private Map>> customOpHashToClasses = new HashMap<>(); //Only contains ops with 1 hash to multiple classes - private List missingOps = new ArrayList<>(); + private final Map nodeConverters = ImportClassMapping.getOpNameMapping(); + private final Map tensorFlowNames = ImportClassMapping.getTFOpMappingFunctions(); + private final Map onnxNames = ImportClassMapping.getOnnxOpMappingFunctions(); + private final Map> customOpHashToClass = new HashMap<>(); + private final Map>> customOpHashToClasses = new HashMap<>(); //Only contains ops with 1 hash to multiple classes + private final List missingOps = new ArrayList<>(); - private Map onnxOpDescriptors; - private Map tensorflowOpDescriptors; - private Map> fieldsForFunction; + private final Map onnxOpDescriptors; + private final Map tensorflowOpDescriptors; + private final Map> fieldsForFunction; private static final Set fieldNamesOpsIgnore = new LinkedHashSet(){{ add("extraArgs"); @@ -71,7 +71,7 @@ public class DifferentialFunctionClassHolder { }}; //When determining fields/properties, where should we terminate the search? //We don't wan to include every single field from every single superclass - private static final Set classesToIgnore = new HashSet<>(Arrays.asList( + private static final Set classesToIgnore = new HashSet<>(Collections.singletonList( Object.class // BaseOp.class //Exclude x/y/z, n, numProcessed, extraArgs, etc )); @@ -82,11 +82,11 @@ public class DifferentialFunctionClassHolder { } @Getter - private int countTotalTfOps; + private final int countTotalTfOps; @Getter - private int countTotalMappedOps; + private final int countTotalMappedOps; - private static DifferentialFunctionClassHolder INSTANCE = new DifferentialFunctionClassHolder(); + private static final DifferentialFunctionClassHolder INSTANCE = new DifferentialFunctionClassHolder(); /** * Get the fields for a given {@link DifferentialFunction} diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/converters/ImportClassMapping.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/converters/ImportClassMapping.java index e6a00e01d..c188fb8c3 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/converters/ImportClassMapping.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/converters/ImportClassMapping.java @@ -36,7 +36,7 @@ public class ImportClassMapping { private static final Map TF_OP_NAME_MAP = new HashMap<>(); private static final Map ONNX_OP_NAME_MAP = new HashMap<>(); - private static final List> fnClasses = Arrays.>asList( + private static final List> fnClasses = Arrays.asList( org.nd4j.linalg.api.ops.DynamicCustomOp.class, org.nd4j.linalg.api.ops.NoOp.class, org.nd4j.linalg.api.ops.impl.updaters.SgdUpdater.class, diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/descriptors/properties/adapters/BooleanAdapter.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/descriptors/properties/adapters/BooleanAdapter.java index 602b0d184..f25243100 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/descriptors/properties/adapters/BooleanAdapter.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/descriptors/properties/adapters/BooleanAdapter.java @@ -29,6 +29,6 @@ public class BooleanAdapter implements AttributeAdapter { @Override public void mapAttributeFor(Object inputAttributeValue, Field fieldFor, DifferentialFunction on) { - on.setValueFor(fieldFor, (boolean) inputAttributeValue); + on.setValueFor(fieldFor, inputAttributeValue); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/descriptors/properties/adapters/DataTypeAdapter.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/descriptors/properties/adapters/DataTypeAdapter.java index 2c3cdb373..9552c14b6 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/descriptors/properties/adapters/DataTypeAdapter.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/descriptors/properties/adapters/DataTypeAdapter.java @@ -38,7 +38,7 @@ public class DataTypeAdapter implements AttributeAdapter { val x = dataType.getNumber(); return dtypeConv(x); - }; + } public static org.nd4j.linalg.api.buffer.DataType dtypeConv(int dataType) { @@ -58,5 +58,5 @@ public class DataTypeAdapter implements AttributeAdapter { case DataType.DT_UINT64_VALUE: return org.nd4j.linalg.api.buffer.DataType.UINT64; default: throw new UnsupportedOperationException("DataType isn't supported: " + dataType + " - " + DataType.forNumber(dataType)); } - }; + } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/graphmapper/tf/TFGraphMapper.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/graphmapper/tf/TFGraphMapper.java index 7e1e236fb..c293bde56 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/graphmapper/tf/TFGraphMapper.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/graphmapper/tf/TFGraphMapper.java @@ -786,7 +786,7 @@ public class TFGraphMapper { } else if (!setList.getBList().isEmpty()) { break; } else if (!setList.getFList().isEmpty()) { - val floats = Floats.toArray((Collection) setList.getFList()); + val floats = Floats.toArray(setList.getFList()); if (adapter != null) { adapter.mapAttributeFor(floats, currentField, on); } else diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/graphmapper/tf/tensors/TFTensorMapper.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/graphmapper/tf/tensors/TFTensorMapper.java index 16f75542e..6204552a3 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/graphmapper/tf/tensors/TFTensorMapper.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/graphmapper/tf/tensors/TFTensorMapper.java @@ -32,7 +32,7 @@ import java.nio.ByteBuffer; */ public interface TFTensorMapper { - enum ValueSource {EMPTY, VALUE_COUNT, BINARY}; + enum ValueSource {EMPTY, VALUE_COUNT, BINARY} DataType dataType(); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/tensorflow/TensorFlowImportValidator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/tensorflow/TensorFlowImportValidator.java index 5338f5e5d..45c59221f 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/tensorflow/TensorFlowImportValidator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/imports/tensorflow/TensorFlowImportValidator.java @@ -268,8 +268,8 @@ public class TensorFlowImportValidator { return new TFImportStatus( Collections.singletonList(path), - unsupportedOpNames.size() > 0 ? Collections.singletonList(path) : Collections.emptyList(), - Collections.emptyList(), + unsupportedOpNames.size() > 0 ? Collections.singletonList(path) : Collections.emptyList(), + Collections.emptyList(), opCount, opNames.size(), opNames, @@ -283,16 +283,16 @@ public class TensorFlowImportValidator { } log.warn("Failed to import model from: " + path + " - not a TensorFlow frozen model in ProtoBuf format?", t); return new TFImportStatus( - Collections.emptyList(), - Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), Collections.singletonList(path), 0, 0, - Collections.emptySet(), - Collections.emptyMap(), - Collections.emptySet(), - Collections.emptySet(), - Collections.>emptyMap()); + Collections.emptySet(), + Collections.emptyMap(), + Collections.emptySet(), + Collections.emptySet(), + Collections.emptyMap()); } } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationELU.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationELU.java index 2e7bdf90d..2498b7123 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationELU.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationELU.java @@ -34,7 +34,7 @@ import org.nd4j.common.primitives.Pair; public class ActivationELU extends BaseActivationFunction { public static final double DEFAULT_ALPHA = 1.0; - private double alpha; + private final double alpha; public ActivationELU() { this(DEFAULT_ALPHA); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationGELU.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationGELU.java index f9fe2714b..a66c6e66e 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationGELU.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationGELU.java @@ -35,7 +35,7 @@ import org.nd4j.common.primitives.Pair; @Getter public class ActivationGELU extends BaseActivationFunction { - private boolean precise; + private final boolean precise; public ActivationGELU(boolean precise){ this.precise = precise; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationHardTanH.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationHardTanH.java index c6eafce62..e56b654f9 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationHardTanH.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationHardTanH.java @@ -30,9 +30,11 @@ import org.nd4j.linalg.factory.Nd4j; import org.nd4j.common.primitives.Pair; /** + * {@code * ⎧ 1, if x > 1 f(x) = ⎨ -1, if x < -1 ⎩ x, otherwise +} */ @EqualsAndHashCode(callSuper = false) @Getter diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationLReLU.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationLReLU.java index 9a3d34a65..5459e7aeb 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationLReLU.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationLReLU.java @@ -39,7 +39,7 @@ import org.nd4j.common.primitives.Pair; public class ActivationLReLU extends BaseActivationFunction { public static final double DEFAULT_ALPHA = 0.01; - private double alpha; + private final double alpha; public ActivationLReLU() { this(DEFAULT_ALPHA); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationPReLU.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationPReLU.java index c7bf5778c..23e3b5a4d 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationPReLU.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationPReLU.java @@ -32,7 +32,7 @@ import org.nd4j.common.primitives.Pair; @Getter public class ActivationPReLU extends BaseActivationFunction { - private INDArray alpha; + private final INDArray alpha; private long[] sharedAxes = null; public ActivationPReLU(INDArray alpha, long[] sharedAxes) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationRReLU.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationRReLU.java index 56b0c9b95..88c662670 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationRReLU.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationRReLU.java @@ -39,7 +39,8 @@ public class ActivationRReLU extends BaseActivationFunction { public static final double DEFAULT_L = 1.0 / 8; public static final double DEFAULT_U = 1.0 / 3; - private double l, u; + private final double l; + private final double u; private transient INDArray alpha; //don't need to write to json, when streaming public ActivationRReLU() { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationReLU.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationReLU.java index d96088dc7..4452f28a3 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationReLU.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationReLU.java @@ -36,9 +36,9 @@ import org.nd4j.linalg.factory.Nd4j; @Getter public class ActivationReLU extends BaseActivationFunction { - private Double max; - private Double threshold; - private Double negativeSlope; + private final Double max; + private final Double threshold; + private final Double negativeSlope; public ActivationReLU(){ this(null, null, null); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationThresholdedReLU.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationThresholdedReLU.java index 9ae0df963..6173e6f3b 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationThresholdedReLU.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationThresholdedReLU.java @@ -31,7 +31,7 @@ import org.nd4j.common.primitives.Pair; /** * Thresholded RELU * - * f(x) = x for x > theta, f(x) = 0 otherwise. theta defaults to 1.0 + * f(x) = x for x > theta, f(x) = 0 otherwise. theta defaults to 1.0 * * @author Max Pumperla */ @@ -40,7 +40,7 @@ import org.nd4j.common.primitives.Pair; public class ActivationThresholdedReLU extends BaseActivationFunction { public static final double DEFAULT_THETA = 1.0; - private double theta; + private final double theta; public ActivationThresholdedReLU() { this(DEFAULT_THETA); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/Blas.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/Blas.java index fe5c1ff50..28d89f378 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/Blas.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/Blas.java @@ -22,7 +22,7 @@ package org.nd4j.linalg.api.blas; public interface Blas { - public enum Vendor { + enum Vendor { UNKNOWN, CUBLAS, OPENBLAS, MKL, } @@ -45,5 +45,5 @@ public interface Blas { * * @return the BLAS library vendor */ - public Vendor getBlasVendor(); + Vendor getBlasVendor(); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/BlasBufferUtil.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/BlasBufferUtil.java index df61d777d..c3ac9321e 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/BlasBufferUtil.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/BlasBufferUtil.java @@ -245,8 +245,8 @@ public class BlasBufferUtil { if (toSet.data().allocationMode() == DataBuffer.AllocationMode.HEAP) { Object array = toSet.data().array(); //data is assumed to have already been updated - if (array == data) - return; + if (array == data) { + } else { //copy the data over directly to the underlying array float[] d = (float[]) array; @@ -310,8 +310,8 @@ public class BlasBufferUtil { if (toSet.data().allocationMode() == DataBuffer.AllocationMode.HEAP) { Object array = toSet.data().array(); //data is assumed to have already been updated - if (array == data) - return; + if (array == data) { + } else { //copy the data over directly to the underlying array double[] d = (double[]) array; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLapack.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLapack.java index cf4487de4..3fc641421 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLapack.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLapack.java @@ -36,8 +36,8 @@ public abstract class BaseLapack implements Lapack { if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE) throw new ND4JArraySizeException(); - int m = (int) A.rows(); - int n = (int) A.columns(); + int m = A.rows(); + int n = A.columns(); INDArray INFO = Nd4j.createArrayFromShapeBuffer(Nd4j.getDataBufferFactory().createInt(1), Nd4j.getShapeInfoProvider().createShapeInformation(new long[] {1, 1}, A.dataType()).getFirst()); @@ -88,7 +88,7 @@ public abstract class BaseLapack implements Lapack { throw new ND4JArraySizeException(); byte uplo = (byte) (lower ? 'L' : 'U'); // upper or lower part of the factor desired ? - int n = (int) A.columns(); + int n = A.columns(); INDArray INFO = Nd4j.createArrayFromShapeBuffer(Nd4j.getDataBufferFactory().createInt(1), Nd4j.getShapeInfoProvider().createShapeInformation(new long[] {1, 1}, A.dataType()).getFirst()); @@ -106,7 +106,6 @@ public abstract class BaseLapack implements Lapack { throw new Error("The matrix is not positive definite! (potrf fails @ order " + INFO.getInt(0) + ")"); } - return; } @@ -132,8 +131,8 @@ public abstract class BaseLapack implements Lapack { if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE) throw new ND4JArraySizeException(); - int m = (int) A.rows(); - int n = (int) A.columns(); + int m = A.rows(); + int n = A.columns(); INDArray INFO = Nd4j.createArrayFromShapeBuffer(Nd4j.getDataBufferFactory().createInt(1), Nd4j.getShapeInfoProvider().createShapeInformation(new long[] {1, 1}, A.dataType()).getFirst()); @@ -187,9 +186,9 @@ public abstract class BaseLapack implements Lapack { int status = -1; if (A.data().dataType() == DataType.DOUBLE) { - status = dsyev(jobz, uplo, (int) A.rows(), A, V); + status = dsyev(jobz, uplo, A.rows(), A, V); } else if (A.data().dataType() == DataType.FLOAT) { - status = ssyev(jobz, uplo, (int) A.rows(), A, V); + status = ssyev(jobz, uplo, A.rows(), A, V); } else { throw new UnsupportedOperationException(); } @@ -218,8 +217,8 @@ public abstract class BaseLapack implements Lapack { if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE) throw new ND4JArraySizeException(); - int m = (int) A.rows(); - int n = (int) A.columns(); + int m = A.rows(); + int n = A.columns(); byte jobu = (byte) (U == null ? 'N' : 'A'); byte jobvt = (byte) (VT == null ? 'N' : 'A'); @@ -274,8 +273,8 @@ public abstract class BaseLapack implements Lapack { if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE) throw new ND4JArraySizeException(); - int m = (int) A.rows(); - int n = (int) A.columns(); + int m = A.rows(); + int n = A.columns(); INDArray L = Nd4j.create(m, n); for (int r = 0; r < m; r++) { @@ -298,8 +297,8 @@ public abstract class BaseLapack implements Lapack { if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE) throw new ND4JArraySizeException(); - int m = (int) A.rows(); - int n = (int) A.columns(); + int m = A.rows(); + int n = A.columns(); INDArray U = Nd4j.create(n, n); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel2.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel2.java index 1743e993e..e331a0d6a 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel2.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel2.java @@ -119,11 +119,11 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X, Y); if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE || A.size(0) > Integer.MAX_VALUE) throw new ND4JArraySizeException(); - dgbmv(order, TransA, (int) A.rows(), (int) A.columns(), KL, KU, alpha, A, (int) A.size(0), X, X.stride(-1), beta, Y, + dgbmv(order, TransA, A.rows(), A.columns(), KL, KU, alpha, A, (int) A.size(0), X, X.stride(-1), beta, Y, Y.stride(-1)); } else { DefaultOpExecutioner.validateDataType(DataType.FLOAT, A, X, Y); - sgbmv(order, TransA, (int) A.rows(), (int) A.columns(), KL, KU, (float) alpha, A, (int) A.size(0), X, X.stride(-1), (float) beta, Y, Y.stride(-1)); + sgbmv(order, TransA, A.rows(), A.columns(), KL, KU, (float) alpha, A, (int) A.size(0), X, X.stride(-1), (float) beta, Y, Y.stride(-1)); } OpExecutionerUtil.checkForAny(Y); @@ -148,10 +148,10 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X, Y); if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE || A.size(0) > Integer.MAX_VALUE) throw new ND4JArraySizeException(); - dger(order, (int) A.rows(), (int) A.columns(), alpha, X, X.stride(-1), Y, Y.stride(-1), A, (int) A.size(0)); + dger(order, A.rows(), A.columns(), alpha, X, X.stride(-1), Y, Y.stride(-1), A, (int) A.size(0)); } else { DefaultOpExecutioner.validateDataType(DataType.FLOAT, A, X, Y); - sger(order, (int) A.rows(), (int) A.columns(), (float) alpha, X, X.stride(-1), Y, Y.stride(-1), A, (int) A.size(0)); + sger(order, A.rows(), A.columns(), (float) alpha, X, X.stride(-1), Y, Y.stride(-1), A, (int) A.size(0)); } OpExecutionerUtil.checkForAny(A); @@ -180,11 +180,11 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { } if (X.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X, Y); - dsbmv(order, Uplo, (int) X.length(), (int) A.columns(), alpha, A, (int) A.size(0), X, X.stride(-1), beta, Y, + dsbmv(order, Uplo, (int) X.length(), A.columns(), alpha, A, (int) A.size(0), X, X.stride(-1), beta, Y, Y.stride(-1)); } else { DefaultOpExecutioner.validateDataType(DataType.FLOAT, A, X, Y); - ssbmv(order, Uplo, (int) X.length(), (int) A.columns(), (float) alpha, A, (int) A.size(0), X, X.stride(-1), (float) beta, + ssbmv(order, Uplo, (int) X.length(), A.columns(), (float) alpha, A, (int) A.size(0), X, X.stride(-1), (float) beta, Y, Y.stride(-1)); } @@ -392,10 +392,10 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (X.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X); - dtbmv(order, Uplo, TransA, Diag, (int) X.length(), (int) A.columns(), A, (int) A.size(0), X, X.stride(-1)); + dtbmv(order, Uplo, TransA, Diag, (int) X.length(), A.columns(), A, (int) A.size(0), X, X.stride(-1)); } else { DefaultOpExecutioner.validateDataType(DataType.FLOAT, A, X); - stbmv(order, Uplo, TransA, Diag, (int) X.length(), (int) A.columns(), A, (int) A.size(0), X, X.stride(-1)); + stbmv(order, Uplo, TransA, Diag, (int) X.length(), A.columns(), A, (int) A.size(0), X, X.stride(-1)); } } @@ -420,10 +420,10 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (X.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X); - dtbsv(order, Uplo, TransA, Diag, (int) X.length(), (int) A.columns(), A, (int) A.size(0), X, X.stride(-1)); + dtbsv(order, Uplo, TransA, Diag, (int) X.length(), A.columns(), A, (int) A.size(0), X, X.stride(-1)); } else { DefaultOpExecutioner.validateDataType(DataType.FLOAT, A, X); - stbsv(order, Uplo, TransA, Diag, (int) X.length(), (int) A.columns(), A, (int) A.size(0), X, X.stride(-1)); + stbsv(order, Uplo, TransA, Diag, (int) X.length(), A.columns(), A, (int) A.size(0), X, X.stride(-1)); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel3.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel3.java index 958396b81..35eb2af49 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel3.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel3.java @@ -142,10 +142,10 @@ public abstract class BaseLevel3 extends BaseLevel implements Level3 { if (A.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, B, C); - dsymm(Order, Side, Uplo, (int) C.rows(), (int) C.columns(), alpha, A, (int) A.size(0), B, (int) B.size(0), beta, C, (int) C.size(0)); + dsymm(Order, Side, Uplo, C.rows(), C.columns(), alpha, A, (int) A.size(0), B, (int) B.size(0), beta, C, (int) C.size(0)); } else { DefaultOpExecutioner.validateDataType(DataType.FLOAT, A, B, C); - ssymm(Order, Side, Uplo, (int) C.rows(), (int) C.columns(), (float) alpha, A, (int) A.size(0), B, (int) B.size(0), (float) beta, C, + ssymm(Order, Side, Uplo, C.rows(), C.columns(), (float) alpha, A, (int) A.size(0), B, (int) B.size(0), (float) beta, C, (int) C.size(0)); } @@ -180,10 +180,10 @@ public abstract class BaseLevel3 extends BaseLevel implements Level3 { if (A.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, C); - dsyrk(Order, Uplo, Trans, (int) C.rows(), 1, alpha, A, (int) A.size(0), beta, C, (int) C.size(0)); + dsyrk(Order, Uplo, Trans, C.rows(), 1, alpha, A, (int) A.size(0), beta, C, (int) C.size(0)); } else { DefaultOpExecutioner.validateDataType(DataType.FLOAT, A, C); - ssyrk(Order, Uplo, Trans, (int) C.rows(), 1, (float) alpha, A, (int) A.size(0), (float) beta, C, (int) C.size(0)); + ssyrk(Order, Uplo, Trans, C.rows(), 1, (float) alpha, A, (int) A.size(0), (float) beta, C, (int) C.size(0)); } OpExecutionerUtil.checkForAny(C); @@ -218,10 +218,10 @@ public abstract class BaseLevel3 extends BaseLevel implements Level3 { if (A.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, B, C); - dsyr2k(Order, Uplo, Trans, (int) A.rows(), (int) A.columns(), alpha, A, (int) A.size(0), B, (int) B.size(0), beta, C, (int) C.size(0)); + dsyr2k(Order, Uplo, Trans, A.rows(), A.columns(), alpha, A, (int) A.size(0), B, (int) B.size(0), beta, C, (int) C.size(0)); } else { DefaultOpExecutioner.validateDataType(DataType.FLOAT, A, B, C); - ssyr2k(Order, Uplo, Trans, (int) A.rows(), (int) A.columns(), (float) alpha, A, (int) A.size(0), B, (int) B.size(0), (float) beta, C, (int) C.size(0)); + ssyr2k(Order, Uplo, Trans, A.rows(), A.columns(), (float) alpha, A, (int) A.size(0), B, (int) B.size(0), (float) beta, C, (int) C.size(0)); } OpExecutionerUtil.checkForAny(C); @@ -257,10 +257,10 @@ public abstract class BaseLevel3 extends BaseLevel implements Level3 { if (A.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, B, C); - dtrmm(Order, Side, Uplo, TransA, Diag, (int) A.rows(), (int) A.columns(), alpha, A, (int) A.size(0), B, (int) B.size(0)); + dtrmm(Order, Side, Uplo, TransA, Diag, A.rows(), A.columns(), alpha, A, (int) A.size(0), B, (int) B.size(0)); } else { DefaultOpExecutioner.validateDataType(DataType.FLOAT, A, B, C); - strmm(Order, Side, Uplo, TransA, Diag, (int) A.rows(), (int) A.columns(), (float) alpha, A, (int) A.size(0), B, (int) B.size(0)); + strmm(Order, Side, Uplo, TransA, Diag, A.rows(), A.columns(), (float) alpha, A, (int) A.size(0), B, (int) B.size(0)); } OpExecutionerUtil.checkForAny(C); @@ -295,10 +295,10 @@ public abstract class BaseLevel3 extends BaseLevel implements Level3 { if (A.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, B); - dtrsm(Order, Side, Uplo, TransA, Diag, (int) A.rows(), (int) A.columns(), alpha, A, (int) A.size(0), B, (int) B.size(0)); + dtrsm(Order, Side, Uplo, TransA, Diag, A.rows(), A.columns(), alpha, A, (int) A.size(0), B, (int) B.size(0)); } else { DefaultOpExecutioner.validateDataType(DataType.FLOAT, A, B); - strsm(Order, Side, Uplo, TransA, Diag, (int) A.rows(), (int) A.columns(), (float) alpha, A, (int) A.size(0), B, (int) B.size(0)); + strsm(Order, Side, Uplo, TransA, Diag, A.rows(), A.columns(), (float) alpha, A, (int) A.size(0), B, (int) B.size(0)); } OpExecutionerUtil.checkForAny(B); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/params/GemmParams.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/params/GemmParams.java index 9d436c542..532bbbe53 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/params/GemmParams.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/params/GemmParams.java @@ -78,18 +78,18 @@ public @Data class GemmParams { this.b = copyIfNeccessary(b); this.c = c; if (ordering == 'c') { - this.m = (int) c.columns(); - this.n = (int) c.rows(); - this.k = (int) a.columns(); + this.m = c.columns(); + this.n = c.rows(); + this.k = a.columns(); } else { - this.m = (int) c.rows(); - this.n = (int) c.columns(); - this.k = (int) b.columns(); + this.m = c.rows(); + this.n = c.columns(); + this.k = b.columns(); } - this.lda = (int) a.rows(); - this.ldb = (int) b.rows(); - this.ldc = (int) c.rows(); + this.lda = a.rows(); + this.ldb = b.rows(); + this.ldc = c.rows(); this.transA = 'N'; this.transB = 'N'; @@ -101,15 +101,15 @@ public @Data class GemmParams { this.b = b.dup(a.ordering()); this.c = c; - this.m = (int) c.rows(); - this.n = (int) c.columns(); - this.k = (int) a.columns(); + this.m = c.rows(); + this.n = c.columns(); + this.k = a.columns(); this.ordering = a.ordering(); - this.lda = (int) a.rows(); - this.ldb = (int) b.rows(); - this.ldc = (int) c.rows(); + this.lda = a.rows(); + this.ldb = b.rows(); + this.ldc = c.rows(); this.transA = 'N'; this.transB = 'N'; @@ -124,14 +124,14 @@ public @Data class GemmParams { this.b = copyIfNeccessary(b); this.c = c; - this.m = (int) c.rows(); - this.n = (int) c.columns(); - this.k = (int) a.columns(); + this.m = c.rows(); + this.n = c.columns(); + this.k = a.columns(); //always fortran ordering - this.lda = (int) (this.a.ordering() == 'f' ? this.a.rows() : this.a.columns()); //Leading dimension of a, as declared. But swap if 'c' order - this.ldb = (int) (this.b.ordering() == 'f' ? this.b.rows() : this.b.columns()); //Leading dimension of b, as declared. But swap if 'c' order - this.ldc = (int) c.rows(); + this.lda = this.a.ordering() == 'f' ? this.a.rows() : this.a.columns(); //Leading dimension of a, as declared. But swap if 'c' order + this.ldb = this.b.ordering() == 'f' ? this.b.rows() : this.b.columns(); //Leading dimension of b, as declared. But swap if 'c' order + this.ldc = c.rows(); this.transA = (this.a.ordering() == 'c' ? 'T' : 'N'); this.transB = (this.b.ordering() == 'c' ? 'T' : 'N'); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/params/GemvParameters.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/params/GemvParameters.java index 1e9822a08..7593f3f2b 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/params/GemvParameters.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/params/GemvParameters.java @@ -44,19 +44,19 @@ public @Data class GemvParameters { if (a.ordering() == 'f' && a.isMatrix()) { - this.m = (int) a.rows(); - this.n = (int) a.columns(); - this.lda = (int) a.rows(); + this.m = a.rows(); + this.n = a.columns(); + this.lda = a.rows(); } else if (a.ordering() == 'c' && a.isMatrix()) { - this.m = (int) a.columns(); - this.n = (int) a.rows(); - this.lda = (int) a.columns(); + this.m = a.columns(); + this.n = a.rows(); + this.lda = a.columns(); aOrdering = 'T'; } else { - this.m = (int) a.rows(); - this.n = (int) a.columns(); + this.m = a.rows(); + this.n = a.columns(); this.lda = (int) a.size(0); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/params/MMulTranspose.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/params/MMulTranspose.java index a97049b58..ef7d0b3be 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/params/MMulTranspose.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/params/MMulTranspose.java @@ -33,7 +33,7 @@ import java.util.Map; @Getter @EqualsAndHashCode public class MMulTranspose implements Serializable { - private static MMulTranspose allFalse = MMulTranspose.builder().build(); + private static final MMulTranspose allFalse = MMulTranspose.builder().build(); private boolean transposeA; private boolean transposeB; private boolean transposeResult; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/buffer/BaseDataBuffer.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/buffer/BaseDataBuffer.java index ada11553f..ecb2110c2 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/buffer/BaseDataBuffer.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/buffer/BaseDataBuffer.java @@ -864,17 +864,17 @@ public abstract class BaseDataBuffer implements DataBuffer { case LONG: return ((LongIndexer) indexer).get(i); case UINT32: - return (long) ((UIntIndexer) indexer).get(i); + return ((UIntIndexer) indexer).get(i); case INT: - return (long) ((IntIndexer) indexer).get(i); + return ((IntIndexer) indexer).get(i); case UINT16: - return (long) ((UShortIndexer) indexer).get(i); + return ((UShortIndexer) indexer).get(i); case SHORT: - return (long) ((ShortIndexer) indexer).get(i); + return ((ShortIndexer) indexer).get(i); case BYTE: - return (long) ((ByteIndexer) indexer).get(i); + return ((ByteIndexer) indexer).get(i); case UBYTE: - return (long) ((UByteIndexer) indexer).get(i); + return ((UByteIndexer) indexer).get(i); case BOOL: return ((BooleanIndexer) indexer).get(i) ? 1L : 0L; default: @@ -908,7 +908,7 @@ public abstract class BaseDataBuffer implements DataBuffer { case SHORT: return ((ShortIndexer) indexer).get(i); case BYTE: - return (short) ((ByteIndexer) indexer).get(i); + return ((ByteIndexer) indexer).get(i); case UINT64: case LONG: return (short) ((LongIndexer) indexer).get(i); @@ -945,7 +945,7 @@ public abstract class BaseDataBuffer implements DataBuffer { case UINT16: return ((UShortIndexer) indexer).get(i); case SHORT: - return (float) ((ShortIndexer) indexer).get(i); + return ((ShortIndexer) indexer).get(i); case BFLOAT16: return ((Bfloat16Indexer) indexer).get(i); case HALF: @@ -953,7 +953,7 @@ public abstract class BaseDataBuffer implements DataBuffer { case UBYTE: return (float) ((UByteIndexer) indexer).get(i); case BYTE: - return (float) ((ByteIndexer) indexer).get(i); + return ((ByteIndexer) indexer).get(i); case UINT64: //Fall through case LONG: return (float) ((LongIndexer) indexer).get(i); @@ -1041,7 +1041,7 @@ public abstract class BaseDataBuffer implements DataBuffer { switch (dataType()) { case BOOL: - ((BooleanIndexer) indexer).put(i, element == 0.0 ? false : true); + ((BooleanIndexer) indexer).put(i, element != 0.0); break; case BYTE: ((ByteIndexer) indexer).put(i, (byte) element); @@ -1137,7 +1137,7 @@ public abstract class BaseDataBuffer implements DataBuffer { switch (dataType()) { case BOOL: - ((BooleanIndexer) indexer).put(i, element == 0 ? false : true); + ((BooleanIndexer) indexer).put(i, element != 0); break; case BYTE: ((ByteIndexer) indexer).put(i, (byte) element); @@ -1233,7 +1233,7 @@ public abstract class BaseDataBuffer implements DataBuffer { switch (dataType()) { case BOOL: - ((BooleanIndexer) indexer).put(i, element == 0 ? false : true); + ((BooleanIndexer) indexer).put(i, element != 0); break; case BYTE: ((ByteIndexer) indexer).put(i, (byte) element); @@ -1297,7 +1297,7 @@ public abstract class BaseDataBuffer implements DataBuffer { if (offset() == 0) { return wrappedBuffer().asIntBuffer(); } else - return (IntBuffer) wrappedBuffer().asIntBuffer().position((int) offset()); + return wrappedBuffer().asIntBuffer().position((int) offset()); } @Override @@ -1308,7 +1308,7 @@ public abstract class BaseDataBuffer implements DataBuffer { if (offset() == 0) { return wrappedBuffer().asLongBuffer(); } else - return (LongBuffer) wrappedBuffer().asLongBuffer().position((int) offset()); + return wrappedBuffer().asLongBuffer().position((int) offset()); } @Override @@ -1319,7 +1319,7 @@ public abstract class BaseDataBuffer implements DataBuffer { if (offset() == 0) { return wrappedBuffer().asDoubleBuffer(); } else { - return (DoubleBuffer) wrappedBuffer().asDoubleBuffer().position((int) (offset())); + return wrappedBuffer().asDoubleBuffer().position((int) (offset())); } } @@ -1331,7 +1331,7 @@ public abstract class BaseDataBuffer implements DataBuffer { if (offset() == 0) { return wrappedBuffer().asFloatBuffer(); } else { - return (FloatBuffer) wrappedBuffer().asFloatBuffer().position((int) (offset())); + return wrappedBuffer().asFloatBuffer().position((int) (offset())); } } @@ -1899,10 +1899,7 @@ public abstract class BaseDataBuffer implements DataBuffer { if (released || isAttached() || isConstant()) return false; - if (wrappedDataBuffer != null && wrappedDataBuffer != this) - return false; - - return true; + return wrappedDataBuffer == null || wrappedDataBuffer == this; } protected void markReleased() { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/buffer/util/DataTypeUtil.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/buffer/util/DataTypeUtil.java index 61f4a6b1d..25e49d37f 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/buffer/util/DataTypeUtil.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/buffer/util/DataTypeUtil.java @@ -28,7 +28,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; public class DataTypeUtil { - private volatile transient static DataType dtype; + private volatile static DataType dtype; private static final ReadWriteLock lock = new ReentrantReadWriteLock(); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/FirstAxisIterator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/FirstAxisIterator.java index 80bf4b574..f37bb7d79 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/FirstAxisIterator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/FirstAxisIterator.java @@ -25,7 +25,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import java.util.Iterator; public class FirstAxisIterator implements Iterator { - private INDArray iterateOver; + private final INDArray iterateOver; private int i = 0; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/FlatIterator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/FlatIterator.java index 7c0d8ab0a..c6be2d059 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/FlatIterator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/FlatIterator.java @@ -26,10 +26,10 @@ import java.util.Iterator; public class FlatIterator implements Iterator { - private int[] shape; + private final int[] shape; private int runningDimension; - private int[] currentCoord; - private int length; + private final int[] currentCoord; + private final int length; private int current = 0; public FlatIterator(int[] shape) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/INDArrayIterator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/INDArrayIterator.java index e1eb7a98e..e4740b316 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/INDArrayIterator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/INDArrayIterator.java @@ -29,7 +29,7 @@ import java.util.Iterator; * @author Adam Gibson */ public class INDArrayIterator implements Iterator { - private INDArray iterateOver; + private final INDArray iterateOver; private int i = 0; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/LinearIndexLookup.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/LinearIndexLookup.java index c3308ff23..865283f3e 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/LinearIndexLookup.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/LinearIndexLookup.java @@ -26,11 +26,11 @@ import org.nd4j.common.util.ArrayUtil; import java.io.Serializable; public class LinearIndexLookup implements Serializable { - private char ordering; - private long[][] indexes; - private long[] shape; - private boolean[] exists; - private long numIndexes; + private final char ordering; + private final long[][] indexes; + private final long[] shape; + private final boolean[] exists; + private final long numIndexes; /** * diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/NdIndexIterator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/NdIndexIterator.java index cd01f89fd..5c6baa572 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/NdIndexIterator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/iter/NdIndexIterator.java @@ -31,10 +31,10 @@ import java.util.Map; public class NdIndexIterator implements Iterator { private int length = -1; private int i = 0; - private long[] shape; + private final long[] shape; private char order = 'c'; private boolean cache = false; - private static Map, LinearIndexLookup> lookupMap = new HashMap<>(); + private static final Map, LinearIndexLookup> lookupMap = new HashMap<>(); private LinearIndexLookup lookup; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/AllocationsTracker.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/AllocationsTracker.java index 0f450224d..a9b320f96 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/AllocationsTracker.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/AllocationsTracker.java @@ -30,7 +30,7 @@ import java.util.concurrent.ConcurrentHashMap; @Slf4j public class AllocationsTracker { private static final AllocationsTracker INSTANCE = new AllocationsTracker(); - private Map devices = new ConcurrentHashMap<>(); + private final Map devices = new ConcurrentHashMap<>(); protected AllocationsTracker() { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/BasicMemoryManager.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/BasicMemoryManager.java index f718a9907..2141c3172 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/BasicMemoryManager.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/BasicMemoryManager.java @@ -53,9 +53,9 @@ public abstract class BasicMemoryManager implements MemoryManager { protected Queue intervals = new ConcurrentLinkedQueue<>(); - private ThreadLocal workspace = new ThreadLocal<>(); + private final ThreadLocal workspace = new ThreadLocal<>(); - private ThreadLocal tempWorkspace = new ThreadLocal<>(); + private final ThreadLocal tempWorkspace = new ThreadLocal<>(); /** * This method returns diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/DeviceAllocationsTracker.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/DeviceAllocationsTracker.java index 21a3f1d16..b2a6fc360 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/DeviceAllocationsTracker.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/DeviceAllocationsTracker.java @@ -31,7 +31,7 @@ import java.util.concurrent.atomic.AtomicLong; @Slf4j public class DeviceAllocationsTracker { - private Map bytesMap = new HashMap<>(); + private final Map bytesMap = new HashMap<>(); public DeviceAllocationsTracker() { for (val e:AllocationKind.values()) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/deallocation/DeallocatorService.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/deallocation/DeallocatorService.java index a53e539e9..5934a6d14 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/deallocation/DeallocatorService.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/deallocation/DeallocatorService.java @@ -37,10 +37,10 @@ import java.util.concurrent.atomic.AtomicLong; @Slf4j public class DeallocatorService { - private Thread[] deallocatorThreads; - private ReferenceQueue[] queues; - private Map referenceMap = new ConcurrentHashMap<>(); - private List>> deviceMap = new ArrayList<>(); + private final Thread[] deallocatorThreads; + private final ReferenceQueue[] queues; + private final Map referenceMap = new ConcurrentHashMap<>(); + private final List>> deviceMap = new ArrayList<>(); private final transient AtomicLong counter = new AtomicLong(0); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/pointers/ImmortalFloatPointer.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/pointers/ImmortalFloatPointer.java index d6d2a128c..b6980d7e4 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/pointers/ImmortalFloatPointer.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/pointers/ImmortalFloatPointer.java @@ -26,7 +26,7 @@ import org.bytedeco.javacpp.Pointer; @Slf4j public class ImmortalFloatPointer extends FloatPointer { - private Pointer pointer; + private final Pointer pointer; public ImmortalFloatPointer(PagedPointer pointer) { this.pointer = pointer; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/provider/BasicWorkspaceManager.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/provider/BasicWorkspaceManager.java index e03ae02d8..0ae612d0e 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/provider/BasicWorkspaceManager.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/memory/provider/BasicWorkspaceManager.java @@ -73,7 +73,7 @@ public abstract class BasicWorkspaceManager implements MemoryWorkspaceManager { */ @Override public String getUUID() { - return "Workspace_" + String.valueOf(counter.incrementAndGet()); + return "Workspace_" + counter.incrementAndGet(); } /** @@ -351,7 +351,7 @@ public abstract class BasicWorkspaceManager implements MemoryWorkspaceManager { log.info("Number of workspaces in current thread: {}", map.size()); log.info("Workspace name: Allocated / external (spilled) / external (pinned)"); for (String key : map.keySet()) { - long current = ((Nd4jWorkspace) map.get(key)).getCurrentSize(); + long current = map.get(key).getCurrentSize(); long spilled = ((Nd4jWorkspace) map.get(key)).getSpilledSize(); long pinned = ((Nd4jWorkspace) map.get(key)).getPinnedSize(); log.info(String.format("%-26s %8s / %8s / %8s (%11d / %11d / %11d)", (key + ":"), diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ndarray/BaseNDArray.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ndarray/BaseNDArray.java index c948afee7..9887ddadb 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ndarray/BaseNDArray.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ndarray/BaseNDArray.java @@ -419,7 +419,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { public BaseNDArray(long newRows, long newColumns, char ordering) { Shape.assertValidOrder(ordering); - this.data = Nd4j.createBuffer((long) newRows * newColumns); + this.data = Nd4j.createBuffer(newRows * newColumns); long[] shape = new long[] {newRows, newColumns}; long[] stride = Nd4j.getStrides(shape, ordering); setShapeInformation(Nd4j.getShapeInfoProvider().createShapeInformation(shape, stride, @@ -522,14 +522,14 @@ public abstract class BaseNDArray implements INDArray, Iterable { public BaseNDArray(float[] data, int[] shape, int[] stride, long offset, char ordering) { Shape.assertValidOrder(ordering); setShapeInformation(Nd4j.getShapeInfoProvider().createShapeInformation(ArrayUtil.toLongArray(shape), ArrayUtil.toLongArray(stride), - Shape.elementWiseStride(shape, stride, ordering == 'f'), ordering, DataType.FLOAT, data != null && data.length > 0 ? false : true)); + Shape.elementWiseStride(shape, stride, ordering == 'f'), ordering, DataType.FLOAT, data == null || data.length <= 0)); if (data != null && data.length > 0) { val perfD = PerformanceTracker.getInstance().helperStartTransaction(); this.data = internalCreateBuffer(data, offset); - PerformanceTracker.getInstance().helperRegisterTransaction(0, perfD, data.length * Nd4j.sizeOfDataType(DataType.FLOAT), MemcpyDirection.HOST_TO_HOST); + PerformanceTracker.getInstance().helperRegisterTransaction(0, perfD, (long) data.length * Nd4j.sizeOfDataType(DataType.FLOAT), MemcpyDirection.HOST_TO_HOST); if (offset >= data.length) throw new IllegalArgumentException("invalid offset: must be < data.length"); @@ -541,7 +541,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { public BaseNDArray(float[] data, long[] shape, long[] stride, long offset, char ordering) { Shape.assertValidOrder(ordering); setShapeInformation(Nd4j.getShapeInfoProvider().createShapeInformation(shape, stride, - Shape.elementWiseStride(shape, stride, ordering == 'f'), ordering, DataType.FLOAT, data != null && data.length > 0 ? false : true)); + Shape.elementWiseStride(shape, stride, ordering == 'f'), ordering, DataType.FLOAT, data == null || data.length <= 0)); if (data != null && data.length > 0) { this.data = Nd4j.createTypedBuffer(data, DataType.FLOAT); if (offset >= data.length) @@ -554,7 +554,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { public BaseNDArray(double[] data, long[] shape, long[] stride, long offset, char ordering) { Shape.assertValidOrder(ordering); setShapeInformation(Nd4j.getShapeInfoProvider().createShapeInformation(shape, stride, - Shape.elementWiseStride(shape, stride, ordering == 'f'), ordering, DataType.DOUBLE, data != null && data.length > 0 ? false : true)); + Shape.elementWiseStride(shape, stride, ordering == 'f'), ordering, DataType.DOUBLE, data == null || data.length <= 0)); if (data != null && data.length > 0) { this.data = Nd4j.createBuffer(data, offset); if (offset >= data.length) @@ -673,7 +673,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { val perfX = PerformanceTracker.getInstance().helperStartTransaction(); val buffer = Nd4j.createBuffer(data); - PerformanceTracker.getInstance().helperRegisterTransaction(0, perfX, data.length * Nd4j.sizeOfDataType(buffer.dataType()), MemcpyDirection.HOST_TO_HOST); + PerformanceTracker.getInstance().helperRegisterTransaction(0, perfX, (long) data.length * Nd4j.sizeOfDataType(buffer.dataType()), MemcpyDirection.HOST_TO_HOST); return buffer; } @@ -682,7 +682,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { val perfX = PerformanceTracker.getInstance().helperStartTransaction(); val buffer = Nd4j.createBuffer(data); - PerformanceTracker.getInstance().helperRegisterTransaction(0, perfX, data.length * Nd4j.sizeOfDataType(buffer.dataType()), MemcpyDirection.HOST_TO_HOST); + PerformanceTracker.getInstance().helperRegisterTransaction(0, perfX, (long) data.length * Nd4j.sizeOfDataType(buffer.dataType()), MemcpyDirection.HOST_TO_HOST); return buffer; } @@ -691,7 +691,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { val perfX = PerformanceTracker.getInstance().helperStartTransaction(); val buffer = Nd4j.createBuffer(data); - PerformanceTracker.getInstance().helperRegisterTransaction(0, perfX, data.length * Nd4j.sizeOfDataType(buffer.dataType()), MemcpyDirection.HOST_TO_HOST); + PerformanceTracker.getInstance().helperRegisterTransaction(0, perfX, (long) data.length * Nd4j.sizeOfDataType(buffer.dataType()), MemcpyDirection.HOST_TO_HOST); return buffer; } @@ -700,7 +700,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { val perfX = PerformanceTracker.getInstance().helperStartTransaction(); val buffer = Nd4j.createBuffer(data, offset); - PerformanceTracker.getInstance().helperRegisterTransaction(0, perfX, data.length * Nd4j.sizeOfDataType(buffer.dataType()), MemcpyDirection.HOST_TO_HOST); + PerformanceTracker.getInstance().helperRegisterTransaction(0, perfX, (long) data.length * Nd4j.sizeOfDataType(buffer.dataType()), MemcpyDirection.HOST_TO_HOST); return buffer; } @@ -709,7 +709,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { val perfX = PerformanceTracker.getInstance().helperStartTransaction(); val buffer = Nd4j.createBuffer(data, offset); - PerformanceTracker.getInstance().helperRegisterTransaction(0, perfX, data.length * Nd4j.sizeOfDataType(buffer.dataType()), MemcpyDirection.HOST_TO_HOST); + PerformanceTracker.getInstance().helperRegisterTransaction(0, perfX, (long) data.length * Nd4j.sizeOfDataType(buffer.dataType()), MemcpyDirection.HOST_TO_HOST); return buffer; } @@ -1086,7 +1086,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { int length = ArrayUtil.prod(tensorShape); int tensorLength = ArrayUtil.prod(tensorShape); - long offset = index * tensorLength / NDArrayMath.lengthPerSlice(ret2); + long offset = (long) index * tensorLength / NDArrayMath.lengthPerSlice(ret2); if (sliceIdx == 0 && length == NDArrayMath.lengthPerSlice(ret2)) { if (offset > Integer.MAX_VALUE) @@ -1460,7 +1460,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { long size_2 = jvmShapeInfo.javaShapeInformation[1 + 2]; if (size_0 != 1) - offset += dim0 * jvmShapeInfo.javaShapeInformation[1 + 0 + 3]; + offset += dim0 * jvmShapeInfo.javaShapeInformation[1 + 3]; if (size_1 != 1) offset += dim1 * jvmShapeInfo.javaShapeInformation[1 + 1 + 3]; if (size_2 != 1) @@ -1900,7 +1900,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { @Override public INDArray getWhere(Number comp, Condition condition) { - return BooleanIndexing.chooseFrom(new INDArray[]{this},Arrays.asList(comp.doubleValue()),Collections.emptyList(),condition); + return BooleanIndexing.chooseFrom(new INDArray[]{this}, Collections.singletonList(comp.doubleValue()),Collections.emptyList(),condition); } @Override @@ -1985,7 +1985,6 @@ public abstract class BaseNDArray implements INDArray, Iterable { Preconditions.checkArgument(slice < slices(), "Invalid slice specified: slice %s must be in range 0 (inclusive) to numSlices=%s (exclusive)", slice, slices()); long[] sliceShape = put.shape(); if (Shape.isRowVectorShape(sliceShape)) { - return; } else { long[] requiredShape = ArrayUtil.removeIndex(shape(), 0); @@ -4111,7 +4110,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { if (isVector()) return Nd4j.pullRows(this, 1, rindices); else { - INDArray ret = Nd4j.createUninitialized(this.dataType(), new long[] {rindices.length, columns()}); + INDArray ret = Nd4j.createUninitialized(this.dataType(), rindices.length, columns()); for (int i = 0; i < rindices.length; i++) ret.putRow(i, getRow(rindices[i])); return ret; @@ -4146,8 +4145,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { // Padding remaining dimensions with all() index if too few indices provided if (indexes.length - numNewAxis < this.rank()) { val newIndexes = new INDArrayIndex[this.rank() + numNewAxis]; - for (int e = 0; e < indexes.length; e++) - newIndexes[e] = indexes[e]; + System.arraycopy(indexes, 0, newIndexes, 0, indexes.length); for (int e = indexes.length; e < newIndexes.length; e++) { numAll++; @@ -4312,7 +4310,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { if (isVector()) { return Nd4j.pullRows(this, 0, cindices, this.ordering()); } else { - INDArray ret = Nd4j.createUninitialized(this.dataType(), new long[]{rows(), cindices.length}); + INDArray ret = Nd4j.createUninitialized(this.dataType(), rows(), cindices.length); for (int i = 0; i < cindices.length; i++) ret.putColumn(i, getColumn(cindices[i])); return ret; @@ -5509,8 +5507,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { if (e != 0) { val t = ArrayOptionsHelper.dataType(jvmShapeInfo.javaShapeInformation); - if (t != DataType.UNKNOWN) - return t; + return t; } return DataType.UNKNOWN; @@ -5623,10 +5620,7 @@ public abstract class BaseNDArray implements INDArray, Iterable { @Override public boolean wasClosed() { // data can be null if that's empty array - if (released || (data() != null && data().wasClosed())) - return true; - - return false; + return released || (data() != null && data().wasClosed()); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/BaseOp.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/BaseOp.java index 0fb2db284..be3a172d1 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/BaseOp.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/BaseOp.java @@ -44,6 +44,7 @@ import org.tensorflow.framework.NodeDef; import java.nio.Buffer; import java.util.Arrays; import java.util.Map; +import java.util.Objects; @Data public abstract class BaseOp extends DifferentialFunction implements Op { @@ -145,7 +146,7 @@ public abstract class BaseOp extends DifferentialFunction implements Op { if (extraArgs != null) { if (Shape.isZ(dtype) || Shape.isB(dtype)) { - long extraz[] = new long[extraArgs.length]; + long[] extraz = new long[extraArgs.length]; for (int i = 0; i < extraArgs.length; i++) { if (extraArgs[i] instanceof Number) { Number arg = (Number) extraArgs[i]; @@ -156,7 +157,7 @@ public abstract class BaseOp extends DifferentialFunction implements Op { extraArgz = Nd4j.getConstantHandler().getConstantBuffer(extraz, dtype); return extraArgz; } else if (Shape.isR(dtype)) { - double extraz[] = new double[extraArgs.length]; + double[] extraz = new double[extraArgs.length]; for (int i = 0; i < extraArgs.length; i++) { if (!(extraArgs[i] instanceof Number)) continue; @@ -318,12 +319,12 @@ public abstract class BaseOp extends DifferentialFunction implements Op { BaseOp baseOp = (BaseOp) o; - if (x != null ? !x.equals(baseOp.x) : baseOp.x != null) return false; - if (y != null ? !y.equals(baseOp.y) : baseOp.y != null) return false; - if (z != null ? !z.equals(baseOp.z) : baseOp.z != null) return false; + if (!Objects.equals(x, baseOp.x)) return false; + if (!Objects.equals(y, baseOp.y)) return false; + if (!Objects.equals(z, baseOp.z)) return false; // Probably incorrect - comparing Object[] arrays with Arrays.equals if (!Arrays.equals(extraArgs, baseOp.extraArgs)) return false; - return extraArgz != null ? extraArgz.equals(baseOp.extraArgz) : baseOp.extraArgz == null; + return Objects.equals(extraArgz, baseOp.extraArgz); } @Override @@ -369,9 +370,9 @@ public abstract class BaseOp extends DifferentialFunction implements Op { if (z.isR()) return new Double(z.getDouble(0)); else if (z.isZ()) - return new Long(z.getInt(0)); + return Long.valueOf(z.getInt(0)); else if (z.isB()) - return new Integer(z.getInt(0)); + return Integer.valueOf(z.getInt(0)); throw new ND4JIllegalStateException("???"); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/BaseOpContext.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/BaseOpContext.java index 8f818b37a..eebc9c8eb 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/BaseOpContext.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/BaseOpContext.java @@ -96,8 +96,7 @@ public abstract class BaseOpContext implements OpContext { @Override public void setDArguments(DataType... arguments) { fastpath_d.clear(); - for (val v:arguments) - fastpath_d.add(v); + Collections.addAll(fastpath_d, arguments); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformSameOp.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformSameOp.java index ef23a963d..d83509801 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformSameOp.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformSameOp.java @@ -143,6 +143,6 @@ public abstract class BaseTransformSameOp extends BaseTransformOp implements Tra check = dataType; } } - return Arrays.asList(dataTypes.get(0)); + return Collections.singletonList(dataTypes.get(0)); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/DynamicCustomOp.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/DynamicCustomOp.java index 304f40b99..c1fe9da10 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/DynamicCustomOp.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/DynamicCustomOp.java @@ -135,7 +135,7 @@ public class DynamicCustomOp extends DifferentialFunction implements CustomOp { if(iArguments != null) { for (val a : iArguments) - this.iArguments.add((Long) a.longValue()); + this.iArguments.add(a.longValue()); } bArguments = new ArrayList<>(); dArguments = new ArrayList<>(); @@ -160,7 +160,7 @@ public class DynamicCustomOp extends DifferentialFunction implements CustomOp { * @param outputs the outputs of the op, may be null */ public DynamicCustomOp(String opName, INDArray[] inputs, INDArray[] outputs) { - this(opName, inputs, outputs, Lists.newArrayList(), Lists.newArrayList()); + this(opName, inputs, outputs, Lists.newArrayList(), Lists.newArrayList()); } /** @@ -313,7 +313,7 @@ public class DynamicCustomOp extends DifferentialFunction implements CustomOp { private void addIArgument(Integer... arg) { for (val a: arg) - addIArgument((Long) a.longValue()); + addIArgument(a.longValue()); } @Override @@ -690,12 +690,12 @@ public class DynamicCustomOp extends DifferentialFunction implements CustomOp { protected long opHash; protected List outputShapes = new ArrayList<>(); - private List inputArguments = new ArrayList<>(); - private List outputArguments = new ArrayList<>(); - private List tArguments = new ArrayList<>(); - private List iArguments = new ArrayList<>(); - private List dArguments = new ArrayList<>(); - private List bArguments = new ArrayList<>(); + private final List inputArguments = new ArrayList<>(); + private final List outputArguments = new ArrayList<>(); + private final List tArguments = new ArrayList<>(); + private final List iArguments = new ArrayList<>(); + private final List dArguments = new ArrayList<>(); + private final List bArguments = new ArrayList<>(); protected DynamicCustomOpsBuilder(String opName, long hash, int numInputs, int numOutputs, boolean inplaceAllowed, int numTArguments, int numIArguments) { this.opHash = hash; @@ -727,8 +727,7 @@ public class DynamicCustomOp extends DifferentialFunction implements CustomOp { throw new ND4JIllegalStateException("CustomOp [" + opName + "] expects at least " + numInputs + " arguments, but " + inputs.length + " was passed to constructor"); } - for (val in : inputs) - inputArguments.add(in); + Collections.addAll(inputArguments, inputs); return this; } @@ -752,8 +751,7 @@ public class DynamicCustomOp extends DifferentialFunction implements CustomOp { throw new ND4JIllegalStateException("CustomOp [" + opName + "] expects at least " + numOutputs + " arguments, but " + outputs.length + " was passed to constructor"); } - for (val in : outputs) - outputArguments.add(in); + Collections.addAll(outputArguments, outputs); return this; } @@ -873,8 +871,7 @@ public class DynamicCustomOp extends DifferentialFunction implements CustomOp { throw new ND4JIllegalStateException("CustomOp [" + opName + "] expects at least " + numTArguments + " integer arguments, but " + targs.length + " was passed to constructor"); } - for (val in : targs) - tArguments.add(in); + Collections.addAll(tArguments, targs); return this; } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/BaseAggregate.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/BaseAggregate.java index d5aaedd5b..1db99da47 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/BaseAggregate.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/BaseAggregate.java @@ -73,11 +73,11 @@ public abstract class BaseAggregate implements Aggregate { @Override public long getRequiredBatchMemorySize() { - long result = maxIntArrays() * maxIntArraySize() * 4; - result += maxArguments() * 8; // pointers - result += maxShapes() * 8; // pointers - result += maxIndexArguments() * 4; - result += maxRealArguments() * (Nd4j.dataType() == DataType.DOUBLE ? 8 + long result = (long) maxIntArrays() * maxIntArraySize() * 4; + result += maxArguments() * 8L; // pointers + result += maxShapes() * 8L; // pointers + result += maxIndexArguments() * 4L; + result += (long) maxRealArguments() * (Nd4j.dataType() == DataType.DOUBLE ? 8 : Nd4j.dataType() == DataType.FLOAT ? 4 : 2); result += 5 * 4; // numArgs diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/Batch.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/Batch.java index 32801a893..97f67e3b6 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/Batch.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/Batch.java @@ -48,12 +48,12 @@ public class Batch { // all aggregates within this batch @Getter - private List aggregates; + private final List aggregates; @Getter - private T sample; + private final T sample; @Getter - private int numAggregates; + private final int numAggregates; /** * This constructor takes List of Aggregates, and builds Batch instance, usable with Nd4j executioner. diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/AggregateAxpy.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/AggregateAxpy.java index 9f7d5439f..6f61dd546 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/AggregateAxpy.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/AggregateAxpy.java @@ -27,7 +27,7 @@ import org.nd4j.linalg.factory.Nd4j; @Deprecated public class AggregateAxpy extends BaseAggregate { - private int vectorLength; + private final int vectorLength; public AggregateAxpy(@NonNull INDArray x, @NonNull INDArray y, double alpha) { this.arguments.add(x); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/custom/Flatten.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/custom/Flatten.java index 7981803e8..a9cb4a502 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/custom/Flatten.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/custom/Flatten.java @@ -32,6 +32,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.DynamicCustomOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; @Data @@ -43,10 +44,9 @@ public class Flatten extends DynamicCustomOp { public Flatten(char order, INDArray... inputs) { this.order = order; - for (val in:inputs) - inputArguments.add(in); + Collections.addAll(inputArguments, inputs); - iArguments.add(Long.valueOf((int) this.order)); + iArguments.add(Long.valueOf(this.order)); } public Flatten(INDArray output, INDArray... inputs) { @@ -70,6 +70,6 @@ public class Flatten extends DynamicCustomOp { public List calculateOutputDataTypes(List inputDataTypes) { int n = args().length; Preconditions.checkState(inputDataTypes != null && inputDataTypes.size() == n, "Expected %s input data types for %s, got %s", n, getClass(), inputDataTypes); - return Arrays.asList(inputDataTypes.get(0)); + return Collections.singletonList(inputDataTypes.get(0)); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/custom/FusedBatchNorm.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/custom/FusedBatchNorm.java index 751f37ede..8a21451b4 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/custom/FusedBatchNorm.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/custom/FusedBatchNorm.java @@ -78,7 +78,7 @@ public class FusedBatchNorm extends DynamicCustomOp { @Override public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map attributesForNode, GraphDef graph) { boolean isNchw = attributesForNode.containsKey("data_format") && attributesForNode.get("data_format").getS().toStringUtf8().equalsIgnoreCase("NCHW"); - boolean training = !attributesForNode.containsKey("is_training") ? true : attributesForNode.get("is_training").getB(); + boolean training = !attributesForNode.containsKey("is_training") || attributesForNode.get("is_training").getB(); addIArgument(isNchw ? 1 : 0); addIArgument(training ? 1 : 0); if(attributesForNode.containsKey("T")){ diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/custom/LinearSolve.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/custom/LinearSolve.java index fe4b9b214..60b19c739 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/custom/LinearSolve.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/custom/LinearSolve.java @@ -67,7 +67,7 @@ public class LinearSolve extends DynamicCustomOp { @Override public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map attributesForNode, GraphDef graph) { - boolean adjoint = attributesForNode.containsKey("adjoint") ? attributesForNode.get("adjoint").getB() : false; + boolean adjoint = attributesForNode.containsKey("adjoint") && attributesForNode.get("adjoint").getB(); addBArgument(adjoint); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastEqualTo.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastEqualTo.java index d0b5149dc..c20054198 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastEqualTo.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastEqualTo.java @@ -27,6 +27,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseBroadcastBoolOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class BroadcastEqualTo extends BaseBroadcastBoolOp { @@ -67,7 +68,7 @@ public class BroadcastEqualTo extends BaseBroadcastBoolOp { @Override public List doDiff(List f1) { - return Arrays.asList(outputVariables()[0]); + return Collections.singletonList(outputVariables()[0]); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThan.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThan.java index 23c09413f..e3b6f8d4c 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThan.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThan.java @@ -27,6 +27,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseBroadcastBoolOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class BroadcastLessThan extends BaseBroadcastBoolOp { @@ -88,6 +89,6 @@ public class BroadcastLessThan extends BaseBroadcastBoolOp { @Override public List doDiff(List f1) { - return Arrays.asList(outputVariables()[0]); + return Collections.singletonList(outputVariables()[0]); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThanOrEqual.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThanOrEqual.java index dfa830801..cbfedd015 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThanOrEqual.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThanOrEqual.java @@ -27,6 +27,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseBroadcastBoolOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class BroadcastLessThanOrEqual extends BaseBroadcastBoolOp { @@ -88,6 +89,6 @@ public class BroadcastLessThanOrEqual extends BaseBroadcastBoolOp { @Override public List doDiff(List f1) { - return Arrays.asList(outputVariables()[0]); + return Collections.singletonList(outputVariables()[0]); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/grid/BaseGridOp.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/grid/BaseGridOp.java index 82e11f321..6e38a738b 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/grid/BaseGridOp.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/grid/BaseGridOp.java @@ -30,6 +30,7 @@ import org.nd4j.linalg.api.ops.grid.GridPointers; import org.nd4j.linalg.api.ops.grid.OpDescriptor; import java.util.ArrayList; +import java.util.Collections; import java.util.List; public abstract class BaseGridOp extends BaseOp implements GridOp { @@ -60,9 +61,7 @@ public abstract class BaseGridOp extends BaseOp implements GridOp { } protected BaseGridOp(GridPointers... pointers) { - for (GridPointers ptr : pointers) { - grid.add(ptr); - } + Collections.addAll(grid, pointers); } protected BaseGridOp(List ops) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/image/CropAndResize.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/image/CropAndResize.java index cf5739b6c..2270912bf 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/image/CropAndResize.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/image/CropAndResize.java @@ -36,7 +36,8 @@ import java.util.*; @NoArgsConstructor public class CropAndResize extends DynamicCustomOp { - public enum Method {BILINEAR, NEAREST}; + public enum Method {BILINEAR, NEAREST} + protected Method method = Method.BILINEAR; protected double extrapolationValue = 0.0; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/image/ResizeArea.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/image/ResizeArea.java index f37f8c33f..28a2a9488 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/image/ResizeArea.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/image/ResizeArea.java @@ -78,7 +78,7 @@ public class ResizeArea extends DynamicCustomOp { TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph); val attrC = attributesForNode.get("align_corners"); - this.alignCorners = attrC != null ? attrC.getB() : false; + this.alignCorners = attrC != null && attrC.getB(); addArgs(); } @@ -86,7 +86,7 @@ public class ResizeArea extends DynamicCustomOp { protected void addArgs() { iArguments.clear(); if(height != null && width != null){ - INDArray size = Nd4j.createFromArray(new int[]{height,width}); + INDArray size = Nd4j.createFromArray(height,width); addInputArgument(size); //iArguments.add(Long.valueOf(height)); //iArguments.add(Long.valueOf(width)); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/image/ResizeBilinear.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/image/ResizeBilinear.java index 6f39345fe..094844939 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/image/ResizeBilinear.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/image/ResizeBilinear.java @@ -84,8 +84,8 @@ public class ResizeBilinear extends DynamicCustomOp { val attrC = attributesForNode.get("align_corners"); val attrH = attributesForNode.get("half_pixel_centers"); - this.alignCorners = attrC != null ? attrC.getB() : false; - this.halfPixelCenters = attrH != null ? attrH.getB() : false; + this.alignCorners = attrC != null && attrC.getB(); + this.halfPixelCenters = attrH != null && attrH.getB(); addArgs(); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/AvgPooling2D.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/AvgPooling2D.java index 725f8c2d2..302009030 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/AvgPooling2D.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/AvgPooling2D.java @@ -274,7 +274,7 @@ public class AvgPooling2D extends DynamicCustomOp { public void initFromOnnx(Onnx.NodeProto node, SameDiff initWith, Map attributesForNode, Onnx.GraphProto graph) { val paddingVal = !attributesForNode.containsKey("auto_pad") ? "VALID" : attributesForNode.get("auto_pad").getS().toStringUtf8(); val kernelShape = attributesForNode.get("kernel_shape").getIntsList(); - val padding = !attributesForNode.containsKey("pads") ? Arrays.asList(1L) : attributesForNode.get("pads").getIntsList(); + val padding = !attributesForNode.containsKey("pads") ? Collections.singletonList(1L) : attributesForNode.get("pads").getIntsList(); val strides = attributesForNode.get("strides").getIntsList(); Pooling2DConfig pooling2DConfig = Pooling2DConfig.builder() diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/DeConv2DTF.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/DeConv2DTF.java index 14f594611..cd2899948 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/DeConv2DTF.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/DeConv2DTF.java @@ -245,7 +245,7 @@ public class DeConv2DTF extends DynamicCustomOp { int n = args().length; Preconditions.checkState(inputDataTypes != null && inputDataTypes.size() == n, "Expected %s input data types for %s, got %s", n, getClass(), inputDataTypes); if(!dArguments.isEmpty()) { - return Arrays.asList(dArguments.get(0)); + return Collections.singletonList(dArguments.get(0)); } return Collections.singletonList(inputDataTypes.get(2)); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/DepthToSpace.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/DepthToSpace.java index d897e6fe5..261240ca7 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/DepthToSpace.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/DepthToSpace.java @@ -72,7 +72,7 @@ public class DepthToSpace extends DynamicCustomOp { // Gradient to DepthToSpace is just SpaceToDepth of same block size and data format. SDVariable gradient = i_v.get(0); SDVariable ret = new SpaceToDepth(sameDiff, new SDVariable[]{gradient}, blockSize, dataFormat).outputVariable(); - return Arrays.asList(ret); + return Collections.singletonList(ret); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/LocalResponseNormalization.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/LocalResponseNormalization.java index b10c53c0a..f967dcf44 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/LocalResponseNormalization.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/LocalResponseNormalization.java @@ -124,7 +124,7 @@ public class LocalResponseNormalization extends DynamicCustomOp { .alpha(alpha) .beta(beta) .bias(bias) - .depth((int) depth) + .depth(depth) .build(); this.config = localResponseNormalizationConfig; addArgs(); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/SConv2D.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/SConv2D.java index 58437aa57..c772ad44b 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/SConv2D.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/SConv2D.java @@ -79,9 +79,7 @@ public class SConv2D extends Conv2D { inputs.add(arg(0)); inputs.add(f1.get(0)); SDVariable[] args = args(); - for( int i=1; i attributesForNode, GraphDef graph) { TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph); - boolean isNHWC = dataFormat == null ? true : dataFormat.equals(DataFormat.NHWC); + boolean isNHWC = dataFormat == null || dataFormat.equals(DataFormat.NHWC); addIArgument(blockSize, isNHWC ? 1 : 0); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/GRUCellOutputs.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/GRUCellOutputs.java index 9524a82ef..40ba2f959 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/GRUCellOutputs.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/GRUCellOutputs.java @@ -32,22 +32,22 @@ public class GRUCellOutputs { /** * Reset gate output [batchSize, numUnits]. */ - private SDVariable r; + private final SDVariable r; /** * Update gate output [batchSize, numUnits]. */ - private SDVariable u; + private final SDVariable u; /** * Cell gate output [batchSize, numUnits]. */ - private SDVariable c; + private final SDVariable c; /** * Current cell output [batchSize, numUnits]. */ - private SDVariable h; + private final SDVariable h; public GRUCellOutputs(SDVariable[] outputs){ Preconditions.checkArgument(outputs.length == 4, diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/LSTMCellOutputs.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/LSTMCellOutputs.java index 6949a984e..a64a6ec87 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/LSTMCellOutputs.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/LSTMCellOutputs.java @@ -32,37 +32,37 @@ public class LSTMCellOutputs { /** * Output - input modulation gate activations [batchSize, numUnits]. */ - private SDVariable i; + private final SDVariable i; /** * Activations, cell state (pre tanh) [batchSize, numUnits]. */ - private SDVariable c; + private final SDVariable c; /** * Output - forget gate activations [batchSize, numUnits]. */ - private SDVariable f; + private final SDVariable f; /** * Output - output gate activations [batchSize, numUnits]. */ - private SDVariable o; + private final SDVariable o; /** * Output - input gate activations [batchSize, numUnits]. */ - private SDVariable z; + private final SDVariable z; /** * Cell state, post tanh [batchSize, numUnits]. */ - private SDVariable h; + private final SDVariable h; /** * Current cell output [batchSize, numUnits]. */ - private SDVariable y; + private final SDVariable y; public LSTMCellOutputs(SDVariable[] outputs){ Preconditions.checkArgument(outputs.length == 7, diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/LSTMLayerOutputs.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/LSTMLayerOutputs.java index e1a24e0d7..f0cacbe4c 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/LSTMLayerOutputs.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/LSTMLayerOutputs.java @@ -37,7 +37,7 @@ public class LSTMLayerOutputs { /** * The LSTM layer data format ({@link LSTMDataFormat}. */ - private LSTMDataFormat dataFormat; + private final LSTMDataFormat dataFormat; /** @@ -51,21 +51,21 @@ public class LSTMLayerOutputs { * [sL, 2, bS, nOut] when directionMode == 4 && dataFormat == 3 * numbers mean index in corresponding enums {@link LSTMDataFormat} and {@link LSTMDirectionMode} */ - private SDVariable timeSeriesOutput; + private final SDVariable timeSeriesOutput; /** * cell state at last step cL: * [bS, nOut] when directionMode FWD or BWD * 2, bS, nOut] when directionMode BIDIR_SUM, BIDIR_CONCAT or BIDIR_EXTRA_DIM */ - private SDVariable lastCellStateOutput; + private final SDVariable lastCellStateOutput; /** * output at last step hL: * [bS, nOut] when directionMode FWD or BWD * 2, bS, nOut] when directionMode BIDIR_SUM, BIDIR_CONCAT or BIDIR_EXTRA_DIM */ - private SDVariable lastTimeStepOutput; + private final SDVariable lastTimeStepOutput; public LSTMLayerOutputs(SDVariable[] outputs, LSTMLayerConfig lstmLayerConfig) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/SRUCellOutputs.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/SRUCellOutputs.java index a6612cc65..603b82ec1 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/SRUCellOutputs.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/SRUCellOutputs.java @@ -33,12 +33,12 @@ public class SRUCellOutputs { /** * Current cell output [batchSize, numUnits]. */ - private SDVariable h; + private final SDVariable h; /** * Current cell state [batchSize, numUnits]. */ - private SDVariable c; + private final SDVariable c; public SRUCellOutputs(SDVariable[] outputs){ Preconditions.checkArgument(outputs.length == 2, diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/SRULayerOutputs.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/SRULayerOutputs.java index 5052c16d2..0c66ffd51 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/SRULayerOutputs.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/recurrent/outputs/SRULayerOutputs.java @@ -35,12 +35,12 @@ public class SRULayerOutputs { /** * Current cell output [batchSize, inSize, timeSeriesLength]. */ - private SDVariable h; + private final SDVariable h; /** * Current cell state [batchSize, inSize, timeSeriesLength]. */ - private SDVariable c; + private final SDVariable c; public SRULayerOutputs(SDVariable[] outputs){ Preconditions.checkArgument(outputs.length == 2, @@ -90,7 +90,7 @@ public class SRULayerOutputs { return lastOutput; } - private SDVariable lastState = null; + private final SDVariable lastState = null; /** * Get c, the state of the cell, for the last time step. diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/loss/SparseSoftmaxCrossEntropyLossWithLogits.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/loss/SparseSoftmaxCrossEntropyLossWithLogits.java index 53ccdb578..6e3a4441c 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/loss/SparseSoftmaxCrossEntropyLossWithLogits.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/loss/SparseSoftmaxCrossEntropyLossWithLogits.java @@ -89,7 +89,7 @@ public class SparseSoftmaxCrossEntropyLossWithLogits extends DynamicCustomOp { public List calculateOutputDataTypes(List inputDataTypes){ Preconditions.checkState(inputDataTypes != null && inputDataTypes.size() == 2, "Expected 2 input datatypes for %s, got %s", getClass(), inputDataTypes); if(dArguments != null && !dArguments.isEmpty()) - return Arrays.asList(dArguments.get(0)); + return Collections.singletonList(dArguments.get(0)); return Collections.singletonList(inputDataTypes.get(1)); //Same as predictions (logits) } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/reduce/Mmul.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/reduce/Mmul.java index be291a9c3..586c39780 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/reduce/Mmul.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/reduce/Mmul.java @@ -253,8 +253,8 @@ public class Mmul extends DynamicCustomOp { @Override public void initFromOnnx(Onnx.NodeProto node, SameDiff initWith, Map attributesForNode, Onnx.GraphProto graph) { - val isTransposeA = !attributesForNode.containsKey("transA") ? false : attributesForNode.get("transA").getI() > 0; - val isTransposeB = !attributesForNode.containsKey("transB") ? false : attributesForNode.get("transB").getI() > 0; + val isTransposeA = attributesForNode.containsKey("transA") && attributesForNode.get("transA").getI() > 0; + val isTransposeB = attributesForNode.containsKey("transB") && attributesForNode.get("transB").getI() > 0; MMulTranspose mMulTranspose = MMulTranspose.builder() .transposeA(isTransposeA).transposeB(isTransposeB) .build(); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/reduce/TensorMmul.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/reduce/TensorMmul.java index c80c7cfb6..fb64d7b7d 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/reduce/TensorMmul.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/reduce/TensorMmul.java @@ -308,8 +308,8 @@ public class TensorMmul extends DynamicCustomOp { @Override public void initFromOnnx(Onnx.NodeProto node, SameDiff initWith, Map attributesForNode, Onnx.GraphProto graph) { - val isTransposeA = !attributesForNode.containsKey("transA") ? false : attributesForNode.get("transA").getI() > 0; - val isTransposeB = !attributesForNode.containsKey("transB") ? false : attributesForNode.get("transB").getI() > 0; + val isTransposeA = attributesForNode.containsKey("transA") && attributesForNode.get("transA").getI() > 0; + val isTransposeB = attributesForNode.containsKey("transB") && attributesForNode.get("transB").getI() > 0; MMulTranspose mMulTranspose = MMulTranspose.builder() .transposeA(isTransposeA).transposeB(isTransposeB) .build(); @@ -325,7 +325,7 @@ public class TensorMmul extends DynamicCustomOp { if (addedEdges != that.addedEdges) return false; if (!Arrays.deepEquals(axes, that.axes)) return false; - return mMulTranspose != null ? mMulTranspose.equals(that.mMulTranspose) : that.mMulTranspose == null; + return Objects.equals(mMulTranspose, that.mMulTranspose); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/reduce3/EqualsWithEps.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/reduce3/EqualsWithEps.java index 154a8661a..170c5274d 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/reduce3/EqualsWithEps.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/reduce3/EqualsWithEps.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class EqualsWithEps extends BaseReduce3Op { @@ -68,6 +69,6 @@ public class EqualsWithEps extends BaseReduce3Op { @Override public List doDiff(List f1) { - return Arrays.asList(outputVariables()[0]); + return Collections.singletonList(outputVariables()[0]); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/ScalarDivision.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/ScalarDivision.java index 12c1e7abc..8eb55dec3 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/ScalarDivision.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/ScalarDivision.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseScalarOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class ScalarDivision extends BaseScalarOp { @@ -72,6 +73,6 @@ public class ScalarDivision extends BaseScalarOp { @Override public List doDiff(List i_v1) { SDVariable ret = i_v1.get(0).div(scalarValue.getDouble(0)); - return Arrays.asList(ret); + return Collections.singletonList(ret); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/ScalarSubtraction.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/ScalarSubtraction.java index 6ebcb274f..9303ba72e 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/ScalarSubtraction.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/ScalarSubtraction.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseScalarOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class ScalarSubtraction extends BaseScalarOp { @@ -68,6 +69,6 @@ public class ScalarSubtraction extends BaseScalarOp { public List doDiff(List i_v1) { SDVariable g = i_v1.get(0); - return Arrays.asList(g); + return Collections.singletonList(g); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarAnd.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarAnd.java index c8434bb1e..8d0c71531 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarAnd.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarAnd.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseScalarBoolOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -69,6 +70,6 @@ public class ScalarAnd extends BaseScalarBoolOp { public List doDiff(List f1) { //Not continuously differentiable, but 0 gradient in most places - return Arrays.asList(sameDiff.zerosLike(arg())); + return Collections.singletonList(sameDiff.zerosLike(arg())); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarEps.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarEps.java index c3e4cc306..1845fb542 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarEps.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarEps.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseScalarBoolOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -83,7 +84,7 @@ public class ScalarEps extends BaseScalarBoolOp { public List doDiff(List f1) { //Not continuously differentiable, but 0 gradient in most places - return Arrays.asList(sameDiff.zerosLike(arg())); + return Collections.singletonList(sameDiff.zerosLike(arg())); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarEquals.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarEquals.java index 095420c5a..320afe6d5 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarEquals.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarEquals.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseScalarBoolOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -82,7 +83,7 @@ public class ScalarEquals extends BaseScalarBoolOp { @Override public List doDiff(List f1) { //Not continuously differentiable, but 0 gradient in most places - return Arrays.asList(sameDiff.zerosLike(arg())); + return Collections.singletonList(sameDiff.zerosLike(arg())); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarGreaterThan.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarGreaterThan.java index 4a19f53cc..587ba1c05 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarGreaterThan.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarGreaterThan.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseScalarBoolOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -77,6 +78,6 @@ public class ScalarGreaterThan extends BaseScalarBoolOp { public List doDiff(List f1) { //Not continuously differentiable, but 0 gradient in most places - return Arrays.asList(sameDiff.zerosLike(arg())); + return Collections.singletonList(sameDiff.zerosLike(arg())); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarGreaterThanOrEqual.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarGreaterThanOrEqual.java index 5e183ddbc..63ddda993 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarGreaterThanOrEqual.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarGreaterThanOrEqual.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseScalarBoolOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -76,7 +77,7 @@ public class ScalarGreaterThanOrEqual extends BaseScalarBoolOp { public List doDiff(List f1) { //Not continuously differentiable, but 0 gradient in most places - return Arrays.asList(sameDiff.zerosLike(arg())); + return Collections.singletonList(sameDiff.zerosLike(arg())); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarLessThan.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarLessThan.java index 393728d06..fa804b30a 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarLessThan.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarLessThan.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseScalarBoolOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -73,6 +74,6 @@ public class ScalarLessThan extends BaseScalarBoolOp { public List doDiff(List f1) { //Not continuously differentiable, but 0 gradient in most places - return Arrays.asList(sameDiff.zerosLike(arg())); + return Collections.singletonList(sameDiff.zerosLike(arg())); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarLessThanOrEqual.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarLessThanOrEqual.java index 3dce29315..ee8ffffde 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarLessThanOrEqual.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarLessThanOrEqual.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseScalarBoolOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -69,7 +70,7 @@ public class ScalarLessThanOrEqual extends BaseScalarBoolOp { public List doDiff(List f1) { //Not continuously differentiable, but 0 gradient in most places - return Arrays.asList(sameDiff.zerosLike(arg())); + return Collections.singletonList(sameDiff.zerosLike(arg())); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarNot.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarNot.java index f1f0e78f1..e89ed9264 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarNot.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarNot.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseScalarBoolOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -74,6 +75,6 @@ public class ScalarNot extends BaseScalarBoolOp { public List doDiff(List f1) { //Not continuously differentiable, but 0 gradient in most places - return Arrays.asList(sameDiff.zerosLike(arg())); + return Collections.singletonList(sameDiff.zerosLike(arg())); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarNotEquals.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarNotEquals.java index 21a6e14c4..923cc12cf 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarNotEquals.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarNotEquals.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseScalarBoolOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -68,6 +69,6 @@ public class ScalarNotEquals extends BaseScalarBoolOp { public List doDiff(List f1) { //Not continuously differentiable, but 0 gradient in most places - return Arrays.asList(sameDiff.zerosLike(arg())); + return Collections.singletonList(sameDiff.zerosLike(arg())); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarOr.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarOr.java index 7a3fb3ab7..5e0c4cacf 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarOr.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarOr.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseScalarBoolOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -75,6 +76,6 @@ public class ScalarOr extends BaseScalarBoolOp { public List doDiff(List f1) { //Not continuously differentiable, but 0 gradient in most places - return Arrays.asList(sameDiff.zerosLike(arg())); + return Collections.singletonList(sameDiff.zerosLike(arg())); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarSetValue.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarSetValue.java index cc488277e..8d65c1c62 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarSetValue.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarSetValue.java @@ -27,6 +27,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseScalarOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class ScalarSetValue extends BaseScalarOp { @@ -82,6 +83,6 @@ public class ScalarSetValue extends BaseScalarOp { @Override public List doDiff(List f1) { - return Arrays.asList(sameDiff.zerosLike(arg())); + return Collections.singletonList(sameDiff.zerosLike(arg())); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarXor.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarXor.java index 54d368bed..b92bcd705 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarXor.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/comparison/ScalarXor.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseScalarBoolOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; /** @@ -74,6 +75,6 @@ public class ScalarXor extends BaseScalarBoolOp { public List doDiff(List f1) { //Not continuously differentiable, but 0 gradient in most places - return Arrays.asList(sameDiff.zerosLike(arg())); + return Collections.singletonList(sameDiff.zerosLike(arg())); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterAdd.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterAdd.java index a5dffd37d..eca904de8 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterAdd.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterAdd.java @@ -71,7 +71,7 @@ public class ScatterAdd extends DynamicCustomOp { TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph); if (nodeDef.containsAttr("use_locking")) { - if (nodeDef.getAttrOrThrow("use_locking").getB() == true) { + if (nodeDef.getAttrOrThrow("use_locking").getB()) { bArguments.add(true); } else { bArguments.add(false); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterDiv.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterDiv.java index ae7c12d8b..34e3278fe 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterDiv.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterDiv.java @@ -96,7 +96,7 @@ public class ScatterDiv extends DynamicCustomOp { TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph); if (nodeDef.containsAttr("use_locking")) { - if (nodeDef.getAttrOrThrow("use_locking").getB() == true) { + if (nodeDef.getAttrOrThrow("use_locking").getB()) { bArguments.add(true); } else { bArguments.add(false); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterMax.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterMax.java index b035772db..73b5eb688 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterMax.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterMax.java @@ -68,7 +68,7 @@ public class ScatterMax extends DynamicCustomOp { TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph); if (nodeDef.containsAttr("use_locking")) { - if (nodeDef.getAttrOrThrow("use_locking").getB() == true) { + if (nodeDef.getAttrOrThrow("use_locking").getB()) { bArguments.add(true); } else { bArguments.add(false); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterMin.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterMin.java index 77ecd2404..3d138c2f0 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterMin.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterMin.java @@ -68,7 +68,7 @@ public class ScatterMin extends DynamicCustomOp { TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph); if (nodeDef.containsAttr("use_locking")) { - if (nodeDef.getAttrOrThrow("use_locking").getB() == true) { + if (nodeDef.getAttrOrThrow("use_locking").getB()) { bArguments.add(true); } else { bArguments.add(false); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterMul.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterMul.java index 1b2a458b0..db22db585 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterMul.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterMul.java @@ -71,7 +71,7 @@ public class ScatterMul extends DynamicCustomOp { TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph); if (nodeDef.containsAttr("use_locking")) { - if (nodeDef.getAttrOrThrow("use_locking").getB() == true) { + if (nodeDef.getAttrOrThrow("use_locking").getB()) { bArguments.add(true); } else { bArguments.add(false); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNd.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNd.java index e680313b2..2700c096a 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNd.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNd.java @@ -69,7 +69,7 @@ public class ScatterNd extends DynamicCustomOp { TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph); if (nodeDef.containsAttr("use_locking")) { - if (nodeDef.getAttrOrThrow("use_locking").getB() == true) { + if (nodeDef.getAttrOrThrow("use_locking").getB()) { bArguments.add(true); } else { bArguments.add(false); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNdAdd.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNdAdd.java index 781382d2b..96cdbdeb8 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNdAdd.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNdAdd.java @@ -69,7 +69,7 @@ public class ScatterNdAdd extends DynamicCustomOp { TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph); if (nodeDef.containsAttr("use_locking")) { - if (nodeDef.getAttrOrThrow("use_locking").getB() == true) { + if (nodeDef.getAttrOrThrow("use_locking").getB()) { bArguments.add(true); } else { bArguments.add(false); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNdSub.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNdSub.java index 546cb5055..da50aba66 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNdSub.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNdSub.java @@ -69,7 +69,7 @@ public class ScatterNdSub extends DynamicCustomOp { TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph); if (nodeDef.containsAttr("use_locking")) { - if (nodeDef.getAttrOrThrow("use_locking").getB() == true) { + if (nodeDef.getAttrOrThrow("use_locking").getB()) { bArguments.add(true); } else { bArguments.add(false); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNdUpdate.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNdUpdate.java index 825965fb0..157fb7bf9 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNdUpdate.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterNdUpdate.java @@ -69,7 +69,7 @@ public class ScatterNdUpdate extends DynamicCustomOp { TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph); if (nodeDef.containsAttr("use_locking")) { - if (nodeDef.getAttrOrThrow("use_locking").getB() == true) { + if (nodeDef.getAttrOrThrow("use_locking").getB()) { bArguments.add(true); } else { bArguments.add(false); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterSub.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterSub.java index 37cdbb495..db344f0f6 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterSub.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterSub.java @@ -87,7 +87,7 @@ public class ScatterSub extends DynamicCustomOp { TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph); if (nodeDef.containsAttr("use_locking")) { - if (nodeDef.getAttrOrThrow("use_locking").getB() == true) { + if (nodeDef.getAttrOrThrow("use_locking").getB()) { bArguments.add(true); } else { bArguments.add(false); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterUpdate.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterUpdate.java index 59bd0a744..5cf1146be 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterUpdate.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/scatter/ScatterUpdate.java @@ -38,7 +38,7 @@ import java.util.Map; public class ScatterUpdate extends DynamicCustomOp { - public static enum UpdateOp { + public enum UpdateOp { ADD, SUBTRACT, MULTIPLY, @@ -76,7 +76,7 @@ public class ScatterUpdate extends DynamicCustomOp { @Override public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map attributesForNode, GraphDef graph) { if (nodeDef.containsAttr("use_locking")) { - if (nodeDef.getAttrOrThrow("use_locking").getB() == true) { + if (nodeDef.getAttrOrThrow("use_locking").getB()) { bArguments.add(true); } else { bArguments.add(false); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/ApplyGradientDescent.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/ApplyGradientDescent.java index 62d1878af..1171bf080 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/ApplyGradientDescent.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/ApplyGradientDescent.java @@ -29,6 +29,7 @@ import org.tensorflow.framework.GraphDef; import org.tensorflow.framework.NodeDef; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -108,7 +109,7 @@ public class ApplyGradientDescent extends DynamicCustomOp { @Override public List doDiff(List i_v) { SDVariable ret = this.outputVariables()[0]; - return Arrays.asList(ret); + return Collections.singletonList(ret); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Create.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Create.java index d4d177688..f4c2ed15c 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Create.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Create.java @@ -81,7 +81,7 @@ public class Create extends DynamicCustomOp { protected void addArgs() { addBArgument(initialize); - addIArgument((int) order,outputType.toInt()); + addIArgument(order,outputType.toInt()); } @Override @@ -121,7 +121,7 @@ public class Create extends DynamicCustomOp { @Override public List doDiff(List i_v) { SDVariable ret = sameDiff.zerosLike(outputVariables()[0]); - return Arrays.asList(ret); + return Collections.singletonList(ret); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/ExpandDims.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/ExpandDims.java index 179565dfe..34dd74ca7 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/ExpandDims.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/ExpandDims.java @@ -151,7 +151,7 @@ public class ExpandDims extends DynamicCustomOp { public List doDiff(List i_v) { //Simply need a reshape to remove the dimension... SDVariable ret = sameDiff.squeeze(i_v.get(0), jaxis); - return Arrays.asList(ret); + return Collections.singletonList(ret); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Eye.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Eye.java index 8b2b7e6a0..5642ef253 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Eye.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Eye.java @@ -127,7 +127,7 @@ public class Eye extends DynamicCustomOp { } } - addTArgument((double) dataType.toInt()); + addTArgument(dataType.toInt()); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/OnesLike.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/OnesLike.java index 80ea9ac62..00aa5b59a 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/OnesLike.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/OnesLike.java @@ -110,7 +110,7 @@ public class OnesLike extends DynamicCustomOp { @Override public List doDiff(List i_v) { SDVariable ret = sameDiff.zerosLike(outputVariables()[0]); - return Arrays.asList(ret); + return Collections.singletonList(ret); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Repeat.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Repeat.java index 9e18a18fa..96200d9ee 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Repeat.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Repeat.java @@ -67,7 +67,7 @@ public class Repeat extends DynamicCustomOp { @Override public Map propertiesForFunction() { - return Collections.singletonMap("axis", axis); + return Collections.singletonMap("axis", axis); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Reshape.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Reshape.java index 977241e23..7d6c9f12f 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Reshape.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Reshape.java @@ -86,7 +86,6 @@ public class Reshape extends DynamicCustomOp { public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map attributesForNode, GraphDef graph) { if (!nodeDef.containsAttr("TShape") && nodeDef.getInputCount() == 1) { this.shape = new long[]{}; - return; } else if(nodeDef.getInputCount() == 1){ val shape = nodeDef.getAttrOrThrow("Tshape"); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Squeeze.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Squeeze.java index b25762f3a..d319ac8bf 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Squeeze.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/shape/Squeeze.java @@ -86,14 +86,13 @@ public class Squeeze extends DynamicCustomOp { for (int d : squeezeDims) { ret = sameDiff.expandDims(ret, d); } - ; - return Arrays.asList(ret); + return Collections.singletonList(ret); } @Override public List calculateOutputDataTypes(List dataTypes){ Preconditions.checkState(!dataTypes.isEmpty(), "Expected list with at least 1 datatype for %s, got %s", getClass(), dataTypes); //Output type is same as input type - return Arrays.asList(dataTypes.get(0)); + return Collections.singletonList(dataTypes.get(0)); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/MaxOut.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/MaxOut.java index eb0a7947e..7fc1953c0 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/MaxOut.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/MaxOut.java @@ -115,10 +115,7 @@ public class MaxOut extends BaseTransformOp { if (y != null && !y().isR()) return false; - if (z != null && z().dataType() != x().dataType()) - return false; - - return true; + return z == null || z().dataType() == x().dataType(); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/any/IsMax.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/any/IsMax.java index be8c99fef..ff48dfa31 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/any/IsMax.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/any/IsMax.java @@ -32,7 +32,7 @@ import java.util.Collections; import java.util.List; /** - * [1, 2, 3, 1] -> [0, 0, 1, 0] + * [1, 2, 3, 1] -> [0, 0, 1, 0] * @author Adam Gibson */ public class IsMax extends DynamicCustomOp { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/clip/ClipByValue.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/clip/ClipByValue.java index 431764efd..ad93c34fd 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/clip/ClipByValue.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/clip/ClipByValue.java @@ -98,6 +98,6 @@ public class ClipByValue extends DynamicCustomOp { public List calculateOutputDataTypes(List inputDataTypes){ Preconditions.checkState(inputDataTypes != null && !inputDataTypes.isEmpty() , "Expected at least 1 input datatype for %s, got %s", getClass(), inputDataTypes); //get the final data type (sometimes model import passes in 2 dummy data types that aren't relevant) - return Arrays.asList(inputDataTypes.get(inputDataTypes.size() - 1)); + return Collections.singletonList(inputDataTypes.get(inputDataTypes.size() - 1)); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/BatchToSpace.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/BatchToSpace.java index de05d6476..3bc42556c 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/BatchToSpace.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/BatchToSpace.java @@ -89,7 +89,7 @@ public class BatchToSpace extends DynamicCustomOp { public List doDiff(List i_v) { // Inverse of batch to space is space to batch with same blocks and padding as crops SDVariable gradient = sameDiff.setupFunction(i_v.get(0)); - return Arrays.asList(sameDiff.cnn().spaceToBatch(gradient, blocks, crops[0], crops[1])); + return Collections.singletonList(sameDiff.cnn().spaceToBatch(gradient, blocks, crops[0], crops[1])); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/BatchToSpaceND.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/BatchToSpaceND.java index d9661edd3..df8e50566 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/BatchToSpaceND.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/BatchToSpaceND.java @@ -71,7 +71,7 @@ public class BatchToSpaceND extends DynamicCustomOp { public List doDiff(List i_v) { // Inverse of batch to space is space to batch with same blocks and padding as crops SDVariable gradient = sameDiff.setupFunction(i_v.get(0)); - return Arrays.asList(sameDiff.cnn().spaceToBatch(gradient, blocks, crops[0], crops[1])); + return Collections.singletonList(sameDiff.cnn().spaceToBatch(gradient, blocks, crops[0], crops[1])); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/Choose.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/Choose.java index 69665508a..44e8ea079 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/Choose.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/Choose.java @@ -69,7 +69,7 @@ public class Choose extends DynamicCustomOp { * @param condition */ public Choose(INDArray[] inputs,Condition condition) { - this(inputs, Collections.emptyList(),Collections.emptyList(),condition); + this(inputs, Collections.emptyList(),Collections.emptyList(),condition); } /** diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/SpaceToBatch.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/SpaceToBatch.java index f00743762..362d16392 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/SpaceToBatch.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/SpaceToBatch.java @@ -80,7 +80,7 @@ public class SpaceToBatch extends DynamicCustomOp { public List doDiff(List i_v) { // Inverse of space to batch is batch to space with same blocks and crops as padding SDVariable gradient = sameDiff.setupFunction(i_v.get(0)); - return Arrays.asList(sameDiff.cnn().batchToSpace(gradient, blocks, padding[0], padding[1])); + return Collections.singletonList(sameDiff.cnn().batchToSpace(gradient, blocks, padding[0], padding[1])); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/SpaceToBatchND.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/SpaceToBatchND.java index 327252215..432f69931 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/SpaceToBatchND.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/SpaceToBatchND.java @@ -71,7 +71,7 @@ public class SpaceToBatchND extends DynamicCustomOp { public List doDiff(List i_v) { // Inverse of space to batch is batch to space with same blocks and crops as padding SDVariable gradient = sameDiff.setupFunction(i_v.get(0)); - return Arrays.asList(sameDiff.cnn().batchToSpace(gradient, blocks, padding[0], padding[1])); + return Collections.singletonList(sameDiff.cnn().batchToSpace(gradient, blocks, padding[0], padding[1])); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/StandardizeBp.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/StandardizeBp.java index df0679604..6d4d9d79b 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/StandardizeBp.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/custom/StandardizeBp.java @@ -28,6 +28,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.DynamicCustomOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class StandardizeBp extends DynamicCustomOp { @@ -70,6 +71,6 @@ public class StandardizeBp extends DynamicCustomOp { Preconditions.checkState(dataTypes != null && dataTypes.size() == 2, "Expected exactly 2 input datatype for %s, got %s", getClass(), dataTypes); Preconditions.checkState(dataTypes.get(0).isFPType(), "Input 0 must be a floating point type, got %s", dataTypes.get(0)); Preconditions.checkState(dataTypes.get(1).isFPType(), "Input 1 must be a floating point type, got %s", dataTypes.get(1)); - return Arrays.asList(dataTypes.get(0)); + return Collections.singletonList(dataTypes.get(0)); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/pairwise/arithmetic/SquaredDifferenceOp.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/pairwise/arithmetic/SquaredDifferenceOp.java index 6df9ff2d9..6894fffb4 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/pairwise/arithmetic/SquaredDifferenceOp.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/pairwise/arithmetic/SquaredDifferenceOp.java @@ -47,7 +47,7 @@ public class SquaredDifferenceOp extends BaseDynamicTransformOp { } public SquaredDifferenceOp(INDArray x, INDArray y) { - addInputArgument(new INDArray[]{x,y}); + addInputArgument(x,y); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Abs.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Abs.java index 905263b0d..ce2bd3c52 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Abs.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Abs.java @@ -77,7 +77,7 @@ public class Abs extends BaseTransformSameOp { @Override public List doDiff(List i_v) { SDVariable ret = sameDiff.math.sign(arg()).mul(i_v.get(0)); - return Arrays.asList(ret); + return Collections.singletonList(ret); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Ceil.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Ceil.java index 787545aa0..1591596f8 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Ceil.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Ceil.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseTransformSameOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class Ceil extends BaseTransformSameOp { @@ -73,6 +74,6 @@ public class Ceil extends BaseTransformSameOp { public List doDiff(List f1) { //not continuously differentiable, but dOut/dIn = 0 in most places - return Arrays.asList(sameDiff.zerosLike(arg())); + return Collections.singletonList(sameDiff.zerosLike(arg())); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Floor.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Floor.java index bd95aa501..e7d755da7 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Floor.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Floor.java @@ -27,6 +27,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseTransformSameOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; @NoArgsConstructor @@ -71,7 +72,7 @@ public class Floor extends BaseTransformSameOp { public List doDiff(List i_v) { //Floor op: non-continuous at integers, but 0 gradient otherwise - return Arrays.asList(sameDiff.zerosLike(arg())); + return Collections.singletonList(sameDiff.zerosLike(arg())); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Identity.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Identity.java index f5249b4bf..e6b285281 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Identity.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Identity.java @@ -77,7 +77,7 @@ public class Identity extends BaseDynamicTransformOp { public List calculateOutputDataTypes(List dataTypes) { Preconditions.checkState(dataTypes != null && dataTypes.size() == 1, "Expected exactly 1 input datatype for %s, got input %s", getClass(), dataTypes); if(!dArguments.isEmpty()) - return Arrays.asList(dArguments.get(0)); + return Collections.singletonList(dArguments.get(0)); return dataTypes; } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Negative.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Negative.java index b9ef68cbe..2945feab2 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Negative.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Negative.java @@ -27,6 +27,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseTransformSameOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; @NoArgsConstructor @@ -70,7 +71,7 @@ public class Negative extends BaseTransformSameOp { @Override public List doDiff(List i_v) { - return Arrays.asList(sameDiff.math.neg(i_v.get(0))); + return Collections.singletonList(sameDiff.math.neg(i_v.get(0))); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Round.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Round.java index b99f04f91..fe55e245c 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Round.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Round.java @@ -28,6 +28,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseTransformSameOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; @NoArgsConstructor @@ -73,6 +74,6 @@ public class Round extends BaseTransformSameOp { @Override public List doDiff(List f1) { - return Arrays.asList(sameDiff.zerosLike(arg())); + return Collections.singletonList(sameDiff.zerosLike(arg())); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Sign.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Sign.java index 12962cdba..945b9f7d1 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Sign.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/same/Sign.java @@ -27,6 +27,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseTransformSameOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class Sign extends BaseTransformSameOp { @@ -73,7 +74,7 @@ public class Sign extends BaseTransformSameOp { @Override public List doDiff(List i_v) { SDVariable ret = sameDiff.zerosLike(arg()); - return Arrays.asList(ret); + return Collections.singletonList(ret); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ACosh.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ACosh.java index 92eeb2df4..8ef16b7eb 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ACosh.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ACosh.java @@ -28,6 +28,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseTransformStrictOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; @NoArgsConstructor @@ -75,7 +76,7 @@ public class ACosh extends BaseTransformStrictOp { //dacosh(x)/dx = 1/(sqrt(x^2-1)) -- note that domain is x >= 1 SDVariable xSqPlus1 = sameDiff.math().square(arg()).sub(1.0); SDVariable sqrt = sameDiff.math().sqrt(xSqPlus1); - return Arrays.asList(i_v.get(0).div(sqrt)); + return Collections.singletonList(i_v.get(0).div(sqrt)); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ASinh.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ASinh.java index 2975aabf8..864b04867 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ASinh.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ASinh.java @@ -27,6 +27,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseTransformStrictOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class ASinh extends BaseTransformStrictOp { @@ -74,6 +75,6 @@ public class ASinh extends BaseTransformStrictOp { //dasinh(x)/dx = 1 / sqrt(x^2+1) SDVariable xSqPlus1 = sameDiff.math.square(arg()).add(1.0); SDVariable ret = i_v.get(0).div(sameDiff.math.sqrt(xSqPlus1)); - return Arrays.asList(ret); + return Collections.singletonList(ret); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ATanh.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ATanh.java index 6953a8df1..d63cee3e1 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ATanh.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ATanh.java @@ -27,6 +27,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseTransformStrictOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class ATanh extends BaseTransformStrictOp { @@ -77,7 +78,7 @@ public class ATanh extends BaseTransformStrictOp { SDVariable oneMinusX2 = sameDiff.math().square(arg()).rsub(1.0); SDVariable ret = oneMinusX2.rdiv(1.0).mul(i_v.get(0)); - return Arrays.asList(ret); + return Collections.singletonList(ret); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Cos.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Cos.java index 7a519db06..2a300e5e7 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Cos.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Cos.java @@ -28,6 +28,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseTransformStrictOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; @NoArgsConstructor @@ -62,7 +63,7 @@ public class Cos extends BaseTransformStrictOp { @Override public List doDiff(List i_v) { SDVariable ret = sameDiff.math.neg(sameDiff.math.sin(arg())).mul(i_v.get(0)); - return Arrays.asList(ret); + return Collections.singletonList(ret); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Cosh.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Cosh.java index 8b5b9dbcd..9f9ee4e52 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Cosh.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Cosh.java @@ -28,6 +28,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseTransformStrictOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; @NoArgsConstructor @@ -72,7 +73,7 @@ public class Cosh extends BaseTransformStrictOp { @Override public List doDiff(List i_v) { SDVariable ret = sameDiff.math.sinh(arg()).mul(i_v.get(0)); - return Arrays.asList(ret); + return Collections.singletonList(ret); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Exp.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Exp.java index 7974c6197..02d946052 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Exp.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Exp.java @@ -27,6 +27,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseTransformStrictOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; @NoArgsConstructor @@ -71,7 +72,7 @@ public class Exp extends BaseTransformStrictOp { @Override public List doDiff(List i_v) { SDVariable ret = sameDiff.math.mul(sameDiff.math.exp(arg()), i_v.get(0)); - return Arrays.asList(ret); + return Collections.singletonList(ret); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Expm1.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Expm1.java index 6733a26b7..cec890024 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Expm1.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Expm1.java @@ -28,6 +28,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseTransformStrictOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; @NoArgsConstructor @@ -72,7 +73,7 @@ public class Expm1 extends BaseTransformStrictOp { @Override public List doDiff(List i_v) { SDVariable ret = sameDiff.math.mul(sameDiff.math.exp(arg()), i_v.get(0)); - return Arrays.asList(ret); + return Collections.singletonList(ret); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Swish.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Swish.java index d8e5ce1f6..bb899e850 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Swish.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/Swish.java @@ -28,6 +28,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.BaseTransformStrictOp; import java.util.Arrays; +import java.util.Collections; import java.util.List; @NoArgsConstructor @@ -72,7 +73,7 @@ public class Swish extends BaseTransformStrictOp { @Override public List doDiff(List i_v) { SDVariable ret = new SwishDerivative(sameDiff, arg()).outputVariable().mul(i_v.get(0)); - return Arrays.asList(ret); + return Collections.singletonList(ret); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/performance/PerformanceTracker.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/performance/PerformanceTracker.java index dbed287b3..e49397525 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/performance/PerformanceTracker.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/performance/PerformanceTracker.java @@ -35,8 +35,8 @@ import java.util.Map; public class PerformanceTracker { private static final PerformanceTracker INSTANCE = new PerformanceTracker(); - private Map bandwidth = new HashMap<>(); - private Map operations = new HashMap<>(); + private final Map bandwidth = new HashMap<>(); + private final Map operations = new HashMap<>(); private PerformanceTracker() { // we put in initial holders, one per device @@ -77,7 +77,7 @@ public class PerformanceTracker { */ public long addMemoryTransaction(int deviceId, long timeSpentNanos, long numberOfBytes, @NonNull MemcpyDirection direction) { // we calculate bytes per microsecond now - val bw = (long) (numberOfBytes / (timeSpentNanos / (double) 1000.0)); + val bw = (long) (numberOfBytes / (timeSpentNanos / 1000.0)); // we skip too small values if (bw > 0) diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/random/compat/RandomStandardNormal.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/random/compat/RandomStandardNormal.java index e1e7dcdce..baae1a0a6 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/random/compat/RandomStandardNormal.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/random/compat/RandomStandardNormal.java @@ -60,7 +60,7 @@ public class RandomStandardNormal extends DynamicCustomOp { addTArgument(0.0, 1.0); } - public RandomStandardNormal(long shape[]) { + public RandomStandardNormal(long[] shape) { this(Nd4j.create(ArrayUtil.toDouble(shape)), Nd4j.create(shape)); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/random/custom/RandomPoisson.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/random/custom/RandomPoisson.java index 8f24a5ca4..d73b28ba0 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/random/custom/RandomPoisson.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/ops/random/custom/RandomPoisson.java @@ -81,7 +81,7 @@ public class RandomPoisson extends DynamicCustomOp { getClass(), inputDataTypes.size()); if(!dArguments.isEmpty()) - return Arrays.asList(dArguments.get(0)); + return Collections.singletonList(dArguments.get(0)); return Collections.singletonList(outputDataType); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/DefaultRandom.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/DefaultRandom.java index bf49e168d..d7d813176 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/DefaultRandom.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/DefaultRandom.java @@ -52,7 +52,7 @@ public class DefaultRandom implements Random, RandomGenerator { @Override public void setSeed(int seed) { - this.seed = (long) seed; + this.seed = seed; getRandomGenerator().setSeed(seed); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/ConstantDistribution.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/ConstantDistribution.java index 6e929fa8d..b56722c30 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/ConstantDistribution.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/ConstantDistribution.java @@ -43,7 +43,7 @@ public class ConstantDistribution extends BaseDistribution { /** * Mean of this distribution. */ - private double value; + private final double value; public ConstantDistribution(double value) { this.value = value; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/OrthogonalDistribution.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/OrthogonalDistribution.java index bd7cff94d..3b1faaf71 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/OrthogonalDistribution.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/OrthogonalDistribution.java @@ -48,7 +48,7 @@ public class OrthogonalDistribution extends BaseDistribution { /** * Mean of this distribution. */ - private double gain; + private final double gain; private INDArray gains; public OrthogonalDistribution(double gain) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/UniformDistribution.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/UniformDistribution.java index b06896146..07627f05c 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/UniformDistribution.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/UniformDistribution.java @@ -30,7 +30,8 @@ import org.nd4j.linalg.api.rng.distribution.BaseDistribution; import org.nd4j.linalg.factory.Nd4j; public class UniformDistribution extends BaseDistribution { - private double upper, lower; + private final double upper; + private final double lower; /** * Create a uniform real distribution using the given lower and upper diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/shape/LongShapeDescriptor.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/shape/LongShapeDescriptor.java index 0263533b6..bcd6fad49 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/shape/LongShapeDescriptor.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/shape/LongShapeDescriptor.java @@ -32,20 +32,20 @@ import java.util.Arrays; public class LongShapeDescriptor { @Getter - private char order; + private final char order; - private long offset; + private final long offset; - private long ews; + private final long ews; - private long hashShape = 0; - private long hashStride = 0; + private final long hashShape = 0; + private final long hashStride = 0; @Getter - private long[] shape; + private final long[] shape; @Getter - private long[] stride; + private final long[] stride; @Getter @Setter private long extras; @@ -107,7 +107,7 @@ public class LongShapeDescriptor { @Override public int hashCode() { - int result = (int) order; + int result = order; result = 31 * result + longHashCode(offset); result = 31 * result + longHashCode(ews); @@ -120,13 +120,11 @@ public class LongShapeDescriptor { @Override public String toString() { - StringBuilder builder = new StringBuilder(); + String builder = shape.length + "," + Arrays.toString(shape) + "," + + Arrays.toString(stride) + "," + extras + "," + ews + "," + + order; - builder.append(shape.length).append(",").append(Arrays.toString(shape)).append(",") - .append(Arrays.toString(stride)).append(",").append(extras).append(",").append(ews).append(",") - .append(order); - - String result = builder.toString().replaceAll("\\]", "").replaceAll("\\[", ""); + String result = builder.replaceAll("\\]", "").replaceAll("\\[", ""); result = "[" + result + "]"; return result; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/shape/Shape.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/shape/Shape.java index cacc677cb..0f2174fd1 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/shape/Shape.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/shape/Shape.java @@ -907,7 +907,7 @@ public class Shape { throw new IllegalArgumentException( String.format("J: Index [%d] must not be >= shape[%d]=%d.", i, i, shape[i])); if (shape[i] != 1) { - offset += indices[i] * stride[i]; + offset += (long) indices[i] * stride[i]; } } @@ -995,7 +995,7 @@ public class Shape { throw new IllegalArgumentException( String.format("J: Index [%d] must not be >= shape[%d]=%d.", i, i, size_dimi)); if (size_dimi != 1) { - offset += indices[i] * stride(shapeInformation, i); + offset += (long) indices[i] * stride(shapeInformation, i); } } return offset; @@ -1058,9 +1058,9 @@ public class Shape { + Arrays.toString(shape(shapeInformation)) + " NDArray"); if (size_0 != 1) - offset += row * strideUnsafe(shapeInformation, 0, 2); + offset += (long) row * strideUnsafe(shapeInformation, 0, 2); if (size_1 != 1) - offset += col * strideUnsafe(shapeInformation, 1, 2); + offset += (long) col * strideUnsafe(shapeInformation, 1, 2); return offset; } @@ -1075,9 +1075,9 @@ public class Shape { + Arrays.toString(shape(shapeInformation)) + " NDArray"); if (size_0 != 1) - offset += row * strideUnsafe(shapeInformation, 0, 2); + offset += (long) row * strideUnsafe(shapeInformation, 0, 2); if (size_1 != 1) - offset += col * strideUnsafe(shapeInformation, 1, 2); + offset += (long) col * strideUnsafe(shapeInformation, 1, 2); return offset; } @@ -1118,9 +1118,9 @@ public class Shape { + Arrays.toString(shape(shapeInformation)) + " NDArray"); if (size_0 != 1) - offset += row * stride(shapeInformation, 0); + offset += (long) row * stride(shapeInformation, 0); if (size_1 != 1) - offset += col * stride(shapeInformation, 1); + offset += (long) col * stride(shapeInformation, 1); return offset; } @@ -1147,11 +1147,11 @@ public class Shape { + "] from a " + Arrays.toString(shape(shapeInformation)) + " NDArray"); if (size_0 != 1) - offset += dim0 * stride(shapeInformation, 0); + offset += (long) dim0 * stride(shapeInformation, 0); if (size_1 != 1) - offset += dim1 * stride(shapeInformation, 1); + offset += (long) dim1 * stride(shapeInformation, 1); if (size_2 != 1) - offset += dim2 * stride(shapeInformation, 2); + offset += (long) dim2 * stride(shapeInformation, 2); return offset; } @@ -1185,11 +1185,11 @@ public class Shape { + "] from a " + Arrays.toString(shape(shapeInformation)) + " NDArray"); if (size_0 != 1) - offset += dim0 * strideUnsafe(shapeInformation, 0, 3); + offset += (long) dim0 * strideUnsafe(shapeInformation, 0, 3); if (size_1 != 1) - offset += dim1 * strideUnsafe(shapeInformation, 1, 3); + offset += (long) dim1 * strideUnsafe(shapeInformation, 1, 3); if (size_2 != 1) - offset += dim2 * strideUnsafe(shapeInformation, 2, 3); + offset += (long) dim2 * strideUnsafe(shapeInformation, 2, 3); return offset; } @@ -1237,13 +1237,13 @@ public class Shape { + dim3 + "] from a " + Arrays.toString(shape(shapeInformation)) + " NDArray"); if (size_0 != 1) - offset += dim0 * stride(shapeInformation, 0); + offset += (long) dim0 * stride(shapeInformation, 0); if (size_1 != 1) - offset += dim1 * stride(shapeInformation, 1); + offset += (long) dim1 * stride(shapeInformation, 1); if (size_2 != 1) - offset += dim2 * stride(shapeInformation, 2); + offset += (long) dim2 * stride(shapeInformation, 2); if (size_3 != 1) - offset += dim3 * stride(shapeInformation, 3); + offset += (long) dim3 * stride(shapeInformation, 3); return offset; } @@ -1276,13 +1276,13 @@ public class Shape { + dim3 + "] from a " + Arrays.toString(shape(shapeInformation)) + " NDArray"); if (size_0 != 1) - offset += dim0 * strideUnsafe(shapeInformation, 0, 4); + offset += (long) dim0 * strideUnsafe(shapeInformation, 0, 4); if (size_1 != 1) - offset += dim1 * strideUnsafe(shapeInformation, 1, 4); + offset += (long) dim1 * strideUnsafe(shapeInformation, 1, 4); if (size_2 != 1) - offset += dim2 * strideUnsafe(shapeInformation, 2, 4); + offset += (long) dim2 * strideUnsafe(shapeInformation, 2, 4); if (size_3 != 1) - offset += dim3 * strideUnsafe(shapeInformation, 3, 4); + offset += (long) dim3 * strideUnsafe(shapeInformation, 3, 4); return offset; } @@ -1299,13 +1299,13 @@ public class Shape { + dim3 + "] from a " + Arrays.toString(shape(shapeInformation)) + " NDArray"); if (size_0 != 1) - offset += dim0 * strideUnsafe(shapeInformation, 0, 4); + offset += (long) dim0 * strideUnsafe(shapeInformation, 0, 4); if (size_1 != 1) - offset += dim1 * strideUnsafe(shapeInformation, 1, 4); + offset += (long) dim1 * strideUnsafe(shapeInformation, 1, 4); if (size_2 != 1) - offset += dim2 * strideUnsafe(shapeInformation, 2, 4); + offset += (long) dim2 * strideUnsafe(shapeInformation, 2, 4); if (size_3 != 1) - offset += dim3 * strideUnsafe(shapeInformation, 3, 4); + offset += (long) dim3 * strideUnsafe(shapeInformation, 3, 4); return offset; } @@ -1630,21 +1630,13 @@ public class Shape { public static boolean scalarEquals(int[] shape1, int[] shape2) { if (shape1.length == 0 && shape2.length == 1 && shape2[0] == 1) { return true; - } else if (shape2.length == 0 && shape1.length == 1 && shape1[0] == 1) { - return true; - } - - return false; + } else return shape2.length == 0 && shape1.length == 1 && shape1[0] == 1; } public static boolean scalarEquals(long[] shape1, long[] shape2) { if (shape1.length == 0 && shape2.length == 1 && shape2[0] == 1) { return true; - } else if (shape2.length == 0 && shape1.length == 1 && shape1[0] == 1) { - return true; - } - - return false; + } else return shape2.length == 0 && shape1.length == 1 && shape1[0] == 1; } /** @@ -2310,7 +2302,7 @@ public class Shape { long index = 0; int shift = 1; for (int i = 0; i < shape.length; i++) { - index += shift * indices[i]; + index += (long) shift * indices[i]; shift *= shape[i]; } return index; @@ -2891,13 +2883,13 @@ public class Shape { * @return */ public static IntBuffer shapeOf(IntBuffer buffer) { - Buffer buffer2 = (Buffer) buffer; + Buffer buffer2 = buffer; IntBuffer ret = (IntBuffer) buffer2.position(1); return ret.slice(); } public static LongBuffer shapeOf(LongBuffer buffer) { - Buffer buffer2 = (Buffer) buffer; + Buffer buffer2 = buffer; val ret = (LongBuffer) buffer2.position(1); return ret.slice(); } @@ -3230,7 +3222,7 @@ public class Shape { @Deprecated public static void setOrder(IntBuffer buffer, char order) { int length = Shape.shapeInfoLength(Shape.rank(buffer)); - buffer.put(length - 1, (int) order); + buffer.put(length - 1, order); throw new RuntimeException("setOrder called"); } @@ -3449,7 +3441,7 @@ public class Shape { */ public static boolean contentEquals(int[] arr, IntBuffer other) { for (int i = 0; i < arr.length; i++) { - Buffer buffer2 = (Buffer) other; + Buffer buffer2 = other; buffer2.position(i); if (arr[i] != other.get()) { return false; @@ -3644,7 +3636,7 @@ public class Shape { //Length is simply 1 + the buffer index of the last element long length = 1; for(int i=0; i> getAllTestMatricesWithShape(char ordering, int rows, int cols, int seed, DataType dataType) { List> all = new ArrayList<>(); Nd4j.getRandom().setSeed(seed); - all.add(new Pair<>(Nd4j.linspace(1, rows * cols, rows * cols, dataType).reshape(ordering, rows, cols), + all.add(new Pair<>(Nd4j.linspace(1, (long) rows * cols, (long) rows * cols, dataType).reshape(ordering, rows, cols), "Nd4j..linspace(1,rows * cols,rows * cols).reshape(rows,cols)")); all.add(getTransposedMatrixWithShape(ordering, rows, cols, seed, dataType)); @@ -96,7 +96,7 @@ public class NDArrayCreationUtil { List> all = new ArrayList<>(); if (rank == 0) { //scalar - all.add(new Pair<>(Nd4j.scalar(dataType, Nd4j.rand(dataType, new int[]{1, 1}).getDouble(0)), "{}")); + all.add(new Pair<>(Nd4j.scalar(dataType, Nd4j.rand(dataType, 1, 1).getDouble(0)), "{}")); return all; } //generate all possible combinations with a 1 and a 2 @@ -128,7 +128,7 @@ public class NDArrayCreationUtil { public static Pair getTransposedMatrixWithShape(char ordering, int rows, int cols, int seed, DataType dataType) { Nd4j.getRandom().setSeed(seed); - INDArray out = Nd4j.linspace(1, rows * cols, rows * cols, dataType).reshape(ordering, cols, rows); + INDArray out = Nd4j.linspace(1, (long) rows * cols, (long) rows * cols, dataType).reshape(ordering, cols, rows); return new Pair<>(out.transpose(), "getTransposedMatrixWithShape(" + rows + "," + cols + "," + seed + ")"); } @@ -181,7 +181,7 @@ public class NDArrayCreationUtil { out[1] = temp01.tensorAlongDimension(2, 0, 1).reshape(rows, cols); Nd4j.getRandom().setSeed(seed); - INDArray temp02 = Nd4j.linspace(1, len, len, dataType).reshape(new long[] {cols, 4, rows}); + INDArray temp02 = Nd4j.linspace(1, len, len, dataType).reshape(cols, 4, rows); out[2] = temp02.tensorAlongDimension(0, 0, 2).reshape(rows, cols); temp02 = Nd4j.linspace(1, len, len, dataType).reshape(cols, 4, rows); out[3] = temp02.tensorAlongDimension(2, 0, 2).reshape(rows, cols); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/compression/BasicNDArrayCompressor.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/compression/BasicNDArrayCompressor.java index 283694f56..76bb501b8 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/compression/BasicNDArrayCompressor.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/compression/BasicNDArrayCompressor.java @@ -86,7 +86,7 @@ public class BasicNDArrayCompressor { builder.append("[").append(comp).append("] "); } - System.out.println(builder.toString()); + System.out.println(builder); } /** diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/compression/CompressedDataBuffer.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/compression/CompressedDataBuffer.java index cdbd52d3b..29c5ebe90 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/compression/CompressedDataBuffer.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/compression/CompressedDataBuffer.java @@ -44,7 +44,7 @@ public class CompressedDataBuffer extends BaseDataBuffer { @Getter @Setter protected CompressionDescriptor compressionDescriptor; - private static Logger logger = LoggerFactory.getLogger(CompressedDataBuffer.class); + private static final Logger logger = LoggerFactory.getLogger(CompressedDataBuffer.class); public CompressedDataBuffer(Pointer pointer, @NonNull CompressionDescriptor descriptor) { this.compressionDescriptor = descriptor; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/compression/CompressionUtils.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/compression/CompressionUtils.java index 03ce9b4b5..ff101063f 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/compression/CompressionUtils.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/compression/CompressionUtils.java @@ -27,16 +27,10 @@ public class CompressionUtils { public static boolean goingToDecompress(@NonNull DataTypeEx from, @NonNull DataTypeEx to) { // TODO: eventually we want FLOAT16 here - if (to.equals(DataTypeEx.FLOAT) || to.equals(DataTypeEx.DOUBLE) ) - return true; - - return false; + return to.equals(DataTypeEx.FLOAT) || to.equals(DataTypeEx.DOUBLE); } public static boolean goingToCompress(@NonNull DataTypeEx from, @NonNull DataTypeEx to) { - if (!goingToDecompress(from, to) && goingToDecompress(to, from)) - return true; - - return false; + return !goingToDecompress(from, to) && goingToDecompress(to, from); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/convolution/Convolution.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/convolution/Convolution.java index 4d91be1ba..b2cb74a30 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/convolution/Convolution.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/convolution/Convolution.java @@ -75,7 +75,7 @@ public class Convolution { if (col.rank() != 6) throw new IllegalArgumentException("col2im input array must be rank 6"); - INDArray output = Nd4j.create(col.dataType(), new long[]{col.size(0), col.size(1), kH, kW}); + INDArray output = Nd4j.create(col.dataType(), col.size(0), col.size(1), kH, kW); val cfg = Conv2DConfig.builder() .sH(sH) @@ -289,8 +289,8 @@ public class Convolution { output = Nd4j.createUninitialized(img.dataType(), new long[]{img.size(0), img.size(1), kh, kw, oH, oW}, 'c'); } else { - long oH = (img.size(2) - (kh + (kh - 1) * (1 - 1)) + 2 * ph) / sy + 1; - long oW = (img.size(3) - (kw + (kw - 1) * (1 - 1)) + 2 * pw) / sx + 1; + long oH = (img.size(2) - (kh + 0) + 2L * ph) / sy + 1; + long oW = (img.size(3) - (kw + 0) + 2L * pw) / sx + 1; output = Nd4j.createUninitialized(img.dataType(), new long[]{img.size(0), img.size(1), kh, kw, oH, oW}, 'c'); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/convolution/OldConvolution.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/convolution/OldConvolution.java index 383e807b1..f898598b1 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/convolution/OldConvolution.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/convolution/OldConvolution.java @@ -71,7 +71,7 @@ public class OldConvolution { //out width long outW = col.size(5); - INDArray img = Nd4j.create(n, c, h + 2 * ph + sy - 1, w + 2 * pw + sx - 1); + INDArray img = Nd4j.create(n, c, h + 2L * ph + sy - 1, w + 2L * pw + sx - 1); for (int i = 0; i < kh; i++) { //iterate over the kernel rows long iLim = i + sy * outH; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/AsyncDataSetIterator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/AsyncDataSetIterator.java index 842b770d3..e57f29072 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/AsyncDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/AsyncDataSetIterator.java @@ -117,7 +117,7 @@ public class AsyncDataSetIterator implements DataSetIterator { this.buffer = queue; this.prefetchSize = queueSize; this.backedIterator = iterator; - this.workspaceId = "ADSI_ITER-" + java.util.UUID.randomUUID().toString(); + this.workspaceId = "ADSI_ITER-" + java.util.UUID.randomUUID(); if (iterator.resetSupported() && !iterator.hasNext()) this.backedIterator.reset(); @@ -364,11 +364,11 @@ public class AsyncDataSetIterator implements DataSetIterator { } protected class AsyncPrefetchThread extends Thread implements Runnable { - private BlockingQueue queue; - private DataSetIterator iterator; - private DataSet terminator; + private final BlockingQueue queue; + private final DataSetIterator iterator; + private final DataSet terminator; private boolean isShutdown = false; // locked around `this` - private WorkspaceConfiguration configuration = WorkspaceConfiguration.builder().minSize(10 * 1024L * 1024L) + private final WorkspaceConfiguration configuration = WorkspaceConfiguration.builder().minSize(10 * 1024L * 1024L) .overallocationLimit(prefetchSize + 2).policyReset(ResetPolicy.ENDOFBUFFER_REACHED) .policyLearning(LearningPolicy.FIRST_LOOP).policyAllocation(AllocationPolicy.OVERALLOCATE) .policySpill(SpillPolicy.REALLOCATE).build(); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/AsyncMultiDataSetIterator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/AsyncMultiDataSetIterator.java index 7b32dca06..822fa3ce2 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/AsyncMultiDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/AsyncMultiDataSetIterator.java @@ -103,7 +103,7 @@ public class AsyncMultiDataSetIterator implements MultiDataSetIterator { this.backedIterator = iterator; this.useWorkspaces = useWorkspace; this.prefetchSize = queueSize; - this.workspaceId = "AMDSI_ITER-" + java.util.UUID.randomUUID().toString(); + this.workspaceId = "AMDSI_ITER-" + java.util.UUID.randomUUID(); this.deviceId = deviceId; if (iterator.resetSupported() && !iterator.hasNext()) @@ -312,11 +312,11 @@ public class AsyncMultiDataSetIterator implements MultiDataSetIterator { } protected class AsyncPrefetchThread extends Thread implements Runnable { - private BlockingQueue queue; - private MultiDataSetIterator iterator; - private MultiDataSet terminator; + private final BlockingQueue queue; + private final MultiDataSetIterator iterator; + private final MultiDataSet terminator; private boolean isShutdown = false; // locked around `this` - private WorkspaceConfiguration configuration = WorkspaceConfiguration.builder().minSize(10 * 1024L * 1024L) + private final WorkspaceConfiguration configuration = WorkspaceConfiguration.builder().minSize(10 * 1024L * 1024L) .overallocationLimit(prefetchSize + 1).policyReset(ResetPolicy.ENDOFBUFFER_REACHED) .policyLearning(LearningPolicy.FIRST_LOOP).policyAllocation(AllocationPolicy.OVERALLOCATE) .policySpill(SpillPolicy.REALLOCATE).build(); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/ExistingMiniBatchDataSetIterator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/ExistingMiniBatchDataSetIterator.java index 87f430b19..1447ec78c 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/ExistingMiniBatchDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/ExistingMiniBatchDataSetIterator.java @@ -33,7 +33,7 @@ public class ExistingMiniBatchDataSetIterator implements DataSetIterator { public static final String DEFAULT_PATTERN = "dataset-%d.bin"; private int currIdx; - private File rootDir; + private final File rootDir; private int totalBatches = -1; private DataSetPreProcessor dataSetPreProcessor; private final String pattern; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/MiniBatchFileDataSetIterator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/MiniBatchFileDataSetIterator.java index 5a9222865..7ea2e04c1 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/MiniBatchFileDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/MiniBatchFileDataSetIterator.java @@ -35,12 +35,12 @@ import java.util.UUID; @Slf4j public class MiniBatchFileDataSetIterator implements DataSetIterator { - private int batchSize; - private List paths; + private final int batchSize; + private final List paths; private int currIdx; private File rootDir; - private int totalExamples; - private int totalLabels; + private final int totalExamples; + private final int totalLabels; private int totalBatches = -1; private DataSetPreProcessor dataSetPreProcessor; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/ViewIterator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/ViewIterator.java index b6def37c8..dd4ba90a7 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/ViewIterator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/ViewIterator.java @@ -34,7 +34,7 @@ import java.util.List; public class ViewIterator implements DataSetIterator { private int batchSize = -1; private int cursor = 0; - private DataSet data; + private final DataSet data; private DataSetPreProcessor preProcessor; public ViewIterator(DataSet data, int batchSize) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/adapter/MultiDataSetIteratorAdapter.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/adapter/MultiDataSetIteratorAdapter.java index 7ca5f922a..e100166b8 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/adapter/MultiDataSetIteratorAdapter.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/adapter/MultiDataSetIteratorAdapter.java @@ -26,7 +26,7 @@ import org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator; public class MultiDataSetIteratorAdapter implements MultiDataSetIterator { - private org.nd4j.linalg.dataset.api.iterator.DataSetIterator iter; + private final org.nd4j.linalg.dataset.api.iterator.DataSetIterator iter; private MultiDataSetPreProcessor preProcessor; public MultiDataSetIteratorAdapter(org.nd4j.linalg.dataset.api.iterator.DataSetIterator iter) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/DataSetUtil.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/DataSetUtil.java index 967fe995d..5545569d7 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/DataSetUtil.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/DataSetUtil.java @@ -107,7 +107,7 @@ public class DataSetUtil { mask = mask.dup('f'); } - INDArray mask1d = mask.reshape('f', new long[] {mask.length(), 1}); + INDArray mask1d = mask.reshape('f', mask.length(), 1); //Assume masks are 0s and 1s: then sum == number of elements int numElements = mask.sumNumber().intValue(); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/CachingDataSetIterator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/CachingDataSetIterator.java index 31a571ba3..b69207188 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/CachingDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/CachingDataSetIterator.java @@ -31,12 +31,12 @@ import java.util.List; public class CachingDataSetIterator implements DataSetIterator { private static final Logger log = LoggerFactory.getLogger(DataSetCache.class); - private DataSetIterator sourceIterator; - private DataSetCache cache; - private String namespace; + private final DataSetIterator sourceIterator; + private final DataSetCache cache; + private final String namespace; private int currentIndex = 0; private boolean usingCache = false; - private boolean allowPrefetching; + private final boolean allowPrefetching; public CachingDataSetIterator(DataSetIterator sourceIterator, DataSetCache cache, String namespace) { this(sourceIterator, cache, namespace, false); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/MultipleEpochsIterator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/MultipleEpochsIterator.java index 6ebea0682..fe3117040 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/MultipleEpochsIterator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/MultipleEpochsIterator.java @@ -33,9 +33,9 @@ import java.util.List; @Deprecated public class MultipleEpochsIterator implements DataSetIterator { private static final Logger log = LoggerFactory.getLogger(MultipleEpochsIterator.class); - private int numPasses; + private final int numPasses; private int batch = 0; - private DataSetIterator iter; + private final DataSetIterator iter; private int passes = 0; private DataSetPreProcessor preProcessor; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/SamplingDataSetIterator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/SamplingDataSetIterator.java index 87c6eafae..606700099 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/SamplingDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/SamplingDataSetIterator.java @@ -32,9 +32,9 @@ import java.util.List; * @author Adam Gibson */ public class SamplingDataSetIterator implements DataSetIterator { - private DataSet sampleFrom; - private int batchSize; - private int totalNumberSamples; + private final DataSet sampleFrom; + private final int batchSize; + private final int totalNumberSamples; private int numTimesSampled; private boolean replace = false; private DataSetPreProcessor preProcessor; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/StandardScaler.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/StandardScaler.java index 757f459ac..240e35f20 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/StandardScaler.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/StandardScaler.java @@ -33,7 +33,7 @@ import java.io.IOException; @Deprecated public class StandardScaler { - private static Logger logger = LoggerFactory.getLogger(StandardScaler.class); + private static final Logger logger = LoggerFactory.getLogger(StandardScaler.class); private INDArray mean, std; private long runningTotal = 0; private long batchCount = 0; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/TestDataSetIterator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/TestDataSetIterator.java index 982987645..ff82d068c 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/TestDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/TestDataSetIterator.java @@ -31,7 +31,7 @@ public class TestDataSetIterator implements DataSetIterator { private static final long serialVersionUID = -7569201667767185411L; private int curr = 0; private int batch = 10; - private List list; + private final List list; private DataSetPreProcessor preProcessor; public TestDataSetIterator(DataSet dataset, int batch) { @@ -73,12 +73,12 @@ public class TestDataSetIterator implements DataSetIterator { @Override public int inputColumns() { - return (int)list.get(0).getFeatures().columns(); + return list.get(0).getFeatures().columns(); } @Override public int totalOutcomes() { - return (int) list.get(0).getLabels().columns(); + return list.get(0).getLabels().columns(); } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/TestMultiDataSetIterator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/TestMultiDataSetIterator.java index 17ce251e9..b0af492bc 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/TestMultiDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/TestMultiDataSetIterator.java @@ -33,7 +33,7 @@ import java.util.List; public class TestMultiDataSetIterator implements MultiDataSetIterator { private int curr = 0; private int batch = 10; - private List list; + private final List list; private MultiDataSetPreProcessor preProcessor; /** diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/cache/InFileAndMemoryDataSetCache.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/cache/InFileAndMemoryDataSetCache.java index 47e985ef3..d274372eb 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/cache/InFileAndMemoryDataSetCache.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/cache/InFileAndMemoryDataSetCache.java @@ -27,8 +27,8 @@ import java.nio.file.Path; public class InFileAndMemoryDataSetCache implements DataSetCache { - private InFileDataSetCache fileCache; - private InMemoryDataSetCache memoryCache; + private final InFileDataSetCache fileCache; + private final InMemoryDataSetCache memoryCache; public InFileAndMemoryDataSetCache(File cacheDirectory) { this.fileCache = new InFileDataSetCache(cacheDirectory); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/cache/InFileDataSetCache.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/cache/InFileDataSetCache.java index 2eb7f5dc8..bf415379a 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/cache/InFileDataSetCache.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/cache/InFileDataSetCache.java @@ -27,7 +27,7 @@ import java.io.IOException; import java.nio.file.Path; public class InFileDataSetCache implements DataSetCache { - private File cacheDirectory; + private final File cacheDirectory; public InFileDataSetCache(File cacheDirectory) { if (cacheDirectory.exists() && !cacheDirectory.isDirectory()) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/cache/InMemoryDataSetCache.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/cache/InMemoryDataSetCache.java index 75a2a9177..df7e10c42 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/cache/InMemoryDataSetCache.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/cache/InMemoryDataSetCache.java @@ -35,8 +35,8 @@ public class InMemoryDataSetCache implements DataSetCache { private static final Logger log = LoggerFactory.getLogger(DataSetCache.class); - private Map cache = new HashMap<>(); - private Set completeNamespaces = new HashSet<>(); + private final Map cache = new HashMap<>(); + private final Set completeNamespaces = new HashSet<>(); @Override public boolean isComplete(String namespace) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/CompositeDataSetPreProcessor.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/CompositeDataSetPreProcessor.java index f39e5eed2..dde3a3963 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/CompositeDataSetPreProcessor.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/CompositeDataSetPreProcessor.java @@ -27,7 +27,7 @@ import org.nd4j.linalg.dataset.api.DataSetPreProcessor; public class CompositeDataSetPreProcessor implements DataSetPreProcessor { private final boolean stopOnEmptyDataSet; - private DataSetPreProcessor[] preProcessors; + private final DataSetPreProcessor[] preProcessors; /** * @param preProcessors Preprocessors to apply. They will be applied in this order diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/CompositeMultiDataSetPreProcessor.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/CompositeMultiDataSetPreProcessor.java index 248b66798..5a972d632 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/CompositeMultiDataSetPreProcessor.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/CompositeMultiDataSetPreProcessor.java @@ -25,7 +25,7 @@ import org.nd4j.linalg.dataset.api.MultiDataSetPreProcessor; public class CompositeMultiDataSetPreProcessor implements MultiDataSetPreProcessor { - private MultiDataSetPreProcessor[] preProcessors; + private final MultiDataSetPreProcessor[] preProcessors; /** * @param preProcessors Preprocessors to apply. They will be applied in this order diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/ImageMultiPreProcessingScaler.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/ImageMultiPreProcessingScaler.java index 8e367f620..8393c9b01 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/ImageMultiPreProcessingScaler.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/ImageMultiPreProcessingScaler.java @@ -28,9 +28,10 @@ import org.nd4j.linalg.dataset.api.preprocessor.serializer.NormalizerType; public class ImageMultiPreProcessingScaler implements MultiDataNormalization { - private double minRange, maxRange; - private double maxPixelVal; - private int[] featureIndices; + private final double minRange; + private final double maxRange; + private final double maxPixelVal; + private final int[] featureIndices; public ImageMultiPreProcessingScaler(int... featureIndices) { this(0, 1, 8, featureIndices); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/MinMaxStrategy.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/MinMaxStrategy.java index 1644bcba1..40c2c413e 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/MinMaxStrategy.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/MinMaxStrategy.java @@ -37,8 +37,8 @@ import java.io.Serializable; @Getter @EqualsAndHashCode public class MinMaxStrategy implements NormalizerStrategy, Serializable { - private double minRange; - private double maxRange; + private final double minRange; + private final double maxRange; public MinMaxStrategy() { this(0, 1); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/classimbalance/BaseUnderSamplingPreProcessor.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/classimbalance/BaseUnderSamplingPreProcessor.java index 9812b240e..de942aa04 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/classimbalance/BaseUnderSamplingPreProcessor.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/classimbalance/BaseUnderSamplingPreProcessor.java @@ -138,7 +138,7 @@ public abstract class BaseUnderSamplingPreProcessor { INDArray floatMask = labelMask.castTo(label.dataType()); if (!sum1.equals(floatMask)) { throw new IllegalArgumentException("Labels of size minibatchx2xtimesteps are expected to be one hot." - + label.toString() + "\n is not one-hot"); + + label + "\n is not one-hot"); } } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/classimbalance/UnderSamplingByMaskingMultiDataSetPreProcessor.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/classimbalance/UnderSamplingByMaskingMultiDataSetPreProcessor.java index d4fe5e792..ae9609c2c 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/classimbalance/UnderSamplingByMaskingMultiDataSetPreProcessor.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/classimbalance/UnderSamplingByMaskingMultiDataSetPreProcessor.java @@ -31,8 +31,8 @@ import java.util.Map; public class UnderSamplingByMaskingMultiDataSetPreProcessor extends BaseUnderSamplingPreProcessor implements MultiDataSetPreProcessor { - private Map targetMinorityDistMap; - private Map minorityLabelMap = new HashMap<>(); + private final Map targetMinorityDistMap; + private final Map minorityLabelMap = new HashMap<>(); /** * The target distribution to approximate. Valid values are between (0,0.5]. diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/classimbalance/UnderSamplingByMaskingPreProcessor.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/classimbalance/UnderSamplingByMaskingPreProcessor.java index 565190197..c7a6d2222 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/classimbalance/UnderSamplingByMaskingPreProcessor.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/classimbalance/UnderSamplingByMaskingPreProcessor.java @@ -26,7 +26,7 @@ import org.nd4j.linalg.dataset.api.DataSetPreProcessor; public class UnderSamplingByMaskingPreProcessor extends BaseUnderSamplingPreProcessor implements DataSetPreProcessor { - private double targetMinorityDist; + private final double targetMinorityDist; private int minorityLabel = 1; /** diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/serializer/NormalizerSerializer.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/serializer/NormalizerSerializer.java index 5a90aaeb1..2a1ce15a2 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/serializer/NormalizerSerializer.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/serializer/NormalizerSerializer.java @@ -34,7 +34,7 @@ public class NormalizerSerializer { private static final String HEADER = "NORMALIZER"; private static NormalizerSerializer defaultSerializer; - private List strategies = new ArrayList<>(); + private final List strategies = new ArrayList<>(); /** * Serialize a normalizer to the given file diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dimensionalityreduction/PCA.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dimensionalityreduction/PCA.java index 9d877025c..4c244f014 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dimensionalityreduction/PCA.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dimensionalityreduction/PCA.java @@ -118,7 +118,7 @@ public class PCA { * @return A matrix of size count rows by N columns */ public INDArray generateGaussianSamples(long count) { - INDArray samples = Nd4j.randn(new long[] {count, eigenvalues.columns()}); + INDArray samples = Nd4j.randn(count, eigenvalues.columns()); INDArray factors = Transforms.pow(eigenvalues, -0.5, true); samples.muliRowVector(factors); return Nd4j.tensorMmul(eigenvectors, samples, new int[][] {{1}, {1}}).transposei().addiRowVector(mean); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dimensionalityreduction/RandomProjection.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dimensionalityreduction/RandomProjection.java index 779365b50..326b9d45f 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dimensionalityreduction/RandomProjection.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dimensionalityreduction/RandomProjection.java @@ -35,9 +35,9 @@ import java.util.List; public class RandomProjection { private int components; - private Random rng; + private final Random rng; private double eps; - private boolean autoMode; + private final boolean autoMode; public RandomProjection(double eps, Random rng){ this.rng = rng; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/env/impl/WorkspacesDebugAction.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/env/impl/WorkspacesDebugAction.java index 2bb87f2bc..e6406a5ae 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/env/impl/WorkspacesDebugAction.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/env/impl/WorkspacesDebugAction.java @@ -35,8 +35,8 @@ public class WorkspacesDebugAction implements EnvironmentalAction { switch (value.toUpperCase()) { case "SPILL_EVERYTHING": { Nd4j.getWorkspaceManager().setDebugMode(DebugMode.SPILL_EVERYTHING); - }; - break; + } + break; case "BYPASS_EVERYTHING": { Nd4j.getWorkspaceManager().setDebugMode(DebugMode.BYPASS_EVERYTHING); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/BaseBlasWrapper.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/BaseBlasWrapper.java index 62a4cb445..24f92f98e 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/BaseBlasWrapper.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/BaseBlasWrapper.java @@ -187,7 +187,7 @@ public abstract class BaseBlasWrapper implements BlasWrapper { LinAlgExceptions.assertMatrix(a); if (a.data().dataType() == DataType.DOUBLE) { - return gemv((double) alpha, a, x, (double) beta, y); + return gemv(alpha, a, x, (double) beta, y); } level2().gemv('N', 'N', alpha, a, x, beta, y); return y; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/BaseNDArrayFactory.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/BaseNDArrayFactory.java index 667432f6c..3458ed06b 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/BaseNDArrayFactory.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/BaseNDArrayFactory.java @@ -749,7 +749,7 @@ public abstract class BaseNDArrayFactory implements NDArrayFactory { INDArray ret = Nd4j.createUninitialized(new long[] {indexes.length, vectorLength}, order); for (int cnt = 0; cnt < indexes.length; cnt++) { - ret.putRow(cnt, source.tensorAlongDimension((int) indexes[cnt], sourceDimension)); + ret.putRow(cnt, source.tensorAlongDimension(indexes[cnt], sourceDimension)); } return ret; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/NDArrayFactory.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/NDArrayFactory.java index 6f319e31d..fe162c8e2 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/NDArrayFactory.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/NDArrayFactory.java @@ -1374,7 +1374,7 @@ public interface NDArrayFactory { * @param file the file to create the map from * @return Map */ - public Map createFromNpzFile(File file) throws Exception; + Map createFromNpzFile(File file) throws Exception; /** * Convert an {@link INDArray} diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/Nd4j.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/Nd4j.java index 46f538dfc..2dfff5fbd 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/Nd4j.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/Nd4j.java @@ -259,7 +259,7 @@ public class Nd4j { private static AffinityManager affinityManager; private static MemoryManager memoryManager; - private static AtomicBoolean fallbackMode; + private static final AtomicBoolean fallbackMode; protected static Properties props = new Properties(); @@ -1961,7 +1961,7 @@ public class Nd4j { return Nd4j.getExecutioner().exec(new Linspace((double) lower, num, (double)step, dtype)); } else { - throw new IllegalStateException("Illegal data type for linspace: " + dtype.toString()); + throw new IllegalStateException("Illegal data type for linspace: " + dtype); } } @@ -1999,7 +1999,7 @@ public class Nd4j { return linspace((double) lower, (double)upper, (int) num, dtype); } else { - throw new IllegalStateException("Illegal data type for linspace: " + dtype.toString()); + throw new IllegalStateException("Illegal data type for linspace: " + dtype); } } @@ -4482,7 +4482,7 @@ public class Nd4j { * @return the created ndarray */ public static INDArray valueArrayOf(long[] shape, float value) { - return valueArrayOf(shape, (double)value, DataType.FLOAT); + return valueArrayOf(shape, value, DataType.FLOAT); } /** @@ -5214,7 +5214,7 @@ public class Nd4j { try { action.process(value); } catch (Exception e2) { - logger.info("Failed to process env variable [" + e + "], got exception: " + e2.toString()); + logger.info("Failed to process env variable [" + e + "], got exception: " + e2); } } } @@ -5776,7 +5776,7 @@ public class Nd4j { val doubles = new float[prod]; val sb = bb.order(_order).asShortBuffer(); for (int e = 0; e < prod; e++) - doubles[e] = HalfIndexer.toFloat((int) sb.get(e)); + doubles[e] = HalfIndexer.toFloat(sb.get(e)); return Nd4j.create(doubles, shapeOf, stridesOf, ordering, DataType.HALF); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/Nd4jBackend.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/Nd4jBackend.java index cbce0c2f5..b56410cd3 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/Nd4jBackend.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/Nd4jBackend.java @@ -220,15 +220,15 @@ public abstract class Nd4jBackend { java.net.URLClassLoader loader = (URLClassLoader) ND4JClassLoading.getNd4jClassloader(); java.net.URL url = jar.toURI().toURL(); /*Disallow if already loaded*/ - for (java.net.URL it : java.util.Arrays.asList(loader.getURLs())) { + for (java.net.URL it : loader.getURLs()) { if (it.equals(url)) { return; } } java.lang.reflect.Method method = - java.net.URLClassLoader.class.getDeclaredMethod("addURL", new Class[] {java.net.URL.class}); + java.net.URLClassLoader.class.getDeclaredMethod("addURL", java.net.URL.class); method.setAccessible(true); /*promote the method to public access*/ - method.invoke(loader, new Object[] {url}); + method.invoke(loader, url); } catch (final java.lang.NoSuchMethodException | java.lang.IllegalAccessException | java.net.MalformedURLException | java.lang.reflect.InvocationTargetException e) { throw new NoAvailableBackendException(e); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/RandomFactory.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/RandomFactory.java index 93911850d..c97f4261a 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/RandomFactory.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/factory/RandomFactory.java @@ -25,8 +25,8 @@ import org.nd4j.linalg.api.rng.Random; import java.lang.reflect.Constructor; public class RandomFactory { - private ThreadLocal threadRandom = new ThreadLocal<>(); - private Class randomClass; + private final ThreadLocal threadRandom = new ThreadLocal<>(); + private final Class randomClass; public RandomFactory(Class randomClass) { this.randomClass = randomClass; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/Heartbeat.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/Heartbeat.java index 32f6c7263..4e73774e2 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/Heartbeat.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/Heartbeat.java @@ -30,7 +30,7 @@ import java.util.concurrent.atomic.AtomicBoolean; public class Heartbeat { private static final Heartbeat INSTANCE = new Heartbeat(); private volatile long serialVersionID; - private AtomicBoolean enabled = new AtomicBoolean(true); + private final AtomicBoolean enabled = new AtomicBoolean(true); protected Heartbeat() { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/reports/Environment.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/reports/Environment.java index f58ab7ea7..abccf8706 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/reports/Environment.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/reports/Environment.java @@ -64,7 +64,6 @@ public class Environment implements Serializable { public String toCompactString() { - StringBuilder builder = new StringBuilder(); /* new format is: @@ -79,10 +78,10 @@ public class Environment implements Serializable { builder.append(backendUsed).append(" "); */ - builder.append(backendUsed).append(" (").append(numCores).append(" cores ") - .append(Math.max(availableMemory / 1024 / 1024 / 1024, 1)).append("GB ").append(osName) - .append(" ").append(osArch).append(")"); + String builder = backendUsed + " (" + numCores + " cores " + + Math.max(availableMemory / 1024 / 1024 / 1024, 1) + "GB " + osName + + " " + osArch + ")"; - return builder.toString(); + return builder; } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/reports/Task.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/reports/Task.java index b9df4d545..a0b32ccae 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/reports/Task.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/reports/Task.java @@ -42,12 +42,11 @@ public class Task { private int numSamples; public String toCompactString() { - StringBuilder builder = new StringBuilder(); - builder.append("F: ").append(numFeatures).append("/"); - builder.append("L: ").append(numLabels).append("/"); - builder.append("S: ").append(numSamples).append(" "); + String builder = "F: " + numFeatures + "/" + + "L: " + numLabels + "/" + + "S: " + numSamples + " "; - return builder.toString(); + return builder; } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/utils/EnvironmentUtils.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/utils/EnvironmentUtils.java index e8ecf94cd..a5f2945bf 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/utils/EnvironmentUtils.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/heartbeat/utils/EnvironmentUtils.java @@ -67,12 +67,12 @@ public class EnvironmentUtils { return random.nextLong(); } catch (Exception e) { - ; // do nothing, just skip to next interface + // do nothing, just skip to next interface } } } catch (Exception e) { - ; // do nothing here + // do nothing here } return ret; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/BooleanIndexing.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/BooleanIndexing.java index 8d203ce56..f5fe68632 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/BooleanIndexing.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/BooleanIndexing.java @@ -56,10 +56,7 @@ public class BooleanIndexing { if (cond instanceof BaseCondition) { long val = (long) Nd4j.getExecutioner().exec(new MatchCondition(n, cond)).getDouble(0); - if (val == n.length()) - return true; - else - return false; + return val == n.length(); } else { throw new RuntimeException("Can only execute BaseCondition conditions using this method"); @@ -85,10 +82,7 @@ public class BooleanIndexing { long tadLength = Shape.getTADLength(n.shape(), dimension); for (int i = 0; i < arr.length(); i++) { - if (arr.getDouble(i) == tadLength) - result[i] = true; - else - result[i] = false; + result[i] = arr.getDouble(i) == tadLength; } return result; @@ -113,10 +107,7 @@ public class BooleanIndexing { boolean[] result = new boolean[(int) arr.length()]; for (int i = 0; i < arr.length(); i++) { - if (arr.getDouble(i) > 0) - result[i] = true; - else - result[i] = false; + result[i] = arr.getDouble(i) > 0; } return result; @@ -133,10 +124,7 @@ public class BooleanIndexing { if (cond instanceof BaseCondition) { long val = (long) Nd4j.getExecutioner().exec(new MatchCondition(n, cond)).getDouble(0); - if (val > 0) - return true; - else - return false; + return val > 0; } else { throw new RuntimeException("Can only execute BaseCondition conditions using this method"); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/IndexInfo.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/IndexInfo.java index 8499f9f58..9f7c134bd 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/IndexInfo.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/IndexInfo.java @@ -24,7 +24,7 @@ package org.nd4j.linalg.indexing; * @author Adam Gibson */ public class IndexInfo { - private INDArrayIndex[] indexes; + private final INDArrayIndex[] indexes; private boolean[] point; private boolean[] newAxis; private int numNewAxes = 0; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/NDArrayIndex.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/NDArrayIndex.java index cfd583d63..8c856bcae 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/NDArrayIndex.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/NDArrayIndex.java @@ -39,8 +39,8 @@ import java.util.List; @Slf4j public abstract class NDArrayIndex implements INDArrayIndex { - private long[] indices; - private static NewAxis NEW_AXIS = new NewAxis(); + private final long[] indices; + private static final NewAxis NEW_AXIS = new NewAxis(); /** @@ -655,9 +655,7 @@ public abstract class NDArrayIndex implements INDArrayIndex { NDArrayIndex that = (NDArrayIndex) o; - if (!Arrays.equals(indices, that.indices)) - return false; - return true; + return Arrays.equals(indices, that.indices); } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/PointIndex.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/PointIndex.java index acd69d529..796a2a3da 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/PointIndex.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/PointIndex.java @@ -28,7 +28,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; */ @EqualsAndHashCode public class PointIndex implements INDArrayIndex { - private long point; + private final long point; /** * diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/SpecifiedIndex.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/SpecifiedIndex.java index f2109e8d2..702f315bc 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/SpecifiedIndex.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/SpecifiedIndex.java @@ -130,7 +130,7 @@ public class SpecifiedIndex implements INDArrayIndex { */ public static class SpecifiedIndexesGenerator implements Generator>> { private int index = 0; - private INDArrayIndex[] indexes; + private final INDArrayIndex[] indexes; /** * The indexes to generate from @@ -166,7 +166,7 @@ public class SpecifiedIndex implements INDArrayIndex { */ public static class SparseSpecifiedIndexesGenerator implements Generator>> { private int index = 0; - private INDArrayIndex[] indexes; + private final INDArrayIndex[] indexes; /** * The indexes to generate from diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/And.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/And.java index 82178d754..cff235d50 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/And.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/And.java @@ -22,7 +22,7 @@ package org.nd4j.linalg.indexing.conditions; public class And implements Condition { - private Condition[] conditions; + private final Condition[] conditions; public And(Condition... conditions) { this.conditions = conditions; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/ConditionEquals.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/ConditionEquals.java index cd8e78b87..aeedb79f2 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/ConditionEquals.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/ConditionEquals.java @@ -22,7 +22,7 @@ package org.nd4j.linalg.indexing.conditions; public class ConditionEquals implements Condition { - private Condition[] conditions; + private final Condition[] conditions; public ConditionEquals(Condition... conditions) { this.conditions = conditions; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/Not.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/Not.java index db6cfd6a7..182924c91 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/Not.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/Not.java @@ -22,7 +22,7 @@ package org.nd4j.linalg.indexing.conditions; public class Not implements Condition { - private Condition opposite; + private final Condition opposite; /** * Returns condition ID for native side diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/Or.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/Or.java index 6c1f6adee..5a7e971d8 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/Or.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/conditions/Or.java @@ -22,7 +22,7 @@ package org.nd4j.linalg.indexing.conditions; public class Or implements Condition { - private Condition[] conditions; + private final Condition[] conditions; public Or(Condition... conditions) { this.conditions = conditions; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/functions/StableNumber.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/functions/StableNumber.java index 62d1e23c3..b6528b1ec 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/functions/StableNumber.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/functions/StableNumber.java @@ -29,7 +29,7 @@ import org.nd4j.common.function.Function; * or nan */ public class StableNumber implements Function { - private Type type; + private final Type type; public enum Type { DOUBLE, FLOAT diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/functions/Value.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/functions/Value.java index 6464a7db7..94207dfd7 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/functions/Value.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/indexing/functions/Value.java @@ -23,7 +23,7 @@ package org.nd4j.linalg.indexing.functions; import org.nd4j.common.function.Function; public class Value implements Function { - private Number number; + private final Number number; public Value(Number number) { this.number = number; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/lossfunctions/impl/LossBinaryXENT.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/lossfunctions/impl/LossBinaryXENT.java index 70ad03d19..9cf173894 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/lossfunctions/impl/LossBinaryXENT.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/lossfunctions/impl/LossBinaryXENT.java @@ -117,7 +117,7 @@ public class LossBinaryXENT implements ILossFunction { INDArray scoreArr; if (activationFn instanceof ActivationSoftmax) { //TODO Post GPU support for custom ops: Use LogSoftMax op to avoid numerical issues when calculating score - INDArray logsoftmax = Nd4j.exec((CustomOp) new SoftMax(preOutput, preOutput.ulike(), -1))[0]; + INDArray logsoftmax = Nd4j.exec(new SoftMax(preOutput, preOutput.ulike(), -1))[0]; Transforms.log(logsoftmax, false); scoreArr = logsoftmax.muli(labels); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/lossfunctions/impl/LossMixtureDensity.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/lossfunctions/impl/LossMixtureDensity.java index 14894362b..84babae8c 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/lossfunctions/impl/LossMixtureDensity.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/lossfunctions/impl/LossMixtureDensity.java @@ -85,7 +85,7 @@ public class LossMixtureDensity implements ILossFunction { // through Nd4j operations in order to increase performance. public MixtureDensityComponents extractComponents(INDArray output) { long outputSize = output.size(1); - if (outputSize != (mLabelWidth + 2) * mMixtures) { + if (outputSize != (long) (mLabelWidth + 2) * mMixtures) { throw new IllegalArgumentException( "Network output size " + outputSize + " must be (labels+2)*mixtures where labels = " + mLabelWidth + " and mixtures = " + mMixtures); @@ -114,7 +114,7 @@ public class LossMixtureDensity implements ILossFunction { // Alpha is a softmax because // the alpha should all sum to 1 for a given gaussian mixture. - mdc.alpha = Nd4j.exec((CustomOp) new SoftMax(mdc.alpha, mdc.alpha, -1))[0]; + mdc.alpha = Nd4j.exec(new SoftMax(mdc.alpha, mdc.alpha, -1))[0]; // Mu comes directly from the network as an unmolested value. // Note that this effectively means that the output layer of @@ -254,10 +254,10 @@ public class LossMixtureDensity implements ILossFunction { INDArray dLdZMu = Nd4j.create(nSamples, mMixtures, mLabelWidth); for (int k = 0; k < mLabelWidth; k++) { dLdZMu.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(k)}, - labelsMinusMu.get(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all(), - NDArrayIndex.point(k)}).muli(pi).divi(variance).negi()); + labelsMinusMu.get(NDArrayIndex.all(), NDArrayIndex.all(), + NDArrayIndex.point(k)).muli(pi).divi(variance).negi()); } - dLdZMu = dLdZMu.reshape(nSamples, mMixtures * mLabelWidth); + dLdZMu = dLdZMu.reshape(nSamples, (long) mMixtures * mLabelWidth); // Place components of gradient into gradient holder. gradient.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.interval(0, mMixtures)}, dLdZAlpha); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/ops/transforms/Transforms.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/ops/transforms/Transforms.java index bffdffdf5..fe44374d9 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/ops/transforms/Transforms.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/ops/transforms/Transforms.java @@ -519,7 +519,7 @@ public class Transforms { * @return */ public static INDArray softmax(INDArray in, boolean copy) { - return Nd4j.getExecutioner().exec((CustomOp) new SoftMax(in, (copy ? in.ulike() : in), -1))[0]; + return Nd4j.getExecutioner().exec(new SoftMax(in, (copy ? in.ulike() : in), -1))[0]; } /** diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/OpProfiler.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/OpProfiler.java index 067b25370..65339d4f0 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/OpProfiler.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/OpProfiler.java @@ -511,7 +511,7 @@ public class OpProfiler { if (operands[e] == null) buffer.append("null"); else - buffer.append(new String("" + operands[e].ordering()).toUpperCase()); + buffer.append(("" + operands[e].ordering()).toUpperCase()); if (e < operands.length - 1) buffer.append(" x "); @@ -631,8 +631,8 @@ public class OpProfiler { if (x == z || y == z) { return processOperands(x, y); } else { - PenaltyCause causeXY[] = processOperands(x, y); - PenaltyCause causeXZ[] = processOperands(x, z); + PenaltyCause[] causeXY = processOperands(x, y); + PenaltyCause[] causeXZ = processOperands(x, z); if ((causeXY.length == 1 && causeXY[0] == NONE) && (causeXZ.length == 1 && causeXZ[0] == NONE)) { return causeXY; @@ -675,7 +675,7 @@ public class OpProfiler { if (operands[e] == null && operands[e + 1] == null) continue; - PenaltyCause lc[] = processOperands(operands[e], operands[e + 1]); + PenaltyCause[] lc = processOperands(operands[e], operands[e + 1]); for (PenaltyCause cause : lc) { if (cause != NONE && !causes.contains(cause)) diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/StackAggregator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/StackAggregator.java index 5383dead5..5151a745e 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/StackAggregator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/StackAggregator.java @@ -24,7 +24,7 @@ import org.nd4j.linalg.profiler.data.primitives.StackDescriptor; import org.nd4j.linalg.profiler.data.primitives.StackTree; public class StackAggregator { - private StackTree tree = new StackTree(); + private final StackTree tree = new StackTree(); public StackAggregator() { // nothing to do here so far diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/StringAggregator.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/StringAggregator.java index 465bdfa4e..7eb8086da 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/StringAggregator.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/StringAggregator.java @@ -32,8 +32,8 @@ import java.util.concurrent.atomic.AtomicLong; public class StringAggregator { - private Map times = new ConcurrentHashMap<>(); - private Map longCalls = new ConcurrentHashMap<>(); + private final Map times = new ConcurrentHashMap<>(); + private final Map longCalls = new ConcurrentHashMap<>(); private static final long THRESHOLD = 100000; diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/StringCounter.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/StringCounter.java index 5054b680a..ab9784de6 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/StringCounter.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/StringCounter.java @@ -28,8 +28,8 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; public class StringCounter { - private Map counter = new ConcurrentHashMap<>(); - private AtomicLong totals = new AtomicLong(0); + private final Map counter = new ConcurrentHashMap<>(); + private final AtomicLong totals = new AtomicLong(0); public StringCounter() { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/StackDescriptor.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/StackDescriptor.java index 60656802c..3121b9785 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/StackDescriptor.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/StackDescriptor.java @@ -33,9 +33,9 @@ import java.util.Arrays; @Slf4j public class StackDescriptor { @Getter - protected StackTraceElement stackTrace[]; + protected StackTraceElement[] stackTrace; - public StackDescriptor(@NonNull StackTraceElement stack[]) { + public StackDescriptor(@NonNull StackTraceElement[] stack) { // we cut off X first elements from stack, because they belong to profiler // basically, we just want to make sure, no profiler-related code is mentioned in stack trace int start = 0; @@ -46,7 +46,6 @@ public class StackDescriptor { // in tests it's quite possible to have no DefaultOpExecutioner calls being used if (start == stack.length) { - ; for (start = 0; start < stack.length; start++) { if (!stack[start + 1].getClassName().contains("OpProfiler") && !stack[start + 1].getClassName().contains("StackAggregator")) diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/StackNode.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/StackNode.java index 316170700..eefe67394 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/StackNode.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/StackNode.java @@ -49,12 +49,12 @@ public class StackNode implements Comparable { builder.append(" "); } - builder.append("").append(nodeURI); + builder.append(nodeURI); if (displayCounts) builder.append(" ").append(counter.get()).append(" us"); - System.out.println(builder.toString()); + System.out.println(builder); for (StackNode node : entries.values()) { node.traverse(ownLevel + 1, displayCounts); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/StackTree.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/StackTree.java index 1785737f3..60d3a1e4b 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/StackTree.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/StackTree.java @@ -41,14 +41,14 @@ public class StackTree { } public String renderTree(boolean displayCounts) { - StringBuilder builder = new StringBuilder(); + String builder = ""; // we'll always have single entry here, but let's keep loop here for (StackNode cNode : basement.values()) { cNode.traverse(0, displayCounts); } - return builder.toString(); + return builder; } public void consumeStackTrace(@NonNull StackDescriptor descriptor) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/TimeSet.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/TimeSet.java index 97f36f73a..0062b2633 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/TimeSet.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/profiler/data/primitives/TimeSet.java @@ -25,7 +25,7 @@ import java.util.List; public class TimeSet implements Comparable { - private List times = new ArrayList<>(); + private final List times = new ArrayList<>(); private long sum = 0; public void addTime(long time) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/schedule/MapSchedule.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/schedule/MapSchedule.java index 578996bb9..ce28a5901 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/schedule/MapSchedule.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/schedule/MapSchedule.java @@ -91,8 +91,8 @@ public class MapSchedule implements ISchedule { */ public static class Builder { - private ScheduleType scheduleType; - private Map values = new HashMap<>(); + private final ScheduleType scheduleType; + private final Map values = new HashMap<>(); /** * @param scheduleType Schedule opType to use diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/string/NDArrayStrings.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/string/NDArrayStrings.java index efcd327df..dae8056c0 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/string/NDArrayStrings.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/string/NDArrayStrings.java @@ -215,11 +215,10 @@ public class NDArrayStrings { } else if (arr.isRowVector()) { //a slice from a higher dim array if (offset == 0) { - StringBuilder sb = new StringBuilder(); - sb.append("["); - sb.append(vectorToString(arr, summarize)); - sb.append("]"); - return sb.toString(); + String sb = "[" + + vectorToString(arr, summarize) + + "]"; + return sb; } return vectorToString(arr, summarize); } else { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/DataSetUtils.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/DataSetUtils.java index 5ec3fc2de..1e374bab3 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/DataSetUtils.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/DataSetUtils.java @@ -32,7 +32,7 @@ import org.nd4j.common.tools.SIS; public class DataSetUtils { // - private SIS sis; + private final SIS sis; // public DataSetUtils( SIS sis, @@ -179,7 +179,7 @@ public class DataSetUtils { if (in_INDA.rows() > Integer.MAX_VALUE) { throw new ND4JArraySizeException(); } - int i_CharsCount = BTools.getIndexCharsCount( (int) in_INDA.rows() - 1 ); + int i_CharsCount = BTools.getIndexCharsCount( in_INDA.rows() - 1 ); // oinfo = ""; oinfo += BTools.getMtLvESS( mtLv ); @@ -201,7 +201,7 @@ public class DataSetUtils { // int c_I = 0; // - for ( int j = (int) in_INDA.columns() - 1; j >= 0; j-- ) { + for (int j = in_INDA.columns() - 1; j >= 0; j-- ) { // if ( c_I > c_End_I ) break; // @@ -221,7 +221,7 @@ public class DataSetUtils { if ( ot_INDA != null ) { if (ot_INDA.columns() - 1 > Integer.MAX_VALUE) throw new ND4JArraySizeException(); - for ( int j = (int) ot_INDA.columns() - 1; j >= 0; j-- ) { + for (int j = ot_INDA.columns() - 1; j >= 0; j-- ) { // if ( c_I > c_End_I ) break; // @@ -349,7 +349,7 @@ public class DataSetUtils { double j_Dbl = -1; if (INDA.rows() - 1 > Integer.MAX_VALUE) throw new ND4JArraySizeException(); - int i_CharsCount = BTools.getIndexCharsCount( (int) INDA.rows() - 1 ); + int i_CharsCount = BTools.getIndexCharsCount( INDA.rows() - 1 ); // if ( !turned ) { //= standard oinfo = ""; @@ -370,7 +370,7 @@ public class DataSetUtils { int c_I = 0; if (INDA.columns() - 1 > Integer.MAX_VALUE) throw new ND4JArraySizeException(); - for ( int j = (int) INDA.columns() - 1; j >= 0; j-- ) { + for (int j = INDA.columns() - 1; j >= 0; j-- ) { // if ( c_I > c_End_I ) break; // diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/ND4JTestUtils.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/ND4JTestUtils.java index 41018c288..ad386be2f 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/ND4JTestUtils.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/ND4JTestUtils.java @@ -49,7 +49,7 @@ public class ND4JTestUtils { } /** - * A function for use with {@link #validateSerializedArrays(File, File, boolean, BiFunction)} using {@link INDArray#equals(Object)} + * A function for use with {@link #validateSerializedArrays(File, File, boolean, BiFunction)} using {@code INDArray#equals(Object)} */ public static class EqualsFn implements BiFunction { @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/NDArrayMath.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/NDArrayMath.java index e20b79f30..cf55c721a 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/NDArrayMath.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/NDArrayMath.java @@ -98,7 +98,7 @@ public class NDArrayMath { */ public static long vectorsPerSlice(INDArray arr) { if (arr.rank() > 2) { - return ArrayUtil.prodLong(new long[] {arr.size(-1), arr.size(-2)}); + return ArrayUtil.prodLong(arr.size(-1), arr.size(-2)); } return arr.slices(); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/workspace/BaseWorkspaceMgr.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/workspace/BaseWorkspaceMgr.java index af0c04d08..9baf97578 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/workspace/BaseWorkspaceMgr.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/workspace/BaseWorkspaceMgr.java @@ -129,9 +129,7 @@ public abstract class BaseWorkspaceMgr> implements WorkspaceMg @Override public void setWorkspace(@NonNull T forEnum, @NonNull String wsName, @NonNull WorkspaceConfiguration configuration) { - if(scopeOutOfWs.contains(forEnum)){ - scopeOutOfWs.remove(forEnum); - } + scopeOutOfWs.remove(forEnum); setWorkspaceName(forEnum, wsName); setConfiguration(forEnum, configuration); } @@ -169,7 +167,7 @@ public abstract class BaseWorkspaceMgr> implements WorkspaceMg throw new ND4JWorkspaceException("Assertion failed: expected current workspace to be \"" + getWorkspaceName(arrayType) + "\" (for array type " + arrayType + ") - actual current workspace is " + (curr == null ? null : curr.getId()) + (msg == null ? "" : ": " + msg)); - }; + } } @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/serde/json/JsonMappers.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/serde/json/JsonMappers.java index 8190f6141..573d0ff81 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/serde/json/JsonMappers.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/serde/json/JsonMappers.java @@ -38,8 +38,8 @@ import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; @Slf4j public class JsonMappers { - private static ObjectMapper jsonMapper = configureMapper(new ObjectMapper()); - private static ObjectMapper yamlMapper = configureMapper(new ObjectMapper(new YAMLFactory())); + private static final ObjectMapper jsonMapper = configureMapper(new ObjectMapper()); + private static final ObjectMapper yamlMapper = configureMapper(new ObjectMapper(new YAMLFactory())); /** * @return The default/primary ObjectMapper for deserializing JSON objects diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/systeminfo/SystemInfo.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/systeminfo/SystemInfo.java index f292cf8eb..89175c137 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/systeminfo/SystemInfo.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/systeminfo/SystemInfo.java @@ -305,7 +305,7 @@ public class SystemInfo { sb.append(String.format(wsFormat, ws.getId(), (ws.isScopeActive() ? "OPEN" : "CLOSED"), fBytes(ws.getCurrentSize()), - String.valueOf(numCycles))).append("\n"); + numCycles)).append("\n"); } } sb.append(fBytes("Workspaces total size", totalWsSize)); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/versioncheck/VersionCheck.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/versioncheck/VersionCheck.java index 5ca7116f0..43b868342 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/versioncheck/VersionCheck.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/versioncheck/VersionCheck.java @@ -234,7 +234,7 @@ public class VersionCheck { try { URI uri = u.toURI(); - try (FileSystem fileSystem = (uri.getScheme().equals("jar") ? FileSystems.newFileSystem(uri, Collections.emptyMap()) : null)) { + try (FileSystem fileSystem = (uri.getScheme().equals("jar") ? FileSystems.newFileSystem(uri, Collections.emptyMap()) : null)) { Path myPath = Paths.get(uri); Files.walkFileTree(myPath, new SimpleFileVisitor() { @Override diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/BaseWeightInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/BaseWeightInitScheme.java index be0abbcf5..898e97c5a 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/BaseWeightInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/BaseWeightInitScheme.java @@ -29,7 +29,7 @@ import java.util.Arrays; @EqualsAndHashCode public abstract class BaseWeightInitScheme implements WeightInitScheme { - private char order; + private final char order; /** * Initialize with c weight ordering by default diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/ConstantInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/ConstantInitScheme.java index 74370926c..7177e0302 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/ConstantInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/ConstantInitScheme.java @@ -32,7 +32,7 @@ import org.nd4j.weightinit.WeightInit; * @author Adam Gibson */ public class ConstantInitScheme extends BaseWeightInitScheme { - private double constant; + private final double constant; @Builder public ConstantInitScheme(char order,double constant) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/DistributionInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/DistributionInitScheme.java index b9cc7469c..ac4c36698 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/DistributionInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/DistributionInitScheme.java @@ -32,7 +32,7 @@ import org.nd4j.weightinit.WeightInit; * @author Adam Gibson */ public class DistributionInitScheme extends BaseWeightInitScheme { - private Distribution distribution; + private final Distribution distribution; @Builder public DistributionInitScheme(char order, Distribution distribution) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/LecunUniformInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/LecunUniformInitScheme.java index 91c398424..7272bea70 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/LecunUniformInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/LecunUniformInitScheme.java @@ -34,7 +34,7 @@ import org.nd4j.weightinit.WeightInit; */ public class LecunUniformInitScheme extends BaseWeightInitScheme { - private double fanIn; + private final double fanIn; @Builder public LecunUniformInitScheme(char order, double fanIn) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/ReluInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/ReluInitScheme.java index 4843cd6cf..f12857f55 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/ReluInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/ReluInitScheme.java @@ -35,7 +35,7 @@ import org.nd4j.weightinit.WeightInit; */ public class ReluInitScheme extends BaseWeightInitScheme { - private double fanIn; + private final double fanIn; @Builder public ReluInitScheme(char order,double fanIn) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/ReluUniformInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/ReluUniformInitScheme.java index 0ffa0d6ef..907165d9b 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/ReluUniformInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/ReluUniformInitScheme.java @@ -35,7 +35,7 @@ import org.nd4j.weightinit.WeightInit; */ public class ReluUniformInitScheme extends BaseWeightInitScheme { - private double fanIn; + private final double fanIn; @Builder public ReluUniformInitScheme(char order, double fanIn) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/SigmoidUniformInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/SigmoidUniformInitScheme.java index 615070802..43284932d 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/SigmoidUniformInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/SigmoidUniformInitScheme.java @@ -35,8 +35,8 @@ import org.nd4j.weightinit.WeightInit; */ public class SigmoidUniformInitScheme extends BaseWeightInitScheme { - private double fanIn; - private double fanOut; + private final double fanIn; + private final double fanOut; @Builder public SigmoidUniformInitScheme(char order, double fanIn,double fanOut) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/UniformInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/UniformInitScheme.java index 680b5ce4a..7aa09e5eb 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/UniformInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/UniformInitScheme.java @@ -35,7 +35,7 @@ import org.nd4j.weightinit.WeightInit; */ public class UniformInitScheme extends BaseWeightInitScheme { - private double fanIn; + private final double fanIn; @Builder public UniformInitScheme(char order, double fanIn) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalFanAvgInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalFanAvgInitScheme.java index b384973a6..cc7530e5a 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalFanAvgInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalFanAvgInitScheme.java @@ -35,8 +35,8 @@ import org.nd4j.weightinit.WeightInit; */ public class VarScalingNormalFanAvgInitScheme extends BaseWeightInitScheme { - private double fanIn; - private double fanOut; + private final double fanIn; + private final double fanOut; @Builder public VarScalingNormalFanAvgInitScheme(char order, double fanIn, double fanOut) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalFanInInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalFanInInitScheme.java index e3839efc0..c0d7057ec 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalFanInInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalFanInInitScheme.java @@ -35,7 +35,7 @@ import org.nd4j.weightinit.WeightInit; */ public class VarScalingNormalFanInInitScheme extends BaseWeightInitScheme { - private double fanIn; + private final double fanIn; @Builder public VarScalingNormalFanInInitScheme(char order, double fanIn) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalFanOutInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalFanOutInitScheme.java index 24fa4c944..5bdaa2066 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalFanOutInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalFanOutInitScheme.java @@ -36,7 +36,7 @@ import org.nd4j.weightinit.WeightInit; */ public class VarScalingNormalFanOutInitScheme extends BaseWeightInitScheme { - private double fanOut; + private final double fanOut; @Builder public VarScalingNormalFanOutInitScheme(char order, double fanOut) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalUniformFanInInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalUniformFanInInitScheme.java index 4a44b6a36..74ace0a67 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalUniformFanInInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalUniformFanInInitScheme.java @@ -35,7 +35,7 @@ import org.nd4j.weightinit.WeightInit; */ public class VarScalingNormalUniformFanInInitScheme extends BaseWeightInitScheme { - private double fanIn; + private final double fanIn; @Builder public VarScalingNormalUniformFanInInitScheme(char order, double fanIn) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalUniformFanOutInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalUniformFanOutInitScheme.java index a2e5c49e0..ffe71b716 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalUniformFanOutInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingNormalUniformFanOutInitScheme.java @@ -34,7 +34,7 @@ import org.nd4j.weightinit.WeightInit; */ public class VarScalingNormalUniformFanOutInitScheme extends BaseWeightInitScheme { - private double fanOut; + private final double fanOut; @Builder public VarScalingNormalUniformFanOutInitScheme(char order, double fanOut) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingUniformFanAvgInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingUniformFanAvgInitScheme.java index a1aa1a54a..fe7485d81 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingUniformFanAvgInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/VarScalingUniformFanAvgInitScheme.java @@ -35,8 +35,8 @@ import org.nd4j.weightinit.WeightInit; */ public class VarScalingUniformFanAvgInitScheme extends BaseWeightInitScheme { - private double fanIn; - private double fanOut; + private final double fanIn; + private final double fanOut; @Builder public VarScalingUniformFanAvgInitScheme(char order, double fanIn, double fanOut) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/XavierFanInInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/XavierFanInInitScheme.java index d361d645d..dd9ec6ab6 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/XavierFanInInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/XavierFanInInitScheme.java @@ -35,7 +35,7 @@ import org.nd4j.weightinit.WeightInit; */ public class XavierFanInInitScheme extends BaseWeightInitScheme { - private double fanIn; + private final double fanIn; @Builder public XavierFanInInitScheme(char order, double fanIn) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/XavierInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/XavierInitScheme.java index c10a40ce5..a10026407 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/XavierInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/XavierInitScheme.java @@ -35,8 +35,8 @@ import org.nd4j.weightinit.WeightInit; */ public class XavierInitScheme extends BaseWeightInitScheme { - private double fanIn; - private double fanOut; + private final double fanIn; + private final double fanOut; @Builder public XavierInitScheme(char order, double fanIn, double fanOut) { diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/XavierUniformInitScheme.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/XavierUniformInitScheme.java index 2bfebd419..ecf3c928e 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/XavierUniformInitScheme.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/weightinit/impl/XavierUniformInitScheme.java @@ -29,8 +29,8 @@ import org.nd4j.weightinit.WeightInit; public class XavierUniformInitScheme extends BaseWeightInitScheme { - private double fanIn; - private double fanOut; + private final double fanIn; + private final double fanOut; @Builder public XavierUniformInitScheme(char order, double fanIn, double fanOut) { diff --git a/cavis-dnn/cavis-dnn-common-tests/src/main/java/org/deeplearning4j/BaseDL4JTest.java b/cavis-dnn/cavis-dnn-common-tests/src/main/java/org/deeplearning4j/BaseDL4JTest.java index cfaae7561..f364c49d4 100644 --- a/cavis-dnn/cavis-dnn-common-tests/src/main/java/org/deeplearning4j/BaseDL4JTest.java +++ b/cavis-dnn/cavis-dnn-common-tests/src/main/java/org/deeplearning4j/BaseDL4JTest.java @@ -156,7 +156,7 @@ public abstract class BaseDL4JTest { int threadsAfter = ManagementFactory.getThreadMXBean().getThreadCount(); long duration = System.currentTimeMillis() - startTime; - sb.append(getClass().getSimpleName()).append(".").append("") + sb.append(getClass().getSimpleName()).append(".") .append(": ").append(duration).append(" ms") .append(", threadCount: (").append(threadCountBefore).append("->").append(threadsAfter).append(")") .append(", jvmTotal=").append(jvmTotal) diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/base/Preconditions.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/base/Preconditions.java index c8bc3966d..c709d29b8 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/base/Preconditions.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/base/Preconditions.java @@ -687,12 +687,12 @@ public final class Preconditions { } else { if(nextCustom < 0 || (nextIdx > 0 && nextIdx < nextCustom)){ //%s tag - sb.append(message.substring(indexOfStart, nextIdx)) + sb.append(message, indexOfStart, nextIdx) .append(formatArg(args[i])); indexOfStart = nextIdx + 2; } else { //Custom tag - sb.append(message.substring(indexOfStart, nextCustom)); + sb.append(message, indexOfStart, nextCustom); String s = FORMATTERS.get(nextCustomTag).format(nextCustomTag, args[i]); sb.append(s); indexOfStart = nextCustom + nextCustomTag.length(); diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/CompactHeapStringList.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/CompactHeapStringList.java index b7a25f248..dec86b34b 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/CompactHeapStringList.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/CompactHeapStringList.java @@ -286,7 +286,7 @@ public class CompactHeapStringList implements List { while (e1.hasNext() && e2.hasNext()) { String o1 = e1.next(); Object o2 = e2.next(); - if (!(o1 == null ? o2 == null : o1.equals(o2))) + if (!(Objects.equals(o1, o2))) return false; } return !(e1.hasNext() || e2.hasNext()); diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/IntArrayKeyMap.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/IntArrayKeyMap.java index 17de2f5a1..e4895051b 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/IntArrayKeyMap.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/IntArrayKeyMap.java @@ -28,7 +28,7 @@ import java.util.*; public class IntArrayKeyMap implements Map { - private Map map = new LinkedHashMap<>(); + private final Map map = new LinkedHashMap<>(); @Override public int size() { @@ -120,7 +120,7 @@ public class IntArrayKeyMap implements Map { public static class IntArray implements Comparable { @Getter - private int[] backingArray; + private final int[] backingArray; public IntArray(int[] backingArray) { Preconditions.checkNotNull(backingArray,"Backing array must not be null!"); diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/IntArrayKeySet.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/IntArrayKeySet.java index 1a8893cda..b1db74f72 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/IntArrayKeySet.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/IntArrayKeySet.java @@ -23,7 +23,7 @@ package org.nd4j.common.collection; import java.util.*; public class IntArrayKeySet implements Set { - private Set set = new LinkedHashSet<>(); + private final Set set = new LinkedHashSet<>(); @Override public int size() { return set.size(); diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/MultiDimensionalMap.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/MultiDimensionalMap.java index a88871152..03ec92701 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/MultiDimensionalMap.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/MultiDimensionalMap.java @@ -361,7 +361,7 @@ public class MultiDimensionalMap implements Serializable { MultiDimensionalMap that = (MultiDimensionalMap) o; - return !(backedMap != null ? !backedMap.equals(that.backedMap) : that.backedMap != null); + return !(!Objects.equals(backedMap, that.backedMap)); } diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/MultiDimensionalSet.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/MultiDimensionalSet.java index c5712d3eb..d16c190cb 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/MultiDimensionalSet.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/collection/MultiDimensionalSet.java @@ -28,7 +28,7 @@ import java.util.concurrent.ConcurrentSkipListSet; public class MultiDimensionalSet implements Set> { - private Set> backedSet; + private final Set> backedSet; public MultiDimensionalSet(Set> backedSet) { this.backedSet = backedSet; diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/holder/ObjectMapperHolder.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/holder/ObjectMapperHolder.java index 0cd5166a1..9df59f5f7 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/holder/ObjectMapperHolder.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/holder/ObjectMapperHolder.java @@ -26,7 +26,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; public class ObjectMapperHolder { - private static ObjectMapper objectMapper = getMapper(); + private static final ObjectMapper objectMapper = getMapper(); private ObjectMapperHolder() {} diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/AbstractFileResolvingResource.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/AbstractFileResolvingResource.java index eb21e75a6..9774b5cd3 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/AbstractFileResolvingResource.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/AbstractFileResolvingResource.java @@ -124,7 +124,7 @@ public abstract class AbstractFileResolvingResource extends AbstractResource { ((HttpURLConnection) con).setRequestMethod("HEAD"); } - return (long) con.getContentLength(); + return con.getContentLength(); } } diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/AbstractResource.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/AbstractResource.java index a6595a0e3..cf7ac3f38 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/AbstractResource.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/AbstractResource.java @@ -79,8 +79,7 @@ public abstract class AbstractResource implements Resource { long size = 0L; int read; - for (byte[] buf = new byte[255]; (read = is.read(buf)) != -1; size += (long) read) { - ; + for (byte[] buf = new byte[255]; (read = is.read(buf)) != -1; size += read) { } long var6 = size; @@ -89,7 +88,6 @@ public abstract class AbstractResource implements Resource { try { is.close(); } catch (IOException var14) { - ; } } diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/ClassPathResource.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/ClassPathResource.java index 7c5687ef9..b79d395a9 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/ClassPathResource.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/ClassPathResource.java @@ -358,7 +358,7 @@ public class ClassPathResource extends AbstractFileResolvingResource { private ZipFile zipFile; private ZipEntry entry; private InputStream stream; - private String resourceName; + private final String resourceName; public GetStreamFromZip(URL url, String resourceName) { this.url = url; diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/CollectionUtils.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/CollectionUtils.java index 4212e69e9..bb256403f 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/CollectionUtils.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/CollectionUtils.java @@ -50,10 +50,7 @@ public abstract class CollectionUtils { Object[] arr$ = arr; int len$ = arr.length; - for (int i$ = 0; i$ < len$; ++i$) { - Object elem = arr$[i$]; - collection.add(elem); - } + collection.addAll(Arrays.asList(arr$).subList(0, len$)); } } @@ -157,7 +154,7 @@ public abstract class CollectionUtils { } public static T findValueOfType(Collection collection, Class type) { - if (isEmpty((Collection) collection)) { + if (isEmpty(collection)) { return null; } else { Object value = null; @@ -179,7 +176,7 @@ public abstract class CollectionUtils { } public static Object findValueOfType(Collection collection, Class[] types) { - if (!isEmpty((Collection) collection) && !ObjectUtils.isEmpty(types)) { + if (!isEmpty(collection) && !ObjectUtils.isEmpty(types)) { Class[] arr$ = types; int len$ = types.length; @@ -260,7 +257,7 @@ public abstract class CollectionUtils { } public static MultiValueMap unmodifiableMultiValueMap(MultiValueMap map) { - Assert.notNull(map, "\'map\' must not be null"); + Assert.notNull(map, "'map' must not be null"); LinkedHashMap result = new LinkedHashMap(map.size()); Iterator unmodifiableMap = map.entrySet().iterator(); @@ -278,7 +275,7 @@ public abstract class CollectionUtils { private final Map> map; public MultiValueMapAdapter(Map> map) { - Assert.notNull(map, "\'map\' must not be null"); + Assert.notNull(map, "'map' must not be null"); this.map = map; } @@ -374,7 +371,7 @@ public abstract class CollectionUtils { } public boolean equals(Object other) { - return this == other ? true : this.map.equals(other); + return this == other || this.map.equals(other); } public int hashCode() { @@ -387,7 +384,7 @@ public abstract class CollectionUtils { } private static class EnumerationIterator implements Iterator { - private Enumeration enumeration; + private final Enumeration enumeration; public EnumerationIterator(Enumeration enumeration) { this.enumeration = enumeration; diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/ObjectUtils.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/ObjectUtils.java index e1dcf32e9..43f6db46b 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/ObjectUtils.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/ObjectUtils.java @@ -122,7 +122,7 @@ public abstract class ObjectUtils { } throw new IllegalArgumentException(String.format("constant [%s] does not exist in enum opType %s", - new Object[] {constant, enumValues.getClass().getComponentType().getName()})); + constant, enumValues.getClass().getComponentType().getName())); } public static A[] addObjectToArray(A[] array, O obj) { @@ -479,7 +479,7 @@ public abstract class ObjectUtils { sb.append(", "); } - sb.append(String.valueOf(array[i])); + sb.append(array[i]); } sb.append("}"); @@ -557,7 +557,7 @@ public abstract class ObjectUtils { sb.append(", "); } - sb.append("\'").append(array[i]).append("\'"); + sb.append("'").append(array[i]).append("'"); } sb.append("}"); diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/ReflectionUtils.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/ReflectionUtils.java index 5c41dba38..f973d1a26 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/ReflectionUtils.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/ReflectionUtils.java @@ -289,7 +289,7 @@ public abstract class ReflectionUtils { mc.doWith(superIfc); } catch (IllegalAccessException var9) { throw new IllegalStateException( - "Shouldn\'t be illegal to access method \'" + superIfc.getName() + "\': " + var9); + "Shouldn't be illegal to access method '" + superIfc.getName() + "': " + var9); } } } @@ -374,7 +374,7 @@ public abstract class ReflectionUtils { fc.doWith(field); } catch (IllegalAccessException var10) { throw new IllegalStateException( - "Shouldn\'t be illegal to access field \'" + field.getName() + "\': " + var10); + "Shouldn't be illegal to access field '" + field.getName() + "': " + var10); } } } diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/StringUtils.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/StringUtils.java index 9f4fecbec..264f76cf5 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/StringUtils.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/StringUtils.java @@ -242,7 +242,7 @@ public abstract class StringUtils { int index = inString.indexOf(oldPattern); for (int patLen = oldPattern.length(); index >= 0; index = inString.indexOf(oldPattern, pos)) { - sb.append(inString.substring(pos, index)); + sb.append(inString, pos, index); sb.append(newPattern); pos = index + patLen; } @@ -276,7 +276,7 @@ public abstract class StringUtils { } public static String quote(String str) { - return str != null ? "\'" + str + "\'" : null; + return str != null ? "'" + str + "'" : null; } public static Object quoteIfString(Object obj) { @@ -536,10 +536,7 @@ public abstract class StringUtils { String[] arr$ = array; int len$ = array.length; - for (int i$ = 0; i$ < len$; ++i$) { - String element = arr$[i$]; - set.add(element); - } + set.addAll(Arrays.asList(arr$).subList(0, len$)); return toStringArray(set); } @@ -656,10 +653,7 @@ public abstract class StringUtils { String[] arr$ = tokens; int len$ = tokens.length; - for (int i$ = 0; i$ < len$; ++i$) { - String token = arr$[i$]; - set.add(token); - } + set.addAll(Arrays.asList(arr$).subList(0, len$)); return set; } diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/VfsUtils.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/VfsUtils.java index 93b11b937..cb4cc04b1 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/VfsUtils.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/io/VfsUtils.java @@ -43,14 +43,14 @@ public abstract class VfsUtils { private static Method VFS_METHOD_GET_ROOT_URL = null; private static Method VFS_METHOD_GET_ROOT_URI = null; private static Method VIRTUAL_FILE_METHOD_EXISTS = null; - private static Method VIRTUAL_FILE_METHOD_GET_INPUT_STREAM; - private static Method VIRTUAL_FILE_METHOD_GET_SIZE; - private static Method VIRTUAL_FILE_METHOD_GET_LAST_MODIFIED; - private static Method VIRTUAL_FILE_METHOD_TO_URL; - private static Method VIRTUAL_FILE_METHOD_TO_URI; - private static Method VIRTUAL_FILE_METHOD_GET_NAME; - private static Method VIRTUAL_FILE_METHOD_GET_PATH_NAME; - private static Method VIRTUAL_FILE_METHOD_GET_CHILD; + private static final Method VIRTUAL_FILE_METHOD_GET_INPUT_STREAM; + private static final Method VIRTUAL_FILE_METHOD_GET_SIZE; + private static final Method VIRTUAL_FILE_METHOD_GET_LAST_MODIFIED; + private static final Method VIRTUAL_FILE_METHOD_TO_URL; + private static final Method VIRTUAL_FILE_METHOD_TO_URI; + private static final Method VIRTUAL_FILE_METHOD_GET_NAME; + private static final Method VIRTUAL_FILE_METHOD_GET_PATH_NAME; + private static final Method VIRTUAL_FILE_METHOD_GET_CHILD; protected static Class VIRTUAL_FILE_VISITOR_INTERFACE; protected static Method VIRTUAL_FILE_METHOD_VISIT; private static Method VFS_UTILS_METHOD_IS_NESTED_FILE = null; @@ -122,11 +122,11 @@ public abstract class VfsUtils { } static Object getRelative(URL url) throws IOException { - return invokeVfsMethod(VFS_METHOD_GET_ROOT_URL, null, new Object[] {url}); + return invokeVfsMethod(VFS_METHOD_GET_ROOT_URL, null, url); } static Object getChild(Object vfsResource, String path) throws IOException { - return invokeVfsMethod(VIRTUAL_FILE_METHOD_GET_CHILD, vfsResource, new Object[] {path}); + return invokeVfsMethod(VIRTUAL_FILE_METHOD_GET_CHILD, vfsResource, path); } static File getFile(Object vfsResource) throws IOException { @@ -148,11 +148,11 @@ public abstract class VfsUtils { } static Object getRoot(URI url) throws IOException { - return invokeVfsMethod(VFS_METHOD_GET_ROOT_URI, null, new Object[] {url}); + return invokeVfsMethod(VFS_METHOD_GET_ROOT_URI, null, url); } protected static Object getRoot(URL url) throws IOException { - return invokeVfsMethod(VFS_METHOD_GET_ROOT_URL, null, new Object[] {url}); + return invokeVfsMethod(VFS_METHOD_GET_ROOT_URL, null, url); } protected static Object doGetVisitorAttribute() { @@ -195,8 +195,8 @@ public abstract class VfsUtils { try { String ex = VFS_VER.V3.equals(version) ? "getChild" : "getRoot"; - VFS_METHOD_GET_ROOT_URL = ReflectionUtils.findMethod(vfsClass, ex, new Class[] {URL.class}); - VFS_METHOD_GET_ROOT_URI = ReflectionUtils.findMethod(vfsClass, ex, new Class[] {URI.class}); + VFS_METHOD_GET_ROOT_URL = ReflectionUtils.findMethod(vfsClass, ex, URL.class); + VFS_METHOD_GET_ROOT_URI = ReflectionUtils.findMethod(vfsClass, ex, URI.class); Class virtualFile = loader.loadClass(pkg + "VirtualFile"); VIRTUAL_FILE_METHOD_EXISTS = ReflectionUtils.findMethod(virtualFile, "exists"); VIRTUAL_FILE_METHOD_GET_INPUT_STREAM = ReflectionUtils.findMethod(virtualFile, "openStream"); @@ -208,15 +208,15 @@ public abstract class VfsUtils { VIRTUAL_FILE_METHOD_GET_PATH_NAME = ReflectionUtils.findMethod(virtualFile, "getPathName"); GET_PHYSICAL_FILE = ReflectionUtils.findMethod(virtualFile, "getPhysicalFile"); ex = VFS_VER.V3.equals(version) ? "getChild" : "findChild"; - VIRTUAL_FILE_METHOD_GET_CHILD = ReflectionUtils.findMethod(virtualFile, ex, new Class[] {String.class}); + VIRTUAL_FILE_METHOD_GET_CHILD = ReflectionUtils.findMethod(virtualFile, ex, String.class); Class utilsClass = loader.loadClass(pkg + "VFSUtils"); VFS_UTILS_METHOD_GET_COMPATIBLE_URI = - ReflectionUtils.findMethod(utilsClass, "getCompatibleURI", new Class[] {virtualFile}); + ReflectionUtils.findMethod(utilsClass, "getCompatibleURI", virtualFile); VFS_UTILS_METHOD_IS_NESTED_FILE = - ReflectionUtils.findMethod(utilsClass, "isNestedFile", new Class[] {virtualFile}); + ReflectionUtils.findMethod(utilsClass, "isNestedFile", virtualFile); VIRTUAL_FILE_VISITOR_INTERFACE = loader.loadClass(pkg + "VirtualFileVisitor"); VIRTUAL_FILE_METHOD_VISIT = ReflectionUtils.findMethod(virtualFile, "visit", - new Class[] {VIRTUAL_FILE_VISITOR_INTERFACE}); + VIRTUAL_FILE_VISITOR_INTERFACE); Class visitorAttributesClass = loader.loadClass(pkg + "VisitorAttributes"); VISITOR_ATTRIBUTES_FIELD_RECURSE = ReflectionUtils.findField(visitorAttributesClass, "RECURSE"); } catch (ClassNotFoundException var7) { @@ -224,9 +224,9 @@ public abstract class VfsUtils { } } - private static enum VFS_VER { + private enum VFS_VER { V2, V3; - private VFS_VER() {} + VFS_VER() {} } } diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/CounterMap.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/CounterMap.java index 1cc6758e6..597513300 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/CounterMap.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/CounterMap.java @@ -192,7 +192,7 @@ public class CounterMap implements Serializable{ public Iterator> getIterator() { return new Iterator>() { - Iterator outerIt; + final Iterator outerIt; Iterator innerIt; F curKey; diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicBoolean.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicBoolean.java index 6c807feea..13a9e523a 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicBoolean.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicBoolean.java @@ -31,7 +31,7 @@ import java.io.IOException; public class JsonDeserializerAtomicBoolean extends JsonDeserializer { @Override - public AtomicBoolean deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { + public AtomicBoolean deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException { JsonNode node = jsonParser.getCodec().readTree(jsonParser); boolean value = node.asBoolean(); return new AtomicBoolean(value); diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicDouble.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicDouble.java index d777b0072..2b152e750 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicDouble.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicDouble.java @@ -31,7 +31,7 @@ import java.io.IOException; public class JsonDeserializerAtomicDouble extends JsonDeserializer { @Override - public AtomicDouble deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { + public AtomicDouble deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException { JsonNode node = jsonParser.getCodec().readTree(jsonParser); double value = node.asDouble(); return new AtomicDouble(value); diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicBoolean.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicBoolean.java index c10f1bc95..e2d51b105 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicBoolean.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicBoolean.java @@ -30,7 +30,7 @@ import java.io.IOException; public class JsonSerializerAtomicBoolean extends JsonSerializer { @Override - public void serialize(AtomicBoolean atomicDouble, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException, JsonProcessingException { + public void serialize(AtomicBoolean atomicDouble, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeBoolean(atomicDouble.get()); } } diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicDouble.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicDouble.java index 1f9041ccd..9e00819d4 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicDouble.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicDouble.java @@ -30,7 +30,7 @@ import java.io.IOException; public class JsonSerializerAtomicDouble extends JsonSerializer { @Override - public void serialize(AtomicDouble atomicDouble, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException, JsonProcessingException { + public void serialize(AtomicDouble atomicDouble, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeNumber(atomicDouble.doubleValue()); } } diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/resources/Resources.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/resources/Resources.java index f8fa974f4..aec97ba3e 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/resources/Resources.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/resources/Resources.java @@ -31,7 +31,7 @@ import java.util.*; @Slf4j public class Resources { - private static Resources INSTANCE = new Resources(); + private static final Resources INSTANCE = new Resources(); protected final List resolvers; @@ -123,7 +123,7 @@ public class Resources { } throw new IllegalStateException("Cannot resolve resource (not found): none of " + resolvers.size() + - " resolvers can resolve resource \"" + resourcePath + "\" - available resolvers: " + resolvers.toString()); + " resolvers can resolve resource \"" + resourcePath + "\" - available resolvers: " + resolvers); } public InputStream getAsStream(String resourcePath) { @@ -135,7 +135,7 @@ public class Resources { } throw new IllegalStateException("Cannot resolve resource (not found): none of " + resolvers.size() + - " resolvers can resolve resource \"" + resourcePath + "\" - available resolvers: " + resolvers.toString()); + " resolvers can resolve resource \"" + resourcePath + "\" - available resolvers: " + resolvers); } public void copyDir(String directoryPath, File destinationDir) { diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/resources/strumpf/ResourceFile.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/resources/strumpf/ResourceFile.java index 0141be02f..8bdeae89c 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/resources/strumpf/ResourceFile.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/resources/strumpf/ResourceFile.java @@ -118,10 +118,7 @@ public class ResourceFile { Preconditions.checkState(expSha256 != null, "Expected JSON property %s was not found in resource reference file %s", sha256Property, filePath); String actualSha256 = sha256(file); - if (!expSha256.equals(actualSha256)) { - return false; - } - return true; + return expSha256.equals(actualSha256); } /** diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/resources/strumpf/StrumpfResolver.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/resources/strumpf/StrumpfResolver.java index 54ff89459..ba879f740 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/resources/strumpf/StrumpfResolver.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/resources/strumpf/StrumpfResolver.java @@ -94,11 +94,7 @@ public class StrumpfResolver implements Resolver { } cpr = new ClassPathResource(resourcePath); - if (cpr.exists()) { - return true; - } - - return false; + return cpr.exists(); } @Override @@ -116,11 +112,7 @@ public class StrumpfResolver implements Resolver { //Second: Check classpath ClassPathResource cpr = new ClassPathResource(dirPath); - if (cpr.exists()) { - return true; - } - - return false; + return cpr.exists(); } @Override diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/tools/BTools.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/tools/BTools.java index 7e4d06b49..d22b22998 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/tools/BTools.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/tools/BTools.java @@ -272,10 +272,10 @@ public class BTools { // String FormatS = ""; if ( LeadingChar == '0' ) { - FormatS = "%" + LeadingChar + Integer.toString( CharsCount ) + "d"; + FormatS = "%" + LeadingChar + CharsCount + "d"; } else { - FormatS = "%" + Integer.toString( CharsCount ) + "d"; + FormatS = "%" + CharsCount + "d"; } // Result = String.format( FormatS, Value ); diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/tools/SIS.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/tools/SIS.java index b10296fcc..a2ee4f925 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/tools/SIS.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/tools/SIS.java @@ -33,7 +33,7 @@ import java.time.format.DateTimeFormatter; public class SIS { // System Informations Saving // - private String baseModuleCode = "SIS"; + private final String baseModuleCode = "SIS"; private String moduleCode = "?"; // private PrintStream out; diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/ArchiveUtils.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/ArchiveUtils.java index 317c5a23d..cd682f3b2 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/ArchiveUtils.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/ArchiveUtils.java @@ -80,7 +80,7 @@ public class ArchiveUtils { new File(dest).mkdirs(); FileInputStream fin = new FileInputStream(target); int BUFFER = 2048; - byte data[] = new byte[BUFFER]; + byte[] data = new byte[BUFFER]; if (file.endsWith(".zip") || file.endsWith(".jar")) { try(ZipInputStream zis = new ZipInputStream(fin)) { @@ -152,7 +152,7 @@ public class ArchiveUtils { else { int count; try(FileOutputStream fos = new FileOutputStream(dest + File.separator + entry.getName()); - BufferedOutputStream destStream = new BufferedOutputStream(fos, BUFFER);) { + BufferedOutputStream destStream = new BufferedOutputStream(fos, BUFFER)) { while ((count = tarIn.read(data, 0, BUFFER)) != -1) { destStream.write(data, 0, count); } diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/ArrayUtil.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/ArrayUtil.java index 8a30f0e48..13780f3a6 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/ArrayUtil.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/ArrayUtil.java @@ -295,7 +295,7 @@ public class ArrayUtil { public static long[] toLongs(byte[] data) { val ret = new long[data.length]; for (int i = 0; i < ret.length; i++) { - ret[i] = (long) data[i]; + ret[i] = data[i]; } return ret; } @@ -311,7 +311,7 @@ public class ArrayUtil { public static long[] toLongs(short[] data) { val ret = new long[data.length]; for (int i = 0; i < ret.length; i++) { - ret[i] = (long) data[i]; + ret[i] = data[i]; } return ret; } @@ -319,7 +319,7 @@ public class ArrayUtil { public static long[] toLongs(int[] data) { val ret = new long[data.length]; for (int i = 0; i < ret.length; i++) { - ret[i] = (long) data[i]; + ret[i] = data[i]; } return ret; } @@ -1105,7 +1105,7 @@ public class ArrayUtil { public static double[] toDoubles(int[] ints) { double[] ret = new double[ints.length]; for (int i = 0; i < ints.length; i++) - ret[i] = (double) ints[i]; + ret[i] = ints[i]; return ret; } @@ -1119,7 +1119,7 @@ public class ArrayUtil { public static double[] toDoubles(float[] ints) { double[] ret = new double[ints.length]; for (int i = 0; i < ints.length; i++) - ret[i] = (double) ints[i]; + ret[i] = ints[i]; return ret; } diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/Index.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/Index.java index cc64e145d..ff91a9a4e 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/Index.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/Index.java @@ -23,14 +23,15 @@ package org.nd4j.common.util; import java.io.Serializable; import java.util.Map; +import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; @SuppressWarnings({"rawtypes", "unchecked"}) public class Index implements Serializable { private static final long serialVersionUID = 1160629777026141078L; - private Map objects = new ConcurrentHashMap<>(); - private Map indexes = new ConcurrentHashMap<>(); + private final Map objects = new ConcurrentHashMap<>(); + private final Map indexes = new ConcurrentHashMap<>(); public synchronized boolean add(Object o, int idx) { if (o instanceof String && o.toString().isEmpty()) { @@ -103,9 +104,9 @@ public class Index implements Serializable { Index index = (Index) o; - if (objects != null ? !objects.equals(index.objects) : index.objects != null) + if (!Objects.equals(objects, index.objects)) return false; - return !(indexes != null ? !indexes.equals(index.indexes) : index.indexes != null); + return !(!Objects.equals(indexes, index.indexes)); } diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/MathUtils.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/MathUtils.java index 58d72eace..6e249ffbd 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/MathUtils.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/MathUtils.java @@ -163,7 +163,7 @@ public class MathUtils { * @param targetAttribute target attribute vector * @return the correlation coefficient or r */ - public static double correlation(double[] residuals, double targetAttribute[]) { + public static double correlation(double[] residuals, double[] targetAttribute) { double[] predictedValues = new double[residuals.length]; for (int i = 0; i < predictedValues.length; i++) { predictedValues[i] = targetAttribute[i] - residuals[i]; @@ -1042,7 +1042,7 @@ public class MathUtils { */ public static /*@pure@*/ double roundDouble(double value, int afterDecimalPoint) { - double mask = Math.pow(10.0, (double) afterDecimalPoint); + double mask = Math.pow(10.0, afterDecimalPoint); return (double) (Math.round(value * mask)) / mask; }//end roundDouble diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/Rational.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/Rational.java index 404874016..e9914479c 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/Rational.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/Rational.java @@ -234,10 +234,10 @@ class Rational implements Cloneable { public Rational pow(BigInteger exponent) throws NumberFormatException { /* test for overflow */ if (exponent.compareTo(MAX_INT) == 1) { - throw new NumberFormatException("Exponent " + exponent.toString() + " too large."); + throw new NumberFormatException("Exponent " + exponent + " too large."); } if (exponent.compareTo(MIN_INT) == -1) { - throw new NumberFormatException("Exponent " + exponent.toString() + " too small."); + throw new NumberFormatException("Exponent " + exponent + " too small."); } /* promote to the simpler interface above */ return pow(exponent.intValue()); diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/SynchronizedTable.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/SynchronizedTable.java index ace0bf5f1..37c16114e 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/SynchronizedTable.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/util/SynchronizedTable.java @@ -27,7 +27,7 @@ import java.util.Map; import java.util.Set; public class SynchronizedTable implements Table { - private Table wrapped; + private final Table wrapped; public SynchronizedTable(Table wrapped) { this.wrapped = wrapped; diff --git a/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/function/FunctionalUtilsTest.java b/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/function/FunctionalUtilsTest.java index 88a4ba98d..e75d8638a 100644 --- a/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/function/FunctionalUtilsTest.java +++ b/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/function/FunctionalUtilsTest.java @@ -44,9 +44,9 @@ public class FunctionalUtilsTest { //[(fish,([],[alex])), (dog,([adam],[steve])), (cat,([adam],[alice]))] Map,List>> assertion = new HashMap<>(); - assertion.put("cat",Pair.of(Arrays.asList("adam"),Arrays.asList("alice"))); - assertion.put("dog",Pair.of(Arrays.asList("adam"),Arrays.asList("steve"))); - assertion.put("fish",Pair.of(Collections.emptyList(),Arrays.asList("alex"))); + assertion.put("cat",Pair.of(Collections.singletonList("adam"), Collections.singletonList("alice"))); + assertion.put("dog",Pair.of(Collections.singletonList("adam"), Collections.singletonList("steve"))); + assertion.put("fish",Pair.of(Collections.emptyList(), Collections.singletonList("alex"))); Map, List>> cogroup = FunctionalUtils.cogroup(leftMap, rightMap); assertEquals(assertion,cogroup); diff --git a/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/io/ClassPathResourceTest.java b/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/io/ClassPathResourceTest.java index 8e7a22e6d..1d49a22ad 100644 --- a/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/io/ClassPathResourceTest.java +++ b/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/io/ClassPathResourceTest.java @@ -39,7 +39,7 @@ public class ClassPathResourceTest { ClassPathResource cpr = new ClassPathResource("somedir"); - File f = new File(FileUtils.getTempDirectoryPath()+File.separatorChar+ UUID.randomUUID().toString()); + File f = new File(FileUtils.getTempDirectoryPath()+File.separatorChar+ UUID.randomUUID()); FileUtils.forceMkdir(f); cpr.copyDirectory(f); diff --git a/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/loader/TestFileBatch.java b/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/loader/TestFileBatch.java index bd3f9c569..201dcf5f5 100644 --- a/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/loader/TestFileBatch.java +++ b/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/loader/TestFileBatch.java @@ -81,7 +81,7 @@ public class TestFileBatch { //Check that it is indeed a valid zip file: - File f = new File(FileUtils.getTempDirectoryPath()+"/"+UUID.randomUUID().toString()); + File f = new File(FileUtils.getTempDirectoryPath()+"/"+ UUID.randomUUID()); f.delete(); fb.writeAsZip(f); diff --git a/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/tools/InfoValuesTest.java b/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/tools/InfoValuesTest.java index 1dc5860d7..19e5dfadc 100644 --- a/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/tools/InfoValuesTest.java +++ b/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/tools/InfoValuesTest.java @@ -26,9 +26,9 @@ import static org.junit.jupiter.api.Assertions.*; public class InfoValuesTest { // - private String[] t1_titleA = { "T0", "T1", "T2", "T3", "T4", "T5" }; + private final String[] t1_titleA = { "T0", "T1", "T2", "T3", "T4", "T5" }; // - private String[] t2_titleA = { "", "T1", "T2" }; + private final String[] t2_titleA = { "", "T1", "T2" }; // @Test diff --git a/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/tools/SISTest.java b/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/tools/SISTest.java index 34953554c..d40d32406 100644 --- a/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/tools/SISTest.java +++ b/cavis-dnn/cavis-dnn-common/src/test/java/org/nd4j/common/tools/SISTest.java @@ -48,7 +48,7 @@ public class SISTest { // assertEquals( 33, fFName.length() ); assertEquals( "Z", fFName.substring( 0, 1 ) ); - assertEquals( "_Test_ABC.txt", fFName.substring( fFName.length() - 13, fFName.length() ) ); + assertEquals( "_Test_ABC.txt", fFName.substring( fFName.length() - 13) ); // assertEquals( "", fFName ); // assertEquals( "", tmpFld.getRoot().getAbsolutePath() ); // diff --git a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/datasets/test/TestDataSetIterator.java b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/datasets/test/TestDataSetIterator.java index 2bda23111..076756b71 100644 --- a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/datasets/test/TestDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/datasets/test/TestDataSetIterator.java @@ -32,7 +32,7 @@ public class TestDataSetIterator implements DataSetIterator { * */ private static final long serialVersionUID = -3042802726018263331L; - private DataSetIterator wrapped; + private final DataSetIterator wrapped; private int numDataSets = 0; @Getter private DataSetPreProcessor preProcessor; diff --git a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/evaluation/EvaluationTools.java b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/evaluation/EvaluationTools.java index 6d4295e1f..8f0712eb9 100644 --- a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/evaluation/EvaluationTools.java +++ b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/evaluation/EvaluationTools.java @@ -173,7 +173,7 @@ public class EvaluationTools { if (classNames != null && classNames.size() > i) { headerText += " (" + classNames.get(i) + ")"; } - headerText += " vs. All";; + headerText += " vs. All"; Component headerDivPad = new ComponentDiv(HEADER_DIV_PAD_STYLE); components.add(headerDivPad); diff --git a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/listener/SystemPolling.java b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/listener/SystemPolling.java index 954be2065..b74a2382b 100644 --- a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/listener/SystemPolling.java +++ b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/listener/SystemPolling.java @@ -36,10 +36,10 @@ import java.util.concurrent.TimeUnit; public class SystemPolling { private ScheduledExecutorService scheduledExecutorService; - private long pollEveryMillis; - private File outputDirectory; - private NameProvider nameProvider; - private ObjectMapper objectMapper = new ObjectMapper(new YAMLFactory()); + private final long pollEveryMillis; + private final File outputDirectory; + private final NameProvider nameProvider; + private final ObjectMapper objectMapper = new ObjectMapper(new YAMLFactory()); private SystemPolling(long pollEveryMillis,File outputDirectory,NameProvider nameProvider) { this.pollEveryMillis = pollEveryMillis; diff --git a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/parallelism/AsyncIterator.java b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/parallelism/AsyncIterator.java index 0ecab4f12..9fb604e89 100644 --- a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/parallelism/AsyncIterator.java +++ b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/parallelism/AsyncIterator.java @@ -102,9 +102,9 @@ public class AsyncIterator implements Iterator { } private class ReaderThread extends Thread implements Runnable { - private BlockingQueue buffer; - private Iterator iterator; - private T terminator; + private final BlockingQueue buffer; + private final Iterator iterator; + private final T terminator; public ReaderThread(Iterator iterator, BlockingQueue buffer, T terminator) { this.buffer = buffer; @@ -133,8 +133,6 @@ public class AsyncIterator implements Iterator { } catch (Exception e) { // TODO: pass that forward throw new RuntimeException(e); - } finally { - //log.info("AsyncReader [{}] stopped", Thread.currentThread().getId()); } } } diff --git a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/storage/impl/RemoteUIStatsStorageRouter.java b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/storage/impl/RemoteUIStatsStorageRouter.java index b67798056..df21ac930 100644 --- a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/storage/impl/RemoteUIStatsStorageRouter.java +++ b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/storage/impl/RemoteUIStatsStorageRouter.java @@ -74,8 +74,8 @@ public class RemoteUIStatsStorageRouter implements StatsStorageRouter, Serializa private transient Thread postThread; - private AtomicBoolean shutdown = new AtomicBoolean(false); - private AtomicLong shutdownWarnCount = new AtomicLong(0); + private final AtomicBoolean shutdown = new AtomicBoolean(false); + private final AtomicLong shutdownWarnCount = new AtomicLong(0); private static final ObjectMapper objectMapper = new ObjectMapper(); @@ -368,7 +368,7 @@ public class RemoteUIStatsStorageRouter implements StatsStorageRouter, Serializa in.close(); log.warn("Error posting to remote UI - received response code {}\tContent: {}", response, - response.toString()); + response); return false; } diff --git a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/ui/UiConnectionInfo.java b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/ui/UiConnectionInfo.java index a2c81576c..47689316e 100644 --- a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/ui/UiConnectionInfo.java +++ b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/ui/UiConnectionInfo.java @@ -49,11 +49,8 @@ public class UiConnectionInfo { * @return */ public String getFirstPart() { - StringBuilder builder = new StringBuilder(); - builder.append(useHttps ? "https" : "http").append("://").append(address).append(":").append(port).append(""); - - return builder.toString(); + return (useHttps ? "https" : "http") + "://" + address + ":" + port; } public String getSecondPart() { @@ -89,7 +86,7 @@ public class UiConnectionInfo { } public static class Builder { - private UiConnectionInfo info = new UiConnectionInfo(); + private final UiConnectionInfo info = new UiConnectionInfo(); /** * This method allows you to specify sessionId for this UiConnectionInfo instance diff --git a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/util/ModelGuesser.java b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/util/ModelGuesser.java index 8aabe3fe4..70b250978 100644 --- a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/util/ModelGuesser.java +++ b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/util/ModelGuesser.java @@ -122,7 +122,7 @@ public class ModelGuesser { */ public static Object loadConfigGuess(InputStream stream) throws Exception { String p = System.getProperty(DL4JSystemProperties.DL4J_TEMP_DIR_PROPERTY); - File tmp = DL4JFileUtils.createTempFile("model-" + UUID.randomUUID().toString(), "bin"); + File tmp = DL4JFileUtils.createTempFile("model-" + UUID.randomUUID(), "bin"); BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(new FileOutputStream(tmp)); IOUtils.copy(stream, bufferedOutputStream); bufferedOutputStream.flush(); diff --git a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/util/MovingWindowMatrix.java b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/util/MovingWindowMatrix.java index 853a2bd98..d25fdb1fb 100644 --- a/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/util/MovingWindowMatrix.java +++ b/cavis-dnn/cavis-dnn-core/src/main/java/org/deeplearning4j/core/util/MovingWindowMatrix.java @@ -43,7 +43,7 @@ public class MovingWindowMatrix { private int windowRowSize = 28; private int windowColumnSize = 28; - private INDArray toSlice; + private final INDArray toSlice; private boolean addRotate = false; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/datavec/RecordReaderDataSetiteratorTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/datavec/RecordReaderDataSetiteratorTest.java index 4a037de0d..c7f354937 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/datavec/RecordReaderDataSetiteratorTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/datavec/RecordReaderDataSetiteratorTest.java @@ -46,7 +46,7 @@ import org.deeplearning4j.datasets.datavec.exception.ZeroLengthSequenceException import org.deeplearning4j.datasets.datavec.tools.SpecialImageRecordReader; import org.junit.jupiter.api.Timeout; import org.junit.jupiter.api.io.TempDir; -import org.nd4j.linalg.dataset.AsyncDataSetIterator;; +import org.nd4j.linalg.dataset.AsyncDataSetIterator; import org.junit.jupiter.api.Test; import org.nd4j.linalg.api.buffer.DataType; @@ -98,11 +98,11 @@ public class RecordReaderDataSetiteratorTest extends BaseDL4JTest { recordReader.initialize(csv); DataSetIterator iter = new RecordReaderDataSetIterator(recordReader, 10, -1, -1, 2); DataSet ds = iter.next(); - assertFalse(ds == null); + assertNotNull(ds); assertEquals(10, ds.numExamples()); iter.hasNext(); iter.next(); - assertEquals(false, iter.hasNext()); + assertFalse(iter.hasNext()); } @Test @@ -841,14 +841,14 @@ public class RecordReaderDataSetiteratorTest extends BaseDL4JTest { public void testSeqRRDSIArrayWritableOneReader() { List> sequence1 = new ArrayList<>(); - sequence1.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {1, 2, 3}, new long[]{1,3})), + sequence1.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {1, 2, 3}, 1,3)), new IntWritable(0))); - sequence1.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {4, 5, 6}, new long[]{1,3})), + sequence1.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {4, 5, 6}, 1,3)), new IntWritable(1))); List> sequence2 = new ArrayList<>(); - sequence2.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {7, 8, 9}, new long[]{1,3})), + sequence2.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {7, 8, 9}, 1,3)), new IntWritable(2))); - sequence2.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {10, 11, 12}, new long[]{1,3})), + sequence2.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {10, 11, 12}, 1,3)), new IntWritable(3))); @@ -874,15 +874,15 @@ public class RecordReaderDataSetiteratorTest extends BaseDL4JTest { public void testSeqRRDSIArrayWritableOneReaderRegression() { //Regression, where the output is an array writable List> sequence1 = new ArrayList<>(); - sequence1.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {1, 2, 3}, new long[]{1,3})), - new NDArrayWritable(Nd4j.create(new double[] {100, 200, 300}, new long[]{1,3})))); - sequence1.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {4, 5, 6}, new long[]{1,3})), - new NDArrayWritable(Nd4j.create(new double[] {400, 500, 600}, new long[]{1,3})))); + sequence1.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {1, 2, 3}, 1,3)), + new NDArrayWritable(Nd4j.create(new double[] {100, 200, 300}, 1,3)))); + sequence1.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {4, 5, 6}, 1,3)), + new NDArrayWritable(Nd4j.create(new double[] {400, 500, 600}, 1,3)))); List> sequence2 = new ArrayList<>(); - sequence2.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {7, 8, 9}, new long[]{1,3})), - new NDArrayWritable(Nd4j.create(new double[] {700, 800, 900}, new long[]{1,3})))); - sequence2.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {10, 11, 12}, new long[]{1,3})), - new NDArrayWritable(Nd4j.create(new double[] {1000, 1100, 1200}, new long[]{1,3})))); + sequence2.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {7, 8, 9}, 1,3)), + new NDArrayWritable(Nd4j.create(new double[] {700, 800, 900}, 1,3)))); + sequence2.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {10, 11, 12}, 1,3)), + new NDArrayWritable(Nd4j.create(new double[] {1000, 1100, 1200}, 1,3)))); SequenceRecordReader rr = new CollectionSequenceRecordReader(Arrays.asList(sequence1, sequence2)); @@ -910,15 +910,15 @@ public class RecordReaderDataSetiteratorTest extends BaseDL4JTest { //Input with multiple array writables: List> sequence1 = new ArrayList<>(); - sequence1.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {1, 2, 3}, new long[]{1,3})), - new NDArrayWritable(Nd4j.create(new double[] {100, 200, 300}, new long[]{1,3})), new IntWritable(0))); - sequence1.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {4, 5, 6}, new long[]{1,3})), - new NDArrayWritable(Nd4j.create(new double[] {400, 500, 600}, new long[]{1,3})), new IntWritable(1))); + sequence1.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {1, 2, 3}, 1,3)), + new NDArrayWritable(Nd4j.create(new double[] {100, 200, 300}, 1,3)), new IntWritable(0))); + sequence1.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {4, 5, 6}, 1,3)), + new NDArrayWritable(Nd4j.create(new double[] {400, 500, 600}, 1,3)), new IntWritable(1))); List> sequence2 = new ArrayList<>(); - sequence2.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {7, 8, 9}, new long[]{1,3})), - new NDArrayWritable(Nd4j.create(new double[] {700, 800, 900}, new long[]{1,3})), new IntWritable(2))); - sequence2.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {10, 11, 12}, new long[]{1,3})), - new NDArrayWritable(Nd4j.create(new double[] {1000, 1100, 1200}, new long[]{1,3})), new IntWritable(3))); + sequence2.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {7, 8, 9}, 1,3)), + new NDArrayWritable(Nd4j.create(new double[] {700, 800, 900}, 1,3)), new IntWritable(2))); + sequence2.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {10, 11, 12}, 1,3)), + new NDArrayWritable(Nd4j.create(new double[] {1000, 1100, 1200}, 1,3)), new IntWritable(3))); SequenceRecordReader rr = new CollectionSequenceRecordReader(Arrays.asList(sequence1, sequence2)); @@ -944,26 +944,26 @@ public class RecordReaderDataSetiteratorTest extends BaseDL4JTest { @Test public void testSeqRRDSIArrayWritableTwoReaders() { List> sequence1 = new ArrayList<>(); - sequence1.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {1, 2, 3}, new long[]{1,3})), + sequence1.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {1, 2, 3}, 1,3)), new IntWritable(100))); - sequence1.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {4, 5, 6}, new long[]{1,3})), + sequence1.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {4, 5, 6}, 1,3)), new IntWritable(200))); List> sequence2 = new ArrayList<>(); - sequence2.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {7, 8, 9}, new long[]{1,3})), + sequence2.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {7, 8, 9}, 1,3)), new IntWritable(300))); - sequence2.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {10, 11, 12}, new long[]{1,3})), + sequence2.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {10, 11, 12}, 1,3)), new IntWritable(400))); SequenceRecordReader rrFeatures = new CollectionSequenceRecordReader(Arrays.asList(sequence1, sequence2)); List> sequence1L = new ArrayList<>(); - sequence1L.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {100, 200, 300}, new long[]{1,3})), + sequence1L.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {100, 200, 300}, 1,3)), new IntWritable(101))); - sequence1L.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {400, 500, 600}, new long[]{1,3})), + sequence1L.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {400, 500, 600}, 1,3)), new IntWritable(201))); List> sequence2L = new ArrayList<>(); - sequence2L.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {700, 800, 900}, new long[]{1,3})), + sequence2L.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {700, 800, 900}, 1,3)), new IntWritable(301))); - sequence2L.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] {1000, 1100, 1200}, new long[]{1,3})), + sequence2L.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {1000, 1100, 1200}, 1,3)), new IntWritable(401))); SequenceRecordReader rrLabels = new CollectionSequenceRecordReader(Arrays.asList(sequence1L, sequence2L)); @@ -1050,12 +1050,12 @@ public class RecordReaderDataSetiteratorTest extends BaseDL4JTest { Collection> data = new ArrayList<>(); - data.add(Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), - new NDArrayWritable(Nd4j.create(new double[] {1.1, 2.1, 3.1}, new long[]{1,3})))); - data.add(Arrays.asList(new DoubleWritable(2), new DoubleWritable(3), - new NDArrayWritable(Nd4j.create(new double[] {4.1, 5.1, 6.1}, new long[]{1,3})))); - data.add(Arrays.asList(new DoubleWritable(4), new DoubleWritable(5), - new NDArrayWritable(Nd4j.create(new double[] {7.1, 8.1, 9.1}, new long[]{1,3})))); + data.add(Arrays.asList(new DoubleWritable(0), new DoubleWritable(1), + new NDArrayWritable(Nd4j.create(new double[] {1.1, 2.1, 3.1}, 1,3)))); + data.add(Arrays.asList(new DoubleWritable(2), new DoubleWritable(3), + new NDArrayWritable(Nd4j.create(new double[] {4.1, 5.1, 6.1}, 1,3)))); + data.add(Arrays.asList(new DoubleWritable(4), new DoubleWritable(5), + new NDArrayWritable(Nd4j.create(new double[] {7.1, 8.1, 9.1}, 1,3)))); RecordReader rr = new CollectionRecordReader(data); int batchSize = 3; @@ -1075,12 +1075,12 @@ public class RecordReaderDataSetiteratorTest extends BaseDL4JTest { //ALSO: test if we have NDArrayWritables for BOTH the features and the labels data = new ArrayList<>(); - data.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {0, 1}, new long[]{1,2})), - new NDArrayWritable(Nd4j.create(new double[] {1.1, 2.1, 3.1}, new long[]{1,3})))); - data.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {2, 3}, new long[]{1,2})), - new NDArrayWritable(Nd4j.create(new double[] {4.1, 5.1, 6.1}, new long[]{1,3})))); - data.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {4, 5}, new long[]{1,2})), - new NDArrayWritable(Nd4j.create(new double[] {7.1, 8.1, 9.1}, new long[]{1,3})))); + data.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {0, 1}, 1,2)), + new NDArrayWritable(Nd4j.create(new double[] {1.1, 2.1, 3.1}, 1,3)))); + data.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {2, 3}, 1,2)), + new NDArrayWritable(Nd4j.create(new double[] {4.1, 5.1, 6.1}, 1,3)))); + data.add(Arrays.asList(new NDArrayWritable(Nd4j.create(new double[] {4, 5}, 1,2)), + new NDArrayWritable(Nd4j.create(new double[] {7.1, 8.1, 9.1}, 1,3)))); labelIndexFrom = 1; labelIndexTo = 1; @@ -1203,7 +1203,7 @@ public class RecordReaderDataSetiteratorTest extends BaseDL4JTest { //[DoubleWritable, DoubleWritable, NDArrayWritable([1,10]), IntWritable] -> concatenate to a [1,13] feature vector automatically. - List l = Arrays.asList(new DoubleWritable(1), + List l = Arrays.asList(new DoubleWritable(1), new NDArrayWritable(Nd4j.create(new double[] {2, 3, 4})), new DoubleWritable(5), new NDArrayWritable(Nd4j.create(new double[] {6, 7, 8})), new IntWritable(9), new IntWritable(1)); @@ -1241,12 +1241,12 @@ public class RecordReaderDataSetiteratorTest extends BaseDL4JTest { //Idea: input vector is like [f,f,f,f,l,l,f,f] or similar - i.e., label writables aren't start/end - List l = Arrays.asList(new DoubleWritable(1), - new NDArrayWritable(Nd4j.create(new float[] {2, 3, 4}, new long[]{1,3})), new DoubleWritable(5), - new NDArrayWritable(Nd4j.create(new float[] {6, 7, 8}, new long[]{1,3}))); + List l = Arrays.asList(new DoubleWritable(1), + new NDArrayWritable(Nd4j.create(new float[] {2, 3, 4}, 1,3)), new DoubleWritable(5), + new NDArrayWritable(Nd4j.create(new float[] {6, 7, 8}, 1,3))); - INDArray expF = Nd4j.create(new float[] {1, 6, 7, 8}, new long[]{1,4}); - INDArray expL = Nd4j.create(new float[] {2, 3, 4, 5}, new long[]{1,4}); + INDArray expF = Nd4j.create(new float[] {1, 6, 7, 8}, 1,4); + INDArray expL = Nd4j.create(new float[] {2, 3, 4, 5}, 1,4); RecordReader rr = new CollectionRecordReader(Collections.singletonList(l)); @@ -1368,12 +1368,12 @@ public class RecordReaderDataSetiteratorTest extends BaseDL4JTest { @Test public void testSeqRRDSINoLabels(){ List> sequence1 = new ArrayList<>(); - sequence1.add(Arrays.asList((Writable) new DoubleWritable(1), new DoubleWritable(2))); - sequence1.add(Arrays.asList((Writable) new DoubleWritable(3), new DoubleWritable(4))); - sequence1.add(Arrays.asList((Writable) new DoubleWritable(5), new DoubleWritable(6))); + sequence1.add(Arrays.asList(new DoubleWritable(1), new DoubleWritable(2))); + sequence1.add(Arrays.asList(new DoubleWritable(3), new DoubleWritable(4))); + sequence1.add(Arrays.asList(new DoubleWritable(5), new DoubleWritable(6))); List> sequence2 = new ArrayList<>(); - sequence2.add(Arrays.asList((Writable) new DoubleWritable(10), new DoubleWritable(20))); - sequence2.add(Arrays.asList((Writable) new DoubleWritable(30), new DoubleWritable(40))); + sequence2.add(Arrays.asList(new DoubleWritable(10), new DoubleWritable(20))); + sequence2.add(Arrays.asList(new DoubleWritable(30), new DoubleWritable(40))); SequenceRecordReader rrFeatures = new CollectionSequenceRecordReader(Arrays.asList(sequence1, sequence2)); SequenceRecordReaderDataSetIterator iter = new SequenceRecordReaderDataSetIterator(rrFeatures, 2, -1, -1); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/datavec/RecordReaderMultiDataSetIteratorTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/datavec/RecordReaderMultiDataSetIteratorTest.java index 7b163892e..0341ac846 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/datavec/RecordReaderMultiDataSetIteratorTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/datavec/RecordReaderMultiDataSetIteratorTest.java @@ -657,17 +657,17 @@ public class RecordReaderMultiDataSetIteratorTest extends BaseDL4JTest { //2 in, 2 out, 3 total sequences of length [1,3,5] List> seq1 = - Arrays.asList(Arrays.asList(new DoubleWritable(1.0), new DoubleWritable(2.0))); + Collections.singletonList(Arrays.asList(new DoubleWritable(1.0), new DoubleWritable(2.0))); List> seq2 = - Arrays.asList(Arrays.asList(new DoubleWritable(10.0), new DoubleWritable(11.0)), - Arrays.asList(new DoubleWritable(20.0), new DoubleWritable(21.0)), - Arrays.asList(new DoubleWritable(30.0), new DoubleWritable(31.0))); + Arrays.asList(Arrays.asList(new DoubleWritable(10.0), new DoubleWritable(11.0)), + Arrays.asList(new DoubleWritable(20.0), new DoubleWritable(21.0)), + Arrays.asList(new DoubleWritable(30.0), new DoubleWritable(31.0))); List> seq3 = - Arrays.asList(Arrays.asList(new DoubleWritable(100.0), new DoubleWritable(101.0)), - Arrays.asList(new DoubleWritable(200.0), new DoubleWritable(201.0)), - Arrays.asList(new DoubleWritable(300.0), new DoubleWritable(301.0)), - Arrays.asList(new DoubleWritable(400.0), new DoubleWritable(401.0)), - Arrays.asList(new DoubleWritable(500.0), new DoubleWritable(501.0))); + Arrays.asList(Arrays.asList(new DoubleWritable(100.0), new DoubleWritable(101.0)), + Arrays.asList(new DoubleWritable(200.0), new DoubleWritable(201.0)), + Arrays.asList(new DoubleWritable(300.0), new DoubleWritable(301.0)), + Arrays.asList(new DoubleWritable(400.0), new DoubleWritable(401.0)), + Arrays.asList(new DoubleWritable(500.0), new DoubleWritable(501.0))); Collection>> seqs = Arrays.asList(seq1, seq2, seq3); @@ -732,8 +732,8 @@ public class RecordReaderMultiDataSetIteratorTest extends BaseDL4JTest { features.add(Arrays.asList(l(new DoubleWritable(1)), l(new DoubleWritable(2)), l(new DoubleWritable(3)))); features.add(Arrays.asList(l(new DoubleWritable(4)), l(new DoubleWritable(5)))); - labels.add(Arrays.asList(l(new IntWritable(0)))); - labels.add(Arrays.asList(l(new IntWritable(1)))); + labels.add(Collections.singletonList(l(new IntWritable(0)))); + labels.add(Collections.singletonList(l(new IntWritable(1)))); CollectionSequenceRecordReader fR = new CollectionSequenceRecordReader(features); CollectionSequenceRecordReader lR = new CollectionSequenceRecordReader(labels); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/datavec/tools/SpecialImageRecordReader.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/datavec/tools/SpecialImageRecordReader.java index e1ec7d90b..545be93e3 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/datavec/tools/SpecialImageRecordReader.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/datavec/tools/SpecialImageRecordReader.java @@ -39,11 +39,15 @@ import java.util.concurrent.atomic.AtomicInteger; @Slf4j public class SpecialImageRecordReader extends ImageRecordReader { - private AtomicInteger counter = new AtomicInteger(0); - private AtomicInteger labelsCounter = new AtomicInteger(0); - private int limit, channels, width, height, numClasses; - private List labels = new ArrayList<>(); - private INDArray zFeatures; + private final AtomicInteger counter = new AtomicInteger(0); + private final AtomicInteger labelsCounter = new AtomicInteger(0); + private final int limit; + private final int channels; + private final int width; + private final int height; + private final int numClasses; + private final List labels = new ArrayList<>(); + private final INDArray zFeatures; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/AbstractDataSetIteratorTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/AbstractDataSetIteratorTest.java index a67078e8a..471515e30 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/AbstractDataSetIteratorTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/AbstractDataSetIteratorTest.java @@ -63,7 +63,7 @@ public class AbstractDataSetIteratorTest extends BaseDL4JTest { @Override public Iterator> iterator() { return new Iterator>() { - private AtomicInteger cnt = new AtomicInteger(0); + private final AtomicInteger cnt = new AtomicInteger(0); @Override public boolean hasNext() { @@ -72,8 +72,8 @@ public class AbstractDataSetIteratorTest extends BaseDL4JTest { @Override public Pair next() { - float features[] = new float[numColumns]; - float labels[] = new float[numColumns]; + float[] features = new float[numColumns]; + float[] labels = new float[numColumns]; for (int i = 0; i < numColumns; i++) { features[i] = (float) i; labels[i] = RandomUtils.nextFloat(0, 5); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/AsyncDataSetIteratorTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/AsyncDataSetIteratorTest.java index e999aee23..c4157cddd 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/AsyncDataSetIteratorTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/AsyncDataSetIteratorTest.java @@ -188,7 +188,7 @@ public class AsyncDataSetIteratorTest extends BaseDL4JTest { DataSet ds = adsi.next(); //log.info("Features ptr: {}", AtomicAllocator.getInstance().getPointer(mds.getFeatures()[0].data()).address()); - assertEquals((double) cnt, ds.getFeatures().meanNumber().doubleValue(), 1e-10, "Failed on epoch " + e + "; iteration: " + cnt + ";"); + assertEquals(cnt, ds.getFeatures().meanNumber().doubleValue(), 1e-10, "Failed on epoch " + e + "; iteration: " + cnt + ";"); assertEquals( (double) cnt + 0.25, ds.getLabels().meanNumber().doubleValue(), 1e-10,"Failed on epoch " + e + "; iteration: " + cnt + ";"); assertEquals((double) cnt + 0.5, @@ -219,7 +219,7 @@ public class AsyncDataSetIteratorTest extends BaseDL4JTest { ds.detach(); //log.info("Features ptr: {}", AtomicAllocator.getInstance().getPointer(mds.getFeatures()[0].data()).address()); - assertEquals((double) cnt, + assertEquals(cnt, ds.getFeatures().meanNumber().doubleValue(), 1e-10, "Failed on epoch " + e + "; iteration: " + cnt + ";"); assertEquals((double) cnt + 0.25, ds.getLabels().meanNumber().doubleValue(), 1e-10, "Failed on epoch " + e + "; iteration: " + cnt + ";"); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/AsyncMultiDataSetIteratorTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/AsyncMultiDataSetIteratorTest.java index 2952382b7..5887bfe90 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/AsyncMultiDataSetIteratorTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/AsyncMultiDataSetIteratorTest.java @@ -57,7 +57,7 @@ public class AsyncMultiDataSetIteratorTest extends BaseDL4JTest { //log.info("Features ptr: {}", AtomicAllocator.getInstance().getPointer(mds.getFeatures()[0].data()).address()); - assertEquals( (double) cnt, + assertEquals(cnt, mds.getFeatures()[0].meanNumber().doubleValue(), 1e-10,"Failed on epoch " + e + "; iteration: " + cnt + ";"); assertEquals((double) cnt + 0.25, mds.getLabels()[0].meanNumber().doubleValue(), 1e-10, "Failed on epoch " + e + "; iteration: " + cnt + ";"); @@ -96,7 +96,7 @@ public class AsyncMultiDataSetIteratorTest extends BaseDL4JTest { //log.info("Features ptr: {}", AtomicAllocator.getInstance().getPointer(mds.getFeatures()[0].data()).address()); - assertEquals((double) cnt, + assertEquals(cnt, mds.getFeatures()[0].meanNumber().doubleValue(), 1e-10, "Failed on epoch " + e + "; iteration: " + cnt + ";"); assertEquals((double) cnt + 0.25, mds.getLabels()[0].meanNumber().doubleValue(), 1e-10, "Failed on epoch " + e + "; iteration: " + cnt + ";"); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/DataSetIteratorTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/DataSetIteratorTest.java index 138298e89..dc9b3ffcf 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/DataSetIteratorTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/DataSetIteratorTest.java @@ -70,7 +70,7 @@ public class DataSetIteratorTest extends BaseDL4JTest { while (iris.hasNext()) { irisC++; DataSet ds = iris.next(); - assertTrue(ds.getLabels().sum(Integer.MAX_VALUE).getDouble(0) == 1.0); + assertEquals(1.0, ds.getLabels().sum(Integer.MAX_VALUE).getDouble(0)); } assertEquals(5, irisC); } @@ -84,7 +84,7 @@ public class DataSetIteratorTest extends BaseDL4JTest { while (mnist.hasNext()) { mnistC++; DataSet ds = mnist.next(); - assertTrue(ds.getLabels().sum(Integer.MAX_VALUE).getDouble(0) == 1.0); + assertEquals(1.0, ds.getLabels().sum(Integer.MAX_VALUE).getDouble(0)); } assertEquals(5, mnistC); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/DataSetSplitterTests.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/DataSetSplitterTests.java index 26302914e..b98c31929 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/DataSetSplitterTests.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/DataSetSplitterTests.java @@ -245,7 +245,7 @@ public class DataSetSplitterTests extends BaseDL4JTest { trained = true; val ds = trainIter.next(); assertNotNull(ds); - assertEquals( (double) globalIter, ds.getFeatures().getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); + assertEquals(globalIter, ds.getFeatures().getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); globalIter++; } assertTrue(trained, "Failed at epoch [" + e + "]"); @@ -260,7 +260,7 @@ public class DataSetSplitterTests extends BaseDL4JTest { val ds = testIter.next(); assertNotNull(ds); - assertEquals((double) globalIter, ds.getFeatures().getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); + assertEquals(globalIter, ds.getFeatures().getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); globalIter++; } assertTrue(tested, "Failed at epoch [" + e + "]"); @@ -275,7 +275,7 @@ public class DataSetSplitterTests extends BaseDL4JTest { val ds = validationIter.next(); assertNotNull(ds); - assertEquals((double) globalIter, ds.getFeatures().getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); + assertEquals(globalIter, ds.getFeatures().getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); globalIter++; } assertTrue(validated, "Failed at epoch [" + e + "]"); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/EarlyTerminationDataSetIteratorTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/EarlyTerminationDataSetIteratorTest.java index d95c63ce7..559865b22 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/EarlyTerminationDataSetIteratorTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/EarlyTerminationDataSetIteratorTest.java @@ -51,7 +51,7 @@ public class EarlyTerminationDataSetIteratorTest extends BaseDL4JTest { List seenData = new ArrayList<>(); while (earlyEndIter.hasNext()) { DataSet path = earlyEndIter.next(); - assertFalse(path == null); + assertNotNull(path); seenData.add(path); batchesSeen++; } @@ -76,10 +76,10 @@ public class EarlyTerminationDataSetIteratorTest extends BaseDL4JTest { EarlyTerminationDataSetIterator earlyEndIter = new EarlyTerminationDataSetIterator(iter, terminateAfter); earlyEndIter.next(10); - assertEquals(false, earlyEndIter.hasNext()); + assertFalse(earlyEndIter.hasNext()); earlyEndIter.reset(); - assertEquals(true, earlyEndIter.hasNext()); + assertTrue(earlyEndIter.hasNext()); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/EarlyTerminationMultiDataSetIteratorTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/EarlyTerminationMultiDataSetIteratorTest.java index b05240ac7..f5e956653 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/EarlyTerminationMultiDataSetIteratorTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/EarlyTerminationMultiDataSetIteratorTest.java @@ -32,8 +32,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; public class EarlyTerminationMultiDataSetIteratorTest extends BaseDL4JTest { @@ -90,10 +89,10 @@ public class EarlyTerminationMultiDataSetIteratorTest extends BaseDL4JTest { new EarlyTerminationMultiDataSetIterator(iter, terminateAfter); earlyEndIter.next(10); - assertEquals(false, earlyEndIter.hasNext()); + assertFalse(earlyEndIter.hasNext()); earlyEndIter.reset(); - assertEquals(true, earlyEndIter.hasNext()); + assertTrue(earlyEndIter.hasNext()); } @Test diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/JointParallelDataSetIteratorTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/JointParallelDataSetIteratorTest.java index 6705f6430..6bc8d8a6c 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/JointParallelDataSetIteratorTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/JointParallelDataSetIteratorTest.java @@ -59,7 +59,7 @@ public class JointParallelDataSetIteratorTest extends BaseDL4JTest { // ds.detach(); //ds.migrate(); - assertEquals( (double) example, ds.getFeatures().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt); + assertEquals(example, ds.getFeatures().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt); assertEquals( (double) example + 0.5, ds.getLabels().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt); cnt++; @@ -96,7 +96,7 @@ public class JointParallelDataSetIteratorTest extends BaseDL4JTest { nulls++; if (cnt % 2 == 2) { - assertEquals( (double) example, ds.getFeatures().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt); + assertEquals(example, ds.getFeatures().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt); assertEquals((double) example + 0.5, ds.getLabels().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt); } @@ -130,7 +130,7 @@ public class JointParallelDataSetIteratorTest extends BaseDL4JTest { DataSet ds = jpdsi.next(); assertNotNull( ds, "Failed on iteration " + cnt); - assertEquals((double) example, ds.getFeatures().meanNumber().doubleValue(),0.001, "Failed on iteration " + cnt); + assertEquals(example, ds.getFeatures().meanNumber().doubleValue(),0.001, "Failed on iteration " + cnt); assertEquals((double) example + 0.5, ds.getLabels().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt); @@ -169,14 +169,14 @@ public class JointParallelDataSetIteratorTest extends BaseDL4JTest { assertNotNull( ds, "Failed on iteration " + cnt); if (cnt % 2 == 0) { - assertEquals( (double) example, ds.getFeatures().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt); + assertEquals(example, ds.getFeatures().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt); assertEquals((double) example + 0.5, ds.getLabels().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt); } else { if (cnt <= 200) { - assertEquals( (double) example, ds.getFeatures().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt); + assertEquals(example, ds.getFeatures().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt); assertEquals( (double) example + 0.5, ds.getLabels().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt); } else { - assertEquals( (double) example_sec, ds.getFeatures().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt + ", second iteration " + cnt_sec); + assertEquals(example_sec, ds.getFeatures().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt + ", second iteration " + cnt_sec); assertEquals((double) example_sec + 0.5, ds.getLabels().meanNumber().doubleValue(), 0.001, "Failed on iteration " + cnt + ", second iteration " + cnt_sec); } } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/MultiDataSetSplitterTests.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/MultiDataSetSplitterTests.java index 27ffe5bba..26dc7803a 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/MultiDataSetSplitterTests.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/MultiDataSetSplitterTests.java @@ -185,7 +185,7 @@ public class MultiDataSetSplitterTests extends BaseDL4JTest { assertNotNull(ds); for (int i = 0; i < ds.getFeatures().length; ++i) { - assertEquals((double) globalIter, ds.getFeatures()[i].getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); + assertEquals(globalIter, ds.getFeatures()[i].getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); } globalIter++; } @@ -202,7 +202,7 @@ public class MultiDataSetSplitterTests extends BaseDL4JTest { assertNotNull(ds); for (int i = 0; i < ds.getFeatures().length; ++i) { - assertEquals((double) globalIter, ds.getFeatures()[i].getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); + assertEquals(globalIter, ds.getFeatures()[i].getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); } globalIter++; } @@ -219,7 +219,7 @@ public class MultiDataSetSplitterTests extends BaseDL4JTest { assertNotNull(ds); for (int i = 0; i < ds.getFeatures().length; ++i) { - assertEquals((double) globalIter, ds.getFeatures()[i].getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); + assertEquals(globalIter, ds.getFeatures()[i].getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); } globalIter++; } @@ -298,7 +298,7 @@ public class MultiDataSetSplitterTests extends BaseDL4JTest { assertNotNull(ds); for (int i = 0; i < ds.getFeatures().length; ++i) { - assertEquals((double) globalIter, ds.getFeatures()[i].getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); + assertEquals(globalIter, ds.getFeatures()[i].getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); } globalIter++; } @@ -314,7 +314,7 @@ public class MultiDataSetSplitterTests extends BaseDL4JTest { val ds = testIter.next(); assertNotNull(ds); for (int i = 0; i < ds.getFeatures().length; ++i) { - assertEquals((double) globalIter, ds.getFeatures()[i].getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); + assertEquals(globalIter, ds.getFeatures()[i].getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); } globalIter++; } @@ -331,7 +331,7 @@ public class MultiDataSetSplitterTests extends BaseDL4JTest { assertNotNull(ds); for (int i = 0; i < ds.getFeatures().length; ++i) { - assertEquals((double) globalIter, ds.getFeatures()[i].getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); + assertEquals(globalIter, ds.getFeatures()[i].getDouble(0), 1e-5f, "Failed at iteration [" + globalIter + "]"); } globalIter++; } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/MultipleEpochsIteratorTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/MultipleEpochsIteratorTest.java index 3b221afd9..69a33aa75 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/MultipleEpochsIteratorTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/MultipleEpochsIteratorTest.java @@ -54,7 +54,7 @@ public class MultipleEpochsIteratorTest extends BaseDL4JTest { assertTrue(multiIter.hasNext()); while (multiIter.hasNext()) { DataSet path = multiIter.next(); - assertFalse(path == null); + assertNotNull(path); } assertEquals(epochs, multiIter.epochs); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/TestAsyncIterator.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/TestAsyncIterator.java index 199953dbc..6d2097a4b 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/TestAsyncIterator.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/TestAsyncIterator.java @@ -175,9 +175,9 @@ public class TestAsyncIterator extends BaseDL4JTest { private static class TestIterator implements DataSetIterator { - private int size; + private final int size; private int cursor; - private long delayMSOnNext; + private final long delayMSOnNext; private TestIterator(int size, long delayMSOnNext) { this.size = size; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/tools/SimpleVariableGenerator.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/tools/SimpleVariableGenerator.java index 172a80167..cf0f578af 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/tools/SimpleVariableGenerator.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/datasets/iterator/tools/SimpleVariableGenerator.java @@ -30,13 +30,13 @@ import java.util.List; import java.util.concurrent.atomic.AtomicInteger; public class SimpleVariableGenerator implements DataSetIterator { - private long seed; - private int numBatches; - private int batchSize; - private int numFeatures; - private int numLabels; + private final long seed; + private final int numBatches; + private final int batchSize; + private final int numFeatures; + private final int numLabels; - private AtomicInteger counter = new AtomicInteger(0); + private final AtomicInteger counter = new AtomicInteger(0); public SimpleVariableGenerator(long seed, int numBatches, int batchSize, int numFeatures, int numLabels) { this.seed = seed; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/earlystopping/TestEarlyStopping.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/earlystopping/TestEarlyStopping.java index 2774e9961..13ae46efb 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/earlystopping/TestEarlyStopping.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/earlystopping/TestEarlyStopping.java @@ -537,7 +537,7 @@ public class TestEarlyStopping extends BaseDL4JTest { private static class LoggingEarlyStoppingListener implements EarlyStoppingListener { - private static Logger log = LoggerFactory.getLogger(LoggingEarlyStoppingListener.class); + private static final Logger log = LoggerFactory.getLogger(LoggingEarlyStoppingListener.class); private int onStartCallCount = 0; private int onEpochCallCount = 0; private int onCompletionCallCount = 0; @@ -852,7 +852,7 @@ public class TestEarlyStopping extends BaseDL4JTest { int outputs = 2; DataSet ds = new DataSet( - Nd4j.rand(new int[]{3, 10, 50}), + Nd4j.rand(3, 10, 50), TestUtils.randomOneHotTimeSeries(3, outputs, 50, 12345)); DataSetIterator train = new ExistingDataSetIterator( Arrays.asList(ds, ds, ds, ds, ds, ds, ds, ds, ds, ds)); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/earlystopping/TestEarlyStoppingCompGraph.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/earlystopping/TestEarlyStoppingCompGraph.java index 1a02ffd7f..4209f8dd3 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/earlystopping/TestEarlyStoppingCompGraph.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/earlystopping/TestEarlyStoppingCompGraph.java @@ -263,7 +263,7 @@ public class TestEarlyStoppingCompGraph extends BaseDL4JTest { private static class LoggingEarlyStoppingListener implements EarlyStoppingListener { - private static Logger log = LoggerFactory.getLogger(LoggingEarlyStoppingListener.class); + private static final Logger log = LoggerFactory.getLogger(LoggingEarlyStoppingListener.class); private int onStartCallCount = 0; private int onEpochCallCount = 0; private int onCompletionCallCount = 0; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/EvalTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/EvalTest.java index c33a69c87..024804c0c 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/EvalTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/EvalTest.java @@ -328,10 +328,10 @@ public class EvalTest extends BaseDL4JTest { for(boolean useMask : new boolean[]{false, true}) { - INDArray in1 = Nd4j.rand(new int[]{3, nIn, tsLength}); + INDArray in1 = Nd4j.rand(3, nIn, tsLength); INDArray out1 = TestUtils.randomOneHotTimeSeries(3, nOut, tsLength); - INDArray in2 = Nd4j.rand(new int[]{5, nIn, tsLength}); + INDArray in2 = Nd4j.rand(5, nIn, tsLength); INDArray out2 = TestUtils.randomOneHotTimeSeries(5, nOut, tsLength); INDArray lMask1 = null; @@ -409,10 +409,10 @@ public class EvalTest extends BaseDL4JTest { for (boolean useMask : new boolean[]{false, true}) { - INDArray in1 = Nd4j.rand(new int[]{3, nIn, tsLength}); + INDArray in1 = Nd4j.rand(3, nIn, tsLength); INDArray out1 = TestUtils.randomOneHotTimeSeries(3, nOut, tsLength); - INDArray in2 = Nd4j.rand(new int[]{5, nIn, tsLength}); + INDArray in2 = Nd4j.rand(5, nIn, tsLength); INDArray out2 = TestUtils.randomOneHotTimeSeries(5, nOut, tsLength); INDArray lMask1 = null; @@ -442,11 +442,11 @@ public class EvalTest extends BaseDL4JTest { @Test public void testEvalSplitting2(){ List> seqFeatures = new ArrayList<>(); - List step = Arrays.asList(new FloatWritable(0), new FloatWritable(0), new FloatWritable(0)); + List step = Arrays.asList(new FloatWritable(0), new FloatWritable(0), new FloatWritable(0)); for( int i=0; i<30; i++ ){ seqFeatures.add(step); } - List> seqLabels = Collections.singletonList(Collections.singletonList(new FloatWritable(0))); + List> seqLabels = Collections.singletonList(Collections.singletonList(new FloatWritable(0))); SequenceRecordReader fsr = new CollectionSequenceRecordReader(Collections.singletonList(seqFeatures)); SequenceRecordReader lsr = new CollectionSequenceRecordReader(Collections.singletonList(seqLabels)); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/ROCTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/ROCTest.java index 629ce0d9b..5684a76d6 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/ROCTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/ROCTest.java @@ -47,8 +47,8 @@ import static org.junit.jupiter.api.Assertions.*; public class ROCTest extends BaseDL4JTest { - private static Map expTPR; - private static Map expFPR; + private static final Map expTPR; + private static final Map expFPR; static { expTPR = new HashMap<>(); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/RegressionEvalTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/RegressionEvalTest.java index 23e69502c..b5e2b994e 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/RegressionEvalTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/eval/RegressionEvalTest.java @@ -102,7 +102,7 @@ public class RegressionEvalTest extends BaseDL4JTest { re.eval(l, predictions, mask); - double[] mse = new double[] {(10 * 10) / 1.0, (2 * 2 + 20 * 20 + 10 * 10) / 3, (3 * 3) / 1.0}; + double[] mse = new double[] {(10 * 10), (2 * 2 + 20 * 20 + 10 * 10) / 3, (3 * 3)}; double[] mae = new double[] {10.0, (2 + 20 + 10) / 3.0, 3.0}; @@ -118,11 +118,11 @@ public class RegressionEvalTest extends BaseDL4JTest { @Test public void testRegressionEvalTimeSeriesSplit(){ - INDArray out1 = Nd4j.rand(new int[]{3, 5, 20}); + INDArray out1 = Nd4j.rand(3, 5, 20); INDArray outSub1 = out1.get(all(), all(), interval(0,10)); INDArray outSub2 = out1.get(all(), all(), interval(10, 20)); - INDArray label1 = Nd4j.rand(new int[]{3, 5, 20}); + INDArray label1 = Nd4j.rand(3, 5, 20); INDArray labelSub1 = label1.get(all(), all(), interval(0,10)); INDArray labelSub2 = label1.get(all(), all(), interval(10, 20)); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/exceptions/TestRecordReaders.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/exceptions/TestRecordReaders.java index 868ec0809..3ec50df59 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/exceptions/TestRecordReaders.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/exceptions/TestRecordReaders.java @@ -37,6 +37,7 @@ import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -45,8 +46,8 @@ public class TestRecordReaders extends BaseDL4JTest { @Test public void testClassIndexOutsideOfRangeRRDSI() { Collection> c = new ArrayList<>(); - c.add(Arrays.asList(new DoubleWritable(0.5), new IntWritable(0))); - c.add(Arrays.asList(new DoubleWritable(1.0), new IntWritable(2))); + c.add(Arrays.asList(new DoubleWritable(0.5), new IntWritable(0))); + c.add(Arrays.asList(new DoubleWritable(1.0), new IntWritable(2))); CollectionRecordReader crr = new CollectionRecordReader(c); @@ -67,13 +68,13 @@ public class TestRecordReaders extends BaseDL4JTest { Collection>> c = new ArrayList<>(); Collection> seq1 = new ArrayList<>(); - seq1.add(Arrays.asList(new DoubleWritable(0.0), new IntWritable(0))); - seq1.add(Arrays.asList(new DoubleWritable(0.0), new IntWritable(1))); + seq1.add(Arrays.asList(new DoubleWritable(0.0), new IntWritable(0))); + seq1.add(Arrays.asList(new DoubleWritable(0.0), new IntWritable(1))); c.add(seq1); Collection> seq2 = new ArrayList<>(); - seq2.add(Arrays.asList(new DoubleWritable(0.0), new IntWritable(0))); - seq2.add(Arrays.asList(new DoubleWritable(0.0), new IntWritable(2))); + seq2.add(Arrays.asList(new DoubleWritable(0.0), new IntWritable(0))); + seq2.add(Arrays.asList(new DoubleWritable(0.0), new IntWritable(2))); c.add(seq2); CollectionSequenceRecordReader csrr = new CollectionSequenceRecordReader(c); @@ -94,24 +95,24 @@ public class TestRecordReaders extends BaseDL4JTest { Collection>> c1 = new ArrayList<>(); Collection> seq1 = new ArrayList<>(); - seq1.add(Arrays.asList(new DoubleWritable(0.0))); - seq1.add(Arrays.asList(new DoubleWritable(0.0))); + seq1.add(Collections.singletonList(new DoubleWritable(0.0))); + seq1.add(Collections.singletonList(new DoubleWritable(0.0))); c1.add(seq1); Collection> seq2 = new ArrayList<>(); - seq2.add(Arrays.asList(new DoubleWritable(0.0))); - seq2.add(Arrays.asList(new DoubleWritable(0.0))); + seq2.add(Collections.singletonList(new DoubleWritable(0.0))); + seq2.add(Collections.singletonList(new DoubleWritable(0.0))); c1.add(seq2); Collection>> c2 = new ArrayList<>(); Collection> seq1a = new ArrayList<>(); - seq1a.add(Arrays.asList(new IntWritable(0))); - seq1a.add(Arrays.asList(new IntWritable(1))); + seq1a.add(Collections.singletonList(new IntWritable(0))); + seq1a.add(Collections.singletonList(new IntWritable(1))); c2.add(seq1a); Collection> seq2a = new ArrayList<>(); - seq2a.add(Arrays.asList(new IntWritable(0))); - seq2a.add(Arrays.asList(new IntWritable(2))); + seq2a.add(Collections.singletonList(new IntWritable(0))); + seq2a.add(Collections.singletonList(new IntWritable(2))); c2.add(seq2a); CollectionSequenceRecordReader csrr = new CollectionSequenceRecordReader(c1); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/AttentionLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/AttentionLayerTest.java index 739168b31..e375aa180 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/AttentionLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/AttentionLayerTest.java @@ -62,7 +62,7 @@ public class AttentionLayerTest extends BaseDL4JTest { for (int mb : new int[]{1, 3}) { for (boolean inputMask : new boolean[]{false, true}) { for (boolean projectInput : new boolean[]{false, true}) { - INDArray in = Nd4j.rand(DataType.DOUBLE, new int[]{mb, nIn, tsLength}); + INDArray in = Nd4j.rand(DataType.DOUBLE, mb, nIn, tsLength); INDArray labels = TestUtils.randomOneHot(mb, nOut); String maskType = (inputMask ? "inputMask" : "none"); @@ -123,7 +123,7 @@ public class AttentionLayerTest extends BaseDL4JTest { for (boolean inputMask : new boolean[]{false, true}) { for (int mb : new int[]{3, 1}) { for (boolean projectInput : new boolean[]{false, true}) { - INDArray in = Nd4j.rand(DataType.DOUBLE, new int[]{mb, nIn, tsLength}); + INDArray in = Nd4j.rand(DataType.DOUBLE, mb, nIn, tsLength); INDArray labels = TestUtils.randomOneHot(mb, nOut); String maskType = (inputMask ? "inputMask" : "none"); @@ -205,7 +205,7 @@ public class AttentionLayerTest extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); for (int mb : new int[]{3, 1}) { - INDArray in = Nd4j.rand(DataType.DOUBLE, new int[]{mb, nIn, tsLength}); + INDArray in = Nd4j.rand(DataType.DOUBLE, mb, nIn, tsLength); INDArray labels = TestUtils.randomOneHot(mb, nOut); String maskType = (inputMask ? "inputMask" : "none"); @@ -257,11 +257,11 @@ public class AttentionLayerTest extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - final INDArray initialInput = Nd4j.rand(new int[]{8, nIn, 7}); - final INDArray goodNextInput = Nd4j.rand(new int[]{8, nIn, 7}); - final INDArray badNextInput = Nd4j.rand(new int[]{8, nIn, 12}); + final INDArray initialInput = Nd4j.rand(8, nIn, 7); + final INDArray goodNextInput = Nd4j.rand(8, nIn, 7); + final INDArray badNextInput = Nd4j.rand(8, nIn, 12); - final INDArray labels = Nd4j.rand(new int[]{8, nOut}); + final INDArray labels = Nd4j.rand(8, nOut); net.fit(initialInput, labels); net.fit(goodNextInput, labels); @@ -281,7 +281,7 @@ public class AttentionLayerTest extends BaseDL4JTest { for (int mb : new int[]{3, 1}) { for (boolean inputMask : new boolean[]{true, false}) { - INDArray in = Nd4j.rand(DataType.DOUBLE, new int[]{mb, nIn, tsLength}); + INDArray in = Nd4j.rand(DataType.DOUBLE, mb, nIn, tsLength); INDArray labels = TestUtils.randomOneHot(mb, nOut); String maskType = (inputMask ? "inputMask" : "none"); @@ -339,7 +339,7 @@ public class AttentionLayerTest extends BaseDL4JTest { for (boolean inputMask : new boolean[]{false, true}) { for (int mb : new int[]{3, 1}) { for (boolean projectInput : new boolean[]{false, true}) { - INDArray in = Nd4j.rand(DataType.DOUBLE, new int[]{mb, nIn, tsLength}); + INDArray in = Nd4j.rand(DataType.DOUBLE, mb, nIn, tsLength); INDArray labels = TestUtils.randomOneHot(mb, nOut); String maskType = (inputMask ? "inputMask" : "none"); @@ -403,7 +403,7 @@ public class AttentionLayerTest extends BaseDL4JTest { for (boolean inputMask : new boolean[]{false, true}) { for (int mb : new int[]{3, 1}) { for (boolean projectInput : new boolean[]{false, true}) { - INDArray in = Nd4j.rand(new int[]{mb, nIn, tsLength}); + INDArray in = Nd4j.rand(mb, nIn, tsLength); INDArray labels = TestUtils.randomOneHot(mb, nOut); String maskType = (inputMask ? "inputMask" : "none"); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/BNGradientCheckTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/BNGradientCheckTest.java index 3d945b27e..65f8787d8 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/BNGradientCheckTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/BNGradientCheckTest.java @@ -111,7 +111,7 @@ public class BNGradientCheckTest extends BaseDL4JTest { int depth = 1; int hw = 4; int nOut = 4; - INDArray input = Nd4j.rand(new int[]{minibatch, depth, hw, hw}); + INDArray input = Nd4j.rand(minibatch, depth, hw, hw); INDArray labels = Nd4j.zeros(minibatch, nOut); Random r = new Random(12345); for (int i = 0; i < minibatch; i++) { @@ -171,7 +171,7 @@ public class BNGradientCheckTest extends BaseDL4JTest { int depth = 2; int hw = 5; int nOut = 2; - INDArray input = Nd4j.rand(new int[]{minibatch, depth, hw, hw}).muli(5).subi(2.5); + INDArray input = Nd4j.rand(minibatch, depth, hw, hw).muli(5).subi(2.5); INDArray labels = TestUtils.randomOneHot(minibatch, nOut); DataSet ds = new DataSet(input, labels); @@ -277,7 +277,7 @@ public class BNGradientCheckTest extends BaseDL4JTest { int minibatch = 10; int nIn = 5; int nOut = 3; - INDArray input = Nd4j.rand(new int[]{minibatch, nIn}); + INDArray input = Nd4j.rand(minibatch, nIn); INDArray labels = Nd4j.zeros(minibatch, nOut); Random r = new Random(12345); for (int i = 0; i < minibatch; i++) { @@ -406,7 +406,7 @@ public class BNGradientCheckTest extends BaseDL4JTest { int depth = 1; int hw = 4; int nOut = 4; - INDArray input = Nd4j.rand(new int[]{minibatch, depth, hw, hw}); + INDArray input = Nd4j.rand(minibatch, depth, hw, hw); INDArray labels = Nd4j.zeros(minibatch, nOut); Random r = new Random(12345); for (int i = 0; i < minibatch; i++) { @@ -470,7 +470,7 @@ public class BNGradientCheckTest extends BaseDL4JTest { net.init(); Random r = new Random(12345); - INDArray input = Nd4j.rand(new int[]{minibatchSize, channels, height, width}); //Order: examples, channels, height, width + INDArray input = Nd4j.rand(minibatchSize, channels, height, width); //Order: examples, channels, height, width INDArray labels = Nd4j.zeros(minibatchSize, numClasses); for (int i = 0; i < minibatchSize; i++) { labels.putScalar(new int[]{i, r.nextInt(numClasses)}, 1.0); @@ -510,7 +510,7 @@ public class BNGradientCheckTest extends BaseDL4JTest { int depth = 2; int hw = 5; int nOut = 3; - INDArray input = Nd4j.rand(new int[]{minibatch, depth, hw, hw}); + INDArray input = Nd4j.rand(minibatch, depth, hw, hw); INDArray labels = Nd4j.zeros(minibatch, nOut); Random r = new Random(12345); for (int i = 0; i < minibatch; i++) { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/CNN1DGradientCheckTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/CNN1DGradientCheckTest.java index 094034320..b61c1fe24 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/CNN1DGradientCheckTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/CNN1DGradientCheckTest.java @@ -82,7 +82,7 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest { for (Activation afn : activations) { for (int minibatchSize : minibatchSizes) { for (int kernel : kernels) { - INDArray input = Nd4j.rand(new int[]{minibatchSize, convNIn, length}); + INDArray input = Nd4j.rand(minibatchSize, convNIn, length); INDArray labels = Nd4j.zeros(minibatchSize, finalNOut, length); for (int i = 0; i < minibatchSize; i++) { for (int j = 0; j < length; j++) { @@ -162,7 +162,7 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest { for (SubsamplingLayer.PoolingType poolingType : poolingTypes) { for (int minibatchSize : minibatchSizes) { for (int kernel : kernels) { - INDArray input = Nd4j.rand(new int[]{minibatchSize, convNIn, length}); + INDArray input = Nd4j.rand(minibatchSize, convNIn, length); INDArray labels = Nd4j.zeros(minibatchSize, finalNOut, croppedLength); for (int i = 0; i < minibatchSize; i++) { for (int j = 0; j < croppedLength; j++) { @@ -243,7 +243,7 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest { for (SubsamplingLayer.PoolingType poolingType : poolingTypes) { for (int minibatchSize : minibatchSizes) { for (int kernel : kernels) { - INDArray input = Nd4j.rand(new int[]{minibatchSize, convNIn, length}); + INDArray input = Nd4j.rand(minibatchSize, convNIn, length); INDArray labels = Nd4j.zeros(minibatchSize, finalNOut, paddedLength); for (int i = 0; i < minibatchSize; i++) { for (int j = 0; j < paddedLength; j++) { @@ -322,7 +322,7 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest { for (SubsamplingLayer.PoolingType poolingType : poolingTypes) { for (int minibatchSize : minibatchSizes) { for (int kernel : kernels) { - INDArray input = Nd4j.rand(new int[]{minibatchSize, convNIn, length}); + INDArray input = Nd4j.rand(minibatchSize, convNIn, length); INDArray labels = Nd4j.zeros(minibatchSize, finalNOut, length); for (int i = 0; i < minibatchSize; i++) { for (int j = 0; j < length; j++) { @@ -418,7 +418,7 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray f = Nd4j.rand(new int[]{2, convNIn, length}); + INDArray f = Nd4j.rand(2, convNIn, length); INDArray fm = Nd4j.create(2, length); fm.get(NDArrayIndex.point(0), NDArrayIndex.all()).assign(1); fm.get(NDArrayIndex.point(1), NDArrayIndex.interval(0,6)).assign(1); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/CNN3DGradientCheckTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/CNN3DGradientCheckTest.java index 2c8f4dead..4d3de0bfb 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/CNN3DGradientCheckTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/CNN3DGradientCheckTest.java @@ -103,9 +103,9 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest { INDArray input; if(df == Convolution3D.DataFormat.NDHWC){ - input = Nd4j.rand(new int[]{miniBatchSize, depth, height, width, convNIn}); + input = Nd4j.rand(miniBatchSize, depth, height, width, convNIn); } else { - input = Nd4j.rand(new int[]{miniBatchSize, convNIn, depth, height, width}); + input = Nd4j.rand(miniBatchSize, convNIn, depth, height, width); } INDArray labels = Nd4j.zeros(miniBatchSize, finalNOut); for (int i = 0; i < miniBatchSize; i++) { @@ -142,7 +142,7 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest { String msg = "DataFormat = " + df + ", minibatch size = " + miniBatchSize + ", activationFn=" + afn + ", kernel = " + Arrays.toString(kernel) + ", stride = " - + Arrays.toString(stride) + ", mode = " + mode.toString() + + Arrays.toString(stride) + ", mode = " + mode + ", input depth " + depth + ", input height " + height + ", input width " + width; @@ -209,7 +209,7 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest { outHeight += zeroPadding[2] + zeroPadding[3]; outWidth += zeroPadding[4] + zeroPadding[5]; - INDArray input = Nd4j.rand(new int[]{miniBatchSize, convNIn, depth, height, width}); + INDArray input = Nd4j.rand(miniBatchSize, convNIn, depth, height, width); INDArray labels = Nd4j.zeros(miniBatchSize, finalNOut); for (int i = 0; i < miniBatchSize; i++) { labels.putScalar(new int[]{i, i % finalNOut}, 1.0); @@ -245,7 +245,7 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest { net.init(); String msg = "Minibatch size = " + miniBatchSize + ", activationFn=" + afn - + ", kernel = " + Arrays.toString(kernel) + ", mode = " + mode.toString() + + ", kernel = " + Arrays.toString(kernel) + ", mode = " + mode + ", input depth " + depth + ", input height " + height + ", input width " + width; @@ -337,7 +337,7 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest { net.init(); String msg = "Minibatch size = " + miniBatchSize + ", activationFn=" + afn - + ", kernel = " + Arrays.toString(kernel) + ", mode = " + mode.toString() + + ", kernel = " + Arrays.toString(kernel) + ", mode = " + mode + ", input depth " + depth + ", input height " + height + ", input width " + width + ", dataFormat=" + df; @@ -424,7 +424,7 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest { net.init(); String msg = "Minibatch size = " + miniBatchSize + ", activationFn=" + afn - + ", kernel = " + Arrays.toString(upsamplingSize) + ", mode = " + mode.toString() + + ", kernel = " + Arrays.toString(upsamplingSize) + ", mode = " + mode + ", input depth " + depth + ", input height " + height + ", input width " + width; @@ -487,7 +487,7 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest { outHeight -= cropping[2] + cropping[3]; outWidth -= cropping[4] + cropping[5]; - INDArray input = Nd4j.rand(new int[]{miniBatchSize, convNIn, depth, height, width}); + INDArray input = Nd4j.rand(miniBatchSize, convNIn, depth, height, width); INDArray labels = Nd4j.zeros(miniBatchSize, finalNOut); for (int i = 0; i < miniBatchSize; i++) { labels.putScalar(new int[]{i, i % finalNOut}, 1.0); @@ -523,7 +523,7 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest { net.init(); String msg = "Minibatch size = " + miniBatchSize + ", activationFn=" + afn - + ", kernel = " + Arrays.toString(kernel) + ", mode = " + mode.toString() + + ", kernel = " + Arrays.toString(kernel) + ", mode = " + mode + ", input depth " + depth + ", input height " + height + ", input width " + width; @@ -583,9 +583,9 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest { INDArray input; if (df == Convolution3D.DataFormat.NDHWC) { - input = Nd4j.rand(new int[]{miniBatchSize, depth, height, width, convNIn}); + input = Nd4j.rand(miniBatchSize, depth, height, width, convNIn); } else { - input = Nd4j.rand(new int[]{miniBatchSize, convNIn, depth, height, width}); + input = Nd4j.rand(miniBatchSize, convNIn, depth, height, width); } INDArray labels = Nd4j.zeros(miniBatchSize, finalNOut); for (int j = 0; j < miniBatchSize; j++) { @@ -618,7 +618,7 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest { String msg = "DataFormat = " + df + ", minibatch size = " + miniBatchSize + ", activationFn=" + afn + ", kernel = " + Arrays.toString(kernel) + ", stride = " - + Arrays.toString(stride) + ", mode = " + mode.toString() + + Arrays.toString(stride) + ", mode = " + mode + ", input depth " + depth + ", input height " + height + ", input width " + width; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/CNNGradientCheckTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/CNNGradientCheckTest.java index 3772741d5..b9536ee41 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/CNNGradientCheckTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/CNNGradientCheckTest.java @@ -63,7 +63,7 @@ public class CNNGradientCheckTest extends BaseDL4JTest { Nd4j.setDataType(DataType.DOUBLE); } - private CNN2DFormat format; + private final CNN2DFormat format; public CNNGradientCheckTest(CNN2DFormat format){ this.format = format; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/DropoutGradientCheck.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/DropoutGradientCheck.java index 193ede7ac..9aafd297c 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/DropoutGradientCheck.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/DropoutGradientCheck.java @@ -125,7 +125,7 @@ public class DropoutGradientCheck extends BaseDL4JTest { INDArray f; if(cnn){ - f = Nd4j.rand(new int[]{minibatch, 2, 6, 6}).muli(10).subi(5); + f = Nd4j.rand(minibatch, 2, 6, 6).muli(10).subi(5); } else { f = Nd4j.rand(minibatch, 6).muli(10).subi(5); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GlobalPoolingGradientCheckTests.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GlobalPoolingGradientCheckTests.java index f4b9d4dc5..7cb10f83b 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GlobalPoolingGradientCheckTests.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GlobalPoolingGradientCheckTests.java @@ -276,7 +276,7 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest { mln.init(); Random r = new Random(12345L); - INDArray input = Nd4j.rand(new int[] {miniBatchSize, inputDepth, inputH, inputW}).subi(0.5); + INDArray input = Nd4j.rand(miniBatchSize, inputDepth, inputH, inputW).subi(0.5); INDArray inputMask; if (miniBatchSize == 1) { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsComputationGraph.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsComputationGraph.java index be641898e..ec99f3852 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsComputationGraph.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsComputationGraph.java @@ -472,7 +472,7 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { ComputationGraph graph = new ComputationGraph(conf); graph.init(); - INDArray input = Nd4j.rand(new int[] {batchSize, inLength, timeSeriesLength}); + INDArray input = Nd4j.rand(batchSize, inLength, timeSeriesLength); INDArray labels = TestUtils.randomOneHotTimeSeries(batchSize, 2, timeSeriesLength); if (PRINT_RESULTS) { @@ -509,7 +509,7 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { graph.init(); Random r = new Random(12345); - INDArray input = Nd4j.rand(new int[] {2, 3, 4}); + INDArray input = Nd4j.rand(2, 3, 4); INDArray labels = TestUtils.randomOneHot(2, 2); //Here: labels are 2d (due to LastTimeStepVertex) if (PRINT_RESULTS) { @@ -572,8 +572,8 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { graph.init(); Random r = new Random(12345); - INDArray input1 = Nd4j.rand(new int[] {batchSize, 3, 4}); - INDArray input2 = Nd4j.rand(new int[] {batchSize, 2, 4}); + INDArray input1 = Nd4j.rand(batchSize, 3, 4); + INDArray input2 = Nd4j.rand(batchSize, 2, 4); INDArray labels = TestUtils.randomOneHotTimeSeries(batchSize, outSize, timeSeriesLength); if (PRINT_RESULTS) { @@ -622,7 +622,7 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { graph.init(); Random r = new Random(12345); - INDArray input = Nd4j.rand(new int[] {2, 2, 4}); + INDArray input = Nd4j.rand(2, 2, 4); INDArray labels = TestUtils.randomOneHotTimeSeries(2, 2, 4); if (PRINT_RESULTS) { @@ -813,7 +813,7 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { int[] minibatchSizes = {1, 3}; for (int mb : minibatchSizes) { - INDArray input = Nd4j.rand(new int[] {mb, 2, inH, inW}).muli(4); //Order: examples, channels, height, width + INDArray input = Nd4j.rand(mb, 2, inH, inW).muli(4); //Order: examples, channels, height, width INDArray out = Nd4j.rand(mb, 2); String msg = "testMultipleOutputsMergeVertex() - minibatchSize = " + mb; @@ -991,7 +991,7 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray example = Nd4j.rand(new int[] {150, inputDepth, inputH, inputW}); + INDArray example = Nd4j.rand(150, inputDepth, inputH, inputW); INDArray labels = Nd4j.zeros(150, numLabels); Random r = new Random(12345); @@ -1001,7 +1001,7 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (train) { for (int i = 0; i < 10; i++) { - INDArray f = Nd4j.rand(new int[] {10, inputDepth, inputH, inputW}); + INDArray f = Nd4j.rand(10, inputDepth, inputH, inputW); INDArray l = Nd4j.zeros(10, numLabels); for (int j = 0; j < 10; j++) { l.putScalar(j, r.nextInt(numLabels), 1.0); @@ -1227,15 +1227,15 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { int[] mbSizes = new int[] {1, 2, 3}; for (int minibatch : mbSizes) { - INDArray in1 = Nd4j.rand(new int[] {minibatch, layerSizes, 4}); - INDArray in2 = Nd4j.rand(new int[] {minibatch, layerSizes, 5}); + INDArray in1 = Nd4j.rand(minibatch, layerSizes, 4); + INDArray in2 = Nd4j.rand(minibatch, layerSizes, 5); INDArray inMask1 = Nd4j.zeros(minibatch, 4); inMask1.get(NDArrayIndex.all(), NDArrayIndex.interval(0, 3)).assign(1); INDArray inMask2 = Nd4j.zeros(minibatch, 5); inMask2.get(NDArrayIndex.all(), NDArrayIndex.interval(0, 4)).assign(1); - INDArray labels1 = Nd4j.rand(new int[] {minibatch, 2}); - INDArray labels2 = Nd4j.rand(new int[] {minibatch, 2}); + INDArray labels1 = Nd4j.rand(minibatch, 2); + INDArray labels2 = Nd4j.rand(minibatch, 2); String testName = "testBasicStackUnstackVariableLengthTS() - minibatch = " + minibatch; @@ -1389,7 +1389,7 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { int[] mbSizes = new int[] {1, 3, 10}; for (int minibatch : mbSizes) { - INDArray in1 = Nd4j.rand(new int[] {minibatch, dIn, h, w}); + INDArray in1 = Nd4j.rand(minibatch, dIn, h, w); INDArray labels1 = Nd4j.rand(minibatch, 2); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsMasking.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsMasking.java index a444e1146..4efd20ee7 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsMasking.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsMasking.java @@ -411,7 +411,7 @@ public class GradientCheckTestsMasking extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray f = Nd4j.rand(new int[]{mb, 3, tsLength}); + INDArray f = Nd4j.rand(mb, 3, tsLength); INDArray l = TestUtils.randomOneHot(mb, 3); INDArray lm = TestUtils.randomBernoulli(mb, 1); @@ -468,7 +468,7 @@ public class GradientCheckTestsMasking extends BaseDL4JTest { ComputationGraph net = new ComputationGraph(conf); net.init(); - INDArray f = Nd4j.rand(new int[]{mb, 3, tsLength}); + INDArray f = Nd4j.rand(mb, 3, tsLength); INDArray l = TestUtils.randomOneHot(mb, 3); INDArray lm = TestUtils.randomBernoulli(mb, 1); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/LRNGradientCheckTests.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/LRNGradientCheckTests.java index ad1b564db..9d982818a 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/LRNGradientCheckTests.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/LRNGradientCheckTests.java @@ -61,7 +61,7 @@ public class LRNGradientCheckTests extends BaseDL4JTest { int depth = 6; int hw = 5; int nOut = 4; - INDArray input = Nd4j.rand(new int[] {minibatch, depth, hw, hw}); + INDArray input = Nd4j.rand(minibatch, depth, hw, hw); INDArray labels = Nd4j.zeros(minibatch, nOut); Random r = new Random(12345); for (int i = 0; i < minibatch; i++) { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/LSTMGradientCheckTests.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/LSTMGradientCheckTests.java index 452742f10..c1e20d858 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/LSTMGradientCheckTests.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/LSTMGradientCheckTests.java @@ -418,7 +418,7 @@ public class LSTMGradientCheckTests extends BaseDL4JTest { //Generate Nd4j.getRandom().setSeed(12345); - INDArray input = Nd4j.rand(new int[] {miniBatchSize, inputSize, timeSeriesLength}); + INDArray input = Nd4j.rand(miniBatchSize, inputSize, timeSeriesLength); INDArray labels = Nd4j.zeros(miniBatchSize, nClasses, timeSeriesLength); Random r = new Random(12345); for (int i = 0; i < miniBatchSize; i++) { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/LossFunctionGradientCheck.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/LossFunctionGradientCheck.java index fe4c1eb3b..74b142845 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/LossFunctionGradientCheck.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/LossFunctionGradientCheck.java @@ -343,7 +343,7 @@ public class LossFunctionGradientCheck extends BaseDL4JTest { lossFunctions[i] = lf2; } catch(IOException ex) { ex.printStackTrace(); - assertTrue(false, "Tests failed: serialization of " + lossFunctions[i]); + fail("Tests failed: serialization of " + lossFunctions[i]); } Nd4j.getRandom().setSeed(12345); @@ -362,8 +362,8 @@ public class LossFunctionGradientCheck extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - assertTrue(((LossLayer) net.getLayer(1).conf().getLayer()).getLossFn().getClass() == lossFunctions[i] - .getClass()); + assertSame(((LossLayer) net.getLayer(1).conf().getLayer()).getLossFn().getClass(), lossFunctions[i] + .getClass()); INDArray[] inOut = getFeaturesAndLabels(lossFunctions[i], minibatchSizes[j], 4, nOut[i], 12345); INDArray input = inOut[0]; @@ -421,22 +421,22 @@ public class LossFunctionGradientCheck extends BaseDL4JTest { labels = Nd4j.diag(Nd4j.ones(3)); gradientAndScore = lossMultiLabel.computeGradientAndScore(labels, preOutput, activationFn, null, true); - assertTrue(!gradientAndScore.getFirst().isNaN()); - assertTrue(!gradientAndScore.getFirst().isInfinite()); + assertFalse(gradientAndScore.getFirst().isNaN()); + assertFalse(gradientAndScore.getFirst().isInfinite()); // Edge Case: Labels are all 1 labels = Nd4j.ones(3, 3); gradientAndScore = lossMultiLabel.computeGradientAndScore(labels, preOutput, activationFn, null, true); - assertTrue(!gradientAndScore.getFirst().isNaN()); - assertTrue(!gradientAndScore.getFirst().isInfinite()); + assertFalse(gradientAndScore.getFirst().isNaN()); + assertFalse(gradientAndScore.getFirst().isInfinite()); // Edge Case: Labels are all 0 labels = Nd4j.zeros(3, 3); gradientAndScore = lossMultiLabel.computeGradientAndScore(labels, preOutput, activationFn, null, true); - assertTrue(!gradientAndScore.getFirst().isNaN()); - assertTrue(!gradientAndScore.getFirst().isInfinite()); + assertFalse(gradientAndScore.getFirst().isNaN()); + assertFalse(gradientAndScore.getFirst().isInfinite()); } public static INDArray[] getFeaturesAndLabels(ILossFunction l, long minibatch, long nIn, long nOut, long seed) { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/NoBiasGradientCheckTests.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/NoBiasGradientCheckTests.java index 8acbf157e..5cfec0631 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/NoBiasGradientCheckTests.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/NoBiasGradientCheckTests.java @@ -135,7 +135,7 @@ public class NoBiasGradientCheckTests extends BaseDL4JTest { int layerSize = 6; for (int minibatch : new int[]{1, 4}) { - INDArray input = Nd4j.rand(new int[]{minibatch, nIn, tsLength}); + INDArray input = Nd4j.rand(minibatch, nIn, tsLength); INDArray labels = TestUtils.randomOneHotTimeSeries(minibatch, nOut, tsLength); for (boolean rnnOutHasBias : new boolean[]{true, false}) { @@ -292,9 +292,9 @@ public class NoBiasGradientCheckTests extends BaseDL4JTest { net.init(); if(cnnHasBias){ - assertEquals(3 * 2 * kernel[0] * kernel[1] + 2, net.getLayer(2).numParams()); + assertEquals(3L * 2 * kernel[0] * kernel[1] + 2, net.getLayer(2).numParams()); } else { - assertEquals(3 * 2 * kernel[0] * kernel[1], net.getLayer(2).numParams()); + assertEquals(3L * 2 * kernel[0] * kernel[1], net.getLayer(2).numParams()); } String msg = "testCnnWithSubsamplingNoBias(), minibatch = " + minibatchSize + ", cnnHasBias = " + cnnHasBias; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/OutputLayerGradientChecks.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/OutputLayerGradientChecks.java index f11daf9ec..1c1da4cee 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/OutputLayerGradientChecks.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/OutputLayerGradientChecks.java @@ -67,7 +67,7 @@ public class OutputLayerGradientChecks extends BaseDL4JTest { for (int maskType = 0; maskType < 3; maskType++) { Random r = new Random(12345L); - INDArray input = Nd4j.rand(new int[]{miniBatchSize, nIn, timeSeriesLength}); + INDArray input = Nd4j.rand(miniBatchSize, nIn, timeSeriesLength); INDArray labelMask; String mt; @@ -172,7 +172,7 @@ public class OutputLayerGradientChecks extends BaseDL4JTest { for (int maskType = 0; maskType < 4; maskType++) { Random r = new Random(12345L); - INDArray input = Nd4j.rand(new int[]{miniBatchSize, dIn, h, w}); + INDArray input = Nd4j.rand(miniBatchSize, dIn, h, w); INDArray labelMask; String mt; @@ -190,13 +190,13 @@ public class OutputLayerGradientChecks extends BaseDL4JTest { break; case 2: //Per x/y masking (3d mask, shape [minibatch, h, w]) - labelMask = Nd4j.createUninitialized(new int[]{miniBatchSize, h, w}); + labelMask = Nd4j.createUninitialized(miniBatchSize, h, w); Nd4j.getExecutioner().exec(new BernoulliDistribution(labelMask, 0.5)); mt = "PerXY"; break; case 3: //Per output masking (4d mask, same shape as output [minibatch, c, h, w]) - labelMask = Nd4j.createUninitialized(new int[]{miniBatchSize, dOut, h, w}); + labelMask = Nd4j.createUninitialized(miniBatchSize, dOut, h, w); Nd4j.getExecutioner().exec(new BernoulliDistribution(labelMask, 0.5)); mt = "PerOutput"; break; @@ -208,7 +208,7 @@ public class OutputLayerGradientChecks extends BaseDL4JTest { INDArray labels; if (lf instanceof LossMSE) { - labels = Nd4j.rand(new int[]{miniBatchSize, dOut, h, w}); + labels = Nd4j.rand(miniBatchSize, dOut, h, w); } else { labels = Nd4j.zeros(miniBatchSize, dOut, h, w); for (int mb = 0; mb < miniBatchSize; mb++) { @@ -283,9 +283,9 @@ public class OutputLayerGradientChecks extends BaseDL4JTest { Random r = new Random(12345L); INDArray input; if(dataFormat == Convolution3D.DataFormat.NCDHW) { - input = Nd4j.rand(new int[]{miniBatchSize, chIn, d, h, w}); + input = Nd4j.rand(miniBatchSize, chIn, d, h, w); } else { - input = Nd4j.rand(new int[]{miniBatchSize, d, h, w, chIn}); + input = Nd4j.rand(miniBatchSize, d, h, w, chIn); } INDArray labelMask; @@ -298,16 +298,16 @@ public class OutputLayerGradientChecks extends BaseDL4JTest { break; case 1: //Per example masking (shape [minibatch, 1, 1, 1, 1] - labelMask = Nd4j.createUninitialized(new int[]{miniBatchSize, 1, 1, 1, 1}); + labelMask = Nd4j.createUninitialized(miniBatchSize, 1, 1, 1, 1); Nd4j.getExecutioner().exec(new BernoulliDistribution(labelMask, 0.5)); mt = "PerExample"; break; case 2: //Per channel masking (5d mask, shape [minibatch, d, 1, 1, 1] or [minibatch, 1, 1, 1, d]) if(dataFormat == Convolution3D.DataFormat.NCDHW) { - labelMask = Nd4j.createUninitialized(new int[]{miniBatchSize, chOut, 1, 1, 1}); + labelMask = Nd4j.createUninitialized(miniBatchSize, chOut, 1, 1, 1); } else { - labelMask = Nd4j.createUninitialized(new int[]{miniBatchSize, 1, 1, 1, chOut}); + labelMask = Nd4j.createUninitialized(miniBatchSize, 1, 1, 1, chOut); } Nd4j.getExecutioner().exec(new BernoulliDistribution(labelMask, 0.5)); mt = "PerChannel"; @@ -315,9 +315,9 @@ public class OutputLayerGradientChecks extends BaseDL4JTest { case 3: //Per output masking (5d mask, same shape as output [minibatch, c, h, w]) if(dataFormat == Convolution3D.DataFormat.NCDHW) { - labelMask = Nd4j.createUninitialized(new int[]{miniBatchSize, chOut, d, h, w}); + labelMask = Nd4j.createUninitialized(miniBatchSize, chOut, d, h, w); } else { - labelMask = Nd4j.createUninitialized(new int[]{miniBatchSize, d, h, w, chOut}); + labelMask = Nd4j.createUninitialized(miniBatchSize, d, h, w, chOut); } Nd4j.getExecutioner().exec(new BernoulliDistribution(labelMask, 0.5)); mt = "PerOutput"; @@ -336,9 +336,9 @@ public class OutputLayerGradientChecks extends BaseDL4JTest { INDArray labels; if (lf instanceof LossMSE) { if(dataFormat == Convolution3D.DataFormat.NCDHW) { - labels = Nd4j.rand(new int[]{miniBatchSize, chOut, d, h, w}); + labels = Nd4j.rand(miniBatchSize, chOut, d, h, w); } else { - labels = Nd4j.rand(new int[]{miniBatchSize, d, h, w, chOut}); + labels = Nd4j.rand(miniBatchSize, d, h, w, chOut); } } else { if(dataFormat == Convolution3D.DataFormat.NCDHW) { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/RnnGradientChecks.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/RnnGradientChecks.java index 4555904ca..87a42e4e0 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/RnnGradientChecks.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/RnnGradientChecks.java @@ -74,7 +74,7 @@ public class RnnGradientChecks extends BaseDL4JTest { if(!simple && hasLayerNorm) continue; - INDArray in = Nd4j.rand(new int[]{mb, nIn, tsLength}); + INDArray in = Nd4j.rand(mb, nIn, tsLength); INDArray labels = Nd4j.create(mb, nOut, tsLength); for (int i = 0; i < mb; i++) { for (int j = 0; j < tsLength; j++) { @@ -159,7 +159,7 @@ public class RnnGradientChecks extends BaseDL4JTest { if(r.nextInt(5) != 0) continue; - INDArray in = Nd4j.rand(new int[]{mb, nIn, tsLength}); + INDArray in = Nd4j.rand(mb, nIn, tsLength); INDArray labels = Nd4j.create(mb, nOut, tsLength); for (int i = 0; i < mb; i++) { for (int j = 0; j < tsLength; j++) { @@ -236,7 +236,7 @@ public class RnnGradientChecks extends BaseDL4JTest { continue; - INDArray in = Nd4j.rand(new int[]{mb, nIn, tsLength}); + INDArray in = Nd4j.rand(mb, nIn, tsLength); INDArray labels = Nd4j.create(mb, nOut); for (int i = 0; i < mb; i++) { labels.putScalar(i, r.nextInt(nOut), 1.0); @@ -306,7 +306,7 @@ public class RnnGradientChecks extends BaseDL4JTest { for (boolean inputMask : new boolean[]{false, true}) { - INDArray in = Nd4j.rand(new int[]{mb, nIn, tsLength}); + INDArray in = Nd4j.rand(mb, nIn, tsLength); INDArray labels = TestUtils.randomOneHotTimeSeries(mb, nOut, tsLength); String maskType = (inputMask ? "inputMask" : "none"); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/YoloGradientCheckTests.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/YoloGradientCheckTests.java index 105fcb284..9ae3e598a 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/YoloGradientCheckTests.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/gradientcheck/YoloGradientCheckTests.java @@ -60,7 +60,7 @@ public class YoloGradientCheckTests extends BaseDL4JTest { Nd4j.setDataType(DataType.DOUBLE); } - private CNN2DFormat format; + private final CNN2DFormat format; public YoloGradientCheckTests(CNN2DFormat format){ this.format = format; } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/MultiLayerNeuralNetConfigurationTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/MultiLayerNeuralNetConfigurationTest.java index 33d8856cd..a10a9a3c7 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/MultiLayerNeuralNetConfigurationTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/MultiLayerNeuralNetConfigurationTest.java @@ -159,7 +159,7 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest { .setInputType(InputType.convolutional(32, 32, 1)).build(); String str = conf.toJson(); - MultiLayerConfiguration fromJson = conf.fromJson(str); + MultiLayerConfiguration fromJson = MultiLayerConfiguration.fromJson(str); assertEquals(conf, fromJson); } @@ -253,7 +253,7 @@ public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest { private static MultiLayerConfiguration getConf() { - MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345l).list() + MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345L).list() .layer(0, new DenseLayer.Builder().nIn(2).nOut(2) .dist(new NormalDistribution(0, 1)).build()) .layer(1, new OutputLayer.Builder().nIn(2).nOut(1) diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/MultiNeuralNetConfLayerBuilderTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/MultiNeuralNetConfLayerBuilderTest.java index 76fb090ea..08e162b7a 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/MultiNeuralNetConfLayerBuilderTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/MultiNeuralNetConfLayerBuilderTest.java @@ -37,8 +37,7 @@ import org.nd4j.linalg.dataset.DataSet; import org.nd4j.linalg.lossfunctions.LossFunctions; import org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction; -import static org.junit.jupiter.api.Assertions.assertArrayEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.*; /** * @author Jeffrey Tang. @@ -84,6 +83,6 @@ public class MultiNeuralNetConfLayerBuilderTest extends BaseDL4JTest { NeuralNetConfiguration firstLayer = multiConf1.getConf(0); NeuralNetConfiguration secondLayer = multiConf1.getConf(1); - assertFalse(firstLayer.equals(secondLayer)); + assertNotEquals(firstLayer, secondLayer); } } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/constraints/TestConstraints.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/constraints/TestConstraints.java index fda02a451..37260087d 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/constraints/TestConstraints.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/constraints/TestConstraints.java @@ -452,9 +452,9 @@ public class TestConstraints extends BaseDL4JTest { for( int i=0; i<100; i++ ){ - INDArray in1 = Nd4j.rand(new int[]{1, nIn, 5}); - INDArray in2 = Nd4j.rand(new int[]{1, 1}); - INDArray label = Nd4j.rand(new int[]{1, 1}); + INDArray in1 = Nd4j.rand(1, nIn, 5); + INDArray in2 = Nd4j.rand(1, 1); + INDArray label = Nd4j.rand(1, 1); g.fit(new INDArray[]{in1, in2}, new INDArray[]{label}); for(Map.Entry e : g.paramTable().entrySet()){ diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ElementWiseVertexTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ElementWiseVertexTest.java index 941309304..046cf0f63 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ElementWiseVertexTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ElementWiseVertexTest.java @@ -56,7 +56,7 @@ public class ElementWiseVertexTest extends BaseDL4JTest { * from @agibsonccc: check for the basics: like 0 numParams */ - ElementWiseVertex.Op ops[] = new ElementWiseVertex.Op[] {ElementWiseVertex.Op.Add, + ElementWiseVertex.Op[] ops = new ElementWiseVertex.Op[] {ElementWiseVertex.Op.Add, ElementWiseVertex.Op.Subtract, ElementWiseVertex.Op.Product}; for (ElementWiseVertex.Op op : ops) { @@ -706,5 +706,5 @@ public class ElementWiseVertexTest extends BaseDL4JTest { return clean; } - private double epsilon = 1e-10; + private final double epsilon = 1e-10; } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ShiftVertexTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ShiftVertexTest.java index 766854407..acab33814 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ShiftVertexTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/graph/ShiftVertexTest.java @@ -258,5 +258,5 @@ public class ShiftVertexTest extends BaseDL4JTest { return clean; } - private double epsilon = 1e-10; + private final double epsilon = 1e-10; } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/layers/LayerBuilderTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/layers/LayerBuilderTest.java index 96a2bc739..484da1ff9 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/layers/LayerBuilderTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/layers/LayerBuilderTest.java @@ -184,7 +184,7 @@ public class LayerBuilderTest extends BaseDL4JTest { assertEquals(numIn, bN.nIn); assertEquals(numOut, bN.nOut); - assertEquals(true, bN.isLockGammaBeta()); + assertTrue(bN.isLockGammaBeta()); assertEquals(0.5, bN.decay, 1e-4); assertEquals(2, bN.gamma, 1e-4); assertEquals(1, bN.beta, 1e-4); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/layers/LayerConfigTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/layers/LayerConfigTest.java index 635926f7c..60b549714 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/layers/LayerConfigTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/layers/LayerConfigTest.java @@ -290,11 +290,11 @@ public class LayerConfigTest extends BaseDL4JTest { net.init(); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, - ((BaseLayer) conf.getConf(0).getLayer()).getGradientNormalization()); + conf.getConf(0).getLayer().getGradientNormalization()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, - ((BaseLayer) conf.getConf(1).getLayer()).getGradientNormalization()); - assertEquals(10, ((BaseLayer) conf.getConf(0).getLayer()).getGradientNormalizationThreshold(), 0.0); - assertEquals(10, ((BaseLayer) conf.getConf(1).getLayer()).getGradientNormalizationThreshold(), 0.0); + conf.getConf(1).getLayer().getGradientNormalization()); + assertEquals(10, conf.getConf(0).getLayer().getGradientNormalizationThreshold(), 0.0); + assertEquals(10, conf.getConf(1).getLayer().getGradientNormalizationThreshold(), 0.0); //With: conf = new NeuralNetConfiguration.Builder() @@ -310,10 +310,10 @@ public class LayerConfigTest extends BaseDL4JTest { net.init(); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, - ((BaseLayer) conf.getConf(0).getLayer()).getGradientNormalization()); - assertEquals(GradientNormalization.None, ((BaseLayer) conf.getConf(1).getLayer()).getGradientNormalization()); - assertEquals(10, ((BaseLayer) conf.getConf(0).getLayer()).getGradientNormalizationThreshold(), 0.0); - assertEquals(2.5, ((BaseLayer) conf.getConf(1).getLayer()).getGradientNormalizationThreshold(), 0.0); + conf.getConf(0).getLayer().getGradientNormalization()); + assertEquals(GradientNormalization.None, conf.getConf(1).getLayer().getGradientNormalization()); + assertEquals(10, conf.getConf(0).getLayer().getGradientNormalizationThreshold(), 0.0); + assertEquals(2.5, conf.getConf(1).getLayer().getGradientNormalizationThreshold(), 0.0); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/CNNProcessorTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/CNNProcessorTest.java index 79878cd4c..48112c682 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/CNNProcessorTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/CNNProcessorTest.java @@ -43,11 +43,11 @@ import static org.junit.jupiter.api.Assertions.*; **/ public class CNNProcessorTest extends BaseDL4JTest { - private static int rows = 28; - private static int cols = 28; - private static INDArray in2D = Nd4j.create(DataType.FLOAT, 1, 784); - private static INDArray in3D = Nd4j.create(DataType.FLOAT, 20, 784, 7); - private static INDArray in4D = Nd4j.create(DataType.FLOAT, 20, 1, 28, 28); + private static final int rows = 28; + private static final int cols = 28; + private static final INDArray in2D = Nd4j.create(DataType.FLOAT, 1, 784); + private static final INDArray in3D = Nd4j.create(DataType.FLOAT, 20, 784, 7); + private static final INDArray in4D = Nd4j.create(DataType.FLOAT, 20, 1, 28, 28); @Test @@ -56,12 +56,12 @@ public class CNNProcessorTest extends BaseDL4JTest { INDArray check2to4 = convProcessor.preProcess(in2D, -1, LayerWorkspaceMgr.noWorkspaces()); int val2to4 = check2to4.shape().length; - assertTrue(val2to4 == 4); + assertEquals(4, val2to4); assertEquals(Nd4j.create(DataType.FLOAT, 1, 1, 28, 28), check2to4); INDArray check4to4 = convProcessor.preProcess(in4D, -1, LayerWorkspaceMgr.noWorkspaces()); int val4to4 = check4to4.shape().length; - assertTrue(val4to4 == 4); + assertEquals(4, val4to4); assertEquals(Nd4j.create(DataType.FLOAT, 20, 1, 28, 28), check4to4); } @@ -134,7 +134,7 @@ public class CNNProcessorTest extends BaseDL4JTest { INDArray check2to2 = convProcessor.backprop(in2D, -1, LayerWorkspaceMgr.noWorkspaces()); int val2to2 = check2to2.shape().length; - assertTrue(val2to2 == 2); + assertEquals(2, val2to2); assertEquals(Nd4j.create(DataType.FLOAT, 1, 784), check2to2); } @@ -144,12 +144,12 @@ public class CNNProcessorTest extends BaseDL4JTest { INDArray check2to4 = convProcessor.backprop(in2D, -1, LayerWorkspaceMgr.noWorkspaces()); int val2to4 = check2to4.shape().length; - assertTrue(val2to4 == 4); + assertEquals(4, val2to4); assertEquals(Nd4j.create(DataType.FLOAT, 1, 1, 28, 28), check2to4); INDArray check4to4 = convProcessor.backprop(in4D, -1, LayerWorkspaceMgr.noWorkspaces()); int val4to4 = check4to4.shape().length; - assertTrue(val4to4 == 4); + assertEquals(4, val4to4); assertEquals(Nd4j.create(DataType.FLOAT, 20, 1, 28, 28), check4to4); } @@ -160,12 +160,12 @@ public class CNNProcessorTest extends BaseDL4JTest { INDArray check2to2 = convProcessor.preProcess(in2D, -1, LayerWorkspaceMgr.noWorkspaces()); int val2to2 = check2to2.shape().length; - assertTrue(val2to2 == 2); + assertEquals(2, val2to2); assertEquals(Nd4j.create(DataType.FLOAT, 1, 784), check2to2); INDArray check4to2 = convProcessor.preProcess(in4D, -1, LayerWorkspaceMgr.noWorkspaces()); int val4to2 = check4to2.shape().length; - assertTrue(val4to2 == 2); + assertEquals(2, val4to2); assertEquals(Nd4j.create(DataType.FLOAT, 20, 784), check4to2); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/TestPreProcessors.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/TestPreProcessors.java index 3f6741b89..56c6cfb1d 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/TestPreProcessors.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/TestPreProcessors.java @@ -239,8 +239,8 @@ public class TestPreProcessors extends BaseDL4JTest { (ConvolutionLayer) nnc.getLayer().instantiate(nnc, null, 0, params, true, params.dataType()); layer.setInputMiniBatchSize(miniBatchSize); - INDArray activationsCnn = Nd4j.rand(new int[] {miniBatchSize * timeSeriesLength, nChannels, - inputHeight, inputWidth}); + INDArray activationsCnn = Nd4j.rand(miniBatchSize * timeSeriesLength, nChannels, + inputHeight, inputWidth); //Check shape of outputs: val prod = nChannels * inputHeight * inputWidth; @@ -262,8 +262,8 @@ public class TestPreProcessors extends BaseDL4JTest { INDArray activationsRnnComp = compProc.preProcess(activationsCnn, miniBatchSize, LayerWorkspaceMgr.noWorkspaces()); assertEquals(activationsRnnComp, activationsRnn, msg); - INDArray epsilonsRnn = Nd4j.rand(new int[] {miniBatchSize, - nChannels * inputHeight * inputWidth, timeSeriesLength}); + INDArray epsilonsRnn = Nd4j.rand(miniBatchSize, + nChannels * inputHeight * inputWidth, timeSeriesLength); INDArray epsilonsCnnComp = compProc.backprop(epsilonsRnn, miniBatchSize, LayerWorkspaceMgr.noWorkspaces()); INDArray epsilonsCnn = proc.backprop(epsilonsRnn, miniBatchSize, LayerWorkspaceMgr.noWorkspaces()); if (!epsilonsCnn.equals(epsilonsCnnComp)) { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/weightnoise/TestWeightNoise.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/weightnoise/TestWeightNoise.java index f495e8fb0..d4bae91a6 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/weightnoise/TestWeightNoise.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/conf/weightnoise/TestWeightNoise.java @@ -259,7 +259,7 @@ public class TestWeightNoise extends BaseDL4JTest { DropConnect d = new DropConnect(0.5); INDArray outTest = d.getParameter(l, "W", 0, 0, false, LayerWorkspaceMgr.noWorkspaces()); - assertTrue(l.getParam("W") == outTest); //Should be same object + assertSame(l.getParam("W"), outTest); //Should be same object INDArray outTrain = d.getParameter(l, "W", 0, 0, true, LayerWorkspaceMgr.noWorkspaces()); assertNotEquals(l.getParam("W"), outTrain); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/ComputationGraphTestRNN.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/ComputationGraphTestRNN.java index 2bddca70a..eb8c1cbcc 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/ComputationGraphTestRNN.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/ComputationGraphTestRNN.java @@ -89,7 +89,7 @@ public class ComputationGraphTestRNN extends BaseDL4JTest { ComputationGraph graph = new ComputationGraph(conf); graph.init(); - INDArray input = Nd4j.rand(new int[] {3, 5, timeSeriesLength}); + INDArray input = Nd4j.rand(3, 5, timeSeriesLength); Map allOutputActivations = graph.feedForward(input, true); INDArray fullOutL0 = allOutputActivations.get("0"); @@ -117,7 +117,7 @@ public class ComputationGraphTestRNN extends BaseDL4JTest { INDArray inputSubset = input.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(startTimeRange, endTimeRange)); if (inLength > 1) - assertTrue(inputSubset.size(2) == inLength); + assertEquals(inputSubset.size(2), inLength); INDArray[] outArr = graph.rnnTimeStep(inputSubset); assertEquals(1, outArr.length); @@ -173,7 +173,7 @@ public class ComputationGraphTestRNN extends BaseDL4JTest { ComputationGraph graph = new ComputationGraph(conf); graph.init(); - INDArray input3d = Nd4j.rand(new int[] {3, 5, timeSeriesLength}); + INDArray input3d = Nd4j.rand(3, 5, timeSeriesLength); INDArray out3d = graph.rnnTimeStep(input3d)[0]; assertArrayEquals(out3d.shape(), new long[] {3, 4, timeSeriesLength}); @@ -191,12 +191,12 @@ public class ComputationGraphTestRNN extends BaseDL4JTest { //Check same but for input of size [3,5,1]. Expect [3,4,1] out graph.rnnClearPreviousState(); for (int i = 0; i < timeSeriesLength; i++) { - INDArray temp = Nd4j.create(new int[] {3, 5, 1}); + INDArray temp = Nd4j.create(3, 5, 1); temp.tensorAlongDimension(0, 1, 0).assign(input3d.tensorAlongDimension(i, 1, 0)); INDArray out3dSlice = graph.rnnTimeStep(temp)[0]; assertArrayEquals(out3dSlice.shape(), new long[] {3, 4, 1}); - assertTrue(out3dSlice.tensorAlongDimension(0, 1, 0).equals(out3d.tensorAlongDimension(i, 1, 0))); + assertEquals(out3dSlice.tensorAlongDimension(0, 1, 0), out3d.tensorAlongDimension(i, 1, 0)); } } @@ -245,8 +245,8 @@ public class ComputationGraphTestRNN extends BaseDL4JTest { ComputationGraph graph = new ComputationGraph(conf); graph.init(); - INDArray input0 = Nd4j.rand(new int[] {3, 5, timeSeriesLength}); - INDArray input1 = Nd4j.rand(new int[] {3, 4, timeSeriesLength}); + INDArray input0 = Nd4j.rand(3, 5, timeSeriesLength); + INDArray input1 = Nd4j.rand(3, 4, timeSeriesLength); Map allOutputActivations = graph.feedForward(new INDArray[] {input0, input1}, true); INDArray fullActLSTM0 = allOutputActivations.get("lstm0"); @@ -276,12 +276,12 @@ public class ComputationGraphTestRNN extends BaseDL4JTest { INDArray inputSubset0 = input0.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(startTimeRange, endTimeRange)); if (inLength > 1) - assertTrue(inputSubset0.size(2) == inLength); + assertEquals(inputSubset0.size(2), inLength); INDArray inputSubset1 = input1.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(startTimeRange, endTimeRange)); if (inLength > 1) - assertTrue(inputSubset1.size(2) == inLength); + assertEquals(inputSubset1.size(2), inLength); INDArray[] outArr = graph.rnnTimeStep(inputSubset0, inputSubset1); assertEquals(2, outArr.length); @@ -395,8 +395,8 @@ public class ComputationGraphTestRNN extends BaseDL4JTest { assertEquals(timeSeriesLength, graphTBPTT.getConfiguration().getTbpttFwdLength()); assertEquals(timeSeriesLength, graphTBPTT.getConfiguration().getTbpttBackLength()); - INDArray inputData = Nd4j.rand(new int[] {miniBatchSize, nIn, timeSeriesLength}); - INDArray labels = Nd4j.rand(new int[] {miniBatchSize, nOut, timeSeriesLength}); + INDArray inputData = Nd4j.rand(miniBatchSize, nIn, timeSeriesLength); + INDArray labels = Nd4j.rand(miniBatchSize, nOut, timeSeriesLength); graph.setInput(0, inputData); graph.setLabel(0, labels); @@ -479,8 +479,8 @@ public class ComputationGraphTestRNN extends BaseDL4JTest { ComputationGraph graph = new ComputationGraph(conf); graph.init(); - INDArray inputLong = Nd4j.rand(new int[] {miniBatchSize, nIn, nTimeSlices * timeSeriesLength}); - INDArray labelsLong = Nd4j.rand(new int[] {miniBatchSize, nOut, nTimeSlices * timeSeriesLength}); + INDArray inputLong = Nd4j.rand(miniBatchSize, nIn, nTimeSlices * timeSeriesLength); + INDArray labelsLong = Nd4j.rand(miniBatchSize, nOut, nTimeSlices * timeSeriesLength); graph.fit(new INDArray[] {inputLong}, new INDArray[] {labelsLong}); } @@ -517,8 +517,8 @@ public class ComputationGraphTestRNN extends BaseDL4JTest { ComputationGraph graph = new ComputationGraph(conf); graph.init(); - INDArray inputLong = Nd4j.rand(new int[] {miniBatchSize, nIn, timeSeriesLength}); - INDArray labelsLong = Nd4j.rand(new int[] {miniBatchSize, nOut, timeSeriesLength}); + INDArray inputLong = Nd4j.rand(miniBatchSize, nIn, timeSeriesLength); + INDArray labelsLong = Nd4j.rand(miniBatchSize, nOut, timeSeriesLength); INDArray initialParams = graph.params().dup(); graph.fit(new INDArray[] {inputLong}, new INDArray[] {labelsLong}); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestCompGraphCNN.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestCompGraphCNN.java index 96532aa69..95691fed6 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestCompGraphCNN.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestCompGraphCNN.java @@ -93,7 +93,7 @@ public class TestCompGraphCNN extends BaseDL4JTest { } protected static int getNumParams() { - return 2 * (3 * 1 * 4 * 4 * 3 + 3) + (7 * 14 * 14 * 6 + 7) + (7 * 10 + 10); + return 2 * (3 * 4 * 4 * 3 + 3) + (7 * 14 * 14 * 6 + 7) + (7 * 10 + 10); } @BeforeEach diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestComputationGraphNetwork.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestComputationGraphNetwork.java index 6b1191a51..7a918a674 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestComputationGraphNetwork.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestComputationGraphNetwork.java @@ -1205,7 +1205,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { NeuralNetConfiguration nnc = new NeuralNetConfiguration(); nnc.setLayer(new DenseLayer.Builder().build()); GraphVertex[] singleInputVertices = new GraphVertex[]{new L2NormalizeVertex(), new LayerVertex(nnc, null), - new PoolHelperVertex(), new PreprocessorVertex(), new ReshapeVertex(new int[]{1, 1}), + new PoolHelperVertex(), new PreprocessorVertex(), new ReshapeVertex(1, 1), new ScaleVertex(1.0), new ShiftVertex(1.0), new SubsetVertex(1, 1), new UnstackVertex(0, 2), new DuplicateToTimeSeriesVertex("in1"), new LastTimeStepVertex("in1")}; @@ -1971,7 +1971,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { IDropout d1 = model.getLayer(0).conf().getLayer().getIDropout(); IDropout d2 = cg2.getLayer(0).conf().getLayer().getIDropout(); - assertFalse(d1 == d2); //Should not be same object! + assertNotSame(d1, d2); //Should not be same object! assertEquals(d1, d2); //But should be equal } @@ -1988,9 +1988,9 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { .addInputs("x_emb") .addLayer("agg_lstm", new Bidirectional(CONCAT, new LSTM.Builder().nOut(hiddenSize/2).build()), "x_emb") .addLayer("agg_att", new DenseLayer.Builder().nIn(100).nOut(1).activation(Activation.SOFTMAX).build(), "agg_lstm") - .addVertex("att", new PreprocessorVertex(new ComposableInputPreProcessor(new FeedForwardToRnnPreProcessor(), new PermutePreprocessor(new int[] {0,2,1}), new RnnToFeedForwardPreProcessor())), "agg_att") + .addVertex("att", new PreprocessorVertex(new ComposableInputPreProcessor(new FeedForwardToRnnPreProcessor(), new PermutePreprocessor(0,2,1), new RnnToFeedForwardPreProcessor())), "agg_att") .addLayer("att_repeat", new RepeatVector.Builder(hiddenSize).build(),"att") - .addVertex("att_trans", new PreprocessorVertex(new PermutePreprocessor(new int[] {0, 2, 1})), "att_repeat") + .addVertex("att_trans", new PreprocessorVertex(new PermutePreprocessor(0, 2, 1)), "att_repeat") .addVertex("mult", new ElementWiseVertex(ElementWiseVertex.Op.Product), "agg_lstm", "att_trans") .addLayer("sum", new GlobalPoolingLayer.Builder().build(), "mult") .addLayer("agg_out", new DenseLayer.Builder().nIn(100).nOut(6).activation(Activation.TANH).build(), "sum") @@ -2003,8 +2003,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { net.init(); - INDArray features = Nd4j.rand(new int[] {dataSize, inputSize, seqLen}); - INDArray labels = Nd4j.rand(new int[] {dataSize, 6}); + INDArray features = Nd4j.rand(dataSize, inputSize, seqLen); + INDArray labels = Nd4j.rand(dataSize, 6); INDArray featuresMask = Nd4j.ones(dataSize, seqLen); INDArray labelsMask = Nd4j.ones(dataSize, 6); @@ -2056,8 +2056,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { soFar += 3*2; INDArray m1b = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+2)).assign(3); //m1b soFar += 2; - INDArray m2w = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+2*1)).assign(4); //m2w - soFar += 2*1; + INDArray m2w = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+ 2)).assign(4); //m2w + soFar += 2; INDArray m2b = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+1)).assign(5); //m2b soFar += 1; @@ -2069,8 +2069,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { soFar += 3*2; INDArray v1b = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+2)).assign(9); //v1b soFar += 2; - INDArray v2w = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+2*1)).assign(10); //v2w - soFar += 2*1; + INDArray v2w = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+ 2)).assign(10); //v2w + soFar += 2; INDArray v2b = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+1)).assign(11); //v2b soFar += 1; @@ -2140,8 +2140,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { int dataSize = 11; - INDArray features = Nd4j.rand(new int[] {dataSize, inputSize}); - INDArray labels = Nd4j.rand(new int[] {dataSize, outputSize}); + INDArray features = Nd4j.rand(dataSize, inputSize); + INDArray labels = Nd4j.rand(dataSize, outputSize); boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(net).inputs(new INDArray[]{features}) .labels(new INDArray[]{labels})); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestSetGetParameters.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestSetGetParameters.java index ec5c47894..0c17238db 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestSetGetParameters.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestSetGetParameters.java @@ -68,8 +68,8 @@ public class TestSetGetParameters extends BaseDL4JTest { assertEquals(params, net2.params()); assertEquals(params, net3.params()); - assertFalse(params == net2.params()); //Different objects due to clone - assertTrue(params == net3.params()); //Same object due to clone + assertNotSame(params, net2.params()); //Different objects due to clone + assertSame(params, net3.params()); //Same object due to clone Map paramsMap = net.paramTable(); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestVariableLengthTSCG.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestVariableLengthTSCG.java index a39ac53b5..96e1dcf12 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestVariableLengthTSCG.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/TestVariableLengthTSCG.java @@ -81,14 +81,14 @@ public class TestVariableLengthTSCG extends BaseDL4JTest { ComputationGraph net = new ComputationGraph(conf); net.init(); - INDArray in1 = Nd4j.rand(new int[] {nExamples, 2, 4}); - INDArray in2 = Nd4j.rand(new int[] {nExamples, 2, 5}); + INDArray in1 = Nd4j.rand(nExamples, 2, 4); + INDArray in2 = Nd4j.rand(nExamples, 2, 5); in2.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 3, true)}, in1); assertEquals(in1, in2.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 4))); - INDArray labels1 = Nd4j.rand(new int[] {nExamples, 1, 4}); + INDArray labels1 = Nd4j.rand(nExamples, 1, 4); INDArray labels2 = Nd4j.create(nExamples, 1, 5); labels2.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 3, true)}, labels1); @@ -178,14 +178,14 @@ public class TestVariableLengthTSCG extends BaseDL4JTest { ComputationGraph net = new ComputationGraph(conf); net.init(); - INDArray in1 = Nd4j.rand(new int[] {nExamples, 2, 4}); - INDArray in2 = Nd4j.rand(new int[] {nExamples, 2, 5}); + INDArray in1 = Nd4j.rand(nExamples, 2, 4); + INDArray in2 = Nd4j.rand(nExamples, 2, 5); in2.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 3, true)}, in1); assertEquals(in1, in2.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 4))); - INDArray labels1 = Nd4j.rand(new int[] {nExamples, 1, 4}); + INDArray labels1 = Nd4j.rand(nExamples, 1, 4); INDArray labels2 = Nd4j.create(nExamples, 1, 5); labels2.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 3, true)}, labels1); @@ -296,7 +296,7 @@ public class TestVariableLengthTSCG extends BaseDL4JTest { } } - INDArray input = Nd4j.rand(new int[] {miniBatch, nIn, tsLength}); + INDArray input = Nd4j.rand(miniBatch, nIn, tsLength); INDArray labels = Nd4j.ones(miniBatch, nOut, tsLength); ComputationGraphConfiguration conf = diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/graphnodes/TestGraphNodes.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/graphnodes/TestGraphNodes.java index de4010554..ba3eb90bb 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/graphnodes/TestGraphNodes.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/graph/graphnodes/TestGraphNodes.java @@ -168,7 +168,7 @@ public class TestGraphNodes extends BaseDL4JTest { assertEquals(Nd4j.zeros(5, 2), backward.get(NDArrayIndex.all(), NDArrayIndex.interval(8, 9, true))); //Test same for CNNs: - in = Nd4j.rand(new int[] {5, 10, 3, 3}); + in = Nd4j.rand(5, 10, 3, 3); subset.setInputs(in); out = subset.doForward(false, LayerWorkspaceMgr.noWorkspaces()); assertEquals(in.get(NDArrayIndex.all(), NDArrayIndex.interval(4, 7, true), NDArrayIndex.all(), @@ -198,7 +198,7 @@ public class TestGraphNodes extends BaseDL4JTest { //First: test without input mask array Nd4j.getRandom().setSeed(12345); - INDArray in = Nd4j.rand(new int[] {3, 5, 6}); + INDArray in = Nd4j.rand(3, 5, 6); INDArray expOut = in.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(5)); GraphVertex gv = graph.getVertex("lastTS"); @@ -250,7 +250,7 @@ public class TestGraphNodes extends BaseDL4JTest { graph.init(); INDArray in2d = Nd4j.rand(3, 5); - INDArray in3d = Nd4j.rand(new int[] {3, 2, 7}); + INDArray in3d = Nd4j.rand(3, 2, 7); graph.setInputs(in2d, in3d); @@ -339,9 +339,9 @@ public class TestGraphNodes extends BaseDL4JTest { GraphVertex stack = new StackVertex(null, "", -1, Nd4j.dataType()); //Test stack with variable length + mask arrays - INDArray in0 = Nd4j.rand(new int[] {5, 2, 5}); - INDArray in1 = Nd4j.rand(new int[] {5, 2, 6}); - INDArray in2 = Nd4j.rand(new int[] {5, 2, 7}); + INDArray in0 = Nd4j.rand(5, 2, 5); + INDArray in1 = Nd4j.rand(5, 2, 6); + INDArray in2 = Nd4j.rand(5, 2, 7); INDArray mask0 = Nd4j.ones(5, 5); INDArray mask1 = Nd4j.ones(5, 6); @@ -434,7 +434,7 @@ public class TestGraphNodes extends BaseDL4JTest { //Test same for CNNs: - in = Nd4j.rand(new int[] {15, 10, 3, 3}); + in = Nd4j.rand(15, 10, 3, 3); unstack0.setInputs(in); unstack1.setInputs(in); unstack2.setInputs(in); @@ -533,7 +533,7 @@ public class TestGraphNodes extends BaseDL4JTest { reshapeVertex.setEpsilon(out); INDArray[] backward = reshapeVertex.doBackward(false, LayerWorkspaceMgr.noWorkspaces()).getSecond(); - assertTrue(Arrays.equals(backward[0].shape(), inputShape)); + assertArrayEquals(backward[0].shape(), inputShape); } @Test @@ -591,7 +591,7 @@ public class TestGraphNodes extends BaseDL4JTest { .build(); - INDArray input = Nd4j.rand(new int[]{10, numInputs, 16}); + INDArray input = Nd4j.rand(10, numInputs, 16); INDArray[] out = updatedModel.output(input); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/ActivationLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/ActivationLayerTest.java index 0c7375cfd..14e169767 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/ActivationLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/ActivationLayerTest.java @@ -86,7 +86,7 @@ public class ActivationLayerTest extends BaseDL4JTest { MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(123) .list() - .layer(0, new DenseLayer.Builder().nIn(28 * 28 * 1).nOut(10).activation(Activation.RELU) + .layer(0, new DenseLayer.Builder().nIn(28 * 28).nOut(10).activation(Activation.RELU) .weightInit(WeightInit.XAVIER).build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( LossFunctions.LossFunction.MCXENT).weightInit(WeightInit.XAVIER) @@ -102,7 +102,7 @@ public class ActivationLayerTest extends BaseDL4JTest { MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(123) .list() - .layer(0, new DenseLayer.Builder().nIn(28 * 28 * 1).nOut(10).activation(Activation.IDENTITY) + .layer(0, new DenseLayer.Builder().nIn(28 * 28).nOut(10).activation(Activation.IDENTITY) .weightInit(WeightInit.XAVIER).build()) .layer(1, new org.deeplearning4j.nn.conf.layers.ActivationLayer.Builder() .activation(Activation.RELU).build()) @@ -144,7 +144,7 @@ public class ActivationLayerTest extends BaseDL4JTest { int layerSize = 5; int nOut = 3; - INDArray next = Nd4j.rand(new int[] {minibatch, nIn}); + INDArray next = Nd4j.rand(minibatch, nIn); INDArray labels = Nd4j.zeros(minibatch, nOut); for (int i = 0; i < minibatch; i++) { labels.putScalar(i, i % nOut, 1.0); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/CacheModeTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/CacheModeTest.java index d4eea7a49..7b55a4641 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/CacheModeTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/CacheModeTest.java @@ -92,7 +92,7 @@ public class CacheModeTest extends BaseDL4JTest { MultiLayerNetwork net2 = new MultiLayerNetwork(conf2); net2.init(); - INDArray in = Nd4j.rand(new int[]{3, 3, 10}); + INDArray in = Nd4j.rand(3, 3, 10); INDArray labels = TestUtils.randomOneHotTimeSeries(3, 10, 10); INDArray out1 = net1.output(in); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/DropoutLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/DropoutLayerTest.java index cee20827c..3aa7e37dd 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/DropoutLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/DropoutLayerTest.java @@ -159,7 +159,7 @@ public class DropoutLayerTest extends BaseDL4JTest { MultiLayerConfiguration confIntegrated = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(123) .list() - .layer(0, new DenseLayer.Builder().nIn(28 * 28 * 1).nOut(10) + .layer(0, new DenseLayer.Builder().nIn(28 * 28).nOut(10) .activation(Activation.RELU).weightInit( WeightInit.XAVIER) .build()) @@ -176,7 +176,7 @@ public class DropoutLayerTest extends BaseDL4JTest { MultiLayerConfiguration confSeparate = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(123) .list() - .layer(0, new DenseLayer.Builder().nIn(28 * 28 * 1).nOut(10).activation(Activation.RELU) + .layer(0, new DenseLayer.Builder().nIn(28 * 28).nOut(10).activation(Activation.RELU) .weightInit(WeightInit.XAVIER).build()) .layer(1, new DropoutLayer.Builder(0.25).build()) .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/OutputLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/OutputLayerTest.java index fcd509494..232a9a46e 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/OutputLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/OutputLayerTest.java @@ -68,7 +68,7 @@ public class OutputLayerTest extends BaseDL4JTest { long numParams = conf.getLayer().initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); OutputLayer l = (OutputLayer) conf.getLayer().instantiate(conf, - Collections.singletonList(new ScoreIterationListener(1)), 0, params, true, params.dataType()); + Collections.singletonList(new ScoreIterationListener(1)), 0, params, true, params.dataType()); params = l.params(); l.setParams(params); assertEquals(params, l.params()); @@ -221,8 +221,8 @@ public class OutputLayerTest extends BaseDL4JTest { double score = mln.score() * timeSeriesLength; double scoreRNN = mlnRnn.score(); - assertTrue(!Double.isNaN(score)); - assertTrue(!Double.isNaN(scoreRNN)); + assertFalse(Double.isNaN(score)); + assertFalse(Double.isNaN(scoreRNN)); double relError = Math.abs(score - scoreRNN) / (Math.abs(score) + Math.abs(scoreRNN)); System.out.println(relError); @@ -306,7 +306,7 @@ public class OutputLayerTest extends BaseDL4JTest { mln2.setParams(mln.params()); - INDArray in = Nd4j.rand(new int[]{miniBatchSize, nIn, timeSeriesLength}); + INDArray in = Nd4j.rand(miniBatchSize, nIn, timeSeriesLength); INDArray out1 = mln.output(in); INDArray out2 = mln.output(in); @@ -390,7 +390,7 @@ public class OutputLayerTest extends BaseDL4JTest { mln2.setParams(mln.params()); - INDArray in = Nd4j.rand(new int[]{3, 3, 5, 5}); + INDArray in = Nd4j.rand(3, 3, 5, 5); INDArray out1 = mln.output(in); INDArray out2 = mln2.output(in); @@ -412,8 +412,8 @@ public class OutputLayerTest extends BaseDL4JTest { assertEquals(mln.gradient().gradient(), mln2.gradient().gradient()); //Also check computeScoreForExamples - INDArray in2a = Nd4j.rand(new int[]{1, 3, 5, 5}); - INDArray labels2a = Nd4j.rand(new int[]{1, 4, 5, 5}); + INDArray in2a = Nd4j.rand(1, 3, 5, 5); + INDArray labels2a = Nd4j.rand(1, 4, 5, 5); INDArray in2 = Nd4j.concat(0, in2a, in2a); INDArray labels2 = Nd4j.concat(0, labels2a, labels2a); @@ -483,7 +483,7 @@ public class OutputLayerTest extends BaseDL4JTest { graph2.setParams(graph.params()); - INDArray in = Nd4j.rand(new int[]{3, 3, 5, 5}); + INDArray in = Nd4j.rand(3, 3, 5, 5); INDArray out1 = graph.outputSingle(in); INDArray out2 = graph2.outputSingle(in); @@ -505,8 +505,8 @@ public class OutputLayerTest extends BaseDL4JTest { assertEquals(graph.gradient().gradient(), graph2.gradient().gradient()); //Also check computeScoreForExamples - INDArray in2a = Nd4j.rand(new int[]{1, 3, 5, 5}); - INDArray labels2a = Nd4j.rand(new int[]{1, 4, 5, 5}); + INDArray in2a = Nd4j.rand(1, 3, 5, 5); + INDArray labels2a = Nd4j.rand(1, 4, 5, 5); INDArray in2 = Nd4j.concat(0, in2a, in2a); INDArray labels2 = Nd4j.concat(0, labels2a, labels2a); @@ -540,7 +540,7 @@ public class OutputLayerTest extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray in = Nd4j.rand(new int[]{2,3,4,5}); + INDArray in = Nd4j.rand(2,3,4,5); INDArray out = net.output(in); double min = out.minNumber().doubleValue(); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/RepeatVectorTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/RepeatVectorTest.java index 2aa98575e..3e526e774 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/RepeatVectorTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/RepeatVectorTest.java @@ -34,12 +34,11 @@ import org.nd4j.common.primitives.Pair; import java.util.Arrays; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; public class RepeatVectorTest extends BaseDL4JTest { - private int REPEAT = 4; + private final int REPEAT = 4; private Layer getRepeatVectorLayer() { @@ -55,18 +54,18 @@ public class RepeatVectorTest extends BaseDL4JTest { double[] arr = new double[] {1., 2., 3., 1., 2., 3., 1., 2., 3., 1., 2., 3.}; INDArray expectedOut = Nd4j.create(arr, new long[] {1, 3, REPEAT}, 'f'); - INDArray input = Nd4j.create(new double[] {1., 2., 3.}, new long[] {1, 3}); + INDArray input = Nd4j.create(new double[] {1., 2., 3.}, 1, 3); Layer layer = getRepeatVectorLayer(); INDArray output = layer.activate(input, false, LayerWorkspaceMgr.noWorkspaces()); - assertTrue(Arrays.equals(expectedOut.shape(), output.shape())); + assertArrayEquals(expectedOut.shape(), output.shape()); assertEquals(expectedOut, output); INDArray epsilon = Nd4j.ones(1,3,4); Pair out = layer.backpropGradient(epsilon, LayerWorkspaceMgr.noWorkspaces()); INDArray outEpsilon = out.getSecond(); - INDArray expectedEpsilon = Nd4j.create(new double[] {4., 4., 4.}, new long[] {1, 3}); + INDArray expectedEpsilon = Nd4j.create(new double[] {4., 4., 4.}, 1, 3); assertEquals(expectedEpsilon, outEpsilon); } } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/SeedTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/SeedTest.java index a9b3ee532..4d46d5066 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/SeedTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/SeedTest.java @@ -40,8 +40,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; public class SeedTest extends BaseDL4JTest { - private DataSetIterator irisIter = new IrisDataSetIterator(50, 50); - private DataSet data = irisIter.next(); + private final DataSetIterator irisIter = new IrisDataSetIterator(50, 50); + private final DataSet data = irisIter.next(); @Test diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Convolution3DTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Convolution3DTest.java index 596906d10..d282690bb 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Convolution3DTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Convolution3DTest.java @@ -36,8 +36,7 @@ import org.nd4j.linalg.factory.Nd4j; import java.util.Arrays; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; /** * @author Max Pumperla @@ -45,18 +44,18 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class Convolution3DTest extends BaseDL4JTest { private int nExamples = 1; - private int nChannelsOut = 1; - private int nChannelsIn = 1; - private int inputDepth = 2 * 2; - private int inputWidth = 28 / 2; - private int inputHeight = 28 / 2; + private final int nChannelsOut = 1; + private final int nChannelsIn = 1; + private final int inputDepth = 2 * 2; + private final int inputWidth = 28 / 2; + private final int inputHeight = 28 / 2; - private int[] kernelSize = new int[]{2, 2, 2}; - private int outputDepth = inputDepth - kernelSize[0] + 1; - private int outputHeight = inputHeight - kernelSize[1] + 1; - private int outputWidth = inputWidth - kernelSize[2] + 1; + private final int[] kernelSize = new int[]{2, 2, 2}; + private final int outputDepth = inputDepth - kernelSize[0] + 1; + private final int outputHeight = inputHeight - kernelSize[1] + 1; + private final int outputWidth = inputWidth - kernelSize[2] + 1; - private INDArray epsilon = Nd4j.ones(nExamples, nChannelsOut, outputDepth, outputHeight, outputWidth); + private final INDArray epsilon = Nd4j.ones(nExamples, nChannelsOut, outputDepth, outputHeight, outputWidth); @Test @@ -65,11 +64,11 @@ public class Convolution3DTest extends BaseDL4JTest { INDArray containedInput = getContainedData(); Convolution3DLayer layer = (Convolution3DLayer) getConvolution3DLayer(ConvolutionMode.Same); - assertTrue(layer.convolutionMode == ConvolutionMode.Same); + assertSame(layer.convolutionMode, ConvolutionMode.Same); INDArray containedOutput = layer.activate(containedInput, false, LayerWorkspaceMgr.noWorkspaces()); - assertTrue(Arrays.equals(containedInput.shape(), containedOutput.shape())); + assertArrayEquals(containedInput.shape(), containedOutput.shape()); } @@ -78,13 +77,12 @@ public class Convolution3DTest extends BaseDL4JTest { Convolution3DLayer layer = (Convolution3DLayer) getConvolution3DLayer(ConvolutionMode.Strict); - assertTrue(layer.convolutionMode == ConvolutionMode.Strict); + assertSame(layer.convolutionMode, ConvolutionMode.Strict); INDArray input = getData(); INDArray output = layer.activate(input, false, LayerWorkspaceMgr.noWorkspaces()); - assertTrue(Arrays.equals(new long[]{nExamples, nChannelsOut, outputDepth, outputWidth, outputHeight}, - output.shape())); + assertArrayEquals(new long[]{nExamples, nChannelsOut, outputDepth, outputWidth, outputHeight}, output.shape()); } private Layer getConvolution3DLayer(ConvolutionMode mode) { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/ConvolutionLayerSetupTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/ConvolutionLayerSetupTest.java index 26fc3a4e3..246dfee5b 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/ConvolutionLayerSetupTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/ConvolutionLayerSetupTest.java @@ -108,7 +108,7 @@ public class ConvolutionLayerSetupTest extends BaseDL4JTest { .setInputType(InputType.convolutional(numRows, numColumns, nChannels)); - DataSet d = new DataSet(Nd4j.rand(new int[]{10, nChannels, numRows, numColumns}), + DataSet d = new DataSet(Nd4j.rand(10, nChannels, numRows, numColumns), FeatureUtil.toOutcomeMatrix(new int[] {1, 1, 1, 1, 1, 1, 1, 1, 1, 1}, 6)); MultiLayerNetwork network = new MultiLayerNetwork(builder.build()); network.init(); @@ -137,7 +137,7 @@ public class ConvolutionLayerSetupTest extends BaseDL4JTest { @Test public void testMultiChannel() throws Exception { - INDArray in = Nd4j.rand(new int[] {10, 3, 28, 28}); + INDArray in = Nd4j.rand(10, 3, 28, 28); INDArray labels = Nd4j.rand(10, 2); DataSet next = new DataSet(in, labels); @@ -288,7 +288,7 @@ public class ConvolutionLayerSetupTest extends BaseDL4JTest { .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2}) .build()) .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) - .nIn(5 * 5 * 1 * 6) //216 + .nIn(5 * 5 * 6) //216 .nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX) .build()) .inputPreProcessor(0, new FeedForwardToCnnPreProcessor(numRows, numColumns, nChannels)) @@ -440,8 +440,8 @@ public class ConvolutionLayerSetupTest extends BaseDL4JTest { network.fit(next); INDArray actualGammaParam = network.getLayer(1).getParam(BatchNormalizationParamInitializer.GAMMA); INDArray actualBetaParam = network.getLayer(1).getParam(BatchNormalizationParamInitializer.BETA); - assertTrue(actualGammaParam != null); - assertTrue(actualBetaParam != null); + assertNotNull(actualGammaParam); + assertNotNull(actualBetaParam); } @Test diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/LocallyConnectedLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/LocallyConnectedLayerTest.java index fa8c88493..e4921b555 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/LocallyConnectedLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/LocallyConnectedLayerTest.java @@ -111,7 +111,7 @@ public class LocallyConnectedLayerTest extends BaseDL4JTest { network.init(); INDArray input = Nd4j.ones(10, 3, 8); - INDArray output = network.output(input, false);; + INDArray output = network.output(input, false); for (int i = 0; i < 100; i++) { // TODO: this falls flat for 1000 iterations on my machine output = network.output(input, false); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SpaceToDepthTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SpaceToDepthTest.java index f69b0041e..0ee4e322f 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SpaceToDepthTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SpaceToDepthTest.java @@ -33,22 +33,21 @@ import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; import java.util.Arrays; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; public class SpaceToDepthTest extends BaseDL4JTest { - private int mb = 1; - private int inDepth = 2; - private int inputWidth = 2; - private int inputHeight = 2; + private final int mb = 1; + private final int inDepth = 2; + private final int inputWidth = 2; + private final int inputHeight = 2; - private int blockSize = 2; - private SpaceToDepthLayer.DataFormat dataFormat = SpaceToDepthLayer.DataFormat.NCHW; + private final int blockSize = 2; + private final SpaceToDepthLayer.DataFormat dataFormat = SpaceToDepthLayer.DataFormat.NCHW; - private int outDepth = inDepth * blockSize * blockSize; - private int outputHeight = inputHeight / blockSize; - private int outputWidth = inputWidth / blockSize; + private final int outDepth = inDepth * blockSize * blockSize; + private final int outputHeight = inputHeight / blockSize; + private final int outputWidth = inputWidth / blockSize; private INDArray getContainedData() { @@ -75,7 +74,7 @@ public class SpaceToDepthTest extends BaseDL4JTest { Layer std = getSpaceToDepthLayer(); INDArray containedOutput = std.activate(containedInput, false, LayerWorkspaceMgr.noWorkspaces()); - assertTrue(Arrays.equals(containedExpectedOut.shape(), containedOutput.shape())); + assertArrayEquals(containedExpectedOut.shape(), containedOutput.shape()); assertEquals(containedExpectedOut, containedOutput); } @@ -89,7 +88,7 @@ public class SpaceToDepthTest extends BaseDL4JTest { std.setInput(getContainedData(), LayerWorkspaceMgr.noWorkspaces()); INDArray containedOutput = std.backpropGradient(containedInputEpsilon, LayerWorkspaceMgr.noWorkspaces()).getRight(); - assertTrue(Arrays.equals(containedExpectedOut.shape(), containedOutput.shape())); + assertArrayEquals(containedExpectedOut.shape(), containedOutput.shape()); assertEquals(containedExpectedOut, containedOutput); } } \ No newline at end of file diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SubsamplingLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SubsamplingLayerTest.java index 2fca7643a..75434a4c3 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SubsamplingLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SubsamplingLayerTest.java @@ -54,12 +54,12 @@ import static org.junit.jupiter.api.Assertions.*; public class SubsamplingLayerTest extends BaseDL4JTest { private int nExamples = 1; - private int depth = 20; //channels & nOut - private int nChannelsIn = 1; - private int inputWidth = 28; - private int inputHeight = 28; - private int[] kernelSize = new int[] {2, 2}; - private int[] stride = new int[] {2, 2}; + private final int depth = 20; //channels & nOut + private final int nChannelsIn = 1; + private final int inputWidth = 28; + private final int inputHeight = 28; + private final int[] kernelSize = new int[] {2, 2}; + private final int[] stride = new int[] {2, 2}; int featureMapWidth = (inputWidth - kernelSize[0]) / stride[0] + 1; int featureMapHeight = (inputHeight - kernelSize[1]) / stride[0] + 1; @@ -73,18 +73,17 @@ public class SubsamplingLayerTest extends BaseDL4JTest { @Test public void testSubSampleMaxActivate() throws Exception { INDArray containedExpectedOut = - Nd4j.create(new double[] {5., 7., 6., 8., 4., 7., 5., 9.}, new long[] {1, 2, 2, 2}).castTo(Nd4j.defaultFloatingPointType()); + Nd4j.create(new double[] {5., 7., 6., 8., 4., 7., 5., 9.}, 1, 2, 2, 2).castTo(Nd4j.defaultFloatingPointType()); INDArray containedInput = getContainedData(); INDArray input = getData(); Layer layer = getSubsamplingLayer(SubsamplingLayer.PoolingType.MAX); INDArray containedOutput = layer.activate(containedInput, false, LayerWorkspaceMgr.noWorkspaces()); - assertTrue(Arrays.equals(containedExpectedOut.shape(), containedOutput.shape())); + assertArrayEquals(containedExpectedOut.shape(), containedOutput.shape()); assertEquals(containedExpectedOut, containedOutput); INDArray output = layer.activate(input, false, LayerWorkspaceMgr.noWorkspaces()); - assertTrue(Arrays.equals(new long[] {nExamples, nChannelsIn, featureMapWidth, featureMapHeight}, - output.shape())); + assertArrayEquals(new long[]{nExamples, nChannelsIn, featureMapWidth, featureMapHeight}, output.shape()); assertEquals(nChannelsIn, output.size(1), 1e-4); // channels retained } @@ -97,12 +96,11 @@ public class SubsamplingLayerTest extends BaseDL4JTest { Layer layer = getSubsamplingLayer(SubsamplingLayer.PoolingType.AVG); INDArray containedOutput = layer.activate(containedInput, false, LayerWorkspaceMgr.noWorkspaces()); - assertTrue(Arrays.equals(containedExpectedOut.shape(), containedOutput.shape())); + assertArrayEquals(containedExpectedOut.shape(), containedOutput.shape()); assertEquals(containedExpectedOut, containedOutput); INDArray output = layer.activate(input, false, LayerWorkspaceMgr.noWorkspaces()); - assertTrue(Arrays.equals(new long[] {nExamples, nChannelsIn, featureMapWidth, featureMapHeight}, - output.shape())); + assertArrayEquals(new long[]{nExamples, nChannelsIn, featureMapWidth, featureMapHeight}, output.shape()); assertEquals(nChannelsIn, output.size(1), 1e-4); // channels retained } @@ -124,7 +122,7 @@ public class SubsamplingLayerTest extends BaseDL4JTest { Pair containedOutput = layer.backpropGradient(expectedContainedEpsilonInput, LayerWorkspaceMgr.noWorkspaces()); assertEquals(expectedContainedEpsilonResult, containedOutput.getSecond()); - assertEquals(null, containedOutput.getFirst().getGradientFor("W")); + assertNull(containedOutput.getFirst().getGradientFor("W")); assertEquals(expectedContainedEpsilonResult.shape().length, containedOutput.getSecond().shape().length); INDArray input2 = getData(); @@ -153,7 +151,7 @@ public class SubsamplingLayerTest extends BaseDL4JTest { Pair containedOutput = layer.backpropGradient(expectedContainedEpsilonInput, LayerWorkspaceMgr.noWorkspaces()); assertEquals(expectedContainedEpsilonResult, containedOutput.getSecond()); - assertEquals(null, containedOutput.getFirst().getGradientFor("W")); + assertNull(containedOutput.getFirst().getGradientFor("W")); assertArrayEquals(expectedContainedEpsilonResult.shape(), containedOutput.getSecond().shape()); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/TestConvolutionModes.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/TestConvolutionModes.java index 6cc561ceb..35ba6d924 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/TestConvolutionModes.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/TestConvolutionModes.java @@ -68,14 +68,14 @@ public class TestConvolutionModes extends BaseDL4JTest { for (int minibatch : minibatches) { for (int inDepth : inDepths) { - INDArray origData = Nd4j.rand(new int[] {minibatch, inDepth, 9, 9}); + INDArray origData = Nd4j.rand(minibatch, inDepth, 9, 9); for (int inSize : inSizes) { for (ConvolutionMode cm : new ConvolutionMode[] {ConvolutionMode.Strict, ConvolutionMode.Truncate}) { - INDArray inputData = Nd4j.rand(new int[] {minibatch, inDepth, inSize, inSize}); + INDArray inputData = Nd4j.rand(minibatch, inDepth, inSize, inSize); inputData.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 9), NDArrayIndex.interval(0, 9)).assign(origData); @@ -147,14 +147,14 @@ public class TestConvolutionModes extends BaseDL4JTest { for (int minibatch : minibatches) { for (int inDepth : inDepths) { - INDArray origData = Nd4j.rand(new int[] {minibatch, inDepth, 9, 9}); + INDArray origData = Nd4j.rand(minibatch, inDepth, 9, 9); for (int inSize : inSizes) { for (ConvolutionMode cm : new ConvolutionMode[] {ConvolutionMode.Strict, ConvolutionMode.Truncate}) { - INDArray inputData = Nd4j.rand(new int[] {minibatch, inDepth, inSize, inSize}); + INDArray inputData = Nd4j.rand(minibatch, inDepth, inSize, inSize); inputData.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 9), NDArrayIndex.interval(0, 9)).assign(origData); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling1DTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling1DTest.java index 0504c4fac..277b43c31 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling1DTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling1DTest.java @@ -38,8 +38,7 @@ import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; import java.util.Arrays; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; /** * @author Max Pumperla @@ -47,11 +46,11 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class Upsampling1DTest extends BaseDL4JTest { private int nExamples = 1; - private int depth = 20; - private int nChannelsIn = 1; - private int inputLength = 28; - private int size = 2; - private int outputLength = inputLength * size; + private final int depth = 20; + private final int nChannelsIn = 1; + private final int inputLength = 28; + private final int size = 2; + private final int outputLength = inputLength * size; private INDArray epsilon = Nd4j.ones(nExamples, depth, outputLength); @@ -65,12 +64,11 @@ public class Upsampling1DTest extends BaseDL4JTest { Layer layer = getUpsampling1DLayer(); INDArray containedOutput = layer.activate(containedInput, false, LayerWorkspaceMgr.noWorkspaces()); - assertTrue(Arrays.equals(containedExpectedOut.shape(), containedOutput.shape())); + assertArrayEquals(containedExpectedOut.shape(), containedOutput.shape()); assertEquals(containedExpectedOut, containedOutput); INDArray output = layer.activate(input, false, LayerWorkspaceMgr.noWorkspaces()); - assertTrue(Arrays.equals(new long[] {nExamples, nChannelsIn, outputLength}, - output.shape())); + assertArrayEquals(new long[]{nExamples, nChannelsIn, outputLength}, output.shape()); assertEquals(nChannelsIn, output.size(1), 1e-4); } @@ -92,7 +90,7 @@ public class Upsampling1DTest extends BaseDL4JTest { Pair containedOutput = layer.backpropGradient(expectedContainedEpsilonInput, LayerWorkspaceMgr.noWorkspaces()); assertEquals(expectedContainedEpsilonResult, containedOutput.getSecond()); - assertEquals(null, containedOutput.getFirst().getGradientFor("W")); + assertNull(containedOutput.getFirst().getGradientFor("W")); assertEquals(expectedContainedEpsilonResult.shape().length, containedOutput.getSecond().shape().length); INDArray input2 = getData(); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling2DTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling2DTest.java index a0ee3de55..e1d46f911 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling2DTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/convolution/Upsampling2DTest.java @@ -46,14 +46,14 @@ import static org.junit.jupiter.api.Assertions.*; public class Upsampling2DTest extends BaseDL4JTest { private int nExamples = 1; - private int depth = 20; - private int nChannelsIn = 1; - private int inputWidth = 28; - private int inputHeight = 28; + private final int depth = 20; + private final int nChannelsIn = 1; + private final int inputWidth = 28; + private final int inputHeight = 28; - private int size = 2; - private int outputWidth = inputWidth * size; - private int outputHeight = inputHeight * size; + private final int size = 2; + private final int outputWidth = inputWidth * size; + private final int outputHeight = inputHeight * size; private INDArray epsilon = Nd4j.ones(nExamples, depth, outputHeight, outputWidth); @@ -68,12 +68,11 @@ public class Upsampling2DTest extends BaseDL4JTest { Layer layer = getUpsamplingLayer(); INDArray containedOutput = layer.activate(containedInput, false, LayerWorkspaceMgr.noWorkspaces()); - assertTrue(Arrays.equals(containedExpectedOut.shape(), containedOutput.shape())); + assertArrayEquals(containedExpectedOut.shape(), containedOutput.shape()); assertEquals(containedExpectedOut, containedOutput); INDArray output = layer.activate(input, false, LayerWorkspaceMgr.noWorkspaces()); - assertTrue(Arrays.equals(new long[] {nExamples, nChannelsIn, outputWidth, outputHeight}, - output.shape())); + assertArrayEquals(new long[]{nExamples, nChannelsIn, outputWidth, outputHeight}, output.shape()); assertEquals(nChannelsIn, output.size(1), 1e-4); } @@ -95,7 +94,7 @@ public class Upsampling2DTest extends BaseDL4JTest { Pair containedOutput = layer.backpropGradient(expectedContainedEpsilonInput, LayerWorkspaceMgr.noWorkspaces()); assertEquals(expectedContainedEpsilonResult, containedOutput.getSecond()); - assertEquals(null, containedOutput.getFirst().getGradientFor("W")); + assertNull(containedOutput.getFirst().getGradientFor("W")); assertEquals(expectedContainedEpsilonResult.shape().length, containedOutput.getSecond().shape().length); INDArray input2 = getData(); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/dense/DenseTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/dense/DenseTest.java index 2c4968e52..25c8074a8 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/dense/DenseTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/dense/DenseTest.java @@ -43,9 +43,9 @@ import static org.junit.jupiter.api.Assertions.assertEquals; public class DenseTest extends BaseDL4JTest { - private int numSamples = 150; - private int batchSize = 150; - private DataSetIterator iter = new IrisDataSetIterator(batchSize, numSamples); + private final int numSamples = 150; + private final int batchSize = 150; + private final DataSetIterator iter = new IrisDataSetIterator(batchSize, numSamples); private DataSet data; @Test diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/embedding/EmbeddingLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/embedding/EmbeddingLayerTest.java index 30e221c1a..259a38382 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/embedding/EmbeddingLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/embedding/EmbeddingLayerTest.java @@ -399,7 +399,6 @@ public class EmbeddingLayerTest extends BaseDL4JTest { net2.setParams(net.params().dup()); - ; INDArray inEmbedding = Nd4j.create(batchSize, 1, timeSeriesLength); INDArray inOneHot = Nd4j.create(batchSize, nClassesIn, timeSeriesLength); INDArray outLabels = Nd4j.create(batchSize, 4, timeSeriesLength); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/normalization/BatchNormalizationTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/normalization/BatchNormalizationTest.java index 10ca617fe..c4950d3c4 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/normalization/BatchNormalizationTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/normalization/BatchNormalizationTest.java @@ -149,7 +149,7 @@ public class BatchNormalizationTest extends BaseDL4JTest { int nIn = 4; int minibatch = 2; Nd4j.getRandom().setSeed(12345); - INDArray input = Nd4j.rand('c', new int[]{minibatch, nIn}); + INDArray input = Nd4j.rand('c', minibatch, nIn); //TODO: other values for gamma/beta INDArray gamma = Nd4j.ones(1, nIn); @@ -207,7 +207,7 @@ public class BatchNormalizationTest extends BaseDL4JTest { int hw = 15; Nd4j.getRandom().setSeed(12345); - INDArray randInput = Nd4j.rand(new int[]{100, nOut, hw, hw}); + INDArray randInput = Nd4j.rand(100, nOut, hw, hw); INDArray output = l.activate(randInput, true, LayerWorkspaceMgr.noWorkspaces()); assertEquals(4, output.rank()); @@ -288,7 +288,7 @@ public class BatchNormalizationTest extends BaseDL4JTest { int hw = 3; int minibatch = 2; Nd4j.getRandom().setSeed(12345); - INDArray input = Nd4j.rand('c', new int[]{minibatch, nIn, hw, hw}); + INDArray input = Nd4j.rand('c', minibatch, nIn, hw, hw); //TODO: other values for gamma/beta INDArray gamma = Nd4j.ones(1, nIn); @@ -313,7 +313,7 @@ public class BatchNormalizationTest extends BaseDL4JTest { //------------------------------------------------------------- //Check backprop - INDArray epsilon = Nd4j.rand('c', new int[]{minibatch, nIn, hw, hw}); //dL/dy + INDArray epsilon = Nd4j.rand('c', minibatch, nIn, hw, hw); //dL/dy int effectiveMinibatch = minibatch * hw * hw; @@ -388,8 +388,8 @@ public class BatchNormalizationTest extends BaseDL4JTest { network.fit(next); INDArray actualGammaParam = network.getLayer(1).getParam(BatchNormalizationParamInitializer.GAMMA); INDArray actualBetaParam = network.getLayer(1).getParam(BatchNormalizationParamInitializer.BETA); - assertTrue(actualGammaParam != null); - assertTrue(actualBetaParam != null); + assertNotNull(actualGammaParam); + assertNotNull(actualBetaParam); } @Test @@ -599,7 +599,7 @@ public class BatchNormalizationTest extends BaseDL4JTest { int minibatch = 32; List list = new ArrayList<>(); for (int i = 0; i < 100; i++) { - list.add(new DataSet(Nd4j.rand(new int[]{minibatch, 3, 5, 5}), Nd4j.rand(minibatch, 10))); + list.add(new DataSet(Nd4j.rand(minibatch, 3, 5, 5), Nd4j.rand(minibatch, 10))); } DataSetIterator iter = new ListDataSetIterator(list); @@ -672,7 +672,7 @@ public class BatchNormalizationTest extends BaseDL4JTest { int minibatch = 32; for (int i = 0; i < 10; i++) { - DataSet ds = new DataSet(Nd4j.rand(new int[]{minibatch, 3, 5, 5}), Nd4j.rand(minibatch, 10)); + DataSet ds = new DataSet(Nd4j.rand(minibatch, 3, 5, 5), Nd4j.rand(minibatch, 10)); net.fit(ds); net2.fit(ds); @@ -743,8 +743,8 @@ public class BatchNormalizationTest extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray in = Nd4j.rand(new int[]{1, 3, 5}); - INDArray label = Nd4j.rand(new int[]{1, 3, 5}); + INDArray in = Nd4j.rand(1, 3, 5); + INDArray label = Nd4j.rand(1, 3, 5); INDArray out = net.output(in); assertArrayEquals(new long[]{1, 3, 5}, out.shape()); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/normalization/LocalResponseTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/normalization/LocalResponseTest.java index 99fc1e5a3..e876b736b 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/normalization/LocalResponseTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/normalization/LocalResponseTest.java @@ -46,15 +46,14 @@ import org.nd4j.linalg.lossfunctions.LossFunctions; import org.nd4j.common.primitives.Pair; import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; -import static org.junit.jupiter.api.Assertions.assertArrayEquals; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.*; /** * */ public class LocalResponseTest extends BaseDL4JTest { - private INDArray x = Nd4j.create(new double[] {0.88128096, -0.96666986, -0.61832994, 0.26418415, 0.05694608, + private final INDArray x = Nd4j.create(new double[] {0.88128096, -0.96666986, -0.61832994, 0.26418415, 0.05694608, 0.2950289, 0.99222249, 0.24541704, 0.4219842, 0.96430975, 0.19299535, -0.06658337, -0.27603117, 0.24216647, 0.21834095, 0.03863283, -0.82313406, -0.37236378, -0.77667993, 0.66295379, -0.34406275, -0.25924176, 0.26652309, -0.58964926, -0.46907067, 0.34666502, 0.81208313, -0.17042427, -0.22470538, @@ -67,7 +66,7 @@ public class LocalResponseTest extends BaseDL4JTest { -0.31666604, 0.19781154, 0.09908111, 0.64796048, -0.99037546, 0.67919868, 0.43810204}, new int[] {2, 7, 3, 2}); - private INDArray activationsExpected = Nd4j.create(new double[] {0.52397668, -0.57476264, -0.3676528, 0.15707894, + private final INDArray activationsExpected = Nd4j.create(new double[] {0.52397668, -0.57476264, -0.3676528, 0.15707894, 0.03385943, 0.17542371, 0.58992499, 0.14591768, 0.25090647, 0.57335907, 0.11475233, -0.03958985, -0.16411273, 0.14398433, 0.12981956, 0.02297027, -0.48942304, -0.22139823, -0.46177959, 0.39418164, -0.20457059, -0.15413573, 0.15846729, -0.3505919, -0.27889356, 0.20611978, 0.48284137, -0.10133155, @@ -80,7 +79,7 @@ public class LocalResponseTest extends BaseDL4JTest { 0.57277, -0.18827969, 0.1176173, 0.05891332, 0.38526815, -0.58884346, 0.40383074, 0.26048511}, new int[] {2, 7, 3, 2}); - private INDArray epsilon = Nd4j.create(new double[] {-0.13515499, 0.96470547, -0.62253004, 0.80172491, -0.97510445, + private final INDArray epsilon = Nd4j.create(new double[] {-0.13515499, 0.96470547, -0.62253004, 0.80172491, -0.97510445, -0.41198033, -0.4790071, 0.07551047, -0.01383764, -0.05797465, 0.21242172, 0.7145375, -0.17809176, -0.11465316, -0.2066526, 0.21950938, 0.4627091, 0.30275798, 0.61443841, 0.75912178, -0.132248, -0.82923287, 0.74962652, -0.88993639, 0.04406403, 0.32096064, -0.46400586, 0.1603231, 0.63007826, @@ -93,7 +92,7 @@ public class LocalResponseTest extends BaseDL4JTest { 0.04847952, -0.82953823, 0.8089835, 0.50185651, -0.88619858, -0.78598201, 0.27489874, 0.63673472}, new int[] {2, 7, 3, 2}); - private INDArray newEpsilonExpected = Nd4j.create(new double[] {-0.08033668, 0.57355404, -0.37014094, 0.47668865, + private final INDArray newEpsilonExpected = Nd4j.create(new double[] {-0.08033668, 0.57355404, -0.37014094, 0.47668865, -0.57978398, -0.24495915, -0.28474802, 0.04490108, -0.00823483, -0.03448687, 0.12630466, 0.42485803, -0.10589627, -0.06816553, -0.12287001, 0.13051508, 0.27510744, 0.18001786, 0.36528736, 0.45133191, -0.07863599, -0.49303374, 0.44571424, -0.52912313, 0.02620371, 0.19082049, -0.27585581, 0.09532529, @@ -133,7 +132,7 @@ public class LocalResponseTest extends BaseDL4JTest { assertEquals(newEpsilonExpected.getDouble(8), containedOutput.getSecond().getDouble(8), 1e-4); assertEquals(newEpsilonExpected.getDouble(20), containedOutput.getSecond().getDouble(20), 1e-4); - assertEquals(null, containedOutput.getFirst().getGradientFor("W")); + assertNull(containedOutput.getFirst().getGradientFor("W")); assertArrayEquals(newEpsilonExpected.shape(), containedOutput.getSecond().shape()); } @@ -182,7 +181,7 @@ public class LocalResponseTest extends BaseDL4JTest { double alpha = 1e-4; double beta = 0.75; - INDArray in = Nd4j.rand(new int[] {minibatch, depth, wh, wh}); + INDArray in = Nd4j.rand(minibatch, depth, wh, wh); INDArray outExp = Nd4j.zeros(minibatch, depth, wh, wh); for (int m = 0; m < minibatch; m++) { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/objdetect/TestYolo2OutputLayer.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/objdetect/TestYolo2OutputLayer.java index 7fb8dc8af..c732ab366 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/objdetect/TestYolo2OutputLayer.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/objdetect/TestYolo2OutputLayer.java @@ -102,7 +102,7 @@ public class TestYolo2OutputLayer extends BaseDL4JTest { org.deeplearning4j.nn.layers.objdetect.Yolo2OutputLayer y2impl = (org.deeplearning4j.nn.layers.objdetect.Yolo2OutputLayer) net.getLayer(1); - INDArray input = Nd4j.rand(new int[]{mb, depth, h, w}); + INDArray input = Nd4j.rand(mb, depth, h, w); INDArray out = y2impl.activate(input, false, LayerWorkspaceMgr.noWorkspaces()); assertNotNull(out); @@ -115,7 +115,7 @@ public class TestYolo2OutputLayer extends BaseDL4JTest { INDArray labels = Nd4j.zeros(mb, labelDepth, h, w); //put 1 object per minibatch, at positions (0,0), (1,1) etc. //Positions for label boxes: (1,1) to (2,2), (2,2) to (4,4) etc - labels.putScalar(0, 4 + 0, 0, 0, 1); + labels.putScalar(0, 4, 0, 0, 1); labels.putScalar(1, 4 + 1, 1, 1, 1); labels.putScalar(2, 4 + 2, 2, 2, 1); @@ -190,7 +190,7 @@ public class TestYolo2OutputLayer extends BaseDL4JTest { org.deeplearning4j.nn.layers.objdetect.Yolo2OutputLayer y2impl = (org.deeplearning4j.nn.layers.objdetect.Yolo2OutputLayer) net.getLayer(1); - INDArray input = Nd4j.rand(new int[]{mb, depth, h, w}); + INDArray input = Nd4j.rand(mb, depth, h, w); INDArray out = y2impl.activate(input, false, LayerWorkspaceMgr.noWorkspaces()); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/ocnn/OCNNOutputLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/ocnn/OCNNOutputLayerTest.java index 1c9da8933..e9f76dfc2 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/ocnn/OCNNOutputLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/ocnn/OCNNOutputLayerTest.java @@ -147,7 +147,7 @@ public class OCNNOutputLayerTest extends BaseDL4JTest { System.out.println("Normal probabilities " + normalProbs); System.out.println("Normal raw output " + outputForNormalSamples); - File tmpFile = new File(testDir.getAbsoluteFile(),"tmp-file-" + UUID.randomUUID().toString()); + File tmpFile = new File(testDir.getAbsoluteFile(),"tmp-file-" + UUID.randomUUID()); ModelSerializer.writeModel(network,tmpFile,true); tmpFile.deleteOnExit(); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/pooling/GlobalPoolingMaskingTests.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/pooling/GlobalPoolingMaskingTests.java index f6ef09732..a7f3d1867 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/pooling/GlobalPoolingMaskingTests.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/pooling/GlobalPoolingMaskingTests.java @@ -74,7 +74,7 @@ public class GlobalPoolingMaskingTests extends BaseDL4JTest { net.init(); Random r = new Random(12345L); - INDArray input = Nd4j.rand(new int[] {miniBatchSize, nIn, timeSeriesLength}).subi(0.5); + INDArray input = Nd4j.rand(miniBatchSize, nIn, timeSeriesLength).subi(0.5); INDArray mask; if (miniBatchSize == 1) { @@ -136,7 +136,7 @@ public class GlobalPoolingMaskingTests extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray inToBeMasked = Nd4j.rand(new int[] {minibatch, depthIn, height, width}); + INDArray inToBeMasked = Nd4j.rand(minibatch, depthIn, height, width); //Shape for mask: [minibatch, 1, 1, width] INDArray maskArray = Nd4j.create(new double[] {1, 1, 1, 1, 1, 0}, new int[]{1,1,1,width}); @@ -164,7 +164,7 @@ public class GlobalPoolingMaskingTests extends BaseDL4JTest { //Finally: check gradient calc for exceptions net.setLayerMaskArrays(maskArray, null); net.setInput(inToBeMasked); - INDArray labels = Nd4j.create(new double[] {0, 1}, new long[]{1,2}); + INDArray labels = Nd4j.create(new double[] {0, 1}, 1,2); net.setLabels(labels); net.computeGradientAndScore(); @@ -199,7 +199,7 @@ public class GlobalPoolingMaskingTests extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray inToBeMasked = Nd4j.rand(new int[] {minibatch, depthIn, height, width}); + INDArray inToBeMasked = Nd4j.rand(minibatch, depthIn, height, width); //Shape for mask: [minibatch, width] INDArray maskArray = Nd4j.create(new double[] {1, 1, 1, 1, 1, 0}, new int[]{1,1,height,1}); @@ -227,7 +227,7 @@ public class GlobalPoolingMaskingTests extends BaseDL4JTest { //Finally: check gradient calc for exceptions net.setLayerMaskArrays(maskArray, null); net.setInput(inToBeMasked); - INDArray labels = Nd4j.create(new double[] {0, 1}, new long[]{1,2}); + INDArray labels = Nd4j.create(new double[] {0, 1}, 1,2); net.setLabels(labels); net.computeGradientAndScore(); @@ -263,7 +263,7 @@ public class GlobalPoolingMaskingTests extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray inToBeMasked = Nd4j.rand(new int[] {minibatch, depthIn, height, width}); + INDArray inToBeMasked = Nd4j.rand(minibatch, depthIn, height, width); //Shape for mask: [minibatch, width] INDArray maskArray = Nd4j.create(new double[][] {{1, 1, 1, 1, 1, 1}, {1, 1, 1, 1, 1, 0}, {1, 1, 1, 1, 0, 0}}) @@ -322,7 +322,7 @@ public class GlobalPoolingMaskingTests extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray inToBeMasked = Nd4j.rand(new int[] {minibatch, depthIn, height, width}); + INDArray inToBeMasked = Nd4j.rand(minibatch, depthIn, height, width); //Shape for mask: [minibatch, 1, height, 1] -> broadcast INDArray maskArray = Nd4j.create(new double[][] {{1, 1, 1, 1, 1}, {1, 1, 1, 1, 0}, {1, 1, 1, 0, 0}}) @@ -381,7 +381,7 @@ public class GlobalPoolingMaskingTests extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray inToBeMasked = Nd4j.rand(new int[] {minibatch, depthIn, height, width}); + INDArray inToBeMasked = Nd4j.rand(minibatch, depthIn, height, width); //Second example in minibatch: size [3,2] inToBeMasked.get(point(1), NDArrayIndex.all(), NDArrayIndex.interval(3,height), NDArrayIndex.all()).assign(0); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/BidirectionalTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/BidirectionalTest.java index 2c9f0886e..e785b36e5 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/BidirectionalTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/BidirectionalTest.java @@ -68,7 +68,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; @Slf4j public class BidirectionalTest extends BaseDL4JTest { - private RNNFormat rnnDataFormat; + private final RNNFormat rnnDataFormat; public BidirectionalTest(RNNFormat rnnDataFormat){ this.rnnDataFormat = rnnDataFormat; @@ -128,9 +128,9 @@ public class BidirectionalTest extends BaseDL4JTest { INDArray in; if (rnnDataFormat == NCW){ - in = Nd4j.rand(new int[]{3, 10, 5}); + in = Nd4j.rand(3, 10, 5); }else{ - in = Nd4j.rand(new int[]{3, 5, 10}); + in = Nd4j.rand(3, 5, 10); } INDArray out1 = net1.output(in); @@ -140,9 +140,9 @@ public class BidirectionalTest extends BaseDL4JTest { INDArray labels; if (rnnDataFormat == NCW){ - labels = Nd4j.rand(new int[]{3, 10, 5}); + labels = Nd4j.rand(3, 10, 5); }else{ - labels = Nd4j.rand(new int[]{3, 5, 10}); + labels = Nd4j.rand(3, 5, 10); } net1.setInput(in); net1.setLabels(labels); @@ -234,14 +234,14 @@ public class BidirectionalTest extends BaseDL4JTest { net2.setParams(net1.params()); //Assuming exact same layout here... - INDArray in = Nd4j.rand(new int[]{3, 10, 5}); + INDArray in = Nd4j.rand(3, 10, 5); INDArray out1 = net1.outputSingle(in); INDArray out2 = net2.outputSingle(in); assertEquals(out1, out2); - INDArray labels = Nd4j.rand(new int[]{3, 10, 5}); + INDArray labels = Nd4j.rand(3, 10, 5); net1.setInput(0,in); net1.setLabels(labels); @@ -261,8 +261,8 @@ public class BidirectionalTest extends BaseDL4JTest { assertEquals(g1.gradient(), g2.gradient()); //Ensure updates are equal: - ComputationGraphUpdater u1 = (ComputationGraphUpdater) net1.getUpdater(); - ComputationGraphUpdater u2 = (ComputationGraphUpdater) net2.getUpdater(); + ComputationGraphUpdater u1 = net1.getUpdater(); + ComputationGraphUpdater u2 = net2.getUpdater(); assertEquals(u1.getUpdaterStateViewArray(), u2.getUpdaterStateViewArray()); u1.update(g1, 0, 0, 3, LayerWorkspaceMgr.noWorkspaces()); u2.update(g2, 0, 0, 3, LayerWorkspaceMgr.noWorkspaces()); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesBidirectionalLSTMTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesBidirectionalLSTMTest.java index b6f3e7a58..bd1291216 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesBidirectionalLSTMTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesBidirectionalLSTMTest.java @@ -47,8 +47,8 @@ import org.nd4j.common.primitives.Pair; import static org.junit.jupiter.api.Assertions.*; public class GravesBidirectionalLSTMTest extends BaseDL4JTest { - private double score = 0.0; - private RNNFormat rnnDataFormat; + private final double score = 0.0; + private final RNNFormat rnnDataFormat; public GravesBidirectionalLSTMTest(RNNFormat rnnDataFormat){ this.rnnDataFormat = rnnDataFormat; @@ -170,13 +170,13 @@ public class GravesBidirectionalLSTMTest extends BaseDL4JTest { assertNotNull(inWeightGradientB); assertNotNull(recurrentWeightGradientB); - assertArrayEquals(biasGradientF.shape(), new long[] {1, 4 * lstmNHiddenUnits}); - assertArrayEquals(inWeightGradientF.shape(), new long[] {nIn, 4 * lstmNHiddenUnits}); - assertArrayEquals(recurrentWeightGradientF.shape(), new long[] {lstmNHiddenUnits, 4 * lstmNHiddenUnits + 3}); + assertArrayEquals(biasGradientF.shape(), new long[] {1, 4L * lstmNHiddenUnits}); + assertArrayEquals(inWeightGradientF.shape(), new long[] {nIn, 4L * lstmNHiddenUnits}); + assertArrayEquals(recurrentWeightGradientF.shape(), new long[] {lstmNHiddenUnits, 4L * lstmNHiddenUnits + 3}); - assertArrayEquals(biasGradientB.shape(), new long[] {1, 4 * lstmNHiddenUnits}); - assertArrayEquals(inWeightGradientB.shape(), new long[] {nIn, 4 * lstmNHiddenUnits}); - assertArrayEquals(recurrentWeightGradientB.shape(), new long[] {lstmNHiddenUnits, 4 * lstmNHiddenUnits + 3}); + assertArrayEquals(biasGradientB.shape(), new long[] {1, 4L * lstmNHiddenUnits}); + assertArrayEquals(inWeightGradientB.shape(), new long[] {nIn, 4L * lstmNHiddenUnits}); + assertArrayEquals(recurrentWeightGradientB.shape(), new long[] {lstmNHiddenUnits, 4L * lstmNHiddenUnits + 3}); assertNotNull(nextEpsilon); if (rnnDataFormat == RNNFormat.NCW) { @@ -212,7 +212,7 @@ public class GravesBidirectionalLSTMTest extends BaseDL4JTest { INDArray params = Nd4j.create(1, numParams); final GravesBidirectionalLSTM lstm = (GravesBidirectionalLSTM) conf.getLayer().instantiate(conf, null, 0, params, true, params.dataType()); - final INDArray input = Nd4j.rand(new int[] {miniBatchSize, nIn, timeSeriesLength}); + final INDArray input = Nd4j.rand(miniBatchSize, nIn, timeSeriesLength); lstm.setInput(input, LayerWorkspaceMgr.noWorkspaces()); @@ -236,7 +236,7 @@ public class GravesBidirectionalLSTMTest extends BaseDL4JTest { for (int i = 0; i < timeSeriesLength; i++) { final INDArray sliceFalse = fwdPassFalse.tensorAlongDimension(i, 1, 0); final INDArray sliceTrue = fwdPassTrue[i]; - assertTrue(sliceFalse.equals(sliceTrue)); + assertEquals(sliceFalse, sliceTrue); } } @@ -273,8 +273,8 @@ public class GravesBidirectionalLSTMTest extends BaseDL4JTest { .instantiate(confBidirectional, null, 0, params, true, params.dataType()); - final INDArray sig = (rnnDataFormat == RNNFormat.NCW)?Nd4j.rand(new int[] {miniBatchSize, nIn, timeSeriesLength}): - Nd4j.rand(new int[] {miniBatchSize, timeSeriesLength, nIn}); + final INDArray sig = (rnnDataFormat == RNNFormat.NCW)?Nd4j.rand(miniBatchSize, nIn, timeSeriesLength): + Nd4j.rand(miniBatchSize, timeSeriesLength, nIn); final INDArray act1 = bidirectionalLSTM.activate(sig, false, LayerWorkspaceMgr.noWorkspaces()); @@ -327,8 +327,8 @@ public class GravesBidirectionalLSTMTest extends BaseDL4JTest { Nd4j.create(1, confForwards.getLayer().initializer().numParams(confForwards))); - final INDArray sig = (rnnDataFormat == RNNFormat.NCW)?Nd4j.rand(new int[] {miniBatchSize, nIn, timeSeriesLength}): - Nd4j.rand(new int[] {miniBatchSize, timeSeriesLength, nIn}); + final INDArray sig = (rnnDataFormat == RNNFormat.NCW)?Nd4j.rand(miniBatchSize, nIn, timeSeriesLength): + Nd4j.rand(miniBatchSize, timeSeriesLength, nIn); final INDArray sigb = sig.dup(); if (rnnDataFormat == RNNFormat.NCW) { @@ -389,8 +389,8 @@ public class GravesBidirectionalLSTMTest extends BaseDL4JTest { assertArrayEquals(activation1.data().asFloat(), activation2.data().asFloat(), 1e-5f); - final INDArray randSig = (rnnDataFormat == RNNFormat.NCW)?Nd4j.rand(new int[] {1, layerSize, timeSeriesLength}): - Nd4j.rand(new int[] {1, timeSeriesLength, layerSize}); + final INDArray randSig = (rnnDataFormat == RNNFormat.NCW)?Nd4j.rand(1, layerSize, timeSeriesLength): + Nd4j.rand(1, timeSeriesLength, layerSize); INDArray randSigBackwards = randSig.dup(); if (rnnDataFormat == RNNFormat.NCW){ reverseColumnsInPlace(randSigBackwards.slice(0)); @@ -549,8 +549,8 @@ public class GravesBidirectionalLSTMTest extends BaseDL4JTest { assertEquals(gateAfn, ((org.deeplearning4j.nn.conf.layers.GravesBidirectionalLSTM) net.getLayer(0).conf() .getLayer()).getGateActivationFn().toString()); - INDArray in = Nd4j.rand(new int[] {3, 2, 5}); - INDArray labels = Nd4j.rand(new int[] {3, 2, 5}); + INDArray in = Nd4j.rand(3, 2, 5); + INDArray labels = Nd4j.rand(3, 2, 5); if (rnnDataFormat == RNNFormat.NWC){ in = in.permute(0, 2, 1); labels = labels.permute(0, 2, 1); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesLSTMTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesLSTMTest.java index 80d3af6fe..679066755 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesLSTMTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesLSTMTest.java @@ -131,9 +131,9 @@ public class GravesLSTMTest extends BaseDL4JTest { assertNotNull(inWeightGradient); assertNotNull(recurrentWeightGradient); - assertArrayEquals(biasGradient.shape(), new long[] {1, 4 * lstmNHiddenUnits}); - assertArrayEquals(inWeightGradient.shape(), new long[] {nIn, 4 * lstmNHiddenUnits}); - assertArrayEquals(recurrentWeightGradient.shape(), new long[] {lstmNHiddenUnits, 4 * lstmNHiddenUnits + 3}); + assertArrayEquals(biasGradient.shape(), new long[] {1, 4L * lstmNHiddenUnits}); + assertArrayEquals(inWeightGradient.shape(), new long[] {nIn, 4L * lstmNHiddenUnits}); + assertArrayEquals(recurrentWeightGradient.shape(), new long[] {lstmNHiddenUnits, 4L * lstmNHiddenUnits + 3}); assertNotNull(nextEpsilon); assertArrayEquals(nextEpsilon.shape(), new long[] {miniBatchSize, nIn, timeSeriesLength}); @@ -164,7 +164,7 @@ public class GravesLSTMTest extends BaseDL4JTest { val numParams = conf.getLayer().initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); GravesLSTM lstm = (GravesLSTM) conf.getLayer().instantiate(conf, null, 0, params, true, params.dataType()); - INDArray input = Nd4j.rand(new int[] {miniBatchSize, nIn, timeSeriesLength}); + INDArray input = Nd4j.rand(miniBatchSize, nIn, timeSeriesLength); lstm.setInput(input, LayerWorkspaceMgr.noWorkspaces()); Method actHelper = GravesLSTM.class.getDeclaredMethod("activateHelper", boolean.class, INDArray.class, @@ -189,7 +189,7 @@ public class GravesLSTMTest extends BaseDL4JTest { for (int i = 0; i < timeSeriesLength; i++) { INDArray sliceFalse = fwdPassFalse.tensorAlongDimension(i, 1, 0); INDArray sliceTrue = fwdPassTrue[i]; - assertTrue(sliceFalse.equals(sliceTrue)); + assertEquals(sliceFalse, sliceTrue); } } @@ -210,13 +210,13 @@ public class GravesLSTMTest extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray in1 = Nd4j.rand(new int[] {1, 2, 4}); - INDArray in2 = Nd4j.rand(new int[] {1, 2, 5}); + INDArray in1 = Nd4j.rand(1, 2, 4); + INDArray in2 = Nd4j.rand(1, 2, 5); in2.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 4)}, in1); assertEquals(in1, in2.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 4))); - INDArray labels1 = Nd4j.rand(new int[] {1, 1, 4}); + INDArray labels1 = Nd4j.rand(1, 1, 4); INDArray labels2 = Nd4j.create(1, 1, 5); labels2.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 4)}, labels1); assertEquals(labels1, labels2.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 4))); @@ -271,8 +271,8 @@ public class GravesLSTMTest extends BaseDL4JTest { assertEquals(gateAfn, ((org.deeplearning4j.nn.conf.layers.GravesLSTM) net.getLayer(0).conf().getLayer()) .getGateActivationFn().toString()); - INDArray in = Nd4j.rand(new int[] {3, 2, 5}); - INDArray labels = Nd4j.rand(new int[] {3, 2, 5}); + INDArray in = Nd4j.rand(3, 2, 5); + INDArray labels = Nd4j.rand(3, 2, 5); net.fit(in, labels); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/MaskZeroLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/MaskZeroLayerTest.java index 1508d4b62..f1fa71ab2 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/MaskZeroLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/MaskZeroLayerTest.java @@ -42,7 +42,7 @@ import java.util.Collections; import static org.junit.jupiter.api.Assertions.assertEquals; public class MaskZeroLayerTest extends BaseDL4JTest { - private RNNFormat rnnDataFormat; + private final RNNFormat rnnDataFormat; public MaskZeroLayerTest(RNNFormat rnnDataFormat){ this.rnnDataFormat = rnnDataFormat; @@ -73,17 +73,17 @@ public class MaskZeroLayerTest extends BaseDL4JTest { .build(); NeuralNetConfiguration conf = new NeuralNetConfiguration(); conf.setLayer(underlying); - INDArray params = Nd4j.zeros(new int[]{1, 16}); + INDArray params = Nd4j.zeros(1, 16); //Set the biases to 1. for (int i = 12; i < 16; i++) { params.putScalar(i, 1.0); } - Layer lstm = underlying.instantiate(conf, Collections.emptyList(), 0, params, false, params.dataType()); + Layer lstm = underlying.instantiate(conf, Collections.emptyList(), 0, params, false, params.dataType()); double maskingValue = 0.0; MaskZeroLayer l = new MaskZeroLayer(lstm, maskingValue); - INDArray input = Nd4j.create(Arrays.asList(ex1, ex2), new int[]{2, 2, 3}); + INDArray input = Nd4j.create(Arrays.asList(ex1, ex2), 2, 2, 3); if (rnnDataFormat == RNNFormat.NWC){ input = input.permute(0, 2, 1); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestLastTimeStepLayer.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestLastTimeStepLayer.java index 170ab285f..4abcfa768 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestLastTimeStepLayer.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestLastTimeStepLayer.java @@ -49,7 +49,7 @@ import static org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction.MSE; public class TestLastTimeStepLayer extends BaseDL4JTest { - private RNNFormat rnnDataFormat; + private final RNNFormat rnnDataFormat; public TestLastTimeStepLayer(RNNFormat rnnDataFormat){ this.rnnDataFormat = rnnDataFormat; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestRnnLayers.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestRnnLayers.java index b9f850453..b5fd0ac57 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestRnnLayers.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestRnnLayers.java @@ -48,13 +48,11 @@ import java.util.Arrays; import java.util.List; import java.util.Random; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; public class TestRnnLayers extends BaseDL4JTest { - private RNNFormat rnnDataFormat; + private final RNNFormat rnnDataFormat; public TestRnnLayers(RNNFormat rnnDataFormat){ this.rnnDataFormat = rnnDataFormat; @@ -87,13 +85,13 @@ public class TestRnnLayers extends BaseDL4JTest { INDArray rnnInput3d = (rnnDataFormat==RNNFormat.NCW)?Nd4j.create(10,12, 1):Nd4j.create(10, 1, 12); INDArray simpleOut = simpleRnn.rnnTimeStep(rnnInput3d, LayerWorkspaceMgr.noWorkspaces()); - assertTrue(Arrays.equals(simpleOut.shape(), (rnnDataFormat==RNNFormat.NCW)?new long[] {10, 3, 1}:new long[]{10, 1, 3})); + assertArrayEquals(simpleOut.shape(), (rnnDataFormat == RNNFormat.NCW) ? new long[]{10, 3, 1} : new long[]{10, 1, 3}); INDArray rnnInput2d = Nd4j.create(10, 12); try { simpleRnn.rnnTimeStep(rnnInput2d, LayerWorkspaceMgr.noWorkspaces()); } catch (IllegalStateException e) { - assertTrue(e.getMessage().equals("3D input expected to RNN layer expected, got 2")); + assertEquals("3D input expected to RNN layer expected, got 2", e.getMessage()); } org.deeplearning4j.nn.layers.recurrent.LSTM lstm = @@ -101,13 +99,13 @@ public class TestRnnLayers extends BaseDL4JTest { INDArray lstmInput3d = (rnnDataFormat==RNNFormat.NCW)?Nd4j.create(10, 3, 1):Nd4j.create(10, 1, 3); INDArray lstmOut = lstm.rnnTimeStep(lstmInput3d, LayerWorkspaceMgr.noWorkspaces()); - assertTrue(Arrays.equals(lstmOut.shape(), (rnnDataFormat==RNNFormat.NCW)?new long[] {10, 5, 1}:new long[]{10, 1, 5})); + assertArrayEquals(lstmOut.shape(), (rnnDataFormat == RNNFormat.NCW) ? new long[]{10, 5, 1} : new long[]{10, 1, 5}); INDArray lstmInput2d = Nd4j.create(10, 3); try { lstm.rnnTimeStep(lstmInput2d, LayerWorkspaceMgr.noWorkspaces()); } catch (IllegalStateException e) { - assertTrue(e.getMessage().equals("3D input expected to RNN layer expected, got 2")); + assertEquals("3D input expected to RNN layer expected, got 2", e.getMessage()); } @@ -178,7 +176,7 @@ public class TestRnnLayers extends BaseDL4JTest { assertEquals(net.params(), netD.params(), s); assertEquals(net.params(), netD2.params(), s); - INDArray f = Nd4j.rand(DataType.FLOAT, new int[]{3, 10, 10}); + INDArray f = Nd4j.rand(DataType.FLOAT, 3, 10, 10); //Output: test mode -> no dropout INDArray out1 = net.output(f); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestSimpleRnn.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestSimpleRnn.java index 5fc4e8bb1..9d77537c8 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestSimpleRnn.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestSimpleRnn.java @@ -42,7 +42,7 @@ import static org.nd4j.linalg.indexing.NDArrayIndex.point; public class TestSimpleRnn extends BaseDL4JTest { - private RNNFormat rnnDataFormat; + private final RNNFormat rnnDataFormat; public TestSimpleRnn(RNNFormat rnnDataFormat){ this.rnnDataFormat = rnnDataFormat; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestTimeDistributed.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestTimeDistributed.java index 6c9a55ed2..90a05de95 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestTimeDistributed.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/TestTimeDistributed.java @@ -49,7 +49,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; public class TestTimeDistributed extends BaseDL4JTest { - private RNNFormat rnnDataFormat; + private final RNNFormat rnnDataFormat; public TestTimeDistributed(RNNFormat rnnDataFormat){ this.rnnDataFormat = rnnDataFormat; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffConv.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffConv.java index d9a331d0b..f0d5d16ce 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffConv.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffConv.java @@ -206,7 +206,7 @@ public class TestSameDiffConv extends BaseDL4JTest { } } - INDArray in = Nd4j.rand(new int[]{minibatch, nIn, imgH, imgW}); + INDArray in = Nd4j.rand(minibatch, nIn, imgH, imgW); INDArray out = net.output(in); INDArray outExp = net2.output(in); @@ -306,7 +306,7 @@ public class TestSameDiffConv extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray f = Nd4j.rand(new int[]{minibatch, nIn, imgH, imgW}); + INDArray f = Nd4j.rand(minibatch, nIn, imgH, imgW); INDArray l = TestUtils.randomOneHot(minibatch, nOut); log.info("Starting: " + msg); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/testlayers/SameDiffDense.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/testlayers/SameDiffDense.java index e49e6aca6..e84390916 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/testlayers/SameDiffDense.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/testlayers/SameDiffDense.java @@ -85,8 +85,8 @@ public class SameDiffDense extends SameDiffLayer { @Override public void defineParameters(SDLayerParams params) { params.clear(); - params.addWeightParam(DefaultParamInitializer.WEIGHT_KEY, new long[]{nIn, nOut}); - params.addBiasParam(DefaultParamInitializer.BIAS_KEY, new long[]{1, nOut}); + params.addWeightParam(DefaultParamInitializer.WEIGHT_KEY, nIn, nOut); + params.addBiasParam(DefaultParamInitializer.BIAS_KEY, 1, nOut); } @Override diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/testlayers/SameDiffMSEOutputLayer.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/testlayers/SameDiffMSEOutputLayer.java index 2e60b8461..41d149b3b 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/testlayers/SameDiffMSEOutputLayer.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/samediff/testlayers/SameDiffMSEOutputLayer.java @@ -35,10 +35,10 @@ import java.util.Map; public class SameDiffMSEOutputLayer extends SameDiffOutputLayer { - private int nIn; - private int nOut; - private Activation activation; - private WeightInit weightInit; + private final int nIn; + private final int nOut; + private final Activation activation; + private final WeightInit weightInit; public SameDiffMSEOutputLayer(int nIn, int nOut, Activation activation, WeightInit weightInit){ this.nIn = nIn; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestReconstructionDistributions.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestReconstructionDistributions.java index 934ba63a8..7138a2a42 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestReconstructionDistributions.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestReconstructionDistributions.java @@ -60,7 +60,7 @@ public class TestReconstructionDistributions extends BaseDL4JTest { INDArray mean = Nd4j.randn(minibatch, inputSize); INDArray logStdevSquared = Nd4j.rand(minibatch, inputSize).subi(0.5); - INDArray distributionParams = Nd4j.createUninitialized(new int[] {minibatch, 2 * inputSize}); + INDArray distributionParams = Nd4j.createUninitialized(minibatch, 2 * inputSize); distributionParams.get(NDArrayIndex.all(), NDArrayIndex.interval(0, inputSize)).assign(mean); distributionParams.get(NDArrayIndex.all(), NDArrayIndex.interval(inputSize, 2 * inputSize)) .assign(logStdevSquared); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestVAE.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestVAE.java index e61614a1b..f535c81fa 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestVAE.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestVAE.java @@ -206,7 +206,7 @@ public class TestVAE extends BaseDL4JTest { INDArray gArr = grads.get(p); assertArrayEquals(pArr.shape(), gvArr.shape()); - assertTrue(gvArr == gArr); //Should be the exact same object due to view mechanics + assertSame(gvArr, gArr); //Should be the exact same object due to view mechanics } } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/misc/TestNetConversion.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/misc/TestNetConversion.java index cd1ca1a28..fc8312630 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/misc/TestNetConversion.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/misc/TestNetConversion.java @@ -59,8 +59,8 @@ public class TestNetConversion extends BaseDL4JTest { default: throw new RuntimeException(); } - INDArray in = (i <= 1 ? Nd4j.rand(new int[]{8, 3, 10, 10}) : Nd4j.rand(new int[]{8, 5, 10})); - INDArray labels = (i <= 1 ? Nd4j.rand(new int[]{8, 10}) : Nd4j.rand(new int[]{8, 10, 10})); + INDArray in = (i <= 1 ? Nd4j.rand(8, 3, 10, 10) : Nd4j.rand(8, 5, 10)); + INDArray labels = (i <= 1 ? Nd4j.rand(8, 10) : Nd4j.rand(8, 10, 10)); ComputationGraph cg = n.toComputationGraph(); @@ -109,7 +109,7 @@ public class TestNetConversion extends BaseDL4JTest { if(train) { for (int i = 0; i < 3; i++) { - INDArray f = Nd4j.rand(new int[]{8, 3, 10, 10}); + INDArray f = Nd4j.rand(8, 3, 10, 10); INDArray l = Nd4j.rand(8, 10); net.fit(f, l); @@ -137,8 +137,8 @@ public class TestNetConversion extends BaseDL4JTest { net.init(); for (int i = 0; i < 3; i++) { - INDArray f = Nd4j.rand(new int[]{8, 5, 10}); - INDArray l = Nd4j.rand(new int[]{8, 10, 10}); + INDArray f = Nd4j.rand(8, 5, 10); + INDArray l = Nd4j.rand(8, 10, 10); net.fit(f, l); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/misc/WorkspaceTests.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/misc/WorkspaceTests.java index ad57a4688..cf7d31bd5 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/misc/WorkspaceTests.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/misc/WorkspaceTests.java @@ -76,7 +76,7 @@ public class WorkspaceTests extends BaseDL4JTest { c.getConfiguration().setTrainingWorkspaceMode(wm); c.getConfiguration().setInferenceWorkspaceMode(wm); - INDArray f = Nd4j.rand(new int[]{8, 1, 28, 28}); + INDArray f = Nd4j.rand(8, 1, 28, 28); INDArray l = Nd4j.rand(8, 10); c.setInputs(f); c.setLabels(l); @@ -112,7 +112,7 @@ public class WorkspaceTests extends BaseDL4JTest { net2.getLayerWiseConfigurations().setInferenceWorkspaceMode(WorkspaceMode.NONE); net2.getLayerWiseConfigurations().setTrainingWorkspaceMode(WorkspaceMode.NONE); - INDArray in = Nd4j.rand(new int[]{1, 2, 5, 5}); + INDArray in = Nd4j.rand(1, 2, 5, 5); net.output(in); net2.output(in); //Op [add_scalar] X argument uses leaked workspace pointer from workspace [LOOP_EXTERNAL] @@ -175,7 +175,7 @@ public class WorkspaceTests extends BaseDL4JTest { } cg.setInputs(input); - cg.setLabels(Nd4j.rand(new int[]{1, 3, 5})); + cg.setLabels(Nd4j.rand(1, 3, 5)); cg.computeGradientAndScore(); } } @@ -207,7 +207,7 @@ public class WorkspaceTests extends BaseDL4JTest { } net.setInput(input); - net.setLabels(Nd4j.rand(new int[]{1, 3, 5})); + net.setLabels(Nd4j.rand(1, 3, 5)); net.computeGradientAndScore(); } } @@ -303,11 +303,11 @@ public class WorkspaceTests extends BaseDL4JTest { net2.init(); for (int j = 0; j < 3; j++) { - net.rnnTimeStep(Nd4j.rand(new int[]{3, 10, 5})); + net.rnnTimeStep(Nd4j.rand(3, 10, 5)); } for (int j = 0; j < 3; j++) { - net2.rnnTimeStep(Nd4j.rand(new int[]{3, 10, 5})); + net2.rnnTimeStep(Nd4j.rand(3, 10, 5)); } } } @@ -384,11 +384,11 @@ public class WorkspaceTests extends BaseDL4JTest { net2.init(); for (int j = 0; j < 3; j++) { - net.fit(Nd4j.rand(new int[]{3, 10, 20}), Nd4j.rand(new int[]{3, 10, 20})); + net.fit(Nd4j.rand(3, 10, 20), Nd4j.rand(3, 10, 20)); } for (int j = 0; j < 3; j++) { - net2.fit(new DataSet(Nd4j.rand(new int[]{3, 10, 20}), Nd4j.rand(new int[]{3, 10, 20}))); + net2.fit(new DataSet(Nd4j.rand(3, 10, 20), Nd4j.rand(3, 10, 20))); } } } @@ -625,7 +625,7 @@ public class WorkspaceTests extends BaseDL4JTest { mlc.setTrainingWorkspaceMode(wm); mlc.setInferenceWorkspaceMode(wm); - INDArray f = Nd4j.rand(new int[]{1, 1, 5, 5}); + INDArray f = Nd4j.rand(1, 1, 5, 5); INDArray l = Nd4j.rand(1, 10); DataSet ds = new DataSet(f,l); @@ -669,7 +669,7 @@ public class WorkspaceTests extends BaseDL4JTest { c.getConfiguration().setTrainingWorkspaceMode(wm); c.getConfiguration().setInferenceWorkspaceMode(wm); - INDArray f = Nd4j.rand(new int[]{8, 1, 28, 28}); + INDArray f = Nd4j.rand(8, 1, 28, 28); INDArray l = Nd4j.rand(8, 10); DataSet ds = new DataSet(f,l); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/mkldnn/ValidateMKLDNN.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/mkldnn/ValidateMKLDNN.java index 0f6337502..695fdb70d 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/mkldnn/ValidateMKLDNN.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/mkldnn/ValidateMKLDNN.java @@ -110,7 +110,7 @@ public class ValidateMKLDNN extends BaseDL4JTest { String name = pt + ", mb=" + minibatch + ", cm=" + cm + ", kernel=" + Arrays.toString(kernel) + ", stride=" + Arrays.toString(stride); LayerHelperValidationUtil.TestCase tc = LayerHelperValidationUtil.TestCase.builder() .testName(name) - .allowHelpersForClasses(Arrays.>asList(org.deeplearning4j.nn.layers.convolution.subsampling.SubsamplingLayer.class, + .allowHelpersForClasses(Arrays.asList(org.deeplearning4j.nn.layers.convolution.subsampling.SubsamplingLayer.class, org.deeplearning4j.nn.layers.convolution.ConvolutionLayer.class)) .testForward(true) .testScore(true) @@ -179,7 +179,7 @@ public class ValidateMKLDNN extends BaseDL4JTest { netWithout.init(); LayerHelperValidationUtil.TestCase tc = LayerHelperValidationUtil.TestCase.builder() - .allowHelpersForClasses(Collections.>singletonList(org.deeplearning4j.nn.layers.normalization.BatchNormalization.class)) + .allowHelpersForClasses(Collections.singletonList(org.deeplearning4j.nn.layers.normalization.BatchNormalization.class)) .testForward(true) .testScore(true) .testBackward(true) @@ -252,7 +252,7 @@ public class ValidateMKLDNN extends BaseDL4JTest { netWithout.init(); LayerHelperValidationUtil.TestCase tc = LayerHelperValidationUtil.TestCase.builder() - .allowHelpersForClasses(Collections.>singletonList(org.deeplearning4j.nn.layers.normalization.LocalResponseNormalization.class)) + .allowHelpersForClasses(Collections.singletonList(org.deeplearning4j.nn.layers.normalization.LocalResponseNormalization.class)) .testForward(true) .testScore(true) .testBackward(true) diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java index c8e758feb..056f4a43e 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java @@ -288,9 +288,9 @@ public class MultiLayerTest extends BaseDL4JTest { log.info("Testing full cycle..."); - List comparableResult = model.feedForward(Nd4j.create(trainingData[0], new long[]{1, trainingData[0].length})); + List comparableResult = model.feedForward(Nd4j.create(trainingData[0], 1, trainingData[0].length)); - INDArray encodeResult = model.activateSelectedLayers(0, 4, Nd4j.create(trainingData[0], new long[]{1, trainingData[0].length})); + INDArray encodeResult = model.activateSelectedLayers(0, 4, Nd4j.create(trainingData[0], 1, trainingData[0].length)); log.info("Compare feedForward results with selectedActivation"); @@ -541,8 +541,8 @@ public class MultiLayerTest extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray in = Nd4j.create(new double[] {1.0, 2.0, 3.0, 4.0}, new long[]{1, 4}); - INDArray out = Nd4j.create(new double[] {1, 0, 0}, new long[]{1,3}); + INDArray in = Nd4j.create(new double[] {1.0, 2.0, 3.0, 4.0}, 1, 4); + INDArray out = Nd4j.create(new double[] {1, 0, 0}, 1,3); double score = net.score(new DataSet(in, out)); } @@ -599,8 +599,8 @@ public class MultiLayerTest extends BaseDL4JTest { testData.setLabelNames(Arrays.asList("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); String actualLables = testData.getLabelName(0); List prediction = net.predict(testData); - assertTrue(actualLables != null); - assertTrue(prediction.get(0) != null); + assertNotNull(actualLables); + assertNotNull(prediction.get(0)); } @Test @@ -611,7 +611,7 @@ public class MultiLayerTest extends BaseDL4JTest { Environment environment = EnvironmentUtils.buildEnvironment(); environment.setSerialVersionID(EnvironmentUtils.buildCId()); - Task task = TaskUtils.buildTask(Nd4j.create(new double[] {1, 2, 3, 4, 5, 6}, new long[]{1,6})); + Task task = TaskUtils.buildTask(Nd4j.create(new double[] {1, 2, 3, 4, 5, 6}, 1,6)); Heartbeat.getInstance().reportEvent(Event.STANDALONE, environment, task); @@ -710,7 +710,7 @@ public class MultiLayerTest extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray inputWrongDepth = Nd4j.rand(new int[]{miniBatch, 5, height, width}); //Order: examples, channels, height, width + INDArray inputWrongDepth = Nd4j.rand(miniBatch, 5, height, width); //Order: examples, channels, height, width net.feedForward(inputWrongDepth); }); } @@ -1419,7 +1419,7 @@ public class MultiLayerTest extends BaseDL4JTest { INDArray bb1 = ((Yolo2OutputLayer)conf.getConf(1).getLayer()).getBoundingBoxes(); INDArray bb2 = ((Yolo2OutputLayer)conf2.getConf(1).getLayer()).getBoundingBoxes(); - assertFalse(bb1 == bb2); + assertNotSame(bb1, bb2); assertEquals(bb1, bb2); } @@ -1475,8 +1475,8 @@ public class MultiLayerTest extends BaseDL4JTest { soFar += 3*2; INDArray m1b = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+2)).assign(3); //m1b soFar += 2; - INDArray m2w = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+2*1)).assign(4); //m2w - soFar += 2*1; + INDArray m2w = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+ 2)).assign(4); //m2w + soFar += 2; INDArray m2b = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+1)).assign(5); //m2b soFar += 1; @@ -1488,8 +1488,8 @@ public class MultiLayerTest extends BaseDL4JTest { soFar += 3*2; INDArray v1b = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+2)).assign(9); //v1b soFar += 2; - INDArray v2w = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+2*1)).assign(10); //v2w - soFar += 2*1; + INDArray v2w = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+ 2)).assign(10); //v2w + soFar += 2; INDArray v2b = viewArray.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(soFar, soFar+1)).assign(11); //v2b soFar += 1; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTestRNN.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTestRNN.java index 6f1b3f732..5064e44ab 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTestRNN.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTestRNN.java @@ -86,7 +86,7 @@ public class MultiLayerTestRNN extends BaseDL4JTest { assertTrue(layer instanceof GravesLSTM); Map paramTable = layer.paramTable(); - assertTrue(paramTable.size() == 3); //2 sets of weights, 1 set of biases + assertEquals(3, paramTable.size()); //2 sets of weights, 1 set of biases INDArray recurrentWeights = paramTable.get(GravesLSTMParamInitializer.RECURRENT_WEIGHT_KEY); assertArrayEquals(recurrentWeights.shape(), new long[] {nHiddenUnits, 4 * nHiddenUnits + 3}); //Should be shape: [layerSize,4*layerSize+3] @@ -104,7 +104,7 @@ public class MultiLayerTestRNN extends BaseDL4JTest { assertEquals(nHiddenUnits, count); val nParams = recurrentWeights.length() + inputWeights.length() + biases.length(); - assertTrue(nParams == layer.numParams()); + assertEquals(nParams, layer.numParams()); } @Test @@ -131,7 +131,7 @@ public class MultiLayerTestRNN extends BaseDL4JTest { assertTrue(layer instanceof GravesLSTM); Map paramTable = layer.paramTable(); - assertTrue(paramTable.size() == 3); //2 sets of weights, 1 set of biases + assertEquals(3, paramTable.size()); //2 sets of weights, 1 set of biases int layerNIn = (i == 0 ? nIn : nHiddenUnits[i - 1]); @@ -151,7 +151,7 @@ public class MultiLayerTestRNN extends BaseDL4JTest { assertEquals(nHiddenUnits[i], (int)count); val nParams = recurrentWeights.length() + inputWeights.length() + biases.length(); - assertTrue(nParams == layer.numParams()); + assertEquals(nParams, layer.numParams()); } } @@ -181,20 +181,20 @@ public class MultiLayerTestRNN extends BaseDL4JTest { .build(); MultiLayerNetwork mln = new MultiLayerNetwork(conf); - INDArray input = Nd4j.rand(new int[] {3, 5, timeSeriesLength}); + INDArray input = Nd4j.rand(3, 5, timeSeriesLength); List allOutputActivations = mln.feedForward(input, true); INDArray outAct = allOutputActivations.get(3); INDArray outRnnTimeStep = mln.rnnTimeStep(input); - assertTrue(outAct.equals(outRnnTimeStep)); //Should be identical here + assertEquals(outAct, outRnnTimeStep); //Should be identical here Map currStateL0 = mln.rnnGetPreviousState(0); Map currStateL1 = mln.rnnGetPreviousState(1); - assertTrue(currStateL0.size() == 2); - assertTrue(currStateL1.size() == 2); + assertEquals(2, currStateL0.size()); + assertEquals(2, currStateL1.size()); INDArray lastActL0 = currStateL0.get(GravesLSTM.STATE_KEY_PREV_ACTIVATION); INDArray lastMemL0 = currStateL0.get(GravesLSTM.STATE_KEY_PREV_MEMCELL); @@ -205,10 +205,10 @@ public class MultiLayerTestRNN extends BaseDL4JTest { assertTrue(lastActL1 != null && lastMemL1 != null); INDArray expectedLastActL0 = allOutputActivations.get(1).tensorAlongDimension(timeSeriesLength - 1, 1, 0); - assertTrue(expectedLastActL0.equals(lastActL0)); + assertEquals(expectedLastActL0, lastActL0); INDArray expectedLastActL1 = allOutputActivations.get(2).tensorAlongDimension(timeSeriesLength - 1, 1, 0); - assertTrue(expectedLastActL1.equals(lastActL1)); + assertEquals(expectedLastActL1, lastActL1); //Check clearing and setting of state: mln.rnnClearPreviousState(); @@ -216,9 +216,9 @@ public class MultiLayerTestRNN extends BaseDL4JTest { assertTrue(mln.rnnGetPreviousState(1).isEmpty()); mln.rnnSetPreviousState(0, currStateL0); - assertTrue(mln.rnnGetPreviousState(0).size() == 2); + assertEquals(2, mln.rnnGetPreviousState(0).size()); mln.rnnSetPreviousState(1, currStateL1); - assertTrue(mln.rnnGetPreviousState(1).size() == 2); + assertEquals(2, mln.rnnGetPreviousState(1).size()); } @Test @@ -278,7 +278,7 @@ public class MultiLayerTestRNN extends BaseDL4JTest { .inputPreProcessor(3, new FeedForwardToRnnPreProcessor()).build(); MultiLayerNetwork mln = new MultiLayerNetwork(conf); - INDArray input = Nd4j.rand(new int[]{3, 5, timeSeriesLength}); + INDArray input = Nd4j.rand(3, 5, timeSeriesLength); List allOutputActivations = mln.feedForward(input, true); INDArray fullOutL0 = allOutputActivations.get(1); @@ -311,7 +311,7 @@ public class MultiLayerTestRNN extends BaseDL4JTest { NDArrayIndex.interval(startTimeRange, endTimeRange)); } if (inLength > 1) - assertTrue(inputSubset.size(2) == inLength); + assertEquals(inputSubset.size(2), inLength); INDArray out = mln.rnnTimeStep(inputSubset); @@ -389,12 +389,12 @@ public class MultiLayerTestRNN extends BaseDL4JTest { //Check same but for input of size [3,5,1]. Expect [3,4,1] out mln.rnnClearPreviousState(); for (int i = 0; i < timeSeriesLength; i++) { - INDArray temp = Nd4j.create(new int[] {3, 5, 1}); + INDArray temp = Nd4j.create(3, 5, 1); temp.tensorAlongDimension(0, 1, 0).assign(input3d.tensorAlongDimension(i, 1, 0)); INDArray out3dSlice = mln.rnnTimeStep(temp); assertArrayEquals(out3dSlice.shape(), new long[] {3, 4, 1}); - assertTrue(out3dSlice.tensorAlongDimension(0, 1, 0).equals(out3d.tensorAlongDimension(i, 1, 0))); + assertEquals(out3dSlice.tensorAlongDimension(0, 1, 0), out3d.tensorAlongDimension(i, 1, 0)); } } @@ -460,8 +460,8 @@ public class MultiLayerTestRNN extends BaseDL4JTest { assertEquals(timeSeriesLength, mlnTBPTT.getLayerWiseConfigurations().getTbpttFwdLength()); assertEquals(timeSeriesLength, mlnTBPTT.getLayerWiseConfigurations().getTbpttBackLength()); - INDArray inputData = Nd4j.rand(new int[] {miniBatchSize, nIn, timeSeriesLength}); - INDArray labels = Nd4j.rand(new int[] {miniBatchSize, nOut, timeSeriesLength}); + INDArray inputData = Nd4j.rand(miniBatchSize, nIn, timeSeriesLength); + INDArray labels = Nd4j.rand(miniBatchSize, nOut, timeSeriesLength); mln.setInput(inputData); mln.setLabels(labels); @@ -542,7 +542,7 @@ public class MultiLayerTestRNN extends BaseDL4JTest { MultiLayerNetwork mln = new MultiLayerNetwork(conf); mln.init(); - INDArray inputLong = Nd4j.rand(new int[] {miniBatchSize, nIn, nTimeSlices * timeSeriesLength}); + INDArray inputLong = Nd4j.rand(miniBatchSize, nIn, nTimeSlices * timeSeriesLength); INDArray input = inputLong.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, timeSeriesLength)); @@ -624,8 +624,8 @@ public class MultiLayerTestRNN extends BaseDL4JTest { MultiLayerNetwork mln = new MultiLayerNetwork(conf); mln.init(); - INDArray inputLong = Nd4j.rand(new int[] {miniBatchSize, nIn, nTimeSlices * timeSeriesLength}); - INDArray labelsLong = Nd4j.rand(new int[] {miniBatchSize, nOut, nTimeSlices * timeSeriesLength}); + INDArray inputLong = Nd4j.rand(miniBatchSize, nIn, nTimeSlices * timeSeriesLength); + INDArray labelsLong = Nd4j.rand(miniBatchSize, nOut, nTimeSlices * timeSeriesLength); mln.fit(inputLong, labelsLong); } @@ -661,8 +661,8 @@ public class MultiLayerTestRNN extends BaseDL4JTest { MultiLayerNetwork mln = new MultiLayerNetwork(conf); mln.init(); - INDArray features = Nd4j.rand(new int[] {miniBatchSize, nIn, timeSeriesLength}); - INDArray labels = Nd4j.rand(new int[] {miniBatchSize, nOut, timeSeriesLength}); + INDArray features = Nd4j.rand(miniBatchSize, nIn, timeSeriesLength); + INDArray labels = Nd4j.rand(miniBatchSize, nOut, timeSeriesLength); INDArray maskArrayInput = Nd4j.ones(miniBatchSize, timeSeriesLength); INDArray maskArrayOutput = Nd4j.ones(miniBatchSize, timeSeriesLength); @@ -743,8 +743,8 @@ public class MultiLayerTestRNN extends BaseDL4JTest { MultiLayerNetwork mln = new MultiLayerNetwork(conf); mln.init(); - INDArray features = Nd4j.rand(new int[] {miniBatchSize, nIn, timeSeriesLength}); - INDArray labels = Nd4j.rand(new int[] {miniBatchSize, nOut, timeSeriesLength}); + INDArray features = Nd4j.rand(miniBatchSize, nIn, timeSeriesLength); + INDArray labels = Nd4j.rand(miniBatchSize, nOut, timeSeriesLength); INDArray maskArrayInput = Nd4j.ones(miniBatchSize, timeSeriesLength); INDArray maskArrayOutput = Nd4j.ones(miniBatchSize, timeSeriesLength); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/TestMasking.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/TestMasking.java index 420417296..c4c3067a9 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/TestMasking.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/TestMasking.java @@ -105,7 +105,7 @@ public class TestMasking extends BaseDL4JTest { int nIn = 6; int layerSize = 4; - INDArray mask1 = Nd4j.create(new double[] {1, 0, 0, 1, 0}, new long[]{1,5}); + INDArray mask1 = Nd4j.create(new double[] {1, 0, 0, 1, 0}, 1,5); INDArray mask3 = Nd4j.create(new double[][] {{1, 1, 1, 1, 1}, {0, 1, 0, 1, 0}, {1, 0, 0, 1, 1}}); INDArray[] labelMasks = new INDArray[] {mask1, mask3}; diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/TestSetGetParameters.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/TestSetGetParameters.java index ff5efa35a..cb9536e3d 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/TestSetGetParameters.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/TestSetGetParameters.java @@ -63,7 +63,7 @@ public class TestSetGetParameters extends BaseDL4JTest { Map initParams2After = net.paramTable(); for (String s : initParams2.keySet()) { - assertTrue(initParams2.get(s).equals(initParams2After.get(s)), "Params differ: " + s); + assertEquals(initParams2.get(s), initParams2After.get(s), "Params differ: " + s); } assertEquals(initParams, initParamsAfter); @@ -100,7 +100,7 @@ public class TestSetGetParameters extends BaseDL4JTest { Map initParams2After = net.paramTable(); for (String s : initParams2.keySet()) { - assertTrue( initParams2.get(s).equals(initParams2After.get(s)), "Params differ: " + s); + assertEquals(initParams2.get(s), initParams2After.get(s), "Params differ: " + s); } assertEquals(initParams, initParamsAfter); @@ -141,8 +141,8 @@ public class TestSetGetParameters extends BaseDL4JTest { assertEquals(params, net2.params()); assertEquals(params, net3.params()); - assertFalse(params == net2.params()); //Different objects due to clone - assertTrue(params == net3.params()); //Same object due to clone + assertNotSame(params, net2.params()); //Different objects due to clone + assertSame(params, net3.params()); //Same object due to clone Map paramsMap = net.paramTable(); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/TestVariableLengthTS.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/TestVariableLengthTS.java index 5212865f6..5d5daed14 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/TestVariableLengthTS.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/multilayer/TestVariableLengthTS.java @@ -83,14 +83,14 @@ public class TestVariableLengthTS extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray in1 = Nd4j.rand(new int[] {nExamples, 2, 4}); - INDArray in2 = Nd4j.rand(new int[] {nExamples, 2, 5}); + INDArray in1 = Nd4j.rand(nExamples, 2, 4); + INDArray in2 = Nd4j.rand(nExamples, 2, 5); in2.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 3, true)}, in1); assertEquals(in1, in2.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 4))); - INDArray labels1 = Nd4j.rand(new int[] {nExamples, 1, 4}); + INDArray labels1 = Nd4j.rand(nExamples, 1, 4); INDArray labels2 = Nd4j.create(nExamples, 1, 5); labels2.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 3, true)}, labels1); @@ -176,14 +176,14 @@ public class TestVariableLengthTS extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - INDArray in1 = Nd4j.rand(new int[] {nExamples, 2, 4}); - INDArray in2 = Nd4j.rand(new int[] {nExamples, 2, 5}); + INDArray in1 = Nd4j.rand(nExamples, 2, 4); + INDArray in2 = Nd4j.rand(nExamples, 2, 5); in2.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 3, true)}, in1); assertEquals(in1, in2.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 4))); - INDArray labels1 = Nd4j.rand(new int[] {nExamples, 1, 4}); + INDArray labels1 = Nd4j.rand(nExamples, 1, 4); INDArray labels2 = Nd4j.create(nExamples, 1, 5); labels2.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 3, true)}, labels1); @@ -302,7 +302,7 @@ public class TestVariableLengthTS extends BaseDL4JTest { } } - INDArray input = Nd4j.rand(new int[] {miniBatch, nIn, tsLength}); + INDArray input = Nd4j.rand(miniBatch, nIn, tsLength); INDArray labels = Nd4j.ones(miniBatch, nOut, tsLength); MultiLayerConfiguration conf = @@ -366,7 +366,7 @@ public class TestVariableLengthTS extends BaseDL4JTest { } } - INDArray input = Nd4j.rand(new int[] {miniBatch, nIn, tsLength}); + INDArray input = Nd4j.rand(miniBatch, nIn, tsLength); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345L).list() @@ -455,8 +455,8 @@ public class TestVariableLengthTS extends BaseDL4JTest { int tsLength = 5; int minibatch = 3; - INDArray input = Nd4j.rand(new int[] {minibatch, nIn, tsLength}); - INDArray labels = Nd4j.rand(new int[] {minibatch, nOut, tsLength}); + INDArray input = Nd4j.rand(minibatch, nIn, tsLength); + INDArray labels = Nd4j.rand(minibatch, nOut, tsLength); INDArray featuresMask = Nd4j.create(new double[][] {{1, 1, 1, 1, 1}, {1, 1, 1, 1, 0}, {1, 1, 1, 0, 0}}); INDArray labelsMask = featuresMask.dup(); @@ -537,8 +537,8 @@ public class TestVariableLengthTS extends BaseDL4JTest { int tsLength = 5; int minibatch = 3; - INDArray input = Nd4j.rand(new int[] {minibatch, nIn, tsLength}); - INDArray labels = Nd4j.rand(new int[] {minibatch, nOut}); + INDArray input = Nd4j.rand(minibatch, nIn, tsLength); + INDArray labels = Nd4j.rand(minibatch, nOut); INDArray featuresMask = Nd4j.create(new double[][] {{1, 1, 1, 1, 1}, {1, 1, 1, 1, 0}, {1, 1, 1, 0, 0}}); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestFrozenLayers.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestFrozenLayers.java index e98680c51..ecda6b48a 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestFrozenLayers.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestFrozenLayers.java @@ -81,8 +81,8 @@ public class TestFrozenLayers extends BaseDL4JTest { } for( int i=0; i<20; i++ ){ - INDArray f = Nd4j.rand(new int[]{16,1,28,28}); - INDArray l = Nd4j.rand(new int[]{16,10}); + INDArray f = Nd4j.rand(16,1,28,28); + INDArray l = Nd4j.rand(16,10); transfer.fit(f,l); } @@ -133,8 +133,8 @@ public class TestFrozenLayers extends BaseDL4JTest { } for( int i=0; i<20; i++ ){ - INDArray f = Nd4j.rand(new int[]{16,1,28,28}); - INDArray l = Nd4j.rand(new int[]{16,10}); + INDArray f = Nd4j.rand(16,1,28,28); + INDArray l = Nd4j.rand(16,10); transfer.fit(new INDArray[]{f},new INDArray[]{l}); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/updater/TestUpdaters.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/updater/TestUpdaters.java index d9735fb89..462143897 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/updater/TestUpdaters.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/updater/TestUpdaters.java @@ -340,7 +340,7 @@ public class TestUpdaters extends BaseDL4JTest { actualM[i] = Math.round(actualM[i] * 1e2) / 1e2; } - assertTrue( Arrays.equals(expectedM, actualM), "Wrong weight gradient after first iteration's update"); + assertArrayEquals(expectedM, actualM, "Wrong weight gradient after first iteration's update"); } @@ -592,7 +592,7 @@ public class TestUpdaters extends BaseDL4JTest { Updater updater = net.getUpdater(); assertNotNull(updater); - assertTrue(updater.getClass() == MultiLayerUpdater.class); + assertSame(updater.getClass(), MultiLayerUpdater.class); MultiLayerUpdater mlu = (MultiLayerUpdater) updater; @@ -695,7 +695,7 @@ public class TestUpdaters extends BaseDL4JTest { Updater newUpdater = UpdaterCreator.getUpdater(net); net.setUpdater(newUpdater); - assertTrue(newUpdater == net.getUpdater()); //Should be identical object + assertSame(newUpdater, net.getUpdater()); //Should be identical object } @Test @@ -722,7 +722,7 @@ public class TestUpdaters extends BaseDL4JTest { Updater newUpdater = UpdaterCreator.getUpdater(net); net.setUpdater(newUpdater); - assertTrue(newUpdater == net.getUpdater()); //Should be identical object + assertSame(newUpdater, net.getUpdater()); //Should be identical object } @Test diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/updater/custom/TestCustomUpdater.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/updater/custom/TestCustomUpdater.java index 703d56eb2..170c6bdc1 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/updater/custom/TestCustomUpdater.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/updater/custom/TestCustomUpdater.java @@ -103,7 +103,7 @@ public class TestCustomUpdater extends BaseDL4JTest { net2.setLabels(labels); net1.computeGradientAndScore(); - net2.computeGradientAndScore();; + net2.computeGradientAndScore(); assertEquals(net1.getFlattenedGradients(), net2.getFlattenedGradients()); } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/util/TestDataSetConsumer.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/util/TestDataSetConsumer.java index cbc94f1f2..54ea33fe7 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/util/TestDataSetConsumer.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/nn/util/TestDataSetConsumer.java @@ -30,8 +30,8 @@ import java.util.concurrent.atomic.AtomicLong; public class TestDataSetConsumer { private DataSetIterator iterator; - private long delay; - private AtomicLong count = new AtomicLong(0); + private final long delay; + private final AtomicLong count = new AtomicLong(0); protected static final Logger logger = LoggerFactory.getLogger(TestDataSetConsumer.class); public TestDataSetConsumer(long delay) { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java index b17032fdd..5b7bec134 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java @@ -65,6 +65,7 @@ import java.util.Collection; import java.util.Collections; import java.util.Map; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; public class TestOptimizers extends BaseDL4JTest { @@ -123,7 +124,7 @@ public class TestOptimizers extends BaseDL4JTest { } double scoreAfter = network.score(ds); scores[i + 1] = scoreAfter; - assertTrue( !Double.isNaN(scoreAfter), "Score is NaN after optimization"); + assertFalse(Double.isNaN(scoreAfter), "Score is NaN after optimization"); assertTrue( scoreAfter <= score, "OA= " + oa + ", before= " + score + ", after= " + scoreAfter); score = scoreAfter; } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimize/solver/accumulation/SmartFancyBlockingQueueTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimize/solver/accumulation/SmartFancyBlockingQueueTest.java index 78dbb6d14..d9b7e86e5 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimize/solver/accumulation/SmartFancyBlockingQueueTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimize/solver/accumulation/SmartFancyBlockingQueueTest.java @@ -50,7 +50,7 @@ public class SmartFancyBlockingQueueTest extends BaseDL4JTest { for (int e = 0; e < 6; e++) { queue.put(Nd4j.create(5, 5).assign(e)); - }; + } assertEquals(6, queue.size()); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimizer/listener/ScoreStatTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimizer/listener/ScoreStatTest.java index 8d3b3751e..a7ac0905b 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimizer/listener/ScoreStatTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimizer/listener/ScoreStatTest.java @@ -34,18 +34,18 @@ public class ScoreStatTest extends BaseDL4JTest { public void testScoreStatSmall() { CollectScoresIterationListener.ScoreStat statTest = new CollectScoresIterationListener.ScoreStat(); for (int i = 0; i < CollectScoresIterationListener.ScoreStat.BUCKET_LENGTH; ++i) { - double score = (double)i; + double score = i; statTest.addScore(i, score); } List indexes = statTest.getIndexes(); List scores = statTest.getScores(); - assertTrue(indexes.size() == 1); - assertTrue(scores.size() == 1); + assertEquals(1, indexes.size()); + assertEquals(1, scores.size()); - assertTrue(indexes.get(0).length == CollectScoresIterationListener.ScoreStat.BUCKET_LENGTH); - assertTrue(scores.get(0).length == CollectScoresIterationListener.ScoreStat.BUCKET_LENGTH); + assertEquals(CollectScoresIterationListener.ScoreStat.BUCKET_LENGTH, indexes.get(0).length); + assertEquals(CollectScoresIterationListener.ScoreStat.BUCKET_LENGTH, scores.get(0).length); assertEquals(indexes.get(0)[indexes.get(0).length-1], CollectScoresIterationListener.ScoreStat.BUCKET_LENGTH-1); assertEquals(scores.get(0)[scores.get(0).length-1], CollectScoresIterationListener.ScoreStat.BUCKET_LENGTH-1, 1e-4); } @@ -109,12 +109,12 @@ public class ScoreStatTest extends BaseDL4JTest { List indexes = statTest.getIndexes(); List scores = statTest.getScores(); - assertTrue(indexes.size() == 2); - assertTrue(scores.size() == 2); + assertEquals(2, indexes.size()); + assertEquals(2, scores.size()); for (int i = 0; i < 5; ++i) { - assertTrue(indexes.get(1)[i] == Integer.MAX_VALUE + i); - assertTrue(scores.get(1)[i] == Integer.MAX_VALUE + i); + assertEquals(indexes.get(1)[i], Integer.MAX_VALUE + i); + assertEquals(scores.get(1)[i], Integer.MAX_VALUE + i); } } diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimizer/listener/TestListeners.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimizer/listener/TestListeners.java index 47430c8c3..55b1d39c8 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimizer/listener/TestListeners.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/optimizer/listener/TestListeners.java @@ -268,7 +268,7 @@ public class TestListeners extends BaseDL4JTest { assertEquals(exp, tl.getCalls()); } - private static enum Call { + private enum Call { ITER_DONE, EPOCH_START, EPOCH_END, diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ArrayUtilTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ArrayUtilTest.java index 696faf3f9..edd398223 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ArrayUtilTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ArrayUtilTest.java @@ -27,6 +27,7 @@ import org.nd4j.common.util.ArrayUtil; import java.util.Arrays; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * @@ -37,11 +38,11 @@ public class ArrayUtilTest extends BaseDL4JTest { public void testRange() { int[] range = ArrayUtil.range(0, 2); int[] test = {0, 1}; - assertEquals(true, Arrays.equals(test, range)); + assertTrue(Arrays.equals(test, range)); int[] test2 = {-1, 0}; int[] range2 = ArrayUtil.range(-1, 1); - assertEquals(true, Arrays.equals(test2, range2)); + assertTrue(Arrays.equals(test2, range2)); } @@ -52,16 +53,16 @@ public class ArrayUtilTest extends BaseDL4JTest { int[] fortranStyleStride = {1, 5, 20}; int[] fortranStyleTest = ArrayUtil.calcStridesFortran(shape); int[] cStyleTest = ArrayUtil.calcStrides(shape); - assertEquals(true, Arrays.equals(cStyleStride, cStyleTest)); - assertEquals(true, Arrays.equals(fortranStyleStride, fortranStyleTest)); + assertTrue(Arrays.equals(cStyleStride, cStyleTest)); + assertTrue(Arrays.equals(fortranStyleStride, fortranStyleTest)); int[] shape2 = {2, 2}; int[] cStyleStride2 = {2, 1}; int[] fortranStyleStride2 = {1, 2}; int[] cStyleTest2 = ArrayUtil.calcStrides(shape2); int[] fortranStyleTest2 = ArrayUtil.calcStridesFortran(shape2); - assertEquals(true, Arrays.equals(cStyleStride2, cStyleTest2)); - assertEquals(true, Arrays.equals(fortranStyleStride2, fortranStyleTest2)); + assertTrue(Arrays.equals(cStyleStride2, cStyleTest2)); + assertTrue(Arrays.equals(fortranStyleStride2, fortranStyleTest2)); diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelGuesserTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelGuesserTest.java index 74fbd476a..2ff1c481d 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelGuesserTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelGuesserTest.java @@ -99,7 +99,7 @@ public class ModelGuesserTest extends BaseDL4JTest { ModelSerializer.writeModel(net, tempFile, true); NormalizerMinMaxScaler normalizer = new NormalizerMinMaxScaler(0, 1); - normalizer.fit(new DataSet(Nd4j.rand(new int[] {2, 2}), Nd4j.rand(new int[] {2, 2}))); + normalizer.fit(new DataSet(Nd4j.rand(2, 2), Nd4j.rand(2, 2))); ModelSerializer.addNormalizerToModel(tempFile, normalizer); Model model = ModelGuesser.loadModelGuess(tempFile.getAbsolutePath()); Normalizer normalizer1 = ModelGuesser.loadNormalizer(tempFile.getAbsolutePath()); @@ -116,7 +116,7 @@ public class ModelGuesserTest extends BaseDL4JTest { File tempFile = new File(testDir, "testNormalizerInPlace.bin"); NormalizerMinMaxScaler normalizer = new NormalizerMinMaxScaler(0, 1); - normalizer.fit(new DataSet(Nd4j.rand(new int[] {2, 2}), Nd4j.rand(new int[] {2, 2}))); + normalizer.fit(new DataSet(Nd4j.rand(2, 2), Nd4j.rand(2, 2))); ModelSerializer.writeModel(net, tempFile, true,normalizer); Model model = ModelGuesser.loadModelGuess(tempFile.getAbsolutePath()); @@ -135,7 +135,7 @@ public class ModelGuesserTest extends BaseDL4JTest { ModelSerializer.writeModel(net, tempFile, true); NormalizerMinMaxScaler normalizer = new NormalizerMinMaxScaler(0, 1); - normalizer.fit(new DataSet(Nd4j.rand(new int[] {2, 2}), Nd4j.rand(new int[] {2, 2}))); + normalizer.fit(new DataSet(Nd4j.rand(2, 2), Nd4j.rand(2, 2))); ModelSerializer.addNormalizerToModel(tempFile, normalizer); Model model = ModelGuesser.loadModelGuess(tempFile.getAbsolutePath()); try (InputStream inputStream = new FileInputStream(tempFile)) { diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelSerializerTest.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelSerializerTest.java index d1b1c3e02..610cb0961 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelSerializerTest.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/ModelSerializerTest.java @@ -248,7 +248,7 @@ public class ModelSerializerTest extends BaseDL4JTest { NormalizerStandardize restored = ModelSerializer.restoreNormalizerFromInputStream(fis); - assertEquals(null, restored); + assertNull(restored); } @Test diff --git a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/TestUIDProvider.java b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/TestUIDProvider.java index a23f3d513..363b90dd6 100644 --- a/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/TestUIDProvider.java +++ b/cavis-dnn/cavis-dnn-core/src/test/java/org/deeplearning4j/util/TestUIDProvider.java @@ -24,10 +24,7 @@ import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.core.util.UIDProvider; import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; public class TestUIDProvider extends BaseDL4JTest { @@ -40,8 +37,8 @@ public class TestUIDProvider extends BaseDL4JTest { assertNotNull(jvmUID); assertNotNull(hardwareUID); - assertTrue(!jvmUID.isEmpty()); - assertTrue(!hardwareUID.isEmpty()); + assertFalse(jvmUID.isEmpty()); + assertFalse(hardwareUID.isEmpty()); assertEquals(jvmUID, UIDProvider.getJVMUID()); assertEquals(hardwareUID, UIDProvider.getHardwareUID()); diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/base/IrisUtils.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/base/IrisUtils.java index 74c2208f0..d48d30586 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/base/IrisUtils.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/base/IrisUtils.java @@ -74,7 +74,7 @@ public class IrisUtils { } for (int i = 0; i < ret.rows(); i++) { - DataSet add = new DataSet(ret.getRow(i, true), Nd4j.create(outcomes[from + i], new long[]{1,3})); + DataSet add = new DataSet(ret.getRow(i, true), Nd4j.create(outcomes[from + i], 1,3)); list.add(add); } return list; diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/fetchers/EmnistDataFetcher.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/fetchers/EmnistDataFetcher.java index 70d974e99..d02749095 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/fetchers/EmnistDataFetcher.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/fetchers/EmnistDataFetcher.java @@ -86,11 +86,7 @@ public class EmnistDataFetcher extends MnistDataFetcher implements DataSetFetche //For some inexplicable reason, EMNIST LETTERS set is indexed 1 to 26 (i.e., 1 to nClasses), while everything else // is indexed (0 to nClasses-1) :/ - if (dataSet == EmnistDataSetIterator.Set.LETTERS) { - oneIndexed = true; - } else { - oneIndexed = false; - } + oneIndexed = dataSet == EmnistDataSetIterator.Set.LETTERS; this.fOrder = true; //MNIST is C order, EMNIST is F order } @@ -107,8 +103,6 @@ public class EmnistDataFetcher extends MnistDataFetcher implements DataSetFetche if (!f.exists()) return false; f = new File(EMNIST_ROOT, e.getTestFileLabelsFilename_unzipped()); - if (!f.exists()) - return false; - return true; + return f.exists(); } } diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/fetchers/MnistDataFetcher.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/fetchers/MnistDataFetcher.java index be1dd952e..59deb860f 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/fetchers/MnistDataFetcher.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/fetchers/MnistDataFetcher.java @@ -142,9 +142,7 @@ public class MnistDataFetcher extends BaseDataFetcher { if (!f.exists()) return false; f = new File(MNIST_ROOT, MnistFetcher.TEST_FILE_LABELS_FILENAME_UNZIPPED); - if (!f.exists()) - return false; - return true; + return f.exists(); } private void validateFiles(String[] files, long[] checksums){ diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/mnist/MnistDbFile.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/mnist/MnistDbFile.java index 5fd53de81..30791e129 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/mnist/MnistDbFile.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/mnist/MnistDbFile.java @@ -26,7 +26,7 @@ import java.io.IOException; import java.io.RandomAccessFile; public abstract class MnistDbFile extends RandomAccessFile { - private int count; + private final int count; /** diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/mnist/MnistImageFile.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/mnist/MnistImageFile.java index 196352e84..2a1a7a2b8 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/mnist/MnistImageFile.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/mnist/MnistImageFile.java @@ -26,8 +26,8 @@ import java.io.IOException; public class MnistImageFile extends MnistDbFile { - private int rows; - private int cols; + private final int rows; + private final int cols; /** * Creates new MNIST database image file ready for reading. diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/RecordReaderMultiDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/RecordReaderMultiDataSetIterator.java index c0c89f00b..34731a8ac 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/RecordReaderMultiDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/RecordReaderMultiDataSetIterator.java @@ -63,13 +63,13 @@ public class RecordReaderMultiDataSetIterator implements MultiDataSetIterator, S EQUAL_LENGTH, ALIGN_START, ALIGN_END } - private int batchSize; - private AlignmentMode alignmentMode; + private final int batchSize; + private final AlignmentMode alignmentMode; private Map recordReaders = new HashMap<>(); private Map sequenceRecordReaders = new HashMap<>(); - private List inputs = new ArrayList<>(); - private List outputs = new ArrayList<>(); + private final List inputs = new ArrayList<>(); + private final List outputs = new ArrayList<>(); @Getter @Setter @@ -775,13 +775,13 @@ public class RecordReaderMultiDataSetIterator implements MultiDataSetIterator, S public static class Builder { - private int batchSize; + private final int batchSize; private AlignmentMode alignmentMode = AlignmentMode.ALIGN_START; - private Map recordReaders = new HashMap<>(); - private Map sequenceRecordReaders = new HashMap<>(); + private final Map recordReaders = new HashMap<>(); + private final Map sequenceRecordReaders = new HashMap<>(); - private List inputs = new ArrayList<>(); - private List outputs = new ArrayList<>(); + private final List inputs = new ArrayList<>(); + private final List outputs = new ArrayList<>(); private boolean timeSeriesRandomOffset = false; private long timeSeriesRandomOffsetSeed = System.currentTimeMillis(); diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/SequenceRecordReaderDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/SequenceRecordReaderDataSetIterator.java index 4ad4ffb48..dbebd0642 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/SequenceRecordReaderDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/SequenceRecordReaderDataSetIterator.java @@ -64,8 +64,8 @@ public class SequenceRecordReaderDataSetIterator implements DataSetIterator { private static final String READER_KEY = "reader"; private static final String READER_KEY_LABEL = "reader_labels"; - private SequenceRecordReader recordReader; - private SequenceRecordReader labelsReader; + private final SequenceRecordReader recordReader; + private final SequenceRecordReader labelsReader; private int miniBatchSize = 10; private final boolean regression; private int labelIndex = -1; @@ -288,7 +288,7 @@ public class SequenceRecordReaderDataSetIterator implements DataSetIterator { fm = RecordReaderDataSetIterator.getOrNull(mds.getFeaturesMaskArrays(), 0); //Per-example masking only on the input -> same for both //Can assume 3d features here - f = Nd4j.createUninitialized(new long[] {f1.size(0), f1.size(1) + f2.size(1), f1.size(2)}); + f = Nd4j.createUninitialized(f1.size(0), f1.size(1) + f2.size(1), f1.size(2)); f.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.interval(0, f1.size(1)), NDArrayIndex.all()}, f1); f.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.interval(f1.size(1), f1.size(1) + f2.size(1)), diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AbstractDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AbstractDataSetIterator.java index 64959c6f3..fbcb248a0 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AbstractDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AbstractDataSetIterator.java @@ -36,7 +36,7 @@ import java.util.concurrent.LinkedBlockingQueue; public abstract class AbstractDataSetIterator implements DataSetIterator { private DataSetPreProcessor preProcessor; - private transient Iterable> iterable; + private final transient Iterable> iterable; private transient Iterator> iterator; private final int batchSize; diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AsyncShieldDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AsyncShieldDataSetIterator.java index a8cef412e..d2bbb9ebb 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AsyncShieldDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AsyncShieldDataSetIterator.java @@ -29,7 +29,7 @@ import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; import java.util.List; public class AsyncShieldDataSetIterator implements DataSetIterator { - private DataSetIterator backingIterator; + private final DataSetIterator backingIterator; /** * @param iterator Iterator to wrop, to disable asynchronous prefetching for diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AsyncShieldMultiDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AsyncShieldMultiDataSetIterator.java index 947cc58ac..4129113f9 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AsyncShieldMultiDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AsyncShieldMultiDataSetIterator.java @@ -26,7 +26,7 @@ import org.nd4j.linalg.dataset.api.MultiDataSetPreProcessor; import org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator; public class AsyncShieldMultiDataSetIterator implements MultiDataSetIterator { - private MultiDataSetIterator backingIterator; + private final MultiDataSetIterator backingIterator; public AsyncShieldMultiDataSetIterator(@NonNull MultiDataSetIterator iterator) { this.backingIterator = iterator; diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/CombinedMultiDataSetPreProcessor.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/CombinedMultiDataSetPreProcessor.java index 84e748430..d1d1a07b3 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/CombinedMultiDataSetPreProcessor.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/CombinedMultiDataSetPreProcessor.java @@ -43,7 +43,7 @@ public class CombinedMultiDataSetPreProcessor implements MultiDataSetPreProcesso } public static class Builder { - private List preProcessors = new ArrayList<>(); + private final List preProcessors = new ArrayList<>(); public Builder() { diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/CombinedPreProcessor.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/CombinedPreProcessor.java index ec225b248..d3f1d47a0 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/CombinedPreProcessor.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/CombinedPreProcessor.java @@ -47,7 +47,7 @@ public class CombinedPreProcessor implements DataSetPreProcessor { } public static class Builder { - private List preProcessors = new ArrayList<>(); + private final List preProcessors = new ArrayList<>(); public Builder() { diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/DataSetIteratorSplitter.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/DataSetIteratorSplitter.java index a1b183565..1c20f1bf5 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/DataSetIteratorSplitter.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/DataSetIteratorSplitter.java @@ -231,10 +231,7 @@ public class DataSetIteratorSplitter { } val state = backedIterator.hasNext(); - if (state && counter.get() < numTrain) - return true; - else - return false; + return state && counter.get() < numTrain; } @Override @@ -325,10 +322,7 @@ public class DataSetIteratorSplitter { @Override public boolean hasNext() { val state = backedIterator.hasNext(); - if (state && counter.get() < numTrain + numTest) - return true; - else - return false; + return state && counter.get() < numTrain + numTest; } @Override diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/EarlyTerminationDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/EarlyTerminationDataSetIterator.java index c3575fe40..c22f3b0e3 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/EarlyTerminationDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/EarlyTerminationDataSetIterator.java @@ -28,8 +28,8 @@ import java.util.List; public class EarlyTerminationDataSetIterator implements DataSetIterator { - private DataSetIterator underlyingIterator; - private int terminationPoint; + private final DataSetIterator underlyingIterator; + private final int terminationPoint; private int minibatchCount = 0; /** diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/EarlyTerminationMultiDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/EarlyTerminationMultiDataSetIterator.java index 9284a26d2..18814a644 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/EarlyTerminationMultiDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/EarlyTerminationMultiDataSetIterator.java @@ -26,8 +26,8 @@ import org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator; public class EarlyTerminationMultiDataSetIterator implements MultiDataSetIterator { - private MultiDataSetIterator underlyingIterator; - private int terminationPoint; + private final MultiDataSetIterator underlyingIterator; + private final int terminationPoint; private int minibatchCount = 0; /** diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/FileSplitDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/FileSplitDataSetIterator.java index 54f576439..ca059e0b3 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/FileSplitDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/FileSplitDataSetIterator.java @@ -35,10 +35,10 @@ import java.util.concurrent.atomic.AtomicInteger; public class FileSplitDataSetIterator implements DataSetIterator { private DataSetPreProcessor preProcessor; - private List files; - private int numFiles; - private AtomicInteger counter = new AtomicInteger(0); - private FileCallback callback; + private final List files; + private final int numFiles; + private final AtomicInteger counter = new AtomicInteger(0); + private final FileCallback callback; /** * @param files List of files to iterate over diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/MultiDataSetIteratorSplitter.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/MultiDataSetIteratorSplitter.java index 128113c9d..3d0769bac 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/MultiDataSetIteratorSplitter.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/MultiDataSetIteratorSplitter.java @@ -197,10 +197,7 @@ public class MultiDataSetIteratorSplitter { } val state = backedIterator.hasNext(); - if (state && counter.get() < numTrain) - return true; - else - return false; + return state && counter.get() < numTrain; } @Override @@ -272,10 +269,7 @@ public class MultiDataSetIteratorSplitter { @Override public boolean hasNext() { val state = backedIterator.hasNext(); - if (state && counter.get() < numTrain + numTest) - return true; - else - return false; + return state && counter.get() < numTrain + numTest; } @Override diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/MultipleEpochsIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/MultipleEpochsIterator.java index 28f69c7bf..85b0c2dd2 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/MultipleEpochsIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/MultipleEpochsIterator.java @@ -231,7 +231,7 @@ public class MultipleEpochsIterator implements DataSetIterator { newEpoch = false; } if (iter == null) - return (epochs < numEpochs) && ((!batchedDS.isEmpty() && batchedDS.size() > batch) || batchedDS.isEmpty()); + return (epochs < numEpochs) && (batchedDS.isEmpty() || batchedDS.size() > batch); else // either there are still epochs to complete or its the first epoch return (epochs < numEpochs) || (iter.hasNext() && (epochs == 0 || epochs == numEpochs)); diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/RandomDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/RandomDataSetIterator.java index 78479e2c2..35cbc5b43 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/RandomDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/RandomDataSetIterator.java @@ -29,7 +29,7 @@ public class RandomDataSetIterator extends MultiDataSetWrapperIterator { public RandomMultiDataSetIterator.Values toMdsValues(){ return RandomMultiDataSetIterator.Values.valueOf(this.toString()); } - }; + } /** * @param numMiniBatches Number of minibatches per epoch diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/RandomMultiDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/RandomMultiDataSetIterator.java index 8ce205878..a4a5bbf44 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/RandomMultiDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/RandomMultiDataSetIterator.java @@ -119,9 +119,9 @@ public class RandomMultiDataSetIterator implements MultiDataSetIterator { public static class Builder { - private int numMiniBatches; - private List> features = new ArrayList<>(); - private List> labels = new ArrayList<>(); + private final int numMiniBatches; + private final List> features = new ArrayList<>(); + private final List> labels = new ArrayList<>(); /** * @param numMiniBatches Number of minibatches per epoch diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/ReconstructionDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/ReconstructionDataSetIterator.java index 8fb538d7f..b41008b97 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/ReconstructionDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/ReconstructionDataSetIterator.java @@ -35,7 +35,7 @@ import java.util.List; */ public class ReconstructionDataSetIterator implements DataSetIterator { - private DataSetIterator iter; + private final DataSetIterator iter; @Getter private DataSetPreProcessor preProcessor; diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/ScrollableDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/ScrollableDataSetIterator.java index 91d9579c9..074297e77 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/ScrollableDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/ScrollableDataSetIterator.java @@ -43,7 +43,7 @@ public class ScrollableDataSetIterator implements DataSetIterator { protected MultiDataSet firstMultiTrain = null; private double ratio; private long totalExamples; - private long itemsPerPart; + private final long itemsPerPart; private long current; @@ -152,10 +152,7 @@ public class ScrollableDataSetIterator implements DataSetIterator { state = backedIterator.hasNext(); if (!state) return false; - if (state && counter.get() < itemsPerPart) - return true; - else - return false; + return state && counter.get() < itemsPerPart; } diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/ScrollableMultiDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/ScrollableMultiDataSetIterator.java index a2ba36f36..b5b4377c1 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/ScrollableMultiDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/ScrollableMultiDataSetIterator.java @@ -45,7 +45,7 @@ public class ScrollableMultiDataSetIterator implements MultiDataSetIterator { protected MultiDataSet firstMultiTrain = null; private double ratio; private long totalExamples; - private long itemsPerPart; + private final long itemsPerPart; private long current; public ScrollableMultiDataSetIterator(int num, MultiDataSetIterator backedIterator, AtomicLong counter, @@ -110,10 +110,7 @@ public class ScrollableMultiDataSetIterator implements MultiDataSetIterator { state = backedIterator.hasNext(); if (!state) return false; - if (state && counter.get() < itemsPerPart) - return true; - else - return false; + return state && counter.get() < itemsPerPart; } diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/callbacks/InterleavedDataSetCallback.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/callbacks/InterleavedDataSetCallback.java index 39be1e600..d9e5d2f42 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/callbacks/InterleavedDataSetCallback.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/callbacks/InterleavedDataSetCallback.java @@ -37,13 +37,13 @@ import java.util.concurrent.atomic.AtomicLong; @Slf4j public class InterleavedDataSetCallback implements DataSetCallback { - private List workspaces = new ArrayList<>(); - private int bufferSize; + private final List workspaces = new ArrayList<>(); + private final int bufferSize; private int numWorkspaces; private boolean isInitialized = false; - private AtomicLong counterInput = new AtomicLong(0); + private final AtomicLong counterInput = new AtomicLong(0); public InterleavedDataSetCallback(int bufferSize) { this.bufferSize = bufferSize; diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/impl/BenchmarkDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/impl/BenchmarkDataSetIterator.java index 7f4808fd2..03b9c1080 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/impl/BenchmarkDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/impl/BenchmarkDataSetIterator.java @@ -33,10 +33,10 @@ import java.util.concurrent.atomic.AtomicLong; @Slf4j public class BenchmarkDataSetIterator implements DataSetIterator { - private INDArray baseFeatures; - private INDArray baseLabels; - private long limit; - private AtomicLong counter = new AtomicLong(0); + private final INDArray baseFeatures; + private final INDArray baseLabels; + private final long limit; + private final AtomicLong counter = new AtomicLong(0); /** * @param featuresShape Shape of the features data to randomly generate diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/impl/BenchmarkMultiDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/impl/BenchmarkMultiDataSetIterator.java index 60457c297..025309a31 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/impl/BenchmarkMultiDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/impl/BenchmarkMultiDataSetIterator.java @@ -31,10 +31,10 @@ import java.util.concurrent.atomic.AtomicLong; @Slf4j public class BenchmarkMultiDataSetIterator implements MultiDataSetIterator { - private INDArray[] baseFeatures; - private INDArray[] baseLabels; - private long limit; - private AtomicLong counter = new AtomicLong(0); + private final INDArray[] baseFeatures; + private final INDArray[] baseLabels; + private final long limit; + private final AtomicLong counter = new AtomicLong(0); public BenchmarkMultiDataSetIterator(int[][] featuresShape, int[] numLabels, int totalIterations) { if (featuresShape.length != numLabels.length) diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/impl/ListDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/impl/ListDataSetIterator.java index ca95b334d..67ebc3a06 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/impl/ListDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/impl/ListDataSetIterator.java @@ -34,7 +34,7 @@ public class ListDataSetIterator implements DataSetIterator { private static final long serialVersionUID = -7569201667767185411L; private int curr = 0; private int batch = 10; - private List list; + private final List list; @Getter private DataSetPreProcessor preProcessor; diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/BaseParallelDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/BaseParallelDataSetIterator.java index 38bda75ad..1f1568909 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/BaseParallelDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/BaseParallelDataSetIterator.java @@ -105,10 +105,7 @@ public abstract class BaseParallelDataSetIterator implements ParallelDataSetIter return true; } case STOP_EVERYONE: { - if (!states.allTrue()) - return false; - - return true; + return states.allTrue(); } default: throw new ND4JIllegalStateException( diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/FileSplitParallelDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/FileSplitParallelDataSetIterator.java index 40dd67594..de6fac884 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/FileSplitParallelDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/FileSplitParallelDataSetIterator.java @@ -26,7 +26,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.IOFileFilter; import org.apache.commons.io.filefilter.RegexFileFilter; -import org.nd4j.linalg.dataset.AsyncDataSetIterator;; +import org.nd4j.linalg.dataset.AsyncDataSetIterator; import org.deeplearning4j.datasets.iterator.FileSplitDataSetIterator; import org.deeplearning4j.datasets.iterator.callbacks.FileCallback; import org.nd4j.linalg.dataset.DataSet; @@ -43,8 +43,8 @@ import java.util.List; public class FileSplitParallelDataSetIterator extends BaseParallelDataSetIterator { public static final String DEFAULT_PATTERN = "dataset-%d.bin"; - private String pattern; - private int buffer; + private final String pattern; + private final int buffer; protected List asyncIterators = new ArrayList<>(); diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/JointParallelDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/JointParallelDataSetIterator.java index 64cd04b89..ef255beb1 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/JointParallelDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/JointParallelDataSetIterator.java @@ -23,7 +23,7 @@ package org.deeplearning4j.datasets.iterator.parallel; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; -import org.nd4j.linalg.dataset.AsyncDataSetIterator;; +import org.nd4j.linalg.dataset.AsyncDataSetIterator; import org.nd4j.linalg.dataset.DataSet; import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; import org.nd4j.linalg.dataset.api.iterator.enums.InequalityHandling; @@ -94,10 +94,10 @@ public class JointParallelDataSetIterator extends BaseParallelDataSetIterator { public static class Builder { - private List iterators = new ArrayList<>(); + private final List iterators = new ArrayList<>(); private boolean enforceSingleDevice = true; private int bufferSize = 4; - private InequalityHandling inequalityHandling; + private final InequalityHandling inequalityHandling; public Builder(@NonNull InequalityHandling inequalityHandling) { this.inequalityHandling = inequalityHandling; diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/MultiBoolean.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/MultiBoolean.java index 61b58bd56..48dc20752 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/MultiBoolean.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/parallel/MultiBoolean.java @@ -28,7 +28,7 @@ public class MultiBoolean { private final int numEntries; private int holder = 0; private int max = 0; - private boolean oneTime; + private final boolean oneTime; private MultiBoolean timeTracker; public MultiBoolean(int numEntries) { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/Hdf5Archive.java b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/Hdf5Archive.java index bb8cd203f..29dbb8d88 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/Hdf5Archive.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/Hdf5Archive.java @@ -59,8 +59,8 @@ public class Hdf5Archive implements Closeable { } } - private H5File file; - private static DataType dataType = new DataType(PredType.NATIVE_FLOAT()); + private final H5File file; + private static final DataType dataType = new DataType(PredType.NATIVE_FLOAT()); public Hdf5Archive(String archiveFilename) { synchronized (LOCK_OBJECT) { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/KerasModel.java b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/KerasModel.java index 1c001c1fd..d4bf6ba92 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/KerasModel.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/KerasModel.java @@ -276,7 +276,7 @@ public class KerasModel { sameDiffLambdaLayer.setLayerName(newName); KerasLambda kerasLambda = new KerasLambda(configCopy,sameDiffLambdaLayer); kerasLambda.layerName = newName; - kerasLambda.setInboundLayerNames(new ArrayList<>(Arrays.asList(input))); + kerasLambda.setInboundLayerNames(new ArrayList<>(Collections.singletonList(input))); layers.put(newName,kerasLambda); int indexOfNewLayer = names.indexOf(input) + 1; updatedOrders.put(indexOfNewLayer,kerasLambda); diff --git a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/TFOpLayer.java b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/TFOpLayer.java index 5d2d04923..8e30f72f2 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/TFOpLayer.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/TFOpLayer.java @@ -43,8 +43,8 @@ import java.util.Map; public class TFOpLayer extends Layer { - private Map nodeDef; - private Map constants; + private final Map nodeDef; + private final Map constants; public TFOpLayer(Map nodeDef, Map constants){ super(); this.nodeDef = nodeDef; diff --git a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/TFOpLayerImpl.java b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/TFOpLayerImpl.java index ab88f2aa5..ba2b98db4 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/TFOpLayerImpl.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/TFOpLayerImpl.java @@ -83,7 +83,7 @@ public class TFOpLayerImpl extends AbstractLayer { Map inputDataTypes = new HashMap<>(); Map constArrays = new HashMap(); this.inputNames = new ArrayList<>(); - List outputNames = Arrays.asList(nodeDef.getName()); + List outputNames = Collections.singletonList(nodeDef.getName()); Map attrMap = nodeDef.getAttrMap(); for (int i = 0; i < nodeDef.getInputCount(); i++){ String inputName = nodeDef.getInput(i); @@ -104,7 +104,7 @@ public class TFOpLayerImpl extends AbstractLayer { this.inputNames.add(nodeDef.getInput(i)); } } - String graph = "node{\n" + nodeDef.toString() + "\n}\nversions {\n producer: 22\n}"; + String graph = "node{\n" + nodeDef + "\n}\nversions {\n producer: 22\n}"; for (int i = 0; i < allInputNames.size(); i++){ String inpName = allInputNames.get(i); String dtype = inputDataTypes.get(inpName); diff --git a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasPReLU.java b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasPReLU.java index 6853ba203..5a4bb1c55 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasPReLU.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasPReLU.java @@ -102,7 +102,7 @@ public class KerasPReLU extends KerasLayer { int[] intAxes = ArrayUtil.toArray(axesList); axes = new long[intAxes.length]; for (int i = 0; i < intAxes.length; i++) { - axes[i] = (long) intAxes[i]; + axes[i] = intAxes[i]; } } catch (Exception e) { // no shared axes diff --git a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/convolutional/KerasDepthwiseConvolution2D.java b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/convolutional/KerasDepthwiseConvolution2D.java index e14d97bb1..264200686 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/convolutional/KerasDepthwiseConvolution2D.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/convolutional/KerasDepthwiseConvolution2D.java @@ -73,7 +73,7 @@ public class KerasDepthwiseConvolution2D extends KerasConvolution { */ public KerasDepthwiseConvolution2D(Map layerConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { - this(layerConfig, Collections.emptyMap(), true); + this(layerConfig, Collections.emptyMap(), true); } /** diff --git a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasPermute.java b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasPermute.java index bafdcc98e..6c0a9c7c7 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasPermute.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasPermute.java @@ -109,7 +109,7 @@ public class KerasPermute extends KerasLayer { case TENSORFLOW: // account for channels last permutationIndices = new int[] {permutationIndices[2], permutationIndices[0], permutationIndices[1]}; - preprocessor = new PermutePreprocessor(new int[]{1, 3, 2}); + preprocessor = new PermutePreprocessor(1, 3, 2); } } else if (inputType[0] instanceof InputType.InputTypeRecurrent) { if (Arrays.equals(permutationIndices, new int[] {2, 1})) diff --git a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasLSTM.java b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasLSTM.java index a2b30b92b..4e35a6867 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasLSTM.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasLSTM.java @@ -123,7 +123,7 @@ public class KerasLSTM extends KerasLayer { */ public KerasLSTM(Map layerConfig, boolean enforceTrainingConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { - this(layerConfig, enforceTrainingConfig, Collections.emptyMap()); + this(layerConfig, enforceTrainingConfig, Collections.emptyMap()); } diff --git a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasSimpleRnn.java b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasSimpleRnn.java index 60c25fe47..ac2d4c234 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasSimpleRnn.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasSimpleRnn.java @@ -87,7 +87,7 @@ public class KerasSimpleRnn extends KerasLayer { */ public KerasSimpleRnn(Map layerConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { - this(layerConfig, true, Collections.emptyMap()); + this(layerConfig, true, Collections.emptyMap()); } /** @@ -113,7 +113,7 @@ public class KerasSimpleRnn extends KerasLayer { */ public KerasSimpleRnn(Map layerConfig, boolean enforceTrainingConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { - this(layerConfig, enforceTrainingConfig, Collections.emptyMap()); + this(layerConfig, enforceTrainingConfig, Collections.emptyMap()); } diff --git a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/wrappers/KerasBidirectional.java b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/wrappers/KerasBidirectional.java index 1042ca244..fa5f5b508 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/wrappers/KerasBidirectional.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/wrappers/KerasBidirectional.java @@ -63,7 +63,7 @@ public class KerasBidirectional extends KerasLayer { */ public KerasBidirectional(Map layerConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { - this(layerConfig, true, Collections.emptyMap()); + this(layerConfig, true, Collections.emptyMap()); } diff --git a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/preprocessors/ReshapePreprocessor.java b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/preprocessors/ReshapePreprocessor.java index 002ce6b57..085db4e37 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/preprocessors/ReshapePreprocessor.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/preprocessors/ReshapePreprocessor.java @@ -89,9 +89,7 @@ public class ReshapePreprocessor extends BaseInputPreProcessor { int shapeLength = shape.length; val miniBatchShape = new long[shapeLength + 1]; miniBatchShape[0] = miniBatchSize; - for (int i = 1; i < miniBatchShape.length; i++) { - miniBatchShape[i] = shape[i - 1]; - } + System.arraycopy(shape, 0, miniBatchShape, 1, miniBatchShape.length - 1); return miniBatchShape; } diff --git a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/utils/KerasModelBuilder.java b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/utils/KerasModelBuilder.java index 92691ddf6..c0392c66a 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/utils/KerasModelBuilder.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/utils/KerasModelBuilder.java @@ -118,7 +118,7 @@ public class KerasModelBuilder implements Cloneable, Closeable { public KerasModelBuilder modelJsonInputStream(InputStream modelJsonInputStream) throws IOException { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); IOUtils.copy(modelJsonInputStream, byteArrayOutputStream); - this.modelJson = new String(byteArrayOutputStream.toByteArray()); + this.modelJson = byteArrayOutputStream.toString(); return this; } @@ -132,7 +132,7 @@ public class KerasModelBuilder implements Cloneable, Closeable { public KerasModelBuilder modelYamlInputStream(InputStream modelYamlInputStream) throws IOException { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); IOUtils.copy(modelYamlInputStream, byteArrayOutputStream); - this.modelJson = new String(byteArrayOutputStream.toByteArray()); + this.modelJson = byteArrayOutputStream.toString(); return this; } @@ -197,7 +197,7 @@ public class KerasModelBuilder implements Cloneable, Closeable { public KerasModelBuilder trainingJsonInputStream(InputStream trainingJsonInputStream) throws IOException { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); IOUtils.copy(trainingJsonInputStream, byteArrayOutputStream); - this.trainingJson = new String(byteArrayOutputStream.toByteArray()); + this.trainingJson = byteArrayOutputStream.toString(); return this; } @@ -210,7 +210,7 @@ public class KerasModelBuilder implements Cloneable, Closeable { public KerasModelBuilder trainingYamlInputStream(InputStream trainingYamlInputStream) throws IOException { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); IOUtils.copy(trainingYamlInputStream, byteArrayOutputStream); - this.trainingYaml = new String(byteArrayOutputStream.toByteArray()); + this.trainingYaml = byteArrayOutputStream.toString(); return this; } diff --git a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/utils/KerasModelUtils.java b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/utils/KerasModelUtils.java index ad11282e5..43f3b244f 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/utils/KerasModelUtils.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/utils/KerasModelUtils.java @@ -170,8 +170,10 @@ public class KerasModelUtils { // check to ensure naming scheme doesn't include forward slash boolean includesSlash = false; for (String layerName : layers.keySet()) { - if (layerName.contains("/")) + if (layerName.contains("/")) { includesSlash = true; + break; + } } synchronized (KerasModelUtils.class) { List layerGroups; diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/MiscTests.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/MiscTests.java index e29503c5f..815b2cf68 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/MiscTests.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/MiscTests.java @@ -111,7 +111,7 @@ public class MiscTests extends BaseDL4JTest { assertFalse(vr0.isValid()); assertEquals("Keras Sequential Model HDF5", vr0.getFormatType()); assertTrue(vr0.getIssues().get(0).contains("exist"), vr0.getIssues().get(0)); - System.out.println(vr0.toString()); + System.out.println(vr0); //Test empty file: File fEmpty = new File(f, "empty.h5"); @@ -121,7 +121,7 @@ public class MiscTests extends BaseDL4JTest { assertEquals("Keras Sequential Model HDF5", vr1.getFormatType()); assertFalse(vr1.isValid()); assertTrue(vr1.getIssues().get(0).contains("empty"), vr1.getIssues().get(0)); - System.out.println(vr1.toString()); + System.out.println(vr1); //Test directory (not zip file) File directory = new File(f, "dir"); @@ -131,7 +131,7 @@ public class MiscTests extends BaseDL4JTest { assertEquals("Keras Sequential Model HDF5", vr2.getFormatType()); assertFalse(vr2.isValid()); assertTrue(vr2.getIssues().get(0).contains("directory"), vr2.getIssues().get(0)); - System.out.println(vr2.toString()); + System.out.println(vr2); //Test Keras HDF5 format: File fText = new File(f, "text.txt"); @@ -141,7 +141,7 @@ public class MiscTests extends BaseDL4JTest { assertFalse(vr3.isValid()); String s = vr3.getIssues().get(0); assertTrue( s.contains("Keras") && s.contains("Sequential") && s.contains("corrupt"), s); - System.out.println(vr3.toString()); + System.out.println(vr3); //Test corrupted npy format: File fValid = Resources.asFile("modelimport/keras/examples/mnist_mlp/mnist_mlp_tf_keras_1_model.h5"); @@ -157,7 +157,7 @@ public class MiscTests extends BaseDL4JTest { assertFalse(vr4.isValid()); s = vr4.getIssues().get(0); assertTrue(s.contains("Keras") && s.contains("Sequential") && s.contains("corrupt"), s); - System.out.println(vr4.toString()); + System.out.println(vr4); //Test valid npy format: @@ -166,7 +166,7 @@ public class MiscTests extends BaseDL4JTest { assertTrue(vr5.isValid()); assertNull(vr5.getIssues()); assertNull(vr5.getException()); - System.out.println(vr4.toString()); + System.out.println(vr4); } @Test @@ -180,7 +180,7 @@ public class MiscTests extends BaseDL4JTest { assertFalse(vr0.isValid()); assertEquals("Keras Functional Model HDF5", vr0.getFormatType()); assertTrue( vr0.getIssues().get(0).contains("exist"), vr0.getIssues().get(0)); - System.out.println(vr0.toString()); + System.out.println(vr0); //Test empty file: File fEmpty = new File(f, "empty.h5"); @@ -190,7 +190,7 @@ public class MiscTests extends BaseDL4JTest { assertEquals("Keras Functional Model HDF5", vr1.getFormatType()); assertFalse(vr1.isValid()); assertTrue( vr1.getIssues().get(0).contains("empty"), vr1.getIssues().get(0)); - System.out.println(vr1.toString()); + System.out.println(vr1); //Test directory (not zip file) File directory = new File(f, "dir"); @@ -200,7 +200,7 @@ public class MiscTests extends BaseDL4JTest { assertEquals("Keras Functional Model HDF5", vr2.getFormatType()); assertFalse(vr2.isValid()); assertTrue(vr2.getIssues().get(0).contains("directory"), vr2.getIssues().get(0)); - System.out.println(vr2.toString()); + System.out.println(vr2); //Test Keras HDF5 format: File fText = new File(f, "text.txt"); @@ -210,7 +210,7 @@ public class MiscTests extends BaseDL4JTest { assertFalse(vr3.isValid()); String s = vr3.getIssues().get(0); assertTrue(s.contains("Keras") && s.contains("Functional") && s.contains("corrupt"),s); - System.out.println(vr3.toString()); + System.out.println(vr3); //Test corrupted npy format: File fValid = Resources.asFile("modelimport/keras/examples/mnist_mlp/mnist_mlp_tf_keras_1_model.h5"); @@ -226,7 +226,7 @@ public class MiscTests extends BaseDL4JTest { assertFalse(vr4.isValid()); s = vr4.getIssues().get(0); assertTrue(s.contains("Keras") && s.contains("Functional") && s.contains("corrupt"),s); - System.out.println(vr4.toString()); + System.out.println(vr4); //Test valid npy format: @@ -235,6 +235,6 @@ public class MiscTests extends BaseDL4JTest { assertTrue(vr5.isValid()); assertNull(vr5.getIssues()); assertNull(vr5.getException()); - System.out.println(vr4.toString()); + System.out.println(vr4); } } diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/DeepCTRLambdaTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/DeepCTRLambdaTest.java index 5b3fdc77a..1eca7ab48 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/DeepCTRLambdaTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/DeepCTRLambdaTest.java @@ -40,7 +40,7 @@ public class DeepCTRLambdaTest { @Override public SDVariable defineLayer(SameDiff sameDiff, SDVariable layerInput) { - return layerInput.sum("tensors_sum-" + UUID.randomUUID().toString(),false,1); + return layerInput.sum("tensors_sum-" + UUID.randomUUID(),false,1); } @Override @@ -53,7 +53,7 @@ public class DeepCTRLambdaTest { @Override public SDVariable defineLayer(SameDiff sameDiff, SDVariable layerInput) { - return layerInput.mul("tensor_square-" + UUID.randomUUID().toString(),layerInput); + return layerInput.mul("tensor_square-" + UUID.randomUUID(),layerInput); } @Override @@ -66,7 +66,7 @@ public class DeepCTRLambdaTest { @Override public SDVariable defineLayer(SameDiff sameDiff, SDVariable layerInput) { - return layerInput.mul("lambda1-" + UUID.randomUUID().toString(),0.5); + return layerInput.mul("lambda1-" + UUID.randomUUID(),0.5); } @Override @@ -80,9 +80,9 @@ public class DeepCTRLambdaTest { @Override public SDVariable defineLayer(SameDiff sameDiff, SDVariable layerInput) { if(this.layerName.equals("concat_embed_2d") || this.layerName.equals("cat_embed_2d_genure_mean")) - return layerInput.mean("mean_pooling-" + UUID.randomUUID().toString(),true,1); + return layerInput.mean("mean_pooling-" + UUID.randomUUID(),true,1); else - return layerInput.mean("mean_pooling-" + UUID.randomUUID().toString(),false,1); + return layerInput.mean("mean_pooling-" + UUID.randomUUID(),false,1); } @Override diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/FullModelComparisons.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/FullModelComparisons.java index 207a8b82f..1120dfbb8 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/FullModelComparisons.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/FullModelComparisons.java @@ -52,7 +52,7 @@ import java.util.Arrays; import java.util.LinkedList; import java.util.List; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; public class FullModelComparisons extends BaseDL4JTest { @@ -90,7 +90,7 @@ public class FullModelComparisons extends BaseDL4JTest { org.deeplearning4j.nn.conf.layers.LSTM firstConf = (org.deeplearning4j.nn.conf.layers.LSTM) firstLstm.conf().getLayer(); // "unit_forget_bias": true - assertTrue(firstConf.getForgetGateBiasInit() == 1.0); + assertEquals(1.0, firstConf.getForgetGateBiasInit()); assertTrue(firstConf.getGateActivationFn() instanceof ActivationHardSigmoid); assertTrue(firstConf.getActivationFn() instanceof ActivationTanH); @@ -101,7 +101,7 @@ public class FullModelComparisons extends BaseDL4JTest { // Need to convert from IFCO to CFOI order // INDArray W = firstLstm.getParam("W"); - Assertions.assertTrue(Arrays.equals(W.shape(), new long[]{nIn, 4 * nOut})); + assertArrayEquals(W.shape(), new long[]{nIn, 4 * nOut}); Assertions.assertEquals(W.getDouble(0, 288), -0.30737767, 1e-7); Assertions.assertEquals(W.getDouble(0, 289), -0.5845409, 1e-7); Assertions.assertEquals(W.getDouble(1, 288), -0.44083247, 1e-7); @@ -112,12 +112,12 @@ public class FullModelComparisons extends BaseDL4JTest { INDArray RW = firstLstm.getParam("RW"); - assertTrue(Arrays.equals(RW.shape(), new long[]{nOut, 4 * nOut})); + assertArrayEquals(RW.shape(), new long[]{nOut, 4 * nOut}); Assertions.assertEquals(RW.getDouble(0, 288), 0.15112677, 1e-7); INDArray b = firstLstm.getParam("b"); - assertTrue(Arrays.equals(b.shape(), new long[]{1, 4 * nOut})); + assertArrayEquals(b.shape(), new long[]{1, 4 * nOut}); Assertions.assertEquals(b.getDouble(0, 288), -0.36940336, 1e-7); // Keras I Assertions.assertEquals(b.getDouble(0, 96), 0.6031118, 1e-7); // Keras F Assertions.assertEquals(b.getDouble(0, 192), -0.13569744, 1e-7); // Keras O @@ -128,7 +128,7 @@ public class FullModelComparisons extends BaseDL4JTest { org.deeplearning4j.nn.conf.layers.LSTM secondConf = (org.deeplearning4j.nn.conf.layers.LSTM) secondLstm.conf().getLayer(); // "unit_forget_bias": true - assertTrue(secondConf.getForgetGateBiasInit() == 1.0); + assertEquals(1.0, secondConf.getForgetGateBiasInit()); assertTrue(firstConf.getGateActivationFn() instanceof ActivationHardSigmoid); assertTrue(firstConf.getActivationFn() instanceof ActivationTanH); @@ -137,16 +137,16 @@ public class FullModelComparisons extends BaseDL4JTest { nOut = 96; W = secondLstm.getParam("W"); - assertTrue(Arrays.equals(W.shape(), new long[]{nIn, 4 * nOut})); + assertArrayEquals(W.shape(), new long[]{nIn, 4 * nOut}); Assertions.assertEquals(W.getDouble(0, 288), -0.7559755, 1e-7); RW = secondLstm.getParam("RW"); - assertTrue(Arrays.equals(RW.shape(), new long[]{nOut, 4 * nOut})); + assertArrayEquals(RW.shape(), new long[]{nOut, 4 * nOut}); Assertions.assertEquals(RW.getDouble(0, 288), -0.33184892, 1e-7); b = secondLstm.getParam("b"); - assertTrue(Arrays.equals(b.shape(), new long[]{1, 4 * nOut})); + assertArrayEquals(b.shape(), new long[]{1, 4 * nOut}); Assertions.assertEquals(b.getDouble(0, 288), -0.2223678, 1e-7); Assertions.assertEquals(b.getDouble(0, 96), 0.73556226, 1e-7); Assertions.assertEquals(b.getDouble(0, 192), -0.63227624, 1e-7); @@ -167,7 +167,7 @@ public class FullModelComparisons extends BaseDL4JTest { INDArray sequence = dataSet.getFeatures().get(NDArrayIndex.point(0)).transpose(); INDArray bsSequence = sequence.reshape(1, 4, 12); // one batch INDArray pred = model.output(bsSequence); - assertTrue(Arrays.equals(pred.shape(), new long[]{1, 1})); + assertArrayEquals(pred.shape(), new long[]{1, 1}); preds.add(pred.getDouble(0, 0)); } INDArray dl4jPredictions = Nd4j.create(preds); diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/JsonTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/JsonTest.java index 71065896c..4558eccc7 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/JsonTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/JsonTest.java @@ -36,7 +36,7 @@ public class JsonTest extends BaseDL4JTest { public void testJsonPreprocessors() throws Exception { InputPreProcessor[] pp = new InputPreProcessor[] { new KerasFlattenRnnPreprocessor(10, 5), - new PermutePreprocessor(new int[]{0,1,2}), + new PermutePreprocessor(0,1,2), new ReshapePreprocessor(new long[]{10,10}, new long[]{100,1}, true, null) }; diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras1ModelConfigurationTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras1ModelConfigurationTest.java index 7c8d8f73d..fc48183e2 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras1ModelConfigurationTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras1ModelConfigurationTest.java @@ -36,7 +36,7 @@ import java.io.InputStream; @Slf4j public class Keras1ModelConfigurationTest extends BaseDL4JTest { - private ClassLoader classLoader = getClass().getClassLoader(); + private final ClassLoader classLoader = getClass().getClassLoader(); @Test public void imdbLstmTfSequentialConfigTest() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras2ModelConfigurationTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras2ModelConfigurationTest.java index a8eab6be6..05f6162f3 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras2ModelConfigurationTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras2ModelConfigurationTest.java @@ -267,7 +267,7 @@ public class Keras2ModelConfigurationTest extends BaseDL4JTest { model.init(); INDArray input = Nd4j.create(DataType.FLOAT, 50, 1500, 500); //NWC format - [Minibatch, seqLength, channels] INDArray out = model.output(input); - assertTrue(Arrays.equals(out.shape(), new long[]{50, 64})); + assertArrayEquals(out.shape(), new long[]{50, 64}); } } diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasInitilizationTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasInitilizationTest.java index 31fb10f09..e97a1685e 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasInitilizationTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasInitilizationTest.java @@ -40,15 +40,15 @@ import static org.junit.jupiter.api.Assertions.assertEquals; public class KerasInitilizationTest extends BaseDL4JTest { - private double minValue = -0.2; - private double maxValue = 0.2; - private double mean = 0.0; - private double stdDev = 0.2; - private double value = 42.0; - private double gain = 0.2; + private final double minValue = -0.2; + private final double maxValue = 0.2; + private final double mean = 0.0; + private final double stdDev = 0.2; + private final double value = 42.0; + private final double gain = 0.2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testInitializers() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasCustomLayerTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasCustomLayerTest.java index 06683cd07..67caf1e3b 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasCustomLayerTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasCustomLayerTest.java @@ -56,7 +56,7 @@ public class KerasCustomLayerTest extends BaseDL4JTest { // download file if (!cachedKerasFile.exists()) { - log.info("Downloading model to " + cachedKerasFile.toString()); + log.info("Downloading model to " + cachedKerasFile); FileUtils.copyURLToFile(new URL(kerasWeightsAndConfigUrl), cachedKerasFile); cachedKerasFile.deleteOnExit(); } diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasCustomLossTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasCustomLossTest.java index 5d394351a..7a13ab908 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasCustomLossTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasCustomLossTest.java @@ -64,7 +64,7 @@ public class KerasCustomLossTest extends BaseDL4JTest { .enforceTrainingConfig(true).buildSequential().getMultiLayerNetwork(); System.out.println(model.summary()); - INDArray input = Nd4j.create(new int[]{10, 3}); + INDArray input = Nd4j.create(10, 3); model.output(input); } finally { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasLambdaTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasLambdaTest.java index 726de2e1f..592ad2d9c 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasLambdaTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasLambdaTest.java @@ -83,7 +83,7 @@ public class KerasLambdaTest extends BaseDL4JTest { .enforceTrainingConfig(false).buildSequential().getMultiLayerNetwork(); System.out.println(model.summary()); - INDArray input = Nd4j.create(new int[]{10, 100}); + INDArray input = Nd4j.create(10, 100); model.output(input); } finally { @@ -105,7 +105,7 @@ public class KerasLambdaTest extends BaseDL4JTest { .enforceTrainingConfig(false).buildModel().getComputationGraph(); System.out.println(model.summary()); - INDArray input = Nd4j.create(new int[]{10, 784}); + INDArray input = Nd4j.create(10, 784); model.output(input); } finally { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasYolo9000PredictTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasYolo9000PredictTest.java index 782923365..a5ab1f512 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasYolo9000PredictTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasYolo9000PredictTest.java @@ -40,7 +40,7 @@ import java.io.File; public class KerasYolo9000PredictTest extends BaseDL4JTest { private static final String DL4J_MODEL_FILE_NAME = "."; - private static ImagePreProcessingScaler IMAGE_PREPROCESSING_SCALER = new ImagePreProcessingScaler(0, 1); + private static final ImagePreProcessingScaler IMAGE_PREPROCESSING_SCALER = new ImagePreProcessingScaler(0, 1); @Test ////@Ignore("Need to manually download file for ylo.") diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasLeakyReLUTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasLeakyReLUTest.java index 91e890cba..b2417dad8 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasLeakyReLUTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasLeakyReLUTest.java @@ -38,8 +38,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; */ public class KerasLeakyReLUTest extends BaseDL4JTest { - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testLeakyReLULayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasPReLUTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasPReLUTest.java index 7405a6007..202e06426 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasPReLUTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasPReLUTest.java @@ -42,8 +42,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; */ public class KerasPReLUTest extends BaseDL4JTest { - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); private final String INIT_KERAS = "glorot_normal"; private final IWeightInit INIT_DL4J = new WeightInitXavier(); diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasThresholdedReLUTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasThresholdedReLUTest.java index 822a140a6..834886ef7 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasThresholdedReLUTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasThresholdedReLUTest.java @@ -38,8 +38,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; */ public class KerasThresholdedReLUTest extends BaseDL4JTest { - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testThresholdedReLULayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution1DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution1DTest.java index 828d1c4c2..f5e25ea9f 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution1DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution1DTest.java @@ -58,8 +58,8 @@ public class KerasAtrousConvolution1DTest extends BaseDL4JTest { private final String BORDER_MODE_VALID = "valid"; private final int[] VALID_PADDING = new int[]{0, 0}; - private Integer keras1 = 1; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Integer keras1 = 1; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); @Test public void testAtrousConvolution1DLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution2DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution2DTest.java index a29be581c..f2eebb8f2 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution2DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution2DTest.java @@ -61,7 +61,7 @@ public class KerasAtrousConvolution2DTest extends BaseDL4JTest { private final String BORDER_MODE_VALID = "valid"; private final int[] VALID_PADDING = new int[]{0, 0}; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); @Test public void testAtrousConvolution2DLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution1DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution1DTest.java index 22a51b1a7..994d3affe 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution1DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution1DTest.java @@ -60,10 +60,10 @@ public class KerasConvolution1DTest extends BaseDL4JTest { private final String BORDER_MODE_VALID = "valid"; private final int[] VALID_PADDING = new int[]{0, 0}; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testConvolution1DLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution2DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution2DTest.java index f449c2cae..b92ab0432 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution2DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution2DTest.java @@ -62,10 +62,10 @@ public class KerasConvolution2DTest extends BaseDL4JTest { private final String BORDER_MODE_VALID = "valid"; private final int[] VALID_PADDING = new int[]{0, 0}; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution3DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution3DTest.java index a6c9af9c4..c36b0351d 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution3DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution3DTest.java @@ -61,10 +61,10 @@ public class KerasConvolution3DTest extends BaseDL4JTest { private final String BORDER_MODE_VALID = "valid"; private final int[] VALID_PADDING = new int[]{0, 0, 0}; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping1DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping1DTest.java index 9519aa4ac..b3159e54b 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping1DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping1DTest.java @@ -41,8 +41,8 @@ public class KerasCropping1DTest extends BaseDL4JTest { private final String LAYER_NAME = "cropping_1D_layer"; private final int CROPPING = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testCropping1DLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping2DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping2DTest.java index 966690847..e65a59438 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping2DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping2DTest.java @@ -42,8 +42,8 @@ public class KerasCropping2DTest extends BaseDL4JTest { private final String LAYER_NAME = "cropping_2D_layer"; private final int[] CROPPING = new int[]{2, 3}; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testCropping2DLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping3DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping3DTest.java index 7c8f45579..5fe65127a 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping3DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping3DTest.java @@ -42,8 +42,8 @@ public class KerasCropping3DTest extends BaseDL4JTest { private final String LAYER_NAME = "cropping_3D_layer"; private final int[] CROPPING = new int[]{2, 3, 5}; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testCropping3DLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDeconvolution2DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDeconvolution2DTest.java index 37fafc785..c0db1c47b 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDeconvolution2DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDeconvolution2DTest.java @@ -62,10 +62,10 @@ public class KerasDeconvolution2DTest extends BaseDL4JTest { private final String BORDER_MODE_VALID = "valid"; private final int[] VALID_PADDING = new int[]{0, 0}; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDepthwiseConvolution2DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDepthwiseConvolution2DTest.java index 1b4b8e7c7..4dc4856c0 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDepthwiseConvolution2DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDepthwiseConvolution2DTest.java @@ -62,8 +62,8 @@ public class KerasDepthwiseConvolution2DTest extends BaseDL4JTest { private final String BORDER_MODE_VALID = "valid"; private final int[] VALID_PADDING = new int[]{0, 0}; - private Integer keras2 = 2; - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras2 = 2; + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasSeparableConvolution2DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasSeparableConvolution2DTest.java index fb1df4525..54f50a478 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasSeparableConvolution2DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasSeparableConvolution2DTest.java @@ -63,10 +63,10 @@ public class KerasSeparableConvolution2DTest extends BaseDL4JTest { private final String BORDER_MODE_VALID = "valid"; private final int[] VALID_PADDING = new int[]{0, 0}; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling1DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling1DTest.java index 4985681cd..394f768c9 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling1DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling1DTest.java @@ -39,12 +39,12 @@ import static org.junit.jupiter.api.Assertions.assertEquals; public class KerasUpsampling1DTest extends BaseDL4JTest { private final String LAYER_NAME = "upsampling_1D_layer"; - private int size = 4; + private final int size = 4; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testUpsampling1DLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling2DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling2DTest.java index eb38f4ec0..f75958315 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling2DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling2DTest.java @@ -41,12 +41,12 @@ import static org.junit.jupiter.api.Assertions.assertEquals; public class KerasUpsampling2DTest extends BaseDL4JTest { private final String LAYER_NAME = "upsampling_2D_layer"; - private int[] size = new int[]{2, 2}; + private final int[] size = new int[]{2, 2}; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testUpsampling2DLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling3DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling3DTest.java index 7741785d1..7c82f4907 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling3DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling3DTest.java @@ -41,12 +41,12 @@ import static org.junit.jupiter.api.Assertions.assertEquals; public class KerasUpsampling3DTest extends BaseDL4JTest { private final String LAYER_NAME = "upsampling_3D_layer"; - private int[] size = new int[]{2, 2, 2}; + private final int[] size = new int[]{2, 2, 2}; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testUpsampling3DLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding1DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding1DTest.java index 9cfe0bdab..64bc6563f 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding1DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding1DTest.java @@ -38,8 +38,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; */ public class KerasZeroPadding1DTest extends BaseDL4JTest { - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testZeroPadding1DLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding2DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding2DTest.java index 809cb5f0a..203c4b887 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding2DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding2DTest.java @@ -42,8 +42,8 @@ public class KerasZeroPadding2DTest extends BaseDL4JTest { private final String LAYER_NAME = "zero_padding_2D_layer"; private final int[] ZERO_PADDING = new int[]{2, 3}; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testZeroPadding2DLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding3DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding3DTest.java index 6ae93473b..cc2c44968 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding3DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding3DTest.java @@ -42,8 +42,8 @@ public class KerasZeroPadding3DTest extends BaseDL4JTest { private final String LAYER_NAME = "zero_padding_3D_layer"; private final int[] ZERO_PADDING = new int[]{2, 3, 4}; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testZeroPadding3DLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasActivationLayer.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasActivationLayer.java index ad73a4c00..1f2496c16 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasActivationLayer.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasActivationLayer.java @@ -38,10 +38,10 @@ public class KerasActivationLayer extends BaseDL4JTest { private final String ACTIVATION_DL4J = "identity"; private final String LAYER_NAME = "test_layer"; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testActivationLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDenseTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDenseTest.java index 2d5c4f864..c9c70e5ff 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDenseTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDenseTest.java @@ -41,10 +41,10 @@ import static org.junit.jupiter.api.Assertions.assertEquals; */ public class KerasDenseTest extends BaseDL4JTest { - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); private final String ACTIVATION_KERAS = "linear"; private final String ACTIVATION_DL4J = "identity"; diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDropoutTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDropoutTest.java index 322955813..afc7506e2 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDropoutTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDropoutTest.java @@ -42,10 +42,10 @@ public class KerasDropoutTest extends BaseDL4JTest { private final double DROPOUT_KERAS = 0.3; private final double DROPOUT_DL4J = 1 - DROPOUT_KERAS; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasMaskingTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasMaskingTest.java index f898209ce..8734351f4 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasMaskingTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasMaskingTest.java @@ -39,8 +39,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; public class KerasMaskingTest extends BaseDL4JTest { - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasPermuteTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasPermuteTest.java index 50efe158a..93df0c5d1 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasPermuteTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasPermuteTest.java @@ -42,10 +42,10 @@ import static org.junit.jupiter.api.Assertions.assertEquals; */ public class KerasPermuteTest extends BaseDL4JTest { - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasRepeatVectorTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasRepeatVectorTest.java index 7390c8bc5..958e2baad 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasRepeatVectorTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasRepeatVectorTest.java @@ -38,12 +38,12 @@ import static org.junit.jupiter.api.Assertions.assertEquals; public class KerasRepeatVectorTest extends BaseDL4JTest { String LAYER_NAME = "repeat"; - private int REPEAT = 4; + private final int REPEAT = 4; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasReshapeTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasReshapeTest.java index 6e57fa561..68ca9b55c 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasReshapeTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasReshapeTest.java @@ -43,10 +43,10 @@ import static org.junit.jupiter.api.Assertions.assertEquals; */ public class KerasReshapeTest extends BaseDL4JTest { - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasSpatialDropout2DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasSpatialDropout2DTest.java index 01d225c19..8234c29b2 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasSpatialDropout2DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasSpatialDropout2DTest.java @@ -42,10 +42,10 @@ public class KerasSpatialDropout2DTest extends BaseDL4JTest { private final double RATE_KERAS = 0.3; private final double RATE_DL4J = 1 - RATE_KERAS; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/embeddings/KerasEmbeddingTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/embeddings/KerasEmbeddingTest.java index d358bd61e..010f890b7 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/embeddings/KerasEmbeddingTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/embeddings/KerasEmbeddingTest.java @@ -43,10 +43,10 @@ public class KerasEmbeddingTest extends BaseDL4JTest { private final String INIT_KERAS = "glorot_normal"; private final int[] INPUT_SHAPE = new int[]{100, 20}; private static final boolean[] MASK_ZERO = new boolean[]{false, true}; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testEmbeddingLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected1DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected1DTest.java index 1b2d9dfd7..42afecf32 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected1DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected1DTest.java @@ -58,10 +58,10 @@ public class KerasLocallyConnected1DTest extends BaseDL4JTest { private final String BORDER_MODE_VALID = "valid"; private final int VALID_PADDING = 0; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected2DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected2DTest.java index b703a482b..42981f1b6 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected2DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected2DTest.java @@ -61,10 +61,10 @@ public class KerasLocallyConnected2DTest extends BaseDL4JTest { private final String BORDER_MODE_VALID = "valid"; private final int[] VALID_PADDING = new int[]{0, 0}; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasAlphaDropoutTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasAlphaDropoutTest.java index fa3a2feae..1f35515bb 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasAlphaDropoutTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasAlphaDropoutTest.java @@ -42,10 +42,10 @@ public class KerasAlphaDropoutTest extends BaseDL4JTest { private final double RATE_KERAS = 0.3; private final double RATE_DL4J = 1 - RATE_KERAS; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianDropoutTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianDropoutTest.java index e23356da2..eee0f1c8a 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianDropoutTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianDropoutTest.java @@ -42,10 +42,10 @@ public class KerasGaussianDropoutTest extends BaseDL4JTest { private final double RATE_KERAS = 0.3; private final double RATE_DL4J = 1 - RATE_KERAS; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianNoiseTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianNoiseTest.java index 4eb0042b5..6d8eb994c 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianNoiseTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianNoiseTest.java @@ -41,10 +41,10 @@ public class KerasGaussianNoiseTest extends BaseDL4JTest { String LAYER_NAME = "gaussian_noise"; private final double STDDEV = 0.3; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/normalization/KerasBatchNormalizationTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/normalization/KerasBatchNormalizationTest.java index d8341de8f..ca84e5244 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/normalization/KerasBatchNormalizationTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/normalization/KerasBatchNormalizationTest.java @@ -41,10 +41,10 @@ public class KerasBatchNormalizationTest extends BaseDL4JTest { public static final String PARAM_NAME_BETA = "beta"; private final String LAYER_NAME = "batch_norm_layer"; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling1DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling1DTest.java index 25557e595..d504e626f 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling1DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling1DTest.java @@ -47,10 +47,10 @@ public class KerasPooling1DTest extends BaseDL4JTest { private final String BORDER_MODE_VALID = "valid"; private final int[] VALID_PADDING = new int[]{0, 0}; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testPooling1DLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling2DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling2DTest.java index 189cea1da..76aed15c1 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling2DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling2DTest.java @@ -49,10 +49,10 @@ public class KerasPooling2DTest extends BaseDL4JTest { private final String BORDER_MODE_VALID = "valid"; private final int[] VALID_PADDING = new int[]{0, 0}; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testPooling2DLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling3DTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling3DTest.java index eefba12b4..44ed404eb 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling3DTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling3DTest.java @@ -49,10 +49,10 @@ public class KerasPooling3DTest extends BaseDL4JTest { private final String BORDER_MODE_VALID = "valid"; private final int[] VALID_PADDING = new int[]{0, 0, 0}; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testPooling3DLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasLSTMTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasLSTMTest.java index b88f3c94c..7ce6bf0b3 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasLSTMTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasLSTMTest.java @@ -61,12 +61,12 @@ public class KerasLSTMTest extends BaseDL4JTest { private final double DROPOUT_DL4J = 1 - DROPOUT_KERAS; private final int N_OUT = 13; - private Boolean[] returnSequences = new Boolean[]{true, false}; - private Boolean[] maskZero = new Boolean[]{true, false}; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Boolean[] returnSequences = new Boolean[]{true, false}; + private final Boolean[] maskZero = new Boolean[]{true, false}; + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testLstmLayer() throws Exception { @@ -177,8 +177,8 @@ public class KerasLSTMTest extends BaseDL4JTest { layerConfig.put(conf.getLAYER_FIELD_CONFIG(), config); layerConfig.put(conf.getLAYER_FIELD_KERAS_VERSION(), kerasVersion); layerConfig.put(conf.getLAYER_FIELD_INBOUND_NODES(), - Arrays.asList(Arrays.asList( - Arrays.asList("embedding")))); + Collections.singletonList(Collections.singletonList( + Collections.singletonList("embedding")))); KerasEmbedding embedding = getEmbedding(maskZero); Map previousLayers = Collections.singletonMap("embedding", embedding); diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasSimpleRnnTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasSimpleRnnTest.java index e68627e3e..c8e8287fb 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasSimpleRnnTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasSimpleRnnTest.java @@ -52,11 +52,11 @@ public class KerasSimpleRnnTest extends BaseDL4JTest { private final double DROPOUT_DL4J = 1 - DROPOUT_KERAS; private final int N_OUT = 13; - private Boolean[] returnSequences = new Boolean[]{true, false}; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Boolean[] returnSequences = new Boolean[]{true, false}; + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testSimpleRnnLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/wrappers/KerasBidirectionalTest.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/wrappers/KerasBidirectionalTest.java index 7073a6cba..1aa8b0a81 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/wrappers/KerasBidirectionalTest.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/wrappers/KerasBidirectionalTest.java @@ -52,10 +52,10 @@ public class KerasBidirectionalTest extends BaseDL4JTest { private final int N_OUT = 13; private final String mode = "sum"; - private Integer keras1 = 1; - private Integer keras2 = 2; - private Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); - private Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); + private final Integer keras1 = 1; + private final Integer keras2 = 2; + private final Keras1LayerConfiguration conf1 = new Keras1LayerConfiguration(); + private final Keras2LayerConfiguration conf2 = new Keras2LayerConfiguration(); @Test public void testLstmLayer() throws Exception { diff --git a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/weights/KerasWeightSettingTests.java b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/weights/KerasWeightSettingTests.java index b40eb37c1..a5eb7f3a2 100644 --- a/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/weights/KerasWeightSettingTests.java +++ b/cavis-dnn/cavis-dnn-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/weights/KerasWeightSettingTests.java @@ -211,7 +211,6 @@ public class KerasWeightSettingTests extends BaseDL4JTest { int nOut = 12; int mb = 10; - ; int[] inShape = new int[]{5, 5, 5}; INDArray input = Nd4j.zeros(mb, inShape[0], inShape[1], inShape[2]); INDArray output = model.output(input); @@ -259,7 +258,7 @@ public class KerasWeightSettingTests extends BaseDL4JTest { ComputationGraph model = loadComputationalGraph(modelPath, false); // INDArray input[] = new INDArray[]{Nd4j.zeros(10, 4, 6, 6), Nd4j.zeros(10, 16, 3, 3)}; - INDArray input[] = new INDArray[]{Nd4j.zeros(10, 6, 6, 4), Nd4j.zeros(10, 3, 3, 16)}; + INDArray[] input = new INDArray[]{Nd4j.zeros(10, 6, 6, 4), Nd4j.zeros(10, 3, 3, 16)}; INDArray[] output = model.output(input); log.info(Arrays.toString(output[0].shape())); assertArrayEquals(new long[]{10, 3, 3, 32}, output[0].shape()); diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/bagofwords/vectorizer/BagOfWordsVectorizer.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/bagofwords/vectorizer/BagOfWordsVectorizer.java index 0928ee429..a83386d01 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/bagofwords/vectorizer/BagOfWordsVectorizer.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/bagofwords/vectorizer/BagOfWordsVectorizer.java @@ -42,6 +42,7 @@ import java.io.BufferedReader; import java.io.File; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -62,7 +63,7 @@ public class BagOfWordsVectorizer extends BaseTextVectorizer { @Override public DataSet vectorize(InputStream is, String label) { try { - BufferedReader reader = new BufferedReader(new InputStreamReader(is, "UTF-8")); + BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8)); String line = ""; StringBuilder builder = new StringBuilder(); while ((line = reader.readLine()) != null) { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/bagofwords/vectorizer/DefaultInputStreamCreator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/bagofwords/vectorizer/DefaultInputStreamCreator.java index b05583f49..a03ebb1a8 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/bagofwords/vectorizer/DefaultInputStreamCreator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/bagofwords/vectorizer/DefaultInputStreamCreator.java @@ -26,7 +26,7 @@ import org.deeplearning4j.text.documentiterator.DocumentIterator; import java.io.InputStream; public class DefaultInputStreamCreator implements InputStreamCreator { - private DocumentIterator iter; + private final DocumentIterator iter; public DefaultInputStreamCreator(DocumentIterator iter) { this.iter = iter; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/bagofwords/vectorizer/TfidfVectorizer.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/bagofwords/vectorizer/TfidfVectorizer.java index b74e6b953..ba344994b 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/bagofwords/vectorizer/TfidfVectorizer.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/bagofwords/vectorizer/TfidfVectorizer.java @@ -45,6 +45,7 @@ import java.io.BufferedReader; import java.io.File; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.util.*; import java.util.concurrent.atomic.AtomicLong; @@ -60,7 +61,7 @@ public class TfidfVectorizer extends BaseTextVectorizer { @Override public DataSet vectorize(InputStream is, String label) { try { - BufferedReader reader = new BufferedReader(new InputStreamReader(is, "UTF-8")); + BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8)); String line = ""; StringBuilder builder = new StringBuilder(); while ((line = reader.readLine()) != null) { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/iterator/BertIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/iterator/BertIterator.java index a004236e6..0478f1e14 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/iterator/BertIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/iterator/BertIterator.java @@ -671,9 +671,9 @@ public class BertIterator implements MultiDataSetIterator { private int listLength = 0; @Getter - private long[] segIdOnesFrom; + private final long[] segIdOnesFrom; private int cursor = 0; - private SentenceListProcessed sentenceListProcessed; + private final SentenceListProcessed sentenceListProcessed; private SentencePairListProcessed(int listLength) { this.listLength = listLength; @@ -701,14 +701,14 @@ public class BertIterator implements MultiDataSetIterator { } private static class SentenceListProcessed { - private int listLength; + private final int listLength; @Getter @Setter private int maxL; @Getter - private List, String>> tokensAndLabelList; + private final List, String>> tokensAndLabelList; private SentenceListProcessed(int listLength) { this.listLength = listLength; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/iterator/CnnSentenceDataSetIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/iterator/CnnSentenceDataSetIterator.java index fd5fafe94..ccf669dc5 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/iterator/CnnSentenceDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/iterator/CnnSentenceDataSetIterator.java @@ -450,7 +450,7 @@ public class CnnSentenceDataSetIterator implements DataSetIterator { public static class Builder { - private Format format; + private final Format format; private LabeledSentenceProvider sentenceProvider = null; private WordVectors wordVectors; private TokenizerFactory tokenizerFactory = new DefaultTokenizerFactory(); diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/iterator/provider/LabelAwareConverter.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/iterator/provider/LabelAwareConverter.java index 9c0aa2018..555b15bda 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/iterator/provider/LabelAwareConverter.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/iterator/provider/LabelAwareConverter.java @@ -29,8 +29,8 @@ import org.nd4j.common.primitives.Pair; import java.util.List; public class LabelAwareConverter implements LabeledSentenceProvider { - private LabelAwareIterator backingIterator; - private List labels; + private final LabelAwareIterator backingIterator; + private final List labels; public LabelAwareConverter(@NonNull LabelAwareIterator iterator, @NonNull List labels) { this.backingIterator = iterator; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/BatchItem.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/BatchItem.java index 590e02e88..f775dbf86 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/BatchItem.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/BatchItem.java @@ -29,7 +29,7 @@ public class BatchItem { private int[] windowWords; // CBOW only private boolean[] wordStatuses; - private long randomValue; + private final long randomValue; private double alpha; private int windowWordsLength; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/BatchSequences.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/BatchSequences.java index 93c10bd5e..bce3fbeaa 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/BatchSequences.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/BatchSequences.java @@ -30,7 +30,7 @@ import java.util.concurrent.atomic.AtomicLong; @Slf4j public class BatchSequences { - private int batches; + private final int batches; List> buffer = new ArrayList<>(); @@ -56,7 +56,7 @@ public class BatchSequences { public List> get(int chunkNo) { List> retVal = new ArrayList<>(); - for (int i = 0 + chunkNo * batches; (i < batches + chunkNo * batches) && (i < buffer.size()); ++i) { + for (int i = chunkNo * batches; (i < batches + chunkNo * batches) && (i < buffer.size()); ++i) { BatchItem value = buffer.get(i); retVal.add(value); } diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/CBOW.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/CBOW.java index 80c5357ad..bb1ffd4a4 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/CBOW.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/CBOW.java @@ -103,7 +103,7 @@ public class CBOW implements ElementsLearningAlgorith logger.info("Initializing syn1Neg..."); ((InMemoryLookupTable) lookupTable).setUseHS(configuration.isUseHierarchicSoftmax()); ((InMemoryLookupTable) lookupTable).setNegative(configuration.getNegative()); - ((InMemoryLookupTable) lookupTable).resetWeights(false); + lookupTable.resetWeights(false); } } diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/SkipGram.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/SkipGram.java index 4fe3320a7..912cb29a9 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/SkipGram.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/SkipGram.java @@ -118,7 +118,7 @@ public class SkipGram implements ElementsLearningAlgo log.info("Initializing syn1Neg..."); ((InMemoryLookupTable) lookupTable).setUseHS(configuration.isUseHierarchicSoftmax()); ((InMemoryLookupTable) lookupTable).setNegative(configuration.getNegative()); - ((InMemoryLookupTable) lookupTable).resetWeights(false); + lookupTable.resetWeights(false); } } diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/sequence/DM.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/sequence/DM.java index 42ad78579..64dffe9de 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/sequence/DM.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/sequence/DM.java @@ -58,7 +58,7 @@ public class DM implements SequenceLearningAlgorithm< protected INDArray syn0, syn1, syn1Neg, table; - private CBOW cbow = new CBOW<>(); + private final CBOW cbow = new CBOW<>(); @Override public ElementsLearningAlgorithm getElementsLearningAlgorithm() { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/loader/VectorsConfiguration.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/loader/VectorsConfiguration.java index c613671db..42273db58 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/loader/VectorsConfiguration.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/loader/VectorsConfiguration.java @@ -29,6 +29,7 @@ import com.fasterxml.jackson.databind.SerializationFeature; import java.io.IOException; import java.io.Serializable; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; @@ -118,7 +119,7 @@ public class VectorsConfiguration implements Serializable { public String toEncodedJson() { Base64 base64 = new Base64(Integer.MAX_VALUE); try { - return base64.encodeAsString(this.toJson().getBytes("UTF-8")); + return base64.encodeAsString(this.toJson().getBytes(StandardCharsets.UTF_8)); } catch (Exception e) { throw new RuntimeException(e); } diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/loader/WordVectorSerializer.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/loader/WordVectorSerializer.java index a2e3fb8c6..e521e2be7 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/loader/WordVectorSerializer.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/loader/WordVectorSerializer.java @@ -338,7 +338,7 @@ public class WordVectorSerializer { if (i < vec.length() - 1) builder.append(" "); } - writer.println(builder.toString()); + writer.println(builder); } } } @@ -530,11 +530,11 @@ public class WordVectorSerializer { try (PrintWriter writer = new PrintWriter(new FileWriter(tempFileFreqs))) { for (int i = 0; i < vectors.getVocab().numWords(); i++) { VocabWord word = vectors.getVocab().elementAtIndex(i); - StringBuilder builder = new StringBuilder(ReadHelper.encodeB64(word.getLabel())).append(" ") - .append(word.getElementFrequency()).append(" ") - .append(vectors.getVocab().docAppearedIn(word.getLabel())); + String builder = ReadHelper.encodeB64(word.getLabel()) + " " + + word.getElementFrequency() + " " + + vectors.getVocab().docAppearedIn(word.getLabel()); - writer.println(builder.toString().trim()); + writer.println(builder.trim()); } } @@ -830,7 +830,7 @@ public class WordVectorSerializer { List rows = new ArrayList<>(); while ((line = reader.readLine()) != null) { String[] split = line.split(" "); - double array[] = new double[split.length]; + double[] array = new double[split.length]; for (int i = 0; i < split.length; i++) { array[i] = Double.parseDouble(split[i]); } @@ -904,7 +904,7 @@ public class WordVectorSerializer { List rows = new ArrayList<>(); while ((line = reader.readLine()) != null) { String[] split = line.split(" "); - double array[] = new double[split.length]; + double[] array = new double[split.length]; for (int i = 0; i < split.length; i++) { array[i] = Double.parseDouble(split[i]); } @@ -1055,7 +1055,7 @@ public class WordVectorSerializer { InMemoryLookupTable lookupTable = - (InMemoryLookupTable) new InMemoryLookupTable.Builder() + new InMemoryLookupTable.Builder() .vectorLength(arrays.get(0).columns()).useAdaGrad(false).cache(vocabCache) .build(); Nd4j.clearNans(syn); @@ -1177,7 +1177,7 @@ public class WordVectorSerializer { PrintWriter printWriter = null; try { - printWriter = new PrintWriter(new OutputStreamWriter(new FileOutputStream(path), "UTF-8")); + printWriter = new PrintWriter(new OutputStreamWriter(new FileOutputStream(path), StandardCharsets.UTF_8)); } catch (Exception e) { throw new RuntimeException(e); } @@ -1265,7 +1265,7 @@ public class WordVectorSerializer { INDArray gradient = word.getHistoricalGradient(); if (gradient == null) gradient = Nd4j.zeros(word.getCodes().size()); - double ada[] = new double[gradient.columns()]; + double[] ada = new double[gradient.columns()]; for (int x = 0; x < gradient.columns(); x++) { ada[x] = gradient.getDouble(x); } @@ -1356,7 +1356,7 @@ public class WordVectorSerializer { // now, it's time to transfer syn0/syn1/syn1 neg values InMemoryLookupTable lookupTable = - (InMemoryLookupTable) new InMemoryLookupTable.Builder().negative(configuration.getNegative()) + new InMemoryLookupTable.Builder().negative(configuration.getNegative()) .useAdaGrad(configuration.isUseAdaGrad()).lr(configuration.getLearningRate()) .cache(vocabCache).vectorLength(configuration.getLayersSize()).build(); @@ -1409,7 +1409,7 @@ public class WordVectorSerializer { @Deprecated public static void writeWordVectors(@NonNull Word2Vec vec, @NonNull String path) throws IOException { BufferedWriter write = new BufferedWriter( - new OutputStreamWriter(new FileOutputStream(new File(path), false), "UTF-8")); + new OutputStreamWriter(new FileOutputStream(new File(path), false), StandardCharsets.UTF_8)); writeWordVectors(vec, write); @@ -1647,7 +1647,7 @@ public class WordVectorSerializer { lookupTable.setSyn0(syn); - return new Pair<>((InMemoryLookupTable) lookupTable, (VocabCache) cache); + return new Pair<>(lookupTable, cache); } catch (IOException readeTextStreamException) { throw new RuntimeException(readeTextStreamException); } finally { @@ -1741,7 +1741,7 @@ public class WordVectorSerializer { } InMemoryLookupTable lookupTable = - (InMemoryLookupTable) new InMemoryLookupTable.Builder() + new InMemoryLookupTable.Builder() .vectorLength(arrays.get(0).columns()).cache(cache).build(); INDArray syn = Nd4j.vstack(arrays); @@ -1749,7 +1749,7 @@ public class WordVectorSerializer { Nd4j.clearNans(syn); lookupTable.setSyn0(syn); - return fromPair(Pair.makePair((InMemoryLookupTable) lookupTable, (VocabCache) cache)); + return fromPair(Pair.makePair(lookupTable, cache)); } /** @@ -1925,11 +1925,11 @@ public class WordVectorSerializer { VectorsConfiguration configuration = vectors.getConfiguration(); String json = configuration.toJson().trim(); - zipfile.write(json.getBytes("UTF-8")); + zipfile.write(json.getBytes(StandardCharsets.UTF_8)); ZipEntry vocab = new ZipEntry(VOCAB_ENTRY); zipfile.putNextEntry(vocab); - zipfile.write(vocabCache.toJson().getBytes("UTF-8")); + zipfile.write(vocabCache.toJson().getBytes(StandardCharsets.UTF_8)); INDArray syn0Data = lookupTable.getSyn0(); ZipEntry syn0 = new ZipEntry(SYN0_ENTRY); @@ -2013,11 +2013,11 @@ public class WordVectorSerializer { byte[] bytes = IOUtils.toByteArray(zipfile); if (name.equals(CONFIG_ENTRY)) { - String content = new String(bytes, "UTF-8"); + String content = new String(bytes, StandardCharsets.UTF_8); configuration = VectorsConfiguration.fromJson(content); continue; } else if (name.equals(VOCAB_ENTRY)) { - String content = new String(bytes, "UTF-8"); + String content = new String(bytes, StandardCharsets.UTF_8); vocabCache = AbstractCache.fromJson(content); continue; } @@ -2068,12 +2068,12 @@ public class WordVectorSerializer { */ public static SequenceVectors readSequenceVectors( @NonNull SequenceElementFactory factory, @NonNull InputStream stream) throws IOException { - BufferedReader reader = new BufferedReader(new InputStreamReader(stream, "UTF-8")); + BufferedReader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8)); // at first we load vectors configuration String line = reader.readLine(); VectorsConfiguration configuration = - VectorsConfiguration.fromJson(new String(Base64.decodeBase64(line), "UTF-8")); + VectorsConfiguration.fromJson(new String(Base64.decodeBase64(line), StandardCharsets.UTF_8)); AbstractCache vocabCache = new AbstractCache.Builder().build(); @@ -2092,7 +2092,7 @@ public class WordVectorSerializer { reader.close(); - InMemoryLookupTable lookupTable = (InMemoryLookupTable) new InMemoryLookupTable.Builder() + InMemoryLookupTable lookupTable = new InMemoryLookupTable.Builder() .vectorLength(rows.get(0).columns()).cache(vocabCache).build(); // fix: add vocab cache /* @@ -2225,7 +2225,7 @@ public class WordVectorSerializer { Base64 base64 = new Base64(Integer.MAX_VALUE); try { String json = mapper.writeValueAsString(this); - String output = base64.encodeAsString(json.getBytes("UTF-8")); + String output = base64.encodeAsString(json.getBytes(StandardCharsets.UTF_8)); return output; } catch (Exception e) { throw new RuntimeException(e); @@ -2241,7 +2241,7 @@ public class WordVectorSerializer { protected static ElementPair fromEncodedJson(String encoded) { ObjectMapper mapper = SequenceElement.mapper(); try { - String decoded = new String(Base64.decodeBase64(encoded), "UTF-8"); + String decoded = new String(Base64.decodeBase64(encoded), StandardCharsets.UTF_8); return mapper.readValue(decoded, ElementPair.class); } catch (IOException e) { throw new RuntimeException(e); @@ -2850,8 +2850,8 @@ public class WordVectorSerializer { } protected static class CSVReader implements Reader { - private BufferedReader reader; - private AtomicInteger idxCounter = new AtomicInteger(0); + private final BufferedReader reader; + private final AtomicInteger idxCounter = new AtomicInteger(0); private String nextLine; protected CSVReader(@NonNull File file) { @@ -3202,12 +3202,12 @@ public class WordVectorSerializer { bytes[i] = b; b = dis.readByte(); if (i == 49) { - sb.append(new String(bytes, "UTF-8")); + sb.append(new String(bytes, StandardCharsets.UTF_8)); i = -1; bytes = new byte[MAX_SIZE]; } } - sb.append(new String(bytes, 0, i + 1, "UTF-8")); + sb.append(new String(bytes, 0, i + 1, StandardCharsets.UTF_8)); return sb.toString(); } @@ -3221,7 +3221,7 @@ public class WordVectorSerializer { */ public static String encodeB64(String word) { try { - return B64 + Base64.encodeBase64String(word.getBytes("UTF-8")).replaceAll("(\r|\n)", ""); + return B64 + Base64.encodeBase64String(word.getBytes(StandardCharsets.UTF_8)).replaceAll("(\r|\n)", ""); } catch (Exception e) { throw new RuntimeException(e); } @@ -3238,7 +3238,7 @@ public class WordVectorSerializer { if (word.startsWith(B64)) { String arp = word.replaceFirst(B64, ""); try { - return new String(Base64.decodeBase64(arp), "UTF-8"); + return new String(Base64.decodeBase64(arp), StandardCharsets.UTF_8); } catch (Exception e) { throw new RuntimeException(e); } diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/BasicModelUtils.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/BasicModelUtils.java index 39f8a3df0..9f27cea83 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/BasicModelUtils.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/BasicModelUtils.java @@ -110,9 +110,8 @@ public class BasicModelUtils implements ModelUtils @Override public Collection wordsNearest(String label, int n) { - List collection = new ArrayList<>(wordsNearest(Arrays.asList(label), new ArrayList(), n + 1)); - if (collection.contains(label)) - collection.remove(label); + List collection = new ArrayList<>(wordsNearest(Collections.singletonList(label), new ArrayList(), n + 1)); + collection.remove(label); while (collection.size() > n) collection.remove(collection.size() - 1); @@ -147,7 +146,7 @@ public class BasicModelUtils implements ModelUtils } else { String[] split = s.split(" "); List positive = Arrays.asList(split[1], split[2]); - List negative = Arrays.asList(split[0]); + List negative = Collections.singletonList(split[0]); String predicted = split[3]; String w = wordsNearest(positive, negative, 1).iterator().next(); if (predicted.equals(w)) diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/FlatModelUtils.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/FlatModelUtils.java index 4725042e9..53261ced2 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/FlatModelUtils.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/FlatModelUtils.java @@ -45,8 +45,7 @@ public class FlatModelUtils extends BasicModelUtils wordsNearest(String label, int n) { Collection collection = wordsNearest(lookupTable.vector(label), n); - if (collection.contains(label)) - collection.remove(label); + collection.remove(label); return collection; } diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/TreeModelUtils.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/TreeModelUtils.java index e2707ff0b..e0d354775 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/TreeModelUtils.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/TreeModelUtils.java @@ -66,9 +66,8 @@ public class TreeModelUtils extends BasicModelUtils(); - Collection collection = wordsNearest(Arrays.asList(label), new ArrayList(), n + 1); - if (collection.contains(label)) - collection.remove(label); + Collection collection = wordsNearest(Collections.singletonList(label), new ArrayList(), n + 1); + collection.remove(label); return collection; } diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/wordvectors/WordVectorsImpl.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/wordvectors/WordVectorsImpl.java index fb5156441..a4a9917fb 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/wordvectors/WordVectorsImpl.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/embeddings/wordvectors/WordVectorsImpl.java @@ -227,7 +227,7 @@ public class WordVectorsImpl implements WordVectors { */ @Override public INDArray getWordVectors(@NonNull Collection labels) { - int indexes[] = new int[labels.size()]; + int[] indexes = new int[labels.size()]; int cnt = 0; boolean useIndexUnknown = useUnknown && vocab.containsWord(getUNK()); diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/fasttext/FastText.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/fasttext/FastText.java index 6e753860f..6a847bbef 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/fasttext/FastText.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/fasttext/FastText.java @@ -124,7 +124,7 @@ public class FastText implements WordVectors, Serializable { private static class ArgsFactory { - private List args = new ArrayList<>(); + private final List args = new ArrayList<>(); private void add(String label, String value) { args.add(label); diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/node2vec/Node2Vec.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/node2vec/Node2Vec.java index f5777ebb0..1dce091e3 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/node2vec/Node2Vec.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/node2vec/Node2Vec.java @@ -51,7 +51,7 @@ public class Node2Vec extends Seque } public static class Builder extends SequenceVectors.Builder { - private GraphWalker walker; + private final GraphWalker walker; public Builder(@NonNull GraphWalker walker, @NonNull VectorsConfiguration configuration) { this.walker = walker; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/paragraphvectors/ParagraphVectors.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/paragraphvectors/ParagraphVectors.java index c4558c331..bcd1983ec 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/paragraphvectors/ParagraphVectors.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/paragraphvectors/ParagraphVectors.java @@ -981,14 +981,14 @@ public class ParagraphVectors extends Word2Vec { if (docIter instanceof LabelAwareDocumentIterator) this.labelAwareIterator = - new DocumentIteratorConverter((LabelAwareDocumentIterator) docIter, labelsSource); + new DocumentIteratorConverter(docIter, labelsSource); else this.labelAwareIterator = new DocumentIteratorConverter(docIter, labelsSource); } else if (sentenceIterator != null) { // we have SentenceIterator. Mechanics will be the same, as above if (sentenceIterator instanceof LabelAwareSentenceIterator) this.labelAwareIterator = new SentenceIteratorConverter( - (LabelAwareSentenceIterator) sentenceIterator, labelsSource); + sentenceIterator, labelsSource); else this.labelAwareIterator = new SentenceIteratorConverter(sentenceIterator, labelsSource); } else if (labelAwareIterator != null) { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/SequenceVectors.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/SequenceVectors.java index c6752ae75..cbb20dc05 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/SequenceVectors.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/SequenceVectors.java @@ -1081,9 +1081,9 @@ public class SequenceVectors extends WordVectorsImpl< // private final AtomicLong linesCounter; private final int limitUpper; private final int limitLower; - private AtomicBoolean isRunning = new AtomicBoolean(true); - private AtomicLong nextRandom; - private Collection stopList; + private final AtomicBoolean isRunning = new AtomicBoolean(true); + private final AtomicLong nextRandom; + private final Collection stopList; private static final int DEFAULT_BUFFER_SIZE = 512; @@ -1220,7 +1220,7 @@ public class SequenceVectors extends WordVectorsImpl< .cyclesBeforeInitialization(3) .initialSize(25L * 1024L * 1024L) .build(); - val workspace_id = "sequence_vectors_training_" + java.util.UUID.randomUUID().toString(); + val workspace_id = "sequence_vectors_training_" + UUID.randomUUID(); Nd4j.getAffinityManager().getDeviceForCurrentThread(); while (digitizer.hasMoreLines()) { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/huffman/GraphHuffman.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/huffman/GraphHuffman.java index 7eaea0f30..e95a2cb88 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/huffman/GraphHuffman.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/huffman/GraphHuffman.java @@ -117,7 +117,7 @@ public class GraphHuffman implements BinaryTree { if (value) return (in | 1L << bitNum); //Bit mask |: 00010000 else - return (in & ~(1 << bitNum)); //Bit mask &: 11101111 + return (in & ~(1L << bitNum)); //Bit mask &: 11101111 } private static boolean getBit(long in, int bitNum) { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/primitives/IGraph.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/primitives/IGraph.java index 29f8e3e08..8f683218a 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/primitives/IGraph.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/primitives/IGraph.java @@ -30,33 +30,33 @@ import java.util.Random; public interface IGraph { /** Number of vertices in the graph */ - public int numVertices(); + int numVertices(); /**Get a vertex in the graph for a given index * @param idx integer index of the vertex to get. must be in range 0 to numVertices() * @return vertex */ - public Vertex getVertex(int idx); + Vertex getVertex(int idx); /** Get multiple vertices in the graph * @param indexes the indexes of the vertices to retrieve * @return list of vertices */ - public List> getVertices(int[] indexes); + List> getVertices(int[] indexes); /** Get multiple vertices in the graph, with secified indices * @param from first vertex to get, inclusive * @param to last vertex to get, inclusive * @return list of vertices */ - public List> getVertices(int from, int to); + List> getVertices(int from, int to); /** Add an edge to the graph. */ - public void addEdge(Edge edge); + void addEdge(Edge edge); /** Convenience method for adding an edge (directed or undirected) to graph */ - public void addEdge(int from, int to, E value, boolean directed); + void addEdge(int from, int to, E value, boolean directed); /** Returns a list of edges for a vertex with a given index * For undirected graphs, returns all edges incident on the vertex @@ -64,7 +64,7 @@ public interface IGraph { * @param vertex index of the vertex to * @return list of edges for this vertex */ - public List> getEdgesOut(int vertex); + List> getEdgesOut(int vertex); /** Returns the degree of the vertex.
* For undirected graphs, this is just the degree.
@@ -72,7 +72,7 @@ public interface IGraph { * @param vertex vertex to get degree for * @return vertex degree */ - public int getVertexDegree(int vertex); + int getVertexDegree(int vertex); /** Randomly sample a vertex connected to a given vertex. Sampling is done uniformly at random. * Specifically, returns a random X such that either a directed edge (vertex -> X) exists, @@ -84,7 +84,7 @@ public interface IGraph { * @throws NoEdgesException thrown if the specified vertex has no edges, or no outgoing edges (in the case * of a directed graph). */ - public Vertex getRandomConnectedVertex(int vertex, Random rng) throws NoEdgesException; + Vertex getRandomConnectedVertex(int vertex, Random rng) throws NoEdgesException; /**Get a list of all of the vertices that the specified vertex is connected to
* Specifically, for undirected graphs return list of all X such that (vertex -- X) exists
@@ -92,7 +92,7 @@ public interface IGraph { * @param vertex Index of the vertex * @return list of vertices that the specified vertex is connected to */ - public List> getConnectedVertices(int vertex); + List> getConnectedVertices(int vertex); /**Return an array of indexes of vertices that the specified vertex is connected to.
* Specifically, for undirected graphs return int[] of all X.vertexID() such that (vertex -- X) exists
@@ -101,5 +101,5 @@ public interface IGraph { * @return list of vertices that the specified vertex is connected to * @see #getConnectedVertices(int) */ - public int[] getConnectedVertexIndices(int vertex); + int[] getConnectedVertexIndices(int vertex); } diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/walkers/impl/NearestVertexWalker.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/walkers/impl/NearestVertexWalker.java index e6f245d47..28f6058e6 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/walkers/impl/NearestVertexWalker.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/walkers/impl/NearestVertexWalker.java @@ -46,7 +46,7 @@ public class NearestVertexWalker implements GraphWalk protected Random rng; protected int depth; - private AtomicInteger position = new AtomicInteger(0); + private final AtomicInteger position = new AtomicInteger(0); protected NearestVertexWalker() { @@ -259,7 +259,7 @@ public class NearestVertexWalker implements GraphWalk } protected class VertexComparator implements Comparator> { - private IGraph graph; + private final IGraph graph; public VertexComparator(@NonNull IGraph graph) { this.graph = graph; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/walkers/impl/PopularityWalker.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/walkers/impl/PopularityWalker.java index 433805d90..58f3e13cb 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/walkers/impl/PopularityWalker.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/graph/walkers/impl/PopularityWalker.java @@ -181,7 +181,7 @@ public class PopularityWalker extends RandomWalker } break; case PROPORTIONAL: { - double norm[] = MathArrays.normalizeArray(weights, 1); + double[] norm = MathArrays.normalizeArray(weights, 1); double prob = rng.nextDouble(); double floor = 0.0; for (int b = 0; b < weights.length; b++) { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/iterators/AbstractSequenceIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/iterators/AbstractSequenceIterator.java index ad93ca7aa..b5b48bfc0 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/iterators/AbstractSequenceIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/iterators/AbstractSequenceIterator.java @@ -30,7 +30,7 @@ import java.util.concurrent.atomic.AtomicInteger; public class AbstractSequenceIterator implements SequenceIterator { - private Iterable> underlyingIterable; + private final Iterable> underlyingIterable; private Iterator> currentIterator; // used to tag each sequence with own Id @@ -71,7 +71,7 @@ public class AbstractSequenceIterator implements Sequ } public static class Builder { - private Iterable> underlyingIterable; + private final Iterable> underlyingIterable; /** * Builds AbstractSequenceIterator on top of Iterable object diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/listeners/ScoreListener.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/listeners/ScoreListener.java index 29b2340cc..3b209dc05 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/listeners/ScoreListener.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/listeners/ScoreListener.java @@ -44,10 +44,7 @@ public class ScoreListener implements VectorsListener @Override public boolean validateEvent(ListenerEvent event, long argument) { - if (event == targetEvent) - return true; - - return false; + return event == targetEvent; } @Override diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/listeners/SerializingListener.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/listeners/SerializingListener.java index d6b711ce2..9b031ae76 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/listeners/SerializingListener.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/listeners/SerializingListener.java @@ -41,7 +41,7 @@ public class SerializingListener implements VectorsLi private ListenerEvent targetEvent = ListenerEvent.EPOCH; private int targetFrequency = 100000; - private Semaphore locker = new Semaphore(1); + private final Semaphore locker = new Semaphore(1); protected SerializingListener() {} @@ -60,10 +60,7 @@ public class SerializingListener implements VectorsLi */ locker.acquire(); - if (event == targetEvent && argument % targetFrequency == 0) { - return true; - } else - return false; + return event == targetEvent && argument % targetFrequency == 0; } catch (Exception e) { throw new RuntimeException(e); } finally { @@ -85,9 +82,7 @@ public class SerializingListener implements VectorsLi SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); - StringBuilder builder = new StringBuilder(targetFolder.getAbsolutePath()); - builder.append("/").append(modelPrefix).append("_").append(sdf.format(new Date())).append(".seqvec"); - File targetFile = new File(builder.toString()); + File targetFile = new File(targetFolder.getAbsolutePath() + "/" + modelPrefix + "_" + sdf.format(new Date()) + ".seqvec"); if (useBinarySerialization) { SerializationUtils.saveObject(sequenceVectors, targetFile); @@ -104,7 +99,7 @@ public class SerializingListener implements VectorsLi public static class Builder { private File targetFolder = new File("./"); - private String modelPrefix = "Model_"; + private final String modelPrefix = "Model_"; private boolean useBinarySerialization = true; private ListenerEvent targetEvent = ListenerEvent.EPOCH; private int targetFrequency = 100000; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/sequence/Sequence.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/sequence/Sequence.java index 71de077b1..e80db82db 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/sequence/Sequence.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/sequence/Sequence.java @@ -204,7 +204,7 @@ public class Sequence implements Serializable { Sequence sequence = (Sequence) o; - return elements != null ? elements.equals(sequence.elements) : sequence.elements == null; + return Objects.equals(elements, sequence.elements); } diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/transformers/impl/GraphTransformer.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/transformers/impl/GraphTransformer.java index 8e83448d8..04b64fa17 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/transformers/impl/GraphTransformer.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/transformers/impl/GraphTransformer.java @@ -86,7 +86,7 @@ public class GraphTransformer implements Iterable>() { - private GraphWalker walker = GraphTransformer.this.walker; + private final GraphWalker walker = GraphTransformer.this.walker; @Override public void remove() { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/transformers/impl/iterables/ParallelTransformerIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/transformers/impl/iterables/ParallelTransformerIterator.java index d68fc6095..2f0064038 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/transformers/impl/iterables/ParallelTransformerIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/sequencevectors/transformers/impl/iterables/ParallelTransformerIterator.java @@ -107,8 +107,8 @@ public class ParallelTransformerIterator extends BasicTransformerIterator { private static class CallableTransformer implements Callable> { - private LabelledDocument document; - private SentenceTransformer transformer; + private final LabelledDocument document; + private final SentenceTransformer transformer; public CallableTransformer(LabelledDocument document, SentenceTransformer transformer) { this.transformer = transformer; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/Huffman.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/Huffman.java index 53a0b34f1..c773722d8 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/Huffman.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/Huffman.java @@ -38,7 +38,7 @@ public class Huffman { public final int MAX_CODE_LENGTH; private volatile boolean buildTrigger = false; - private Logger logger = LoggerFactory.getLogger(Huffman.class); + private final Logger logger = LoggerFactory.getLogger(Huffman.class); public Huffman(Collection words) { this(words, 40); @@ -63,7 +63,7 @@ public class Huffman { }); } - private List words; + private final List words; public void build() { buildTrigger = true; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/StaticWord2Vec.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/StaticWord2Vec.java index 10c71e58b..46841f364 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/StaticWord2Vec.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/StaticWord2Vec.java @@ -38,7 +38,7 @@ import java.util.concurrent.ConcurrentHashMap; @Slf4j public class StaticWord2Vec implements WordVectors { - private List> cacheWrtDevice = new ArrayList<>(); + private final List> cacheWrtDevice = new ArrayList<>(); private AbstractStorage storage; private long cachePerDevice = 0L; private VocabCache vocabCache; @@ -380,9 +380,9 @@ public class StaticWord2Vec implements WordVectors { public static class Builder { - private AbstractStorage storage; + private final AbstractStorage storage; private long cachePerDevice = 0L; - private VocabCache vocabCache; + private final VocabCache vocabCache; /** * diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/StreamWork.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/StreamWork.java index 8ab9a7071..e6272597b 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/StreamWork.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/StreamWork.java @@ -25,7 +25,7 @@ import java.io.Serializable; import java.util.concurrent.atomic.AtomicInteger; public class StreamWork implements Serializable { - private InputStreamCreator is; + private final InputStreamCreator is; private AtomicInteger count = new AtomicInteger(0); diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/VocabWork.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/VocabWork.java index ee8ed1ca2..aa4cc5556 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/VocabWork.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/VocabWork.java @@ -22,7 +22,9 @@ package org.deeplearning4j.models.word2vec; import java.io.Serializable; import java.util.Arrays; +import java.util.Collections; import java.util.List; +import java.util.Objects; import java.util.concurrent.atomic.AtomicInteger; public class VocabWork implements Serializable { @@ -40,7 +42,7 @@ public class VocabWork implements Serializable { public VocabWork(AtomicInteger count, String work, boolean stem, String label) { - this(count, work, stem, Arrays.asList(label)); + this(count, work, stem, Collections.singletonList(label)); } public VocabWork(AtomicInteger count, String work, boolean stem, List label) { @@ -97,11 +99,11 @@ public class VocabWork implements Serializable { if (stem != vocabWork.stem) return false; - if (count != null ? !count.equals(vocabWork.count) : vocabWork.count != null) + if (!Objects.equals(count, vocabWork.count)) return false; - if (label != null ? !label.equals(vocabWork.label) : vocabWork.label != null) + if (!Objects.equals(label, vocabWork.label)) return false; - return !(work != null ? !work.equals(vocabWork.work) : vocabWork.work != null); + return !(!Objects.equals(work, vocabWork.work)); } diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataFetcher.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataFetcher.java index 71bd91a75..e7d740dff 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataFetcher.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataFetcher.java @@ -51,15 +51,15 @@ public class Word2VecDataFetcher implements DataSetFetcher { */ private static final long serialVersionUID = 3245955804749769475L; private transient Iterator files; - private Word2Vec vec; - private static Pattern begin = Pattern.compile("<[A-Z]+>"); - private static Pattern end = Pattern.compile(""); + private final Word2Vec vec; + private static final Pattern begin = Pattern.compile("<[A-Z]+>"); + private static final Pattern end = Pattern.compile(""); private List labels = new ArrayList<>(); private int batch; - private List cache = new ArrayList<>(); + private final List cache = new ArrayList<>(); private static final Logger log = LoggerFactory.getLogger(Word2VecDataFetcher.class); private int totalExamples; - private String path; + private final String path; public Word2VecDataFetcher(String path, Word2Vec vec, List labels) { if (vec == null || labels == null || labels.isEmpty()) diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataSetIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataSetIterator.java index d06f92e7b..99d90793f 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataSetIterator.java @@ -42,10 +42,10 @@ import java.util.concurrent.CopyOnWriteArrayList; @Slf4j public class Word2VecDataSetIterator implements DataSetIterator { - private Word2Vec vec; - private LabelAwareSentenceIterator iter; - private List cachedWindow; - private List labels; + private final Word2Vec vec; + private final LabelAwareSentenceIterator iter; + private final List cachedWindow; + private final List labels; private int batch = 10; @Getter private DataSetPreProcessor preProcessor; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/VocabConstructor.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/VocabConstructor.java index 9c3417878..7cbe51806 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/VocabConstructor.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/VocabConstructor.java @@ -48,7 +48,7 @@ public class VocabConstructor { private boolean useAdaGrad = false; private boolean fetchLabels = false; private int limit; - private AtomicLong seqCount = new AtomicLong(0); + private final AtomicLong seqCount = new AtomicLong(0); private InvertedIndex index; private boolean enableScavenger = false; private T unk; @@ -453,7 +453,7 @@ public class VocabConstructor { } public static class Builder { - private List> sources = new ArrayList<>(); + private final List> sources = new ArrayList<>(); private VocabCache cache; private Collection stopWords = new ArrayList<>(); private boolean useAdaGrad = false; @@ -608,7 +608,7 @@ public class VocabConstructor { private final Sequence document; private final AbstractCache targetVocab; private final AtomicLong loopCounter; - private AtomicBoolean done = new AtomicBoolean(false); + private final AtomicBoolean done = new AtomicBoolean(false); public VocabRunnable(@NonNull AbstractCache targetVocab, @NonNull Sequence sequence, @NonNull AtomicLong finalCounter, @NonNull AtomicLong loopCounter) { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/VocabularyHolder.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/VocabularyHolder.java index 668305d1c..eb4fe89f5 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/VocabularyHolder.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/VocabularyHolder.java @@ -39,7 +39,7 @@ public class VocabularyHolder implements Serializable { private final Map vocabulary = new ConcurrentHashMap<>(); // idxMap marked as transient, since there's no real reason to save this data on serialization - private transient Map idxMap = new ConcurrentHashMap<>(); + private final transient Map idxMap = new ConcurrentHashMap<>(); private int minWordFrequency = 0; private boolean hugeModelExpected = false; private int retentionDelay = 3; @@ -52,11 +52,11 @@ public class VocabularyHolder implements Serializable { private long totalWordOccurrences = 0; // for scavenger mechanics we need to know the actual number of words being added - private transient AtomicLong hiddenWordsCounter = new AtomicLong(0); + private final transient AtomicLong hiddenWordsCounter = new AtomicLong(0); - private AtomicInteger totalWordCount = new AtomicInteger(0); + private final AtomicInteger totalWordCount = new AtomicInteger(0); - private Logger logger = LoggerFactory.getLogger(VocabularyHolder.class); + private final Logger logger = LoggerFactory.getLogger(VocabularyHolder.class); private static final int MAX_CODE_LENGTH = 40; @@ -285,7 +285,6 @@ public class VocabularyHolder implements Serializable { && hiddenWordsCounter.incrementAndGet() % scavengerThreshold == 0) activateScavenger(); - return; } } @@ -410,9 +409,9 @@ public class VocabularyHolder implements Serializable { int i; // get vocabulary as sorted list List vocab = this.words(); - int count[] = new int[vocab.size() * 2 + 1]; - int parent_node[] = new int[vocab.size() * 2 + 1]; - byte binary[] = new byte[vocab.size() * 2 + 1]; + int[] count = new int[vocab.size() * 2 + 1]; + int[] parent_node = new int[vocab.size() * 2 + 1]; + byte[] binary = new byte[vocab.size() * 2 + 1]; // at this point vocab is sorted, with descending order for (int a = 0; a < vocab.size(); a++) diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/VocabularyWord.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/VocabularyWord.java index 46c2103af..46d4b2f28 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/VocabularyWord.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/VocabularyWord.java @@ -29,6 +29,7 @@ import com.fasterxml.jackson.databind.SerializationFeature; import java.io.IOException; import java.io.Serializable; +import java.util.Objects; @Data public class VocabularyWord implements Serializable { @@ -84,7 +85,7 @@ public class VocabularyWord implements Serializable { VocabularyWord word1 = (VocabularyWord) o; - return word != null ? word.equals(word1.word) : word1.word == null; + return Objects.equals(word, word1.word); } diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/inmemory/AbstractCache.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/inmemory/AbstractCache.java index 7c96a3ae6..641f32f9e 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/inmemory/AbstractCache.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/inmemory/AbstractCache.java @@ -70,14 +70,14 @@ public class AbstractCache implements VocabCache { // we're using for compatibility & failproof reasons: it's easier to store unique labels then abstract objects of unknown size // TODO: wtf this one is doing here? - private List stopWords = new ArrayList<>(); // stop words + private final List stopWords = new ArrayList<>(); // stop words // this variable defines how often scavenger will be activated private int scavengerThreshold = 3000000; // ser private int retentionDelay = 3; // ser // for scavenger mechanics we need to know the actual number of words being added - private transient AtomicLong hiddenWordsCounter = new AtomicLong(0); + private final transient AtomicLong hiddenWordsCounter = new AtomicLong(0); private final AtomicLong totalWordCount = new AtomicLong(0); // ser @@ -180,7 +180,7 @@ public class AbstractCache implements VocabCache { */ public boolean containsElement(T element) { // FIXME: lolwtf - return vocabulary.values().contains(element); + return vocabulary.containsValue(element); } /** diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/inmemory/InMemoryLookupCache.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/inmemory/InMemoryLookupCache.java index 349c827fb..96248536f 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/inmemory/InMemoryLookupCache.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/models/word2vec/wordstore/inmemory/InMemoryLookupCache.java @@ -32,6 +32,7 @@ import java.io.InputStream; import java.io.Serializable; import java.util.Collection; import java.util.Map; +import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; @@ -330,9 +331,7 @@ public class InMemoryLookupCache implements VocabCache, Serializable @Override public synchronized boolean addToken(VocabWord word) { - if (null == tokens.put(word.getLabel(), word)) - return true; - return false; + return null == tokens.put(word.getLabel(), word); } @Override @@ -448,11 +447,11 @@ public class InMemoryLookupCache implements VocabCache, Serializable if (numDocs != that.numDocs) return false; - if (wordIndex != null ? !wordIndex.equals(that.wordIndex) : that.wordIndex != null) + if (!Objects.equals(wordIndex, that.wordIndex)) return false; - if (wordFrequencies != null ? !wordFrequencies.equals(that.wordFrequencies) : that.wordFrequencies != null) + if (!Objects.equals(wordFrequencies, that.wordFrequencies)) return false; - if (docFrequencies != null ? !docFrequencies.equals(that.docFrequencies) : that.docFrequencies != null) + if (!Objects.equals(docFrequencies, that.docFrequencies)) return false; if (vocabWords().equals(that.vocabWords())) return true; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/documentiterator/FileDocumentIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/documentiterator/FileDocumentIterator.java index 990f5e629..2f9bc3fef 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/documentiterator/FileDocumentIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/documentiterator/FileDocumentIterator.java @@ -42,7 +42,7 @@ public class FileDocumentIterator implements DocumentIterator { private Iterator iter; private LineIterator lineIterator; - private File rootDir; + private final File rootDir; private static final Logger log = LoggerFactory.getLogger(FileDocumentIterator.class); public FileDocumentIterator(String path) { @@ -116,7 +116,7 @@ public class FileDocumentIterator implements DocumentIterator { if (rootDir.isDirectory()) iter = FileUtils.iterateFiles(rootDir, null, true); else - iter = Arrays.asList(rootDir).iterator(); + iter = Collections.singletonList(rootDir).iterator(); } diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/documentiterator/FilenamesLabelAwareIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/documentiterator/FilenamesLabelAwareIterator.java index 98eba8752..99553ae8c 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/documentiterator/FilenamesLabelAwareIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/documentiterator/FilenamesLabelAwareIterator.java @@ -114,8 +114,8 @@ public class FilenamesLabelAwareIterator implements LabelAwareIterator { public static class Builder { protected List foldersToScan = new ArrayList<>(); - private List fileList = new ArrayList<>(); - private List labels = new ArrayList<>(); + private final List fileList = new ArrayList<>(); + private final List labels = new ArrayList<>(); private boolean absPath = false; public Builder() { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/documentiterator/LabelsSource.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/documentiterator/LabelsSource.java index 8088eadc0..1dc09a26a 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/documentiterator/LabelsSource.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/documentiterator/LabelsSource.java @@ -32,13 +32,13 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; public class LabelsSource implements Serializable { - private AtomicLong counter = new AtomicLong(0); + private final AtomicLong counter = new AtomicLong(0); @Setter private String template; private boolean useFormatter = false; private List labels; private long maxCount = 0; - private Set uniq = Collections.newSetFromMap(new ConcurrentHashMap()); + private final Set uniq = Collections.newSetFromMap(new ConcurrentHashMap()); public LabelsSource() { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/inputsanitation/InputHomogenization.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/inputsanitation/InputHomogenization.java index 049f0c047..2d34687da 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/inputsanitation/InputHomogenization.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/inputsanitation/InputHomogenization.java @@ -25,7 +25,7 @@ import java.text.Normalizer.Form; import java.util.List; public class InputHomogenization { - private String input; + private final String input; private List ignoreCharactersContaining; private boolean preserveCase; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/movingwindow/ContextLabelRetriever.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/movingwindow/ContextLabelRetriever.java index 4c5ea7506..7772413b6 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/movingwindow/ContextLabelRetriever.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/movingwindow/ContextLabelRetriever.java @@ -33,8 +33,8 @@ import java.util.List; public class ContextLabelRetriever { - private static String BEGIN_LABEL = "<([A-Za-z]+|\\d+)>"; - private static String END_LABEL = ""; + private static final String BEGIN_LABEL = "<([A-Za-z]+|\\d+)>"; + private static final String END_LABEL = ""; private ContextLabelRetriever() {} @@ -66,7 +66,7 @@ public class ContextLabelRetriever { //no labels; add these as NONE and begin the new label if (!currTokens.isEmpty()) { - tokensWithSameLabel.add(new Pair<>("NONE", (List) new ArrayList<>(currTokens))); + tokensWithSameLabel.add(new Pair<>("NONE", new ArrayList<>(currTokens))); currTokens.clear(); } @@ -85,7 +85,7 @@ public class ContextLabelRetriever { Preconditions.checkState(!endLabel.isEmpty(), "End label is empty!"); Preconditions.checkState(currLabel.equals(endLabel), "Current label begin and end did not match for the parse. Was: %s ending with %s", currLabel, endLabel); - tokensWithSameLabel.add(new Pair<>(currLabel, (List) new ArrayList<>(currTokens))); + tokensWithSameLabel.add(new Pair<>(currLabel, new ArrayList<>(currTokens))); currTokens.clear(); //clear out the tokens @@ -96,7 +96,7 @@ public class ContextLabelRetriever { //no labels; add these as NONE and begin the new label if (!currTokens.isEmpty()) { - tokensWithSameLabel.add(new Pair<>("none", (List) new ArrayList<>(currTokens))); + tokensWithSameLabel.add(new Pair<>("none", new ArrayList<>(currTokens))); currTokens.clear(); } diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/movingwindow/Window.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/movingwindow/Window.java index 89244c7ba..739fa0907 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/movingwindow/Window.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/movingwindow/Window.java @@ -37,10 +37,10 @@ public class Window implements Serializable { private String label = "NONE"; private boolean beginLabel; private boolean endLabel; - private int windowSize; + private final int windowSize; private int median; - private static String BEGIN_LABEL = "<([A-Z]+|\\d+)>"; - private static String END_LABEL = ""; + private static final String BEGIN_LABEL = "<([A-Z]+|\\d+)>"; + private static final String END_LABEL = ""; private int begin, end; /** diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/movingwindow/WordConverter.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/movingwindow/WordConverter.java index 5860dbf76..23f59131b 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/movingwindow/WordConverter.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/movingwindow/WordConverter.java @@ -32,7 +32,7 @@ import java.util.List; public class WordConverter { private List sentences = new ArrayList<>(); - private Word2Vec vec; + private final Word2Vec vec; private List windows; public WordConverter(List sentences, Word2Vec vec) { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/AggregatingSentenceIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/AggregatingSentenceIterator.java index d140452dd..a88989050 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/AggregatingSentenceIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/AggregatingSentenceIterator.java @@ -28,9 +28,9 @@ import java.util.List; import java.util.concurrent.atomic.AtomicInteger; public class AggregatingSentenceIterator implements SentenceIterator { - private List backendIterators; + private final List backendIterators; private SentencePreProcessor preProcessor; - private AtomicInteger position = new AtomicInteger(0); + private final AtomicInteger position = new AtomicInteger(0); private AggregatingSentenceIterator(@NonNull List list) { this.backendIterators = list; @@ -82,7 +82,7 @@ public class AggregatingSentenceIterator implements SentenceIterator { } public static class Builder { - private List backendIterators = new ArrayList<>(); + private final List backendIterators = new ArrayList<>(); private SentencePreProcessor preProcessor; public Builder() { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/BasicLineIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/BasicLineIterator.java index 3da5acfd4..0f9f6b0c5 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/BasicLineIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/BasicLineIterator.java @@ -30,7 +30,7 @@ import java.util.Iterator; public class BasicLineIterator implements SentenceIterator, Iterable { private BufferedReader reader; - private InputStream backendStream; + private final InputStream backendStream; private SentencePreProcessor preProcessor; private boolean internal = false; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/BasicResultSetIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/BasicResultSetIterator.java index 0be1cf537..feaa357bb 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/BasicResultSetIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/BasicResultSetIterator.java @@ -25,8 +25,8 @@ import java.sql.SQLException; public class BasicResultSetIterator implements SentenceIterator { - private ResultSet rs; - private String columnName; + private final ResultSet rs; + private final String columnName; private SentencePreProcessor preProcessor; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/CollectionSentenceIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/CollectionSentenceIterator.java index be0a834a7..b3911a407 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/CollectionSentenceIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/CollectionSentenceIterator.java @@ -26,7 +26,7 @@ import java.util.Iterator; public class CollectionSentenceIterator extends BaseSentenceIterator { private Iterator iter; - private Collection coll; + private final Collection coll; public CollectionSentenceIterator(SentencePreProcessor preProcessor, Collection coll) { super(preProcessor); diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/FileSentenceIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/FileSentenceIterator.java index 31c0dfc6c..98772f160 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/FileSentenceIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/FileSentenceIterator.java @@ -29,6 +29,7 @@ import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import java.util.Iterator; import java.util.Queue; import java.util.zip.GZIPInputStream; @@ -59,7 +60,7 @@ public class FileSentenceIterator extends BaseSentenceIterator { if (file.isDirectory()) fileIterator = FileUtils.iterateFiles(file, null, true); else - fileIterator = Arrays.asList(file).iterator(); + fileIterator = Collections.singletonList(file).iterator(); } public FileSentenceIterator(File dir) { @@ -141,7 +142,7 @@ public class FileSentenceIterator extends BaseSentenceIterator { @Override public void reset() { if (file.isFile()) - fileIterator = Arrays.asList(file).iterator(); + fileIterator = Collections.singletonList(file).iterator(); else fileIterator = FileUtils.iterateFiles(file, null, true); diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/LineSentenceIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/LineSentenceIterator.java index 76ac3f37a..77ea21b1c 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/LineSentenceIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/LineSentenceIterator.java @@ -29,7 +29,7 @@ public class LineSentenceIterator extends BaseSentenceIterator { private InputStream file; private LineIterator iter; - private File f; + private final File f; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/MutipleEpochsSentenceIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/MutipleEpochsSentenceIterator.java index 576dd6140..f2eda2b4f 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/MutipleEpochsSentenceIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/MutipleEpochsSentenceIterator.java @@ -25,9 +25,9 @@ import lombok.NonNull; import java.util.concurrent.atomic.AtomicInteger; public class MutipleEpochsSentenceIterator implements SentenceIterator { - private SentenceIterator iterator; - private int numEpochs; - private AtomicInteger counter = new AtomicInteger(0); + private final SentenceIterator iterator; + private final int numEpochs; + private final AtomicInteger counter = new AtomicInteger(0); public MutipleEpochsSentenceIterator(@NonNull SentenceIterator iterator, int numEpochs) { this.numEpochs = numEpochs; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/PrefetchingSentenceIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/PrefetchingSentenceIterator.java index ef83f32ba..f513d4eab 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/PrefetchingSentenceIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/PrefetchingSentenceIterator.java @@ -60,7 +60,7 @@ public class PrefetchingSentenceIterator implements SentenceIterator { @Override public boolean hasNext() { - return (reader != null) ? reader.hasMoreLines() : false; + return reader != null && reader.hasMoreLines(); } @Override @@ -93,7 +93,7 @@ public class PrefetchingSentenceIterator implements SentenceIterator { } public static class Builder { - private SentenceIterator iterator; + private final SentenceIterator iterator; private int fetchSize = 10000; private SentencePreProcessor preProcessor; @@ -123,13 +123,13 @@ public class PrefetchingSentenceIterator implements SentenceIterator { } private class AsyncIteratorReader extends Thread implements Runnable { - private SentenceIterator iterator; - private int fetchSize; - private AtomicBoolean shouldTerminate = new AtomicBoolean(false); - private ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); - private SentencePreProcessor preProcessor; - private AtomicBoolean isRunning = new AtomicBoolean(true); - private ArrayBlockingQueue buffer; + private final SentenceIterator iterator; + private final int fetchSize; + private final AtomicBoolean shouldTerminate = new AtomicBoolean(false); + private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); + private final SentencePreProcessor preProcessor; + private final AtomicBoolean isRunning = new AtomicBoolean(true); + private final ArrayBlockingQueue buffer; public AsyncIteratorReader(@NonNull SentenceIterator iterator, int fetchSize, SentencePreProcessor preProcessor) { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/StreamLineIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/StreamLineIterator.java index 5bfcff057..6734d9747 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/StreamLineIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/StreamLineIterator.java @@ -35,7 +35,7 @@ import java.util.concurrent.atomic.AtomicBoolean; @Slf4j public class StreamLineIterator implements SentenceIterator { - private DocumentIterator iterator; + private final DocumentIterator iterator; private int linesToFetch; private final LinkedBlockingQueue buffer = new LinkedBlockingQueue<>(); private SentencePreProcessor preProcessor; @@ -118,14 +118,14 @@ public class StreamLineIterator implements SentenceIterator { } public static class Builder { - private DocumentIterator iterator; + private final DocumentIterator iterator; private int linesToFetch = 50; private SentencePreProcessor preProcessor; public Builder(@NonNull final InputStream stream) { this(new DocumentIterator() { private final InputStream onlyStream = stream; - private AtomicBoolean isConsumed = new AtomicBoolean(false); + private final AtomicBoolean isConsumed = new AtomicBoolean(false); @Override public boolean hasNext() { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/SynchronizedSentenceIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/SynchronizedSentenceIterator.java index ceaa3ad4a..aa80f8963 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/SynchronizedSentenceIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/SynchronizedSentenceIterator.java @@ -23,7 +23,7 @@ package org.deeplearning4j.text.sentenceiterator; import lombok.NonNull; public class SynchronizedSentenceIterator implements SentenceIterator { - private SentenceIterator underlyingIterator; + private final SentenceIterator underlyingIterator; public SynchronizedSentenceIterator(@NonNull SentenceIterator iterator) { this.underlyingIterator = iterator; diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/interoperability/SentenceIteratorConverter.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/interoperability/SentenceIteratorConverter.java index 74b7172e6..959edf763 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/interoperability/SentenceIteratorConverter.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/interoperability/SentenceIteratorConverter.java @@ -32,8 +32,8 @@ import org.slf4j.LoggerFactory; import java.util.List; public class SentenceIteratorConverter implements LabelAwareIterator { - private SentenceIterator backendIterator; - private LabelsSource generator; + private final SentenceIterator backendIterator; + private final LabelsSource generator; protected static final Logger log = LoggerFactory.getLogger(SentenceIteratorConverter.class); public SentenceIteratorConverter(@NonNull SentenceIterator iterator) { diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/labelaware/LabelAwareFileSentenceIterator.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/labelaware/LabelAwareFileSentenceIterator.java index 38f8124eb..cf329345f 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/labelaware/LabelAwareFileSentenceIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/sentenceiterator/labelaware/LabelAwareFileSentenceIterator.java @@ -25,6 +25,7 @@ import org.deeplearning4j.text.sentenceiterator.SentencePreProcessor; import java.io.File; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class LabelAwareFileSentenceIterator extends FileSentenceIterator implements LabelAwareSentenceIterator { @@ -49,6 +50,6 @@ public class LabelAwareFileSentenceIterator extends FileSentenceIterator impleme @Override public List currentLabels() { - return Arrays.asList(currentFile.getParentFile().getName()); + return Collections.singletonList(currentFile.getParentFile().getName()); } } diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/DefaultStreamTokenizer.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/DefaultStreamTokenizer.java index d7a8a2a1b..4aadef999 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/DefaultStreamTokenizer.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/DefaultStreamTokenizer.java @@ -36,10 +36,10 @@ import java.util.concurrent.atomic.AtomicInteger; */ public class DefaultStreamTokenizer implements Tokenizer { - private StreamTokenizer streamTokenizer; + private final StreamTokenizer streamTokenizer; private TokenPreProcess tokenPreProcess; - private List tokens = new ArrayList<>(); - private AtomicInteger position = new AtomicInteger(0); + private final List tokens = new ArrayList<>(); + private final AtomicInteger position = new AtomicInteger(0); protected static final Logger log = LoggerFactory.getLogger(DefaultStreamTokenizer.class); diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/DefaultTokenizer.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/DefaultTokenizer.java index d504a2872..93d29ed4c 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/DefaultTokenizer.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/DefaultTokenizer.java @@ -34,7 +34,7 @@ public class DefaultTokenizer implements Tokenizer { tokenizer = new StringTokenizer(tokens); } - private StringTokenizer tokenizer; + private final StringTokenizer tokenizer; private TokenPreProcess tokenPreProcess; @Override diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/preprocessor/CompositePreProcessor.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/preprocessor/CompositePreProcessor.java index 0d861f851..8b3e3ac9d 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/preprocessor/CompositePreProcessor.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/preprocessor/CompositePreProcessor.java @@ -31,7 +31,7 @@ import java.util.List; public class CompositePreProcessor implements TokenPreProcess { - private List preProcessors; + private final List preProcessors; public CompositePreProcessor(@NonNull TokenPreProcess... preProcessors){ Preconditions.checkState(preProcessors.length > 0, "No preprocessors were specified (empty input)"); diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/preprocessor/StringCleaning.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/preprocessor/StringCleaning.java index f6db851b0..d7eaf8543 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/preprocessor/StringCleaning.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/preprocessor/StringCleaning.java @@ -24,7 +24,7 @@ import java.util.regex.Pattern; public class StringCleaning { - private static final Pattern punctPattern = Pattern.compile("[\\d\\.:,\"\'\\(\\)\\[\\]|/?!;]+"); + private static final Pattern punctPattern = Pattern.compile("[\\d\\.:,\"'\\(\\)\\[\\]|/?!;]+"); private StringCleaning() {} diff --git a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizerfactory/NGramTokenizerFactory.java b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizerfactory/NGramTokenizerFactory.java index 6203a7715..e42093108 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizerfactory/NGramTokenizerFactory.java +++ b/cavis-dnn/cavis-dnn-nlp/src/main/java/org/deeplearning4j/text/tokenization/tokenizerfactory/NGramTokenizerFactory.java @@ -33,7 +33,7 @@ public class NGramTokenizerFactory implements TokenizerFactory { private TokenPreProcess preProcess; private Integer minN = 1; private Integer maxN = 1; - private TokenizerFactory tokenizerFactory; + private final TokenizerFactory tokenizerFactory; public NGramTokenizerFactory(TokenizerFactory tokenizerFactory, Integer minN, Integer maxN) { this.tokenizerFactory = tokenizerFactory; diff --git a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/iterator/TestBertIterator.java b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/iterator/TestBertIterator.java index f4303f28e..82e7493ab 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/iterator/TestBertIterator.java +++ b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/iterator/TestBertIterator.java @@ -49,12 +49,12 @@ import static org.junit.jupiter.api.Assertions.*; @Timeout(200) public class TestBertIterator extends BaseDL4JTest { - private static File pathToVocab = Resources.asFile("other/vocab.txt"); - private static Charset c = StandardCharsets.UTF_8; - private static String shortSentence = "I saw a girl with a telescope."; - private static String longSentence = "Donaudampfschifffahrts Kapitänsmützeninnenfuttersaum"; - private static String sentenceA = "Goodnight noises everywhere"; - private static String sentenceB = "Goodnight moon"; + private static final File pathToVocab = Resources.asFile("other/vocab.txt"); + private static final Charset c = StandardCharsets.UTF_8; + private static final String shortSentence = "I saw a girl with a telescope."; + private static final String longSentence = "Donaudampfschifffahrts Kapitänsmützeninnenfuttersaum"; + private static final String sentenceA = "Goodnight noises everywhere"; + private static final String sentenceB = "Goodnight moon"; public TestBertIterator() throws IOException { } @@ -534,18 +534,18 @@ public class TestBertIterator extends BaseDL4JTest { @Getter private static class TestSentencePairsHelper { - private List sentencesLeft; - private List sentencesRight; - private List> sentencePairs; - private List> tokenizedSentencesLeft; - private List> tokenizedSentencesRight; - private List labels; + private final List sentencesLeft; + private final List sentencesRight; + private final List> sentencePairs; + private final List> tokenizedSentencesLeft; + private final List> tokenizedSentencesRight; + private final List labels; private int shortL; private int longL; private int sentenceALen; private int sentenceBLen; - private BertWordPieceTokenizerFactory tokenizer; - private CollectionLabeledPairSentenceProvider pairSentenceProvider; + private final BertWordPieceTokenizerFactory tokenizer; + private final CollectionLabeledPairSentenceProvider pairSentenceProvider; private TestSentencePairsHelper() throws IOException { this(3); @@ -596,13 +596,13 @@ public class TestBertIterator extends BaseDL4JTest { @Getter private static class TestSentenceHelper { - private List sentences; - private List> tokenizedSentences; - private List labels; + private final List sentences; + private final List> tokenizedSentences; + private final List labels; private int shortestL = 0; private int longestL = 0; - private BertWordPieceTokenizerFactory tokenizer; - private CollectionLabeledSentenceProvider sentenceProvider; + private final BertWordPieceTokenizerFactory tokenizer; + private final CollectionLabeledSentenceProvider sentenceProvider; private TestSentenceHelper() throws IOException { this(false, 2); diff --git a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/embeddings/inmemory/InMemoryLookupTableTest.java b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/embeddings/inmemory/InMemoryLookupTableTest.java index 5faad62ad..30e633075 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/embeddings/inmemory/InMemoryLookupTableTest.java +++ b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/embeddings/inmemory/InMemoryLookupTableTest.java @@ -83,14 +83,14 @@ public class InMemoryLookupTableTest extends BaseDL4JTest { assertEquals(244, cacheSource.numWords()); InMemoryLookupTable mem1 = - (InMemoryLookupTable) new InMemoryLookupTable.Builder().vectorLength(100) - .cache(cacheSource).seed(17).build(); + new InMemoryLookupTable.Builder().vectorLength(100) + .cache(cacheSource).seed(17).build(); mem1.resetWeights(true); InMemoryLookupTable mem2 = - (InMemoryLookupTable) new InMemoryLookupTable.Builder().vectorLength(100) - .cache(cacheSource).seed(15).build(); + new InMemoryLookupTable.Builder().vectorLength(100) + .cache(cacheSource).seed(15).build(); mem2.resetWeights(true); @@ -130,8 +130,8 @@ public class InMemoryLookupTableTest extends BaseDL4JTest { assertEquals(244, cacheSource.numWords()); InMemoryLookupTable mem1 = - (InMemoryLookupTable) new InMemoryLookupTable.Builder().vectorLength(100) - .cache(cacheSource).build(); + new InMemoryLookupTable.Builder().vectorLength(100) + .cache(cacheSource).build(); mem1.resetWeights(true); @@ -160,8 +160,8 @@ public class InMemoryLookupTableTest extends BaseDL4JTest { InMemoryLookupTable mem2 = - (InMemoryLookupTable) new InMemoryLookupTable.Builder().vectorLength(100) - .cache(cacheTarget).seed(18).build(); + new InMemoryLookupTable.Builder().vectorLength(100) + .cache(cacheTarget).seed(18).build(); mem2.resetWeights(true); diff --git a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/fasttext/FastTextTest.java b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/fasttext/FastTextTest.java index 2879fdffe..8b604dfc0 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/fasttext/FastTextTest.java +++ b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/fasttext/FastTextTest.java @@ -52,10 +52,10 @@ public class FastTextTest extends BaseDL4JTest { - private File inputFile = Resources.asFile("models/fasttext/data/labeled_data.txt"); - private File supModelFile = Resources.asFile("models/fasttext/supervised.model.bin"); - private File cbowModelFile = Resources.asFile("models/fasttext/cbow.model.bin"); - private File supervisedVectors = Resources.asFile("models/fasttext/supervised.model.vec"); + private final File inputFile = Resources.asFile("models/fasttext/data/labeled_data.txt"); + private final File supModelFile = Resources.asFile("models/fasttext/supervised.model.bin"); + private final File cbowModelFile = Resources.asFile("models/fasttext/cbow.model.bin"); + private final File supervisedVectors = Resources.asFile("models/fasttext/supervised.model.vec"); @TempDir public File testDir; diff --git a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/paragraphvectors/ParagraphVectorsTest.java b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/paragraphvectors/ParagraphVectorsTest.java index 84869467b..eba85a8fc 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/paragraphvectors/ParagraphVectorsTest.java +++ b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/paragraphvectors/ParagraphVectorsTest.java @@ -300,7 +300,7 @@ public class ParagraphVectorsTest extends BaseDL4JTest { SerializationUtils.saveObject(vec, tempFile); - ParagraphVectors vec2 = (ParagraphVectors) SerializationUtils.readObject(tempFile); + ParagraphVectors vec2 = SerializationUtils.readObject(tempFile); INDArray day2 = vec2.getWordVectorMatrix("day").dup(); List labelsBinary = vec2.labelsSource.getLabels(); @@ -985,8 +985,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest { assertNotEquals(null, d2v.getLookupTable()); assertNotEquals(null, d2v.getVocab()); - assertTrue(d2v.getVocab() == w2v.getVocab()); - assertTrue(d2v.getLookupTable() == w2v.getLookupTable()); + assertSame(d2v.getVocab(), w2v.getVocab()); + assertSame(d2v.getLookupTable(), w2v.getLookupTable()); String textA = "Donald Trump referred to President Obama as \"your president\" during the first presidential debate on Monday, much to many people’s chagrin on social media. Trump, made the reference after saying that the greatest threat facing the world is nuclear weapons. He then turned to Hillary Clinton and said, \"Not global warming like you think and your President thinks,\" referring to Obama."; @@ -1156,7 +1156,7 @@ public class ParagraphVectorsTest extends BaseDL4JTest { Word2Vec unserialized = null; try { json = paragraphVectors.toJson(); - log.info("{}", json.toString()); + log.info("{}", json); unserialized = ParagraphVectors.fromJson(json); } catch (Exception e) { @@ -1164,12 +1164,12 @@ public class ParagraphVectorsTest extends BaseDL4JTest { fail(); } - assertEquals(cache.totalWordOccurrences(), ((ParagraphVectors) unserialized).getVocab().totalWordOccurrences()); - assertEquals(cache.totalNumberOfDocs(), ((ParagraphVectors) unserialized).getVocab().totalNumberOfDocs()); + assertEquals(cache.totalWordOccurrences(), unserialized.getVocab().totalWordOccurrences()); + assertEquals(cache.totalNumberOfDocs(), unserialized.getVocab().totalNumberOfDocs()); for (int i = 0; i < words.length; ++i) { val cached = cache.wordAtIndex(i); - val restored = ((ParagraphVectors) unserialized).getVocab().wordAtIndex(i); + val restored = unserialized.getVocab().wordAtIndex(i); assertNotNull(cached); assertEquals(cached, restored); } diff --git a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/sequencevectors/SequenceVectorsTest.java b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/sequencevectors/SequenceVectorsTest.java index 02bfcf733..09c621c4d 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/sequencevectors/SequenceVectorsTest.java +++ b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/sequencevectors/SequenceVectorsTest.java @@ -472,7 +472,7 @@ public class SequenceVectorsTest extends BaseDL4JTest { @Override public String toString() { return "VocabWord{" + "wordFrequency=" + this.elementFrequency + ", index=" + index + ", codes=" + codes - + ", word='" + String.valueOf(id) + '\'' + ", points=" + points + ", codeLength=" + + ", word='" + id + '\'' + ", points=" + points + ", codeLength=" + codeLength + '}'; } } diff --git a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/sequencevectors/graph/walkers/impl/RandomWalkerTest.java b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/sequencevectors/graph/walkers/impl/RandomWalkerTest.java index 7c150a610..2771d4ae4 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/sequencevectors/graph/walkers/impl/RandomWalkerTest.java +++ b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/sequencevectors/graph/walkers/impl/RandomWalkerTest.java @@ -93,7 +93,7 @@ public class RandomWalkerTest extends BaseDL4JTest { for (int i = 0; i < 10; i++) { Vertex vertex = graph.getVertex(i); - assertEquals(null, vertex.getValue()); + assertNull(vertex.getValue()); assertEquals(i, vertex.vertexID()); } assertEquals(10, graph.numVertices()); @@ -101,7 +101,7 @@ public class RandomWalkerTest extends BaseDL4JTest { @Test public void testGraphTraverseRandom1() throws Exception { - RandomWalker walker = (RandomWalker) new RandomWalker.Builder<>(graph) + RandomWalker walker = new RandomWalker.Builder<>(graph) .setNoEdgeHandling(NoEdgeHandling.SELF_LOOP_ON_DISCONNECTED).setWalkLength(3).build(); int cnt = 0; @@ -123,7 +123,7 @@ public class RandomWalkerTest extends BaseDL4JTest { @Test public void testGraphTraverseRandom2() throws Exception { - RandomWalker walker = (RandomWalker) new RandomWalker.Builder<>(graph) + RandomWalker walker = new RandomWalker.Builder<>(graph) .setSeed(12345) .setNoEdgeHandling(NoEdgeHandling.EXCEPTION_ON_DISCONNECTED).setWalkLength(20) .setWalkDirection(WalkDirection.FORWARD_UNIQUE) @@ -148,7 +148,7 @@ public class RandomWalkerTest extends BaseDL4JTest { @Test public void testGraphTraverseRandom3() throws Exception { - RandomWalker walker = (RandomWalker) new RandomWalker.Builder<>(graph) + RandomWalker walker = new RandomWalker.Builder<>(graph) .setNoEdgeHandling(NoEdgeHandling.EXCEPTION_ON_DISCONNECTED).setWalkLength(20) .setWalkDirection(WalkDirection.FORWARD_UNIQUE) .setNoEdgeHandling(NoEdgeHandling.EXCEPTION_ON_DISCONNECTED).build(); @@ -160,17 +160,17 @@ public class RandomWalkerTest extends BaseDL4JTest { } // if cycle passed without exception - something went bad - assertTrue(false); + fail(); } catch (NoEdgesException e) { // this cycle should throw exception } catch (Exception e) { - assertTrue(false); + fail(); } } @Test public void testGraphTraverseRandom4() throws Exception { - RandomWalker walker = (RandomWalker) new RandomWalker.Builder<>(graphBig) + RandomWalker walker = new RandomWalker.Builder<>(graphBig) .setSeed(12345) .setNoEdgeHandling(NoEdgeHandling.EXCEPTION_ON_DISCONNECTED).setWalkLength(20) .setWalkDirection(WalkDirection.FORWARD_UNIQUE) @@ -187,7 +187,7 @@ public class RandomWalkerTest extends BaseDL4JTest { @Test public void testGraphTraverseRandom5() throws Exception { - RandomWalker walker = (RandomWalker) new RandomWalker.Builder<>(graphBig) + RandomWalker walker = new RandomWalker.Builder<>(graphBig) .setWalkLength(20).setWalkDirection(WalkDirection.FORWARD_UNIQUE) .setNoEdgeHandling(NoEdgeHandling.CUTOFF_ON_DISCONNECTED).build(); diff --git a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/sequencevectors/transformers/impl/iterables/ParallelTransformerIteratorTest.java b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/sequencevectors/transformers/impl/iterables/ParallelTransformerIteratorTest.java index ceef572e1..d82844919 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/sequencevectors/transformers/impl/iterables/ParallelTransformerIteratorTest.java +++ b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/sequencevectors/transformers/impl/iterables/ParallelTransformerIteratorTest.java @@ -48,7 +48,7 @@ import static org.junit.jupiter.api.Assertions.assertNotEquals; @Slf4j @Timeout(300) public class ParallelTransformerIteratorTest extends BaseDL4JTest { - private TokenizerFactory factory = new DefaultTokenizerFactory(); + private final TokenizerFactory factory = new DefaultTokenizerFactory(); @BeforeEach public void setUp() throws Exception { @@ -165,7 +165,7 @@ public class ParallelTransformerIteratorTest extends BaseDL4JTest { String testString = ""; String[] stringsArray = new String[100]; for (int i = 0; i < 100; ++i) { - testString += Integer.toString(i) + " "; + testString += i + " "; stringsArray[i] = Integer.toString(i); } InputStream inputStream = IOUtils.toInputStream(testString, "UTF-8"); @@ -196,7 +196,7 @@ public class ParallelTransformerIteratorTest extends BaseDL4JTest { String testStrings = ""; for (int i = 0; i < 1000; ++i) { stringsArray[i] = Integer.toString(i); - testStrings += Integer.toString(i) + "\n"; + testStrings += i + "\n"; } InputStream inputStream = IOUtils.toInputStream(testStrings, "UTF-8"); SentenceIterator iterator = new BasicLineIterator(inputStream); diff --git a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataSetIteratorTest.java b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataSetIteratorTest.java index 7d806aafb..948a124b3 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataSetIteratorTest.java +++ b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataSetIteratorTest.java @@ -93,7 +93,7 @@ public class Word2VecDataSetIteratorTest extends BaseDL4JTest { iterator.reset(); return new LabelAwareSentenceIterator() { - private AtomicInteger cnt = new AtomicInteger(0); + private final AtomicInteger cnt = new AtomicInteger(0); @Override public String currentLabel() { diff --git a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/wordstore/VocabConstructorTest.java b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/wordstore/VocabConstructorTest.java index 33b2715bf..f51337cba 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/wordstore/VocabConstructorTest.java +++ b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/wordstore/VocabConstructorTest.java @@ -83,8 +83,7 @@ public class VocabConstructorTest extends BaseDL4JTest { continue; cnt++; - if (!set.contains(token)) - set.add(token); + set.add(token); } lines++; @@ -167,7 +166,7 @@ public class VocabConstructorTest extends BaseDL4JTest { public Iterator> iterator() { return new Iterator>() { - private AtomicBoolean switcher = new AtomicBoolean(true); + private final AtomicBoolean switcher = new AtomicBoolean(true); @Override public boolean hasNext() { @@ -216,7 +215,7 @@ public class VocabConstructorTest extends BaseDL4JTest { public Iterator> iterator() { return new Iterator>() { - private AtomicBoolean switcher = new AtomicBoolean(true); + private final AtomicBoolean switcher = new AtomicBoolean(true); @Override public boolean hasNext() { diff --git a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/wordstore/inmemory/AbstractCacheTest.java b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/wordstore/inmemory/AbstractCacheTest.java index 9cdf38363..56e7e8bce 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/wordstore/inmemory/AbstractCacheTest.java +++ b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/models/word2vec/wordstore/inmemory/AbstractCacheTest.java @@ -136,7 +136,7 @@ public class AbstractCacheTest extends BaseDL4JTest { AbstractCache unserialized = null; try { json = cache.toJson(); - log.info("{}", json.toString()); + log.info("{}", json); unserialized = AbstractCache.fromJson(json); } @@ -159,7 +159,7 @@ public class AbstractCacheTest extends BaseDL4JTest { public void testUserClassSerialization() { AbstractCache cache = new AbstractCache.Builder().build(); - ExtVocabWord words[] = new ExtVocabWord[3]; + ExtVocabWord[] words = new ExtVocabWord[3]; words[0] = new ExtVocabWord("some", 1100, 1.0, "word"); words[1] = new ExtVocabWord("none", 23214, 2.0, "test"); words[2] = new ExtVocabWord("wwew", 13223, 3.0, "tester"); diff --git a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/BertWordPieceTokenizerTests.java b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/BertWordPieceTokenizerTests.java index d36ab414c..665c0b180 100644 --- a/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/BertWordPieceTokenizerTests.java +++ b/cavis-dnn/cavis-dnn-nlp/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/BertWordPieceTokenizerTests.java @@ -47,8 +47,8 @@ import static org.junit.jupiter.api.Assertions.*; @Timeout(300) public class BertWordPieceTokenizerTests extends BaseDL4JTest { - private File pathToVocab = Resources.asFile("other/vocab.txt"); - private Charset c = StandardCharsets.UTF_8; + private final File pathToVocab = Resources.asFile("other/vocab.txt"); + private final Charset c = StandardCharsets.UTF_8; public BertWordPieceTokenizerTests() throws IOException { } diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-client/src/main/java/org/deeplearning4j/nearestneighbor/client/NearestNeighborsClient.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-client/src/main/java/org/deeplearning4j/nearestneighbor/client/NearestNeighborsClient.java index 4e185df0e..d134f3000 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-client/src/main/java/org/deeplearning4j/nearestneighbor/client/NearestNeighborsClient.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-client/src/main/java/org/deeplearning4j/nearestneighbor/client/NearestNeighborsClient.java @@ -55,7 +55,7 @@ public class NearestNeighborsClient { // Only one time Unirest.setObjectMapper(new ObjectMapper() { - private com.fasterxml.jackson.databind.ObjectMapper jacksonObjectMapper = + private final com.fasterxml.jackson.databind.ObjectMapper jacksonObjectMapper = new com.fasterxml.jackson.databind.ObjectMapper(); public T readValue(String value, Class valueType) { diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/algorithm/Distance.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/algorithm/Distance.java index 6aff39dbb..c6c091837 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/algorithm/Distance.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/algorithm/Distance.java @@ -25,8 +25,8 @@ public enum Distance { JACCARD("jaccard"), HAMMING("hamming"); - private String functionName; - private Distance(String name) { + private final String functionName; + Distance(String name) { functionName = name; } diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/cluster/CentersHolder.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/cluster/CentersHolder.java index 25542dc8f..9a55db1da 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/cluster/CentersHolder.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/cluster/CentersHolder.java @@ -33,7 +33,8 @@ public class CentersHolder { protected transient INDArray distances; protected transient INDArray argMin; - private long rows, cols; + private final long rows; + private final long cols; public CentersHolder(long rows, long cols) { this.rows = rows; @@ -46,7 +47,7 @@ public class CentersHolder { public synchronized void addCenter(INDArray pointView) { if (centers == null) - this.centers = Nd4j.create(pointView.dataType(), new long[] {rows, cols}); + this.centers = Nd4j.create(pointView.dataType(), rows, cols); centers.putRow(index++, pointView); } @@ -56,7 +57,7 @@ public class CentersHolder { distances = Nd4j.create(centers.dataType(), centers.rows()); if (argMin == null) - argMin = Nd4j.createUninitialized(DataType.LONG, new long[0]); + argMin = Nd4j.createUninitialized(DataType.LONG); if (op == null) { op = ClusterUtils.createDistanceFunctionOp(distanceFunction, centers, point.getArray(), 1); @@ -80,7 +81,7 @@ public class CentersHolder { distances = Nd4j.create(centers.dataType(), centers.rows()); if (argMin == null) - argMin = Nd4j.createUninitialized(DataType.LONG, new long[0]); + argMin = Nd4j.createUninitialized(DataType.LONG); if (op == null) { op = ClusterUtils.createDistanceFunctionOp(distanceFunction, centers, point.getArray(), 1); diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/info/ClusterSetInfo.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/info/ClusterSetInfo.java index 1c57bc38a..c55834c1b 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/info/ClusterSetInfo.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/info/ClusterSetInfo.java @@ -33,8 +33,8 @@ public class ClusterSetInfo implements Serializable { private Map clustersInfos = new HashMap<>(); private Table distancesBetweenClustersCenters = HashBasedTable.create(); private AtomicInteger pointLocationChange; - private boolean threadSafe; - private boolean inverse; + private final boolean threadSafe; + private final boolean inverse; public ClusterSetInfo(boolean inverse) { this(inverse, false); diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/kdtree/HyperRect.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/kdtree/HyperRect.java index 013263629..bbb68bbec 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/kdtree/HyperRect.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/kdtree/HyperRect.java @@ -29,10 +29,10 @@ import java.io.Serializable; public class HyperRect implements Serializable { //private List points; - private float[] lowerEnds; - private float[] higherEnds; - private INDArray lowerEndsIND; - private INDArray higherEndsIND; + private final float[] lowerEnds; + private final float[] higherEnds; + private final INDArray lowerEndsIND; + private final INDArray higherEndsIND; public HyperRect(float[] lowerEndsIn, float[] higherEndsIn) { this.lowerEnds = new float[lowerEndsIn.length]; diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/kdtree/KDTree.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/kdtree/KDTree.java index 68ccf6281..207ccb39b 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/kdtree/KDTree.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/kdtree/KDTree.java @@ -128,7 +128,7 @@ public class KDTree implements Serializable { // Share this data for recursive calls of "knn" private float currentDistance; private INDArray currentPoint; - private INDArray minDistance = Nd4j.scalar(0.f); + private final INDArray minDistance = Nd4j.scalar(0.f); public List> knn(INDArray point, float distance) { diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/lsh/RandomProjectionLSH.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/lsh/RandomProjectionLSH.java index 75e342e78..53307e517 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/lsh/RandomProjectionLSH.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/lsh/RandomProjectionLSH.java @@ -63,14 +63,14 @@ public class RandomProjectionLSH implements LSH { return "cosinedistance"; } - @Getter private int hashLength; + @Getter private final int hashLength; - @Getter private int numTables; + @Getter private final int numTables; - @Getter private int inDimension; + @Getter private final int inDimension; - @Getter private double radius; + @Getter private final double radius; INDArray randomProjection; @@ -190,7 +190,7 @@ public class RandomProjectionLSH implements LSH { INDArray bucketData(INDArray query){ INDArray mask = bucket(query); int nRes = mask.sum(0).getInt(0); - INDArray res = Nd4j.create(new int[] {nRes, inDimension}); + INDArray res = Nd4j.create(nRes, inDimension); int j = 0; for (int i = 0; i < nRes; i++){ while (mask.getInt(j) == 0 && j < mask.length() - 1) { @@ -216,7 +216,7 @@ public class RandomProjectionLSH implements LSH { int accepted = 0; while (accepted < sortedDistances.length() && sortedDistances.getInt(accepted) <= maxRange) accepted +=1; - INDArray res = Nd4j.create(new int[] {accepted, inDimension}); + INDArray res = Nd4j.create(accepted, inDimension); for(int i = 0; i < accepted; i++){ res.putRow(i, bucketData.getRow(shuffleIndexes.getInt(i))); } diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/quadtree/QuadTree.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/quadtree/QuadTree.java index b26ffc636..0dae7e642 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/quadtree/QuadTree.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/quadtree/QuadTree.java @@ -42,9 +42,9 @@ public class QuadTree implements Serializable { private Cell boundary; static final int QT_NO_DIMS = 2; static final int QT_NODE_CAPACITY = 1; - private INDArray buf = Nd4j.create(QT_NO_DIMS); + private final INDArray buf = Nd4j.create(QT_NO_DIMS); private INDArray data, centerOfMass = Nd4j.create(QT_NO_DIMS); - private int[] index = new int[QT_NODE_CAPACITY]; + private final int[] index = new int[QT_NODE_CAPACITY]; /** diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/randomprojection/RPTree.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/randomprojection/RPTree.java index 1360a5c92..6bf1d1faa 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/randomprojection/RPTree.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/randomprojection/RPTree.java @@ -25,6 +25,7 @@ import org.nd4j.common.primitives.Pair; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.concurrent.ExecutorService; @@ -111,15 +112,15 @@ public class RPTree { * @return a list of samples */ public List> queryWithDistances(INDArray query, int numResults) { - return RPUtils.queryAllWithDistances(query,X,Arrays.asList(this),numResults,similarityFunction); + return RPUtils.queryAllWithDistances(query,X, Collections.singletonList(this),numResults,similarityFunction); } public INDArray query(INDArray query,int numResults) { - return RPUtils.queryAll(query,X,Arrays.asList(this),numResults,similarityFunction); + return RPUtils.queryAll(query,X, Collections.singletonList(this),numResults,similarityFunction); } public List getCandidates(INDArray target) { - return RPUtils.getCandidates(target,Arrays.asList(this),similarityFunction); + return RPUtils.getCandidates(target, Collections.singletonList(this),similarityFunction); } diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/randomprojection/RPUtils.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/randomprojection/RPUtils.java index aecdae476..184264b9e 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/randomprojection/RPUtils.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/randomprojection/RPUtils.java @@ -36,7 +36,7 @@ import java.util.*; public class RPUtils { - private static ThreadLocal> functionInstances = new ThreadLocal<>(); + private static final ThreadLocal> functionInstances = new ThreadLocal<>(); public static DifferentialFunction getOp(String name, INDArray x, diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/sptree/Cell.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/sptree/Cell.java index 2781f2ce4..6526ff687 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/sptree/Cell.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/sptree/Cell.java @@ -24,7 +24,7 @@ import java.io.Serializable; * @author Adam Gibson */ public class Cell implements Serializable { - private int dimension; + private final int dimension; private INDArray corner, width; public Cell(int dimension) { diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/sptree/SpTree.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/sptree/SpTree.java index 1ef6dcaf6..df82c0c31 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/sptree/SpTree.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/sptree/SpTree.java @@ -58,7 +58,7 @@ public class SpTree implements Serializable { private boolean isLeaf = true; private Collection indices; private SpTree[] children; - private static Logger log = LoggerFactory.getLogger(SpTree.class); + private static final Logger log = LoggerFactory.getLogger(SpTree.class); private String similarityFunction = Distance.EUCLIDEAN.toString(); diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/util/MathUtils.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/util/MathUtils.java index 0f657569e..93fbb073a 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/util/MathUtils.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/util/MathUtils.java @@ -144,7 +144,7 @@ public class MathUtils { * * @return the correlation coefficient or r */ - public static double correlation(double[] residuals, double targetAttribute[]) { + public static double correlation(double[] residuals, double[] targetAttribute) { double[] predictedValues = new double[residuals.length]; for (int i = 0; i < predictedValues.length; i++) { predictedValues[i] = targetAttribute[i] - residuals[i]; @@ -1011,7 +1011,7 @@ public class MathUtils { */ public static /*@pure@*/ double roundDouble(double value, int afterDecimalPoint) { - double mask = Math.pow(10.0, (double) afterDecimalPoint); + double mask = Math.pow(10.0, afterDecimalPoint); return (double) (Math.round(value * mask)) / mask; }//end roundDouble diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/util/MultiThreadUtils.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/util/MultiThreadUtils.java index 5ca73a1ac..d3ab0536f 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/util/MultiThreadUtils.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/util/MultiThreadUtils.java @@ -24,7 +24,7 @@ import java.util.concurrent.*; public class MultiThreadUtils { - private static Logger log = LoggerFactory.getLogger(MultiThreadUtils.class); + private static final Logger log = LoggerFactory.getLogger(MultiThreadUtils.class); private static ExecutorService instance; diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/vptree/VPTreeFillSearch.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/vptree/VPTreeFillSearch.java index 9dbc75416..df4a08991 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/vptree/VPTreeFillSearch.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/main/java/org/deeplearning4j/clustering/vptree/VPTreeFillSearch.java @@ -35,13 +35,13 @@ import java.util.List; * nearby points by k in a greedy fashion */ public class VPTreeFillSearch { - private VPTree vpTree; - private int k; + private final VPTree vpTree; + private final int k; @Getter private List results; @Getter private List distances; - private INDArray target; + private final INDArray target; public VPTreeFillSearch(VPTree vpTree, int k, INDArray target) { this.vpTree = vpTree; diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/kdtree/KDTreeTest.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/kdtree/KDTreeTest.java index 00beb9e71..f0b864d84 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/kdtree/KDTreeTest.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/kdtree/KDTreeTest.java @@ -72,11 +72,11 @@ public class KDTreeTest extends BaseDL4JTest { @Test public void testTree() { KDTree tree = new KDTree(2); - INDArray half = Nd4j.create(new double[] {0.5, 0.5}, new long[]{1,2}).castTo(DataType.FLOAT); - INDArray one = Nd4j.create(new double[] {1, 1}, new long[]{1,2}).castTo(DataType.FLOAT); + INDArray half = Nd4j.create(new double[] {0.5, 0.5}, 1,2).castTo(DataType.FLOAT); + INDArray one = Nd4j.create(new double[] {1, 1}, 1,2).castTo(DataType.FLOAT); tree.insert(half); tree.insert(one); - Pair pair = tree.nn(Nd4j.create(new double[] {0.5, 0.5}, new long[]{1,2}).castTo(DataType.FLOAT)); + Pair pair = tree.nn(Nd4j.create(new double[] {0.5, 0.5}, 1,2).castTo(DataType.FLOAT)); assertEquals(half, pair.getValue()); } diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/kmeans/KMeansTest.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/kmeans/KMeansTest.java index 40683daa9..63c4bcf15 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/kmeans/KMeansTest.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/kmeans/KMeansTest.java @@ -38,7 +38,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; @Timeout(120) public class KMeansTest extends BaseDL4JTest { - private boolean[] useKMeansPlusPlus = {true, false}; + private final boolean[] useKMeansPlusPlus = {true, false}; @Test public void testKMeans() { @@ -178,9 +178,9 @@ public class KMeansTest extends BaseDL4JTest { ClusterSet clusterSet = kMeansClustering.applyTo(points); - INDArray row0 = Nd4j.createFromArray(new double[]{16.6575, 18.4850}); - INDArray row1 = Nd4j.createFromArray(new double[]{32.6050, 31.1500}); - INDArray row2 = Nd4j.createFromArray(new double[]{75.9348, 74.1990}); + INDArray row0 = Nd4j.createFromArray(16.6575, 18.4850); + INDArray row1 = Nd4j.createFromArray(32.6050, 31.1500); + INDArray row2 = Nd4j.createFromArray(75.9348, 74.1990); /*List clusters = clusterSet.getClusters(); assertEquals(row0, clusters.get(0).getCenter().getArray()); @@ -211,9 +211,9 @@ public class KMeansTest extends BaseDL4JTest { int rows = 3, cols = 2; CentersHolder ch = new CentersHolder(rows, cols); - INDArray row0 = Nd4j.createFromArray(new double[]{16.4000, 17.1200}); - INDArray row1 = Nd4j.createFromArray(new double[]{45.8000, 54.2300}); - INDArray row2 = Nd4j.createFromArray(new double[]{95.9348, 94.1990}); + INDArray row0 = Nd4j.createFromArray(16.4000, 17.1200); + INDArray row1 = Nd4j.createFromArray(45.8000, 54.2300); + INDArray row2 = Nd4j.createFromArray(95.9348, 94.1990); ch.addCenter(row0); ch.addCenter(row1); diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/sptree/SPTreeTest.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/sptree/SPTreeTest.java index 5973a1f5a..39cb8b10a 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/sptree/SPTreeTest.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/sptree/SPTreeTest.java @@ -62,18 +62,15 @@ public class SPTreeTest extends BaseDL4JTest { 0.4093918718557811, 0.9563909195720572, 0.5994144944480242, 0.8278927844215804, 0.38586830957105667, 0.6201844716257464, 0.7603829079070265, 0.07875691596842949, 0.08651136699915507, 0.7445210640026082, 0.6547649514127559, 0.3384719042666908, 0.05816723105860,0.6248951423054205, 0.7431868493349041}; INDArray data = Nd4j.createFromArray(aData).reshape(11,5); - INDArray rows = Nd4j.createFromArray(new int[]{ - 0, 9, 18, 27, 36, 45, 54, 63, 72, 81, 90, 99}); - INDArray cols = Nd4j.createFromArray(new int[]{ - 4, 3, 10, 8, 6, 7, 1, 5, 9, 4, 9, 8, 10, 2, 0, 6, 7, 3, 6, 8, 3, 9, 10, 1, 4, 0, 5, 10, 0, 4, 6, 8, 9, 2, 5, 7, 0, 10, 3, 1, 8, 9, 6, 7, 2, 7, 9, 3, 10, 0, 4, 2, 8, 1, 2, 8, 3, 10, 0, 4, 9, 1, 5, 5, 9, 0, 3, 10, 4, 8, 1, 2, 6, 2, 0, 3, 4, 1, 10, 9, 7, 10, 1, 3, 7, 4, 5, 2, 8, 6, 3, 4, 0, 9, 6, 5, 8, 7, 1}); - INDArray vals = Nd4j.createFromArray(new double[] - { 0.6806, 0.1978, 0.1349, 0.0403, 0.0087, 0.0369, 0.0081, 0.0172, 0.0014, 0.0046, 0.0081, 0.3375, 0.2274, 0.0556, 0.0098, 0.0175, 0.0027, 0.0077, 0.0014, 0.0023, 0.0175, 0.6569, 0.1762, 0.0254, 0.0200, 0.0118, 0.0074, 0.0046, 0.0124, 0.0012, 0.1978, 0.0014, 0.0254, 0.7198, 0.0712, 0.0850, 0.0389, 0.0555, 0.0418, 0.0286, 0.6806, 0.3375, 0.0074, 0.0712, 0.2290, 0.0224, 0.0189, 0.0080, 0.0187, 0.0097, 0.0172, 0.0124, 0.0418, 0.7799, 0.0521, 0.0395, 0.0097, 0.0030, 0.0023, 1.706e-5, 0.0087, 0.0027, 0.6569, 0.0850, 0.0080, 0.5562, 0.0173, 0.0015, 1.706e-5, 0.0369, 0.0077, 0.0286, 0.0187, 0.7799, 0.0711, 0.0200, 0.0084, 0.0012, 0.0403, 0.0556, 0.1762, 0.0389, 0.0224, 0.0030, 0.5562, 0.0084, 0.0060, 0.0028, 0.0014, 0.2274, 0.0200, 0.0555, 0.0189, 0.0521, 0.0015, 0.0711, 0.0028, 0.3911, 0.1349, 0.0098, 0.0118, 0.7198, 0.2290, 0.0395, 0.0173, 0.0200, 0.0060, 0.3911}); + INDArray rows = Nd4j.createFromArray(0, 9, 18, 27, 36, 45, 54, 63, 72, 81, 90, 99); + INDArray cols = Nd4j.createFromArray(4, 3, 10, 8, 6, 7, 1, 5, 9, 4, 9, 8, 10, 2, 0, 6, 7, 3, 6, 8, 3, 9, 10, 1, 4, 0, 5, 10, 0, 4, 6, 8, 9, 2, 5, 7, 0, 10, 3, 1, 8, 9, 6, 7, 2, 7, 9, 3, 10, 0, 4, 2, 8, 1, 2, 8, 3, 10, 0, 4, 9, 1, 5, 5, 9, 0, 3, 10, 4, 8, 1, 2, 6, 2, 0, 3, 4, 1, 10, 9, 7, 10, 1, 3, 7, 4, 5, 2, 8, 6, 3, 4, 0, 9, 6, 5, 8, 7, 1); + INDArray vals = Nd4j.createFromArray(0.6806, 0.1978, 0.1349, 0.0403, 0.0087, 0.0369, 0.0081, 0.0172, 0.0014, 0.0046, 0.0081, 0.3375, 0.2274, 0.0556, 0.0098, 0.0175, 0.0027, 0.0077, 0.0014, 0.0023, 0.0175, 0.6569, 0.1762, 0.0254, 0.0200, 0.0118, 0.0074, 0.0046, 0.0124, 0.0012, 0.1978, 0.0014, 0.0254, 0.7198, 0.0712, 0.0850, 0.0389, 0.0555, 0.0418, 0.0286, 0.6806, 0.3375, 0.0074, 0.0712, 0.2290, 0.0224, 0.0189, 0.0080, 0.0187, 0.0097, 0.0172, 0.0124, 0.0418, 0.7799, 0.0521, 0.0395, 0.0097, 0.0030, 0.0023, 1.706e-5, 0.0087, 0.0027, 0.6569, 0.0850, 0.0080, 0.5562, 0.0173, 0.0015, 1.706e-5, 0.0369, 0.0077, 0.0286, 0.0187, 0.7799, 0.0711, 0.0200, 0.0084, 0.0012, 0.0403, 0.0556, 0.1762, 0.0389, 0.0224, 0.0030, 0.5562, 0.0084, 0.0060, 0.0028, 0.0014, 0.2274, 0.0200, 0.0555, 0.0189, 0.0521, 0.0015, 0.0711, 0.0028, 0.3911, 0.1349, 0.0098, 0.0118, 0.7198, 0.2290, 0.0395, 0.0173, 0.0200, 0.0060, 0.3911); SpTree tree = new SpTree(data); INDArray posF = Nd4j.create(11, 5); /*try (MemoryWorkspace ws = tree.workspace().notifyScopeEntered())*/ { tree.computeEdgeForces(rows, cols, vals, 11, posF); } - INDArray expected = Nd4j.createFromArray(new double[]{ -0.08045664291717945, -0.1010737980370276, 0.01793326162563703, 0.16108447776416351, -0.20679423033936287, -0.15788549368713395, 0.02546624825966788, 0.062309466206907055, -0.165806093080134, 0.15266225270841186, 0.17508365896345726, 0.09588570563583201, 0.34124767300538084, 0.14606666020839956, -0.06786563815470595, -0.09326646571247202, -0.19896040730569928, -0.3618837364446506, 0.13946315445146712, -0.04570186310149667, -0.2473462951783839, -0.41362278505023914, -0.1094083777758208, 0.10705807646770374, 0.24462088260113946, 0.21722270026621748, -0.21799892431326567, -0.08205544003080587, -0.11170161709042685, -0.2674768703060442, 0.03617747284043274, 0.16430316252598698, 0.04552845070022399, 0.2593696744801452, 0.1439989190892037, -0.059339471967457376, 0.05460893792863096, -0.0595168036583193, -0.2527693197519917, -0.15850951859835274, -0.2945536856938165, 0.15434659331638875, -0.022910846947667776, 0.23598009757792854, -0.11149279745674007, 0.09670616593772939, 0.11125703954547914, -0.08519984596392606, -0.12779827002328714, 0.23025192887225998, 0.13741473964038722, -0.06193553503816597, -0.08349781586292176, 0.1622156410642145, 0.155975447743472}).reshape(11,5); + INDArray expected = Nd4j.createFromArray(-0.08045664291717945, -0.1010737980370276, 0.01793326162563703, 0.16108447776416351, -0.20679423033936287, -0.15788549368713395, 0.02546624825966788, 0.062309466206907055, -0.165806093080134, 0.15266225270841186, 0.17508365896345726, 0.09588570563583201, 0.34124767300538084, 0.14606666020839956, -0.06786563815470595, -0.09326646571247202, -0.19896040730569928, -0.3618837364446506, 0.13946315445146712, -0.04570186310149667, -0.2473462951783839, -0.41362278505023914, -0.1094083777758208, 0.10705807646770374, 0.24462088260113946, 0.21722270026621748, -0.21799892431326567, -0.08205544003080587, -0.11170161709042685, -0.2674768703060442, 0.03617747284043274, 0.16430316252598698, 0.04552845070022399, 0.2593696744801452, 0.1439989190892037, -0.059339471967457376, 0.05460893792863096, -0.0595168036583193, -0.2527693197519917, -0.15850951859835274, -0.2945536856938165, 0.15434659331638875, -0.022910846947667776, 0.23598009757792854, -0.11149279745674007, 0.09670616593772939, 0.11125703954547914, -0.08519984596392606, -0.12779827002328714, 0.23025192887225998, 0.13741473964038722, -0.06193553503816597, -0.08349781586292176, 0.1622156410642145, 0.155975447743472).reshape(11,5); for (int i = 0; i < 11; ++i) assertArrayEquals(expected.getRow(i).toDoubleVector(), posF.getRow(i).toDoubleVector(), 1e-2); diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/vptree/VPTreeSerializationTests.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/vptree/VPTreeSerializationTests.java index c4146ebe2..9fa41f77f 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/vptree/VPTreeSerializationTests.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/vptree/VPTreeSerializationTests.java @@ -40,7 +40,7 @@ public class VPTreeSerializationTests extends BaseDL4JTest { @Test public void testSerialization_1() throws Exception { - val points = Nd4j.rand(new int[] {10, 15}); + val points = Nd4j.rand(10, 15); val treeA = new VPTree(points, true, 2); try (val bos = new ByteArrayOutputStream()) { @@ -84,7 +84,7 @@ public class VPTreeSerializationTests extends BaseDL4JTest { @Test public void testNewConstructor_1() { - val points = Nd4j.rand(new int[] {10, 15}); + val points = Nd4j.rand(10, 15); val treeA = new VPTree(points, true, 2); val rows = Nd4j.tear(points, 1); diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/vptree/VpTreeNodeTest.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/vptree/VpTreeNodeTest.java index 99acc67d7..d7f8e0a29 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/vptree/VpTreeNodeTest.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-core/src/test/java/org/deeplearning4j/clustering/vptree/VpTreeNodeTest.java @@ -406,7 +406,7 @@ public class VpTreeNodeTest extends BaseDL4JTest { i = 0; for (DataPoint p : results) sortedResults.putRow(i++, p.getPoint()); - INDArray[] sortedWithIndices = Nd4j.sortWithIndices(sortedResults, dimensionToSort, true);; + INDArray[] sortedWithIndices = Nd4j.sortWithIndices(sortedResults, dimensionToSort, true); sortedResults = sortedWithIndices[1]; assertEquals(trueResults.sumNumber().doubleValue(), sortedResults.sumNumber().doubleValue(), 1e-5); } diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-server/src/main/java/org/deeplearning4j/nearestneighbor/server/NearestNeighborsServer.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-server/src/main/java/org/deeplearning4j/nearestneighbor/server/NearestNeighborsServer.java index 93cc48675..661d1f46f 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-server/src/main/java/org/deeplearning4j/nearestneighbor/server/NearestNeighborsServer.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-server/src/main/java/org/deeplearning4j/nearestneighbor/server/NearestNeighborsServer.java @@ -63,15 +63,15 @@ public class NearestNeighborsServer extends AbstractVerticle { private static class RunArgs { @Parameter(names = {"--ndarrayPath"}, arity = 1, required = true) - private String ndarrayPath = null; + private final String ndarrayPath = null; @Parameter(names = {"--labelsPath"}, arity = 1, required = false) - private String labelsPath = null; + private final String labelsPath = null; @Parameter(names = {"--nearestNeighborsPort"}, arity = 1) - private int port = 9000; + private final int port = 9000; @Parameter(names = {"--similarityFunction"}, arity = 1) - private String similarityFunction = "euclidean"; + private final String similarityFunction = "euclidean"; @Parameter(names = {"--invert"}, arity = 1) - private boolean invert = false; + private final boolean invert = false; } private static RunArgs instanceArgs; @@ -93,7 +93,7 @@ public class NearestNeighborsServer extends AbstractVerticle { log.error("Error in NearestNeighboursServer parameters", e); StringBuilder sb = new StringBuilder(); jcmdr.usage(sb); - log.error("Usage: {}", sb.toString()); + log.error("Usage: {}", sb); //User provides invalid input -> print the usage info jcmdr.usage(); @@ -211,12 +211,10 @@ public class NearestNeighborsServer extends AbstractVerticle { rc.response().setStatusCode(HttpResponseStatus.BAD_REQUEST.code()) .putHeader("content-type", "application/json") .end(JsonMappers.getMapper().writeValueAsString(results)); - return; } catch (Throwable e) { log.error("Error in POST /knn",e); rc.response().setStatusCode(HttpResponseStatus.INTERNAL_SERVER_ERROR.code()) .end("Error parsing request - " + e.getMessage()); - return; } }); @@ -270,7 +268,6 @@ public class NearestNeighborsServer extends AbstractVerticle { log.error("Error in POST /knnnew",e); rc.response().setStatusCode(HttpResponseStatus.INTERNAL_SERVER_ERROR.code()) .end("Error parsing request - " + e.getMessage()); - return; } }); } diff --git a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-server/src/test/java/org/deeplearning4j/nearestneighbor/server/NearestNeighborTest.java b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-server/src/test/java/org/deeplearning4j/nearestneighbor/server/NearestNeighborTest.java index 22e67ac59..87b1c8c72 100644 --- a/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-server/src/test/java/org/deeplearning4j/nearestneighbor/server/NearestNeighborTest.java +++ b/cavis-dnn/cavis-dnn-nn-parent/cavis-dnn-nn-server/src/test/java/org/deeplearning4j/nearestneighbor/server/NearestNeighborTest.java @@ -93,7 +93,7 @@ public class NearestNeighborTest extends BaseDL4JTest { @Test public void vpTreeTest() throws Exception { - INDArray matrix = Nd4j.rand(new int[] {400,10}); + INDArray matrix = Nd4j.rand(400,10); INDArray rowVector = matrix.getRow(70); INDArray resultArr = Nd4j.zeros(400,1); Executor executor = Executors.newSingleThreadExecutor(); @@ -144,7 +144,7 @@ public class NearestNeighborTest extends BaseDL4JTest { int numNeighbors = 42; INDArray points = Nd4j.rand(numRows, numCols); VPTree tree = new VPTree(points); - INDArray query = Nd4j.rand(new int[] {1, numCols}); + INDArray query = Nd4j.rand(1, numCols); VPTreeFillSearch fillSearch = new VPTreeFillSearch(tree, numNeighbors, query); fillSearch.search(); List results = fillSearch.getResults(); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/EarlyStoppingConfiguration.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/EarlyStoppingConfiguration.java index d0acb2e06..8f55745ed 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/EarlyStoppingConfiguration.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/EarlyStoppingConfiguration.java @@ -93,7 +93,7 @@ public class EarlyStoppingConfiguration implements Serializable private EarlyStoppingModelSaver modelSaver = new InMemoryModelSaver<>(); private List epochTerminationConditions = new ArrayList<>(); - private List iterationTerminationConditions = new ArrayList<>(); + private final List iterationTerminationConditions = new ArrayList<>(); private boolean saveLastModel = false; private int evaluateEveryNEpochs = 1; private ScoreCalculator scoreCalculator; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/saver/LocalFileGraphSaver.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/saver/LocalFileGraphSaver.java index 314747866..4b08e401f 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/saver/LocalFileGraphSaver.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/saver/LocalFileGraphSaver.java @@ -34,8 +34,8 @@ public class LocalFileGraphSaver implements EarlyStoppingModelSaver { public enum ROCType {ROC, BINARY, MULTICLASS} - public enum Metric {AUC, AUPRC}; + public enum Metric {AUC, AUPRC} protected final ROCType type; protected final Metric metric; @@ -80,7 +80,7 @@ public class ROCScoreCalculator extends BaseIEvaluationScoreCalculator implements IEarlyStoppingTrainer { - private static Logger log = LoggerFactory.getLogger(BaseEarlyStoppingTrainer.class); + private static final Logger log = LoggerFactory.getLogger(BaseEarlyStoppingTrainer.class); protected T model; @@ -294,7 +292,7 @@ public abstract class BaseEarlyStoppingTrainer implements IEarl } if (epochTerminate) { log.info("Hit epoch termination condition at epoch {}. Details: {}", epochCount, - termReason.toString()); + termReason); T bestModel; try { bestModel = esConfig.getModelSaver().getBestModel(); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/trainer/EarlyStoppingGraphTrainer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/trainer/EarlyStoppingGraphTrainer.java index d78967448..e0011f535 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/trainer/EarlyStoppingGraphTrainer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/trainer/EarlyStoppingGraphTrainer.java @@ -32,7 +32,7 @@ import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; import org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator; public class EarlyStoppingGraphTrainer extends BaseEarlyStoppingTrainer { //implements IEarlyStoppingTrainer { - private ComputationGraph net; + private final ComputationGraph net; /** * @param esConfig Configuration diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/trainer/EarlyStoppingTrainer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/trainer/EarlyStoppingTrainer.java index c96ad86f9..f4df7a3d4 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/trainer/EarlyStoppingTrainer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/earlystopping/trainer/EarlyStoppingTrainer.java @@ -34,8 +34,8 @@ import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; public class EarlyStoppingTrainer extends BaseEarlyStoppingTrainer { - private MultiLayerNetwork net; - private boolean isMultiEpoch = false; + private final MultiLayerNetwork net; + private final boolean isMultiEpoch = false; public EarlyStoppingTrainer(EarlyStoppingConfiguration earlyStoppingConfiguration, diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/eval/BaseEvaluation.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/eval/BaseEvaluation.java index e7318364d..1c784f9b1 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/eval/BaseEvaluation.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/eval/BaseEvaluation.java @@ -42,9 +42,9 @@ import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; public abstract class BaseEvaluation extends org.nd4j.evaluation.BaseEvaluation { @Getter - private static ObjectMapper objectMapper = configureMapper(new ObjectMapper()); + private static final ObjectMapper objectMapper = configureMapper(new ObjectMapper()); @Getter - private static ObjectMapper yamlMapper = configureMapper(new ObjectMapper(new YAMLFactory())); + private static final ObjectMapper yamlMapper = configureMapper(new ObjectMapper(new YAMLFactory())); private static ObjectMapper configureMapper(ObjectMapper ret) { ret.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/ComputationGraphConfiguration.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/ComputationGraphConfiguration.java index efe5b0f60..8fe4b99a3 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/ComputationGraphConfiguration.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/ComputationGraphConfiguration.java @@ -929,13 +929,9 @@ public class ComputationGraphConfiguration implements Serializable, Cloneable { public GraphBuilder removeVertex(String vertexName, boolean removeConnections) { vertices.remove(vertexName); vertexInputs.remove(vertexName); - if (networkInputs.contains(vertexName)) { - networkInputs.remove(vertexName); - } + networkInputs.remove(vertexName); if (removeConnections) { - if (networkOutputs.contains(vertexName)) { - networkOutputs.remove(vertexName); - } + networkOutputs.remove(vertexName); Map> newVertexInputs = new LinkedHashMap<>(); for (Map.Entry> entry : this.vertexInputs.entrySet()) { List inputs = entry.getValue(); @@ -954,9 +950,7 @@ public class ComputationGraphConfiguration implements Serializable, Cloneable { } this.vertexInputs = newVertexInputs; - if (inputPreProcessors.containsKey(vertexName)) { - inputPreProcessors.remove(vertexName); - } + inputPreProcessors.remove(vertexName); } return this; } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetConfiguration.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetConfiguration.java index 5ceb3ea63..69ff898e2 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetConfiguration.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/NeuralNetConfiguration.java @@ -144,8 +144,8 @@ public class NeuralNetConfiguration implements Serializable, Cloneable { */ public static class ListBuilder extends MultiLayerConfiguration.Builder { private int layerCounter = -1; //Used only for .layer(Layer) method - private Map layerwise; - private Builder globalConfig; + private final Map layerwise; + private final Builder globalConfig; // Constructor public ListBuilder(Builder globalConfig, Map layerMap) { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/constraint/MaxNormConstraint.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/constraint/MaxNormConstraint.java index 8f2994cf5..43fdc4254 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/constraint/MaxNormConstraint.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/constraint/MaxNormConstraint.java @@ -62,7 +62,7 @@ public class MaxNormConstraint extends BaseConstraint { */ public MaxNormConstraint(double maxNorm, int... dimensions) { - this(maxNorm, Collections.emptySet(), dimensions); + this(maxNorm, Collections.emptySet(), dimensions); } @Override diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/constraint/MinMaxNormConstraint.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/constraint/MinMaxNormConstraint.java index 895072c39..6449a9abd 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/constraint/MinMaxNormConstraint.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/constraint/MinMaxNormConstraint.java @@ -70,7 +70,7 @@ public class MinMaxNormConstraint extends BaseConstraint { * parameters which have order [depthOut, depthIn, kH, kW] */ public MinMaxNormConstraint(double min, double max, double rate, int... dimensions){ - this(min, max, rate, Collections.emptySet(), dimensions); + this(min, max, rate, Collections.emptySet(), dimensions); } /** diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/constraint/UnitNormConstraint.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/constraint/UnitNormConstraint.java index 8e06315be..a082056a7 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/constraint/UnitNormConstraint.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/constraint/UnitNormConstraint.java @@ -44,7 +44,7 @@ public class UnitNormConstraint extends BaseConstraint { * parameters which have order [depthOut, depthIn, kH, kW] */ public UnitNormConstraint(int... dimensions){ - this(Collections.emptySet(), dimensions); + this(Collections.emptySet(), dimensions); } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/distribution/BinomialDistribution.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/distribution/BinomialDistribution.java index 883b027eb..14c6d368a 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/distribution/BinomialDistribution.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/distribution/BinomialDistribution.java @@ -77,9 +77,7 @@ public class BinomialDistribution extends Distribution { BinomialDistribution other = (BinomialDistribution) obj; if (numberOfTrials != other.numberOfTrials) return false; - if (Double.doubleToLongBits(probabilityOfSuccess) != Double.doubleToLongBits(other.probabilityOfSuccess)) - return false; - return true; + return Double.doubleToLongBits(probabilityOfSuccess) == Double.doubleToLongBits(other.probabilityOfSuccess); } public String toString() { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/distribution/NormalDistribution.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/distribution/NormalDistribution.java index 566c58f66..1c867a6ff 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/distribution/NormalDistribution.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/distribution/NormalDistribution.java @@ -87,9 +87,7 @@ public class NormalDistribution extends Distribution { NormalDistribution other = (NormalDistribution) obj; if (Double.doubleToLongBits(mean) != Double.doubleToLongBits(other.mean)) return false; - if (Double.doubleToLongBits(std) != Double.doubleToLongBits(other.std)) - return false; - return true; + return Double.doubleToLongBits(std) == Double.doubleToLongBits(other.std); } public String toString() { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/distribution/serde/LegacyDistributionDeserializer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/distribution/serde/LegacyDistributionDeserializer.java index 88415f1cf..ecf9fee12 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/distribution/serde/LegacyDistributionDeserializer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/distribution/serde/LegacyDistributionDeserializer.java @@ -33,7 +33,7 @@ import java.io.IOException; public class LegacyDistributionDeserializer extends JsonDeserializer { @Override public Distribution deserialize(JsonParser jp, DeserializationContext deserializationContext) - throws IOException, JsonProcessingException { + throws IOException { //Manually parse old format JsonNode node = jp.getCodec().readTree(jp); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/graph/SubsetVertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/graph/SubsetVertex.java index 9c98c8b3b..52f0e059d 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/graph/SubsetVertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/graph/SubsetVertex.java @@ -63,7 +63,7 @@ public class SubsetVertex extends GraphVertex { @Override public int hashCode() { - return new Integer(from).hashCode() ^ new Integer(to).hashCode(); + return Integer.valueOf(from).hashCode() ^ Integer.valueOf(to).hashCode(); } @Override diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/AbstractLSTM.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/AbstractLSTM.java index 94cfe157f..a974a7f91 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/AbstractLSTM.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/AbstractLSTM.java @@ -84,7 +84,7 @@ public abstract class AbstractLSTM extends BaseRecurrentLayer { * @param gateActivationFn Activation function for the LSTM gates */ public T gateActivationFunction(String gateActivationFn) { - return (T) gateActivationFunction(Activation.fromString(gateActivationFn)); + return gateActivationFunction(Activation.fromString(gateActivationFn)); } /** @@ -94,7 +94,7 @@ public abstract class AbstractLSTM extends BaseRecurrentLayer { * @param gateActivationFn Activation function for the LSTM gates */ public T gateActivationFunction(Activation gateActivationFn) { - return (T) gateActivationFunction(gateActivationFn.getActivationFunction()); + return gateActivationFunction(gateActivationFn.getActivationFunction()); } /** diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CapsuleLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CapsuleLayer.java index 548883015..c6f31faf3 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CapsuleLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CapsuleLayer.java @@ -150,7 +150,7 @@ public class CapsuleLayer extends SameDiffLayer { public void defineParameters(SDLayerParams params) { params.clear(); params.addWeightParam(WEIGHT_PARAM, - 1, inputCapsules, capsules * capsuleDimensions, inputCapsuleDimensions, 1); + 1, inputCapsules, (long) capsules * capsuleDimensions, inputCapsuleDimensions, 1); if(hasBias){ params.addBiasParam(BIAS_PARAM, @@ -168,7 +168,7 @@ public class CapsuleLayer extends SameDiffLayer { WeightInitUtil.initWeights( inputCapsules * inputCapsuleDimensions, capsules * capsuleDimensions, - new long[]{1, inputCapsules, capsules * capsuleDimensions, + new long[]{1, inputCapsules, (long) capsules * capsuleDimensions, inputCapsuleDimensions, 1}, this.weightInit, null, diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CenterLossOutputLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CenterLossOutputLayer.java index 43cc2e9b0..820d73d5d 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CenterLossOutputLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CenterLossOutputLayer.java @@ -81,12 +81,10 @@ public class CenterLossOutputLayer extends BaseOutputLayer { @Override public IUpdater getUpdaterByParam(String paramName) { // center loss utilizes alpha directly for this so any updater can be used for other layers - switch (paramName) { - case CenterLossParamInitializer.CENTER_KEY: - return new NoOp(); - default: - return iUpdater; + if (CenterLossParamInitializer.CENTER_KEY.equals(paramName)) { + return new NoOp(); } + return iUpdater; } public double getAlpha() { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/ConvolutionLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/ConvolutionLayer.java index 9276408a9..ae26e62f0 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/ConvolutionLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/ConvolutionLayer.java @@ -49,7 +49,7 @@ public class ConvolutionLayer extends FeedForwardLayer { protected boolean hasBias = true; protected ConvolutionMode convolutionMode = ConvolutionMode.Truncate; //Default to truncate here - default for 0.6.0 and earlier networks on JSON deserialization - protected int dilation[] = new int[] {1, 1}; + protected int[] dilation = new int[] {1, 1}; protected int[] kernelSize; // Square filter protected int[] stride; // Default is 2. Down-sample by a factor of 2 protected int[] padding; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/GravesBidirectionalLSTM.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/GravesBidirectionalLSTM.java index 102c0c008..76a943509 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/GravesBidirectionalLSTM.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/GravesBidirectionalLSTM.java @@ -176,7 +176,7 @@ public class GravesBidirectionalLSTM extends BaseRecurrentLayer { */ public Builder helperAllowFallback(boolean allowFallback) { this.setHelperAllowFallback(allowFallback); - return (Builder) this; + return this; } @SuppressWarnings("unchecked") diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/InputTypeUtil.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/InputTypeUtil.java index 417edd8ce..d6015e022 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/InputTypeUtil.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/InputTypeUtil.java @@ -76,8 +76,8 @@ public class InputTypeUtil { return InputType.convolutional(hOut, wOut, outputDepth, i.getFormat()); } - long hOut = sH * (hIn - 1) + kH - 2 * padH; - long wOut = sW * (wIn - 1) + kW - 2 * padW; + long hOut = sH * (hIn - 1) + kH - 2L * padH; + long wOut = sW * (wIn - 1) + kW - 2L * padW; return InputType.convolutional(hOut, wOut, outputDepth, i.getFormat()); } @@ -126,9 +126,9 @@ public class InputTypeUtil { return InputType.convolutional3D(dataFormat, dOut, hOut, wOut, outputDepth); } - long hOut = sH * (hIn - 1) + kH - 2 * padH; - long wOut = sW * (wIn - 1) + kW - 2 * padW; - long dOut = sD * (dIn - 1) + kD - 2 * padD; + long hOut = sH * (hIn - 1) + kH - 2L * padH; + long wOut = sW * (wIn - 1) + kW - 2L * padW; + long dOut = sD * (dIn - 1) + kD - 2L * padD; return InputType.convolutional3D(dataFormat, dOut, hOut, wOut, outputDepth); } @@ -179,20 +179,20 @@ public class InputTypeUtil { stride, padding, outputChannels, convolutionMode)); } - if (kH <= 0 || (padH > 0 && kH > inHeight + 2 * padH)) { + if (kH <= 0 || (padH > 0 && kH > inHeight + 2L * padH)) { throw new DL4JInvalidConfigException(getConfigErrorCommonLine(layerIdx, layerName, layerClass, true) + " Invalid input configuration for kernel height. Require 0 < kH <= inHeight + 2*padH; got (kH=" + kH + ", inHeight=" + inHeight + ", padH=" + padH + ")\n" + getConfigErrorCommonLastLine( inputType, kernelSize, stride, padding, outputChannels, convolutionMode)); } - if (kW <= 0 || (padW > 0 && kW > inWidth + 2 * padW)) { + if (kW <= 0 || (padW > 0 && kW > inWidth + 2L * padW)) { throw new DL4JInvalidConfigException(getConfigErrorCommonLine(layerIdx, layerName, layerClass, false) + " Invalid input configuration for kernel width. Require 0 < kW <= inWidth + 2*padW; got (kW=" + kW + ", inWidth=" + inWidth + ", padW=" + padW + ")\n" + getConfigErrorCommonLastLine( inputType, kernelSize, stride, padding, outputChannels, convolutionMode)); } - if (kD <= 0 || (padD > 0 && kD > inDepth + 2 * padD)) { + if (kD <= 0 || (padD > 0 && kD > inDepth + 2L * padD)) { throw new DL4JInvalidConfigException(getConfigErrorCommonLine(layerIdx, layerName, layerClass, false) + " Invalid input configuration for kernel channels. Require 0 < kD <= inDepth + 2*padD; got (kD=" + kD + ", inDepth=" + inDepth + ", padD=" + padD + ")\n" + getConfigErrorCommonLastLine( @@ -200,7 +200,7 @@ public class InputTypeUtil { } //Strict mode: require exactly the right size... if (convolutionMode == ConvolutionMode.Strict) { - if ((inHeight - kH + 2 * padH) % sH != 0) { + if ((inHeight - kH + 2L * padH) % sH != 0) { double d = (inHeight - kH + 2 * padH) / ((double) sH) + 1.0; String str = String.format("%.2f", d); int truncated = (int) d; @@ -218,7 +218,7 @@ public class InputTypeUtil { convolutionMode)); } - if ((inWidth - kW + 2 * padW) % sW != 0) { + if ((inWidth - kW + 2L * padW) % sW != 0) { double d = (inWidth - kW + 2 * padW) / ((double) sW) + 1.0; String str = String.format("%.2f", d); int truncated = (int) d; @@ -236,7 +236,7 @@ public class InputTypeUtil { convolutionMode)); } - if ((inDepth - kD + 2 * padD) % sD != 0) { + if ((inDepth - kD + 2L * padD) % sD != 0) { double d = (inDepth - kD + 2 * padD) / ((double) sD) + 1.0; String str = String.format("%.2f", d); int truncated = (int) d; @@ -262,9 +262,9 @@ public class InputTypeUtil { return InputType.convolutional3D(dataFormat, outD, outH, outW, outputChannels); } - long dOut = (inDepth - kD + 2 * padD) / sD + 1; - long hOut = (inHeight - kH + 2 * padH) / sH + 1; - long wOut = (inWidth - kW + 2 * padW) / sW + 1; + long dOut = (inDepth - kD + 2L * padD) / sD + 1; + long hOut = (inHeight - kH + 2L * padH) / sH + 1; + long wOut = (inWidth - kW + 2L * padW) / sW + 1; return InputType.convolutional3D(dOut, hOut, wOut, outputChannels); } @@ -396,7 +396,7 @@ public class InputTypeUtil { convolutionMode)); } //note the padding check > 0 here. This validation fails for padding == 0. Verified on resnet50 - if (kH <= 0 || padH > 0 && (padH > 0 && kH > inHeight + 2 * padH)) { + if (kH <= 0 || padH > 0 && (padH > 0 && kH > inHeight + 2L * padH)) { throw new DL4JInvalidConfigException(getConfigErrorCommonLine(layerIdx, layerName, layerClass, true) + " Invalid input configuration for kernel height. Require 0 < kH <= inHeight + 2*padH; got (kH=" + kH + ", inHeight=" + inHeight + ", padH=" + padH + ")\n" + getConfigErrorCommonLastLine( @@ -404,7 +404,7 @@ public class InputTypeUtil { } //note the padding check > 0 here. This validation fails for padding == 0. Verified on resnet50 - if (kW <= 0 || padW > 0 && (padW > 0 && kW > inWidth + 2 * padW)) { + if (kW <= 0 || padW > 0 && (padW > 0 && kW > inWidth + 2L * padW)) { throw new DL4JInvalidConfigException(getConfigErrorCommonLine(layerIdx, layerName, layerClass, false) + " Invalid input configuration for kernel width. Require 0 < kW <= inWidth + 2*padW; got (kW=" + kW + ", inWidth=" + inWidth + ", padW=" + padW + ")\n" + getConfigErrorCommonLastLine( @@ -413,7 +413,7 @@ public class InputTypeUtil { //Strict mode: require exactly the right size... if (convolutionMode == ConvolutionMode.Strict) { - if ((inHeight - kH + 2 * padH) % sH != 0) { + if ((inHeight - kH + 2L * padH) % sH != 0) { double d = (inHeight - kH + 2 * padH) / ((double) sH) + 1.0; String str = String.format("%.2f", d); int truncated = (int) d; @@ -431,7 +431,7 @@ public class InputTypeUtil { } - if ((inWidth - kW + 2 * padW) % sW != 0) { + if ((inWidth - kW + 2L * padW) % sW != 0) { double d = (inWidth - kW + 2 * padW) / ((double) sW) + 1.0; String str = String.format("%.2f", d); int truncated = (int) d; @@ -455,8 +455,8 @@ public class InputTypeUtil { - long hOut = (inHeight - kH + 2 * padH) / sH + 1; - long wOut = (inWidth - kW + 2 * padW) / sW + 1; + long hOut = (inHeight - kH + 2L * padH) / sH + 1; + long wOut = (inWidth - kW + 2L * padW) / sW + 1; return InputType.convolutional(hOut, wOut, outputDepth, format); } @@ -596,15 +596,13 @@ public class InputTypeUtil { case FF: for(int i = 0; i < vertexInputs.length; i++) { if(vertexInputs[i].getType() != maxType) { - switch(vertexInputs[i].getType()) { - case RNN: - InputType.InputTypeRecurrent recurrent = (InputType.InputTypeRecurrent) vertexInputs[i]; - if(recurrent.getTimeSeriesLength() == 1) { - vertexInputs[i] = InputType.feedForward(recurrent.getSize()); - } - break; - default: - throw new IllegalArgumentException("Attempted conversion of types and was unable to"); + if (vertexInputs[i].getType() == InputType.Type.RNN) { + InputType.InputTypeRecurrent recurrent = (InputType.InputTypeRecurrent) vertexInputs[i]; + if (recurrent.getTimeSeriesLength() == 1) { + vertexInputs[i] = InputType.feedForward(recurrent.getSize()); + } + } else { + throw new IllegalArgumentException("Attempted conversion of types and was unable to"); } } } @@ -621,14 +619,11 @@ public class InputTypeUtil { } for(int i = 0; i < vertexInputs.length; i++) { if(vertexInputs[i].getType() != maxType) { - switch(vertexInputs[i].getType()) { - case FF: - InputType.InputTypeFeedForward ff = (InputType.InputTypeFeedForward) vertexInputs[i]; - vertexInputs[i] = InputType.recurrent(ff.getSize(),rnnFormat); - break; - default: - throw new IllegalArgumentException("Attempted conversion of types and was unable to"); - + if (vertexInputs[i].getType() == InputType.Type.FF) { + InputType.InputTypeFeedForward ff = (InputType.InputTypeFeedForward) vertexInputs[i]; + vertexInputs[i] = InputType.recurrent(ff.getSize(), rnnFormat); + } else { + throw new IllegalArgumentException("Attempted conversion of types and was unable to"); } } } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/LocallyConnected2D.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/LocallyConnected2D.java index 724a0c22d..b44055332 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/LocallyConnected2D.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/LocallyConnected2D.java @@ -146,7 +146,7 @@ public class LocallyConnected2D extends SameDiffLayer { @Override public void defineParameters(SDLayerParams params) { params.clear(); - val weightsShape = new long[] {outputSize[0] * outputSize[1], featureDim, nOut}; + val weightsShape = new long[] {(long) outputSize[0] * outputSize[1], featureDim, nOut}; params.addWeightParam(ConvolutionParamInitializer.WEIGHT_KEY, weightsShape); if (hasBias) { val biasShape = new long[] {nOut}; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/PrimaryCapsules.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/PrimaryCapsules.java index 033b96470..2107bdede 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/PrimaryCapsules.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/PrimaryCapsules.java @@ -129,10 +129,10 @@ public class PrimaryCapsules extends SameDiffLayer { public void defineParameters(SDLayerParams params) { params.clear(); params.addWeightParam(WEIGHT_PARAM, - kernelSize[0], kernelSize[1], inputChannels, capsuleDimensions * channels); + kernelSize[0], kernelSize[1], inputChannels, (long) capsuleDimensions * channels); if(hasBias){ - params.addBiasParam(BIAS_PARAM, capsuleDimensions * channels); + params.addBiasParam(BIAS_PARAM, (long) capsuleDimensions * channels); } } @@ -165,7 +165,7 @@ public class PrimaryCapsules extends SameDiffLayer { InputTypeConvolutional out = (InputTypeConvolutional) InputTypeUtil .getOutputTypeCnnLayers(inputType, kernelSize, stride, padding, dilation, convolutionMode, - capsuleDimensions * channels, -1, getLayerName(), PrimaryCapsules.class); + (long) capsuleDimensions * channels, -1, getLayerName(), PrimaryCapsules.class); return InputType.recurrent((int) (out.getChannels() * out.getHeight() * out.getWidth() / capsuleDimensions), capsuleDimensions); @@ -187,7 +187,7 @@ public class PrimaryCapsules extends SameDiffLayer { InputTypeConvolutional out = (InputTypeConvolutional) InputTypeUtil .getOutputTypeCnnLayers(inputType, kernelSize, stride, padding, dilation, convolutionMode, - capsuleDimensions * channels, -1, getLayerName(), PrimaryCapsules.class); + (long) capsuleDimensions * channels, -1, getLayerName(), PrimaryCapsules.class); this.capsules = (int) (out.getChannels() * out.getHeight() * out.getWidth() / capsuleDimensions); } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling3DLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling3DLayer.java index 2175c58ab..cb643cd7b 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling3DLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling3DLayer.java @@ -455,7 +455,7 @@ public class Subsampling3DLayer extends NoParamLayer { } public T dilation(int dDepth, int dHeight, int dWidth) { - this.setDilation(new int[] {dDepth, dHeight, dWidth}); + this.setDilation(dDepth, dHeight, dWidth); return (T) this; } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Upsampling1D.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Upsampling1D.java index 3af2e9d55..6a012ed15 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Upsampling1D.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Upsampling1D.java @@ -128,7 +128,7 @@ public class Upsampling1D extends BaseUpsamplingLayer { */ public Builder size(int size) { - this.setSize(new int[] {size}); + this.setSize(size); return this; } @@ -153,7 +153,7 @@ public class Upsampling1D extends BaseUpsamplingLayer { if(size.length == 2){ if(size[0] == size[1]) { - setSize(new int[]{size[0]}); + setSize(size[0]); return; } else { Preconditions.checkArgument(false, diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Upsampling3D.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Upsampling3D.java index 984926ec1..ef5d832b4 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Upsampling3D.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Upsampling3D.java @@ -157,7 +157,7 @@ public class Upsampling3D extends BaseUpsamplingLayer { */ public Builder size(int size) { - this.setSize(new int[] {size, size, size}); + this.setSize(size, size, size); return this; } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/convolutional/Cropping1D.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/convolutional/Cropping1D.java index ae71c3811..fd2546019 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/convolutional/Cropping1D.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/convolutional/Cropping1D.java @@ -150,7 +150,7 @@ public class Cropping1D extends NoParamLayer { * @param cropBottom Amount of cropping to apply to the bottom of the input activations */ public Builder(int cropTop, int cropBottom) { - this.setCropping(new int[]{cropTop, cropBottom}); + this.setCropping(cropTop, cropBottom); } public Cropping1D build() { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/convolutional/Cropping2D.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/convolutional/Cropping2D.java index 8ea2ea18e..29aad71bd 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/convolutional/Cropping2D.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/convolutional/Cropping2D.java @@ -185,7 +185,7 @@ public class Cropping2D extends NoParamLayer { * @param cropRight Amount of cropping to apply to the right of the input activations */ public Builder(int cropTop, int cropBottom, int cropLeft, int cropRight) { - this.setCropping(new int[] {cropTop, cropBottom, cropLeft, cropRight}); + this.setCropping(cropTop, cropBottom, cropLeft, cropRight); } public Cropping2D build() { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/convolutional/Cropping3D.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/convolutional/Cropping3D.java index df3137629..1ab34b17b 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/convolutional/Cropping3D.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/convolutional/Cropping3D.java @@ -167,7 +167,7 @@ public class Cropping3D extends NoParamLayer { * @param cropRightW Amount of cropping to apply to the right of the width dimension */ public Builder(int cropLeftD, int cropRightD, int cropLeftH, int cropRightH, int cropLeftW, int cropRightW) { - this.setCropping(new int[] {cropLeftD, cropRightD, cropLeftH, cropRightH, cropLeftW, cropRightW}); + this.setCropping(cropLeftD, cropRightD, cropLeftH, cropRightH, cropLeftW, cropRightW); } public Cropping3D build() { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/objdetect/BoundingBoxesDeserializer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/objdetect/BoundingBoxesDeserializer.java index 747a95320..8dac21edc 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/objdetect/BoundingBoxesDeserializer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/objdetect/BoundingBoxesDeserializer.java @@ -33,7 +33,7 @@ import java.io.IOException; public class BoundingBoxesDeserializer extends JsonDeserializer { @Override - public INDArray deserialize(JsonParser jp, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { + public INDArray deserialize(JsonParser jp, DeserializationContext deserializationContext) throws IOException { JsonNode node = jp.getCodec().readTree(jp); if(node.has("dataBuffer")){ //Must be legacy format serialization diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/samediff/SameDiffLambdaVertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/samediff/SameDiffLambdaVertex.java index 0e6d6ebb6..d3c10ec2f 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/samediff/SameDiffLambdaVertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/samediff/SameDiffLambdaVertex.java @@ -91,8 +91,8 @@ public abstract class SameDiffLambdaVertex extends SameDiffVertex { public class VertexInputs { - private SameDiff sameDiff; - private Map map = new LinkedHashMap<>(); + private final SameDiff sameDiff; + private final Map map = new LinkedHashMap<>(); protected VertexInputs(SameDiff sd) { this.sameDiff = sd; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/variational/CompositeReconstructionDistribution.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/variational/CompositeReconstructionDistribution.java index 47cfffd43..ca96fb46e 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/variational/CompositeReconstructionDistribution.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/variational/CompositeReconstructionDistribution.java @@ -227,7 +227,7 @@ public class CompositeReconstructionDistribution implements ReconstructionDistri private INDArray randomSample(INDArray preOutDistributionParams, boolean isMean) { int inputSoFar = 0; int paramsSoFar = 0; - INDArray out = Nd4j.createUninitialized(preOutDistributionParams.dataType(), new long[] {preOutDistributionParams.size(0), totalSize}); + INDArray out = Nd4j.createUninitialized(preOutDistributionParams.dataType(), preOutDistributionParams.size(0), totalSize); for (int i = 0; i < distributionSizes.length; i++) { int thisDataSize = distributionSizes[i]; int thisParamsSize = reconstructionDistributions[i].distributionInputSize(thisDataSize); @@ -254,8 +254,8 @@ public class CompositeReconstructionDistribution implements ReconstructionDistri public static class Builder { - private List distributionSizes = new ArrayList<>(); - private List reconstructionDistributions = new ArrayList<>(); + private final List distributionSizes = new ArrayList<>(); + private final List reconstructionDistributions = new ArrayList<>(); /** * Add another distribution to the composite distribution. This will add the distribution for the next 'distributionSize' diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/variational/VariationalAutoencoder.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/variational/VariationalAutoencoder.java index 3cc996c4a..ca1f10bd0 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/variational/VariationalAutoencoder.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/variational/VariationalAutoencoder.java @@ -93,10 +93,7 @@ public class VariationalAutoencoder extends BasePretrainNetwork { if (paramName.startsWith(VariationalAutoencoderParamInitializer.PZX_LOGSTD2_PREFIX)) { return true; } - if (paramName.startsWith(VariationalAutoencoderParamInitializer.PXZ_PREFIX)) { - return true; - } - return false; + return paramName.startsWith(VariationalAutoencoderParamInitializer.PXZ_PREFIX); } @Override diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/memory/LayerMemoryReport.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/memory/LayerMemoryReport.java index 28725679b..771df513c 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/memory/LayerMemoryReport.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/memory/LayerMemoryReport.java @@ -180,10 +180,10 @@ public class LayerMemoryReport extends MemoryReport { public static class Builder { - private String layerName; - private Class layerType; - private InputType inputType; - private InputType outputType; + private final String layerName; + private final Class layerType; + private final InputType inputType; + private final InputType outputType; //Standard memory (in terms of total ND4J array length) private long parameterSize; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/preprocessor/FeedForwardToCnn3DPreProcessor.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/preprocessor/FeedForwardToCnn3DPreProcessor.java index 02e8a1544..9d667cc07 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/preprocessor/FeedForwardToCnn3DPreProcessor.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/preprocessor/FeedForwardToCnn3DPreProcessor.java @@ -106,7 +106,7 @@ public class FeedForwardToCnn3DPreProcessor implements InputPreProcessor { epsilons = workspaceMgr.dup(ArrayType.ACTIVATION_GRAD, epsilons, 'c'); if (shape == null || ArrayUtil.prod(shape) != epsilons.length()) { - INDArray ret = epsilons.reshape('c', epsilons.size(0),inputDepth * inputHeight * inputWidth * numChannels); + INDArray ret = epsilons.reshape('c', epsilons.size(0), (long) inputDepth * inputHeight * inputWidth * numChannels); return workspaceMgr.leverageTo(ArrayType.ACTIVATION_GRAD, ret); } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/BaseNetConfigDeserializer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/BaseNetConfigDeserializer.java index 513b42aa8..abd52c0c3 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/BaseNetConfigDeserializer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/BaseNetConfigDeserializer.java @@ -64,7 +64,7 @@ public abstract class BaseNetConfigDeserializer extends StdDeserializer im @Override public abstract T deserialize(JsonParser jp, DeserializationContext ctxt) - throws IOException, JsonProcessingException; + throws IOException; protected boolean requiresIUpdaterFromLegacy(Layer[] layers){ for(Layer l : layers){ diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/JsonMappers.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/JsonMappers.java index c5a8fe912..8097111d6 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/JsonMappers.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/JsonMappers.java @@ -32,8 +32,8 @@ import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; @Slf4j public class JsonMappers { - private static ObjectMapper jsonMapper = new ObjectMapper(); - private static ObjectMapper yamlMapper = new ObjectMapper(new YAMLFactory()); + private static final ObjectMapper jsonMapper = new ObjectMapper(); + private static final ObjectMapper yamlMapper = new ObjectMapper(new YAMLFactory()); private static ObjectMapper legacyMapper; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/format/DataFormatDeserializer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/format/DataFormatDeserializer.java index d7c3d636a..e9397126a 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/format/DataFormatDeserializer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/format/DataFormatDeserializer.java @@ -32,7 +32,7 @@ import java.io.IOException; public class DataFormatDeserializer extends JsonDeserializer { @Override - public DataFormat deserialize(JsonParser jp, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { + public DataFormat deserialize(JsonParser jp, DeserializationContext deserializationContext) throws IOException { JsonNode node = jp.getCodec().readTree(jp); String text = node.textValue(); switch (text){ diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/legacy/LegacyIntArrayDeserializer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/legacy/LegacyIntArrayDeserializer.java index 3bbb5f8f6..804655669 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/legacy/LegacyIntArrayDeserializer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/serde/legacy/LegacyIntArrayDeserializer.java @@ -31,7 +31,7 @@ import java.io.IOException; public class LegacyIntArrayDeserializer extends JsonDeserializer { @Override - public int[] deserialize(JsonParser jp, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { + public int[] deserialize(JsonParser jp, DeserializationContext deserializationContext) throws IOException { JsonNode n = jp.getCodec().readTree(jp); if(n.isArray()){ ArrayNode an = (ArrayNode)n; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/DefaultStepFunction.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/DefaultStepFunction.java index dc32f1232..d3a2c9518 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/DefaultStepFunction.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/DefaultStepFunction.java @@ -35,9 +35,7 @@ public class DefaultStepFunction extends StepFunction { return true; if (obj == null) return false; - if (getClass() != obj.getClass()) - return false; - return true; + return getClass() == obj.getClass(); } public String toString() { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/GradientStepFunction.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/GradientStepFunction.java index 2a727535f..4b18a4aeb 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/GradientStepFunction.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/GradientStepFunction.java @@ -35,9 +35,7 @@ public class GradientStepFunction extends StepFunction { return true; if (obj == null) return false; - if (getClass() != obj.getClass()) - return false; - return true; + return getClass() == obj.getClass(); } public String toString() { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/NegativeDefaultStepFunction.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/NegativeDefaultStepFunction.java index 867ed7b28..7bd42d9e7 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/NegativeDefaultStepFunction.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/NegativeDefaultStepFunction.java @@ -35,9 +35,7 @@ public class NegativeDefaultStepFunction extends StepFunction { return true; if (obj == null) return false; - if (getClass() != obj.getClass()) - return false; - return true; + return getClass() == obj.getClass(); } public String toString() { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/NegativeGradientStepFunction.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/NegativeGradientStepFunction.java index a7c1d3648..943aed06f 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/NegativeGradientStepFunction.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/stepfunctions/NegativeGradientStepFunction.java @@ -35,9 +35,7 @@ public class NegativeGradientStepFunction extends StepFunction { return true; if (obj == null) return false; - if (getClass() != obj.getClass()) - return false; - return true; + return getClass() == obj.getClass(); } public String toString() { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/gradient/DefaultGradient.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/gradient/DefaultGradient.java index 9f8557ccc..23d1651f5 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/gradient/DefaultGradient.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/gradient/DefaultGradient.java @@ -31,7 +31,7 @@ import java.util.Map; public class DefaultGradient implements Gradient { public static final char DEFAULT_FLATTENING_ORDER = 'f'; - private Map gradients = new LinkedHashMap<>(); + private final Map gradients = new LinkedHashMap<>(); private Map flatteningOrders; @Setter private INDArray flattenedGradient; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/ComputationGraph.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/ComputationGraph.java index 44a838df0..ac8a05be4 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/ComputationGraph.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/ComputationGraph.java @@ -191,12 +191,12 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork { * The number of input arrays to the network. Many networks only have 1 input; however, a ComputationGraph may * have an arbitrary number (>=1) separate input arrays */ - private int numInputArrays; + private final int numInputArrays; /** * The number of output arrays to the network. Many networks only have 1 output; however, a ComputationGraph may * have an arbitrary number (>=1) separate output arrays */ - private int numOutputArrays; + private final int numOutputArrays; //Current inputs, labels, input mask arrays and label mask arrays private transient INDArray[] inputs; @@ -2605,7 +2605,7 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork { List outputLayers = configuration.getNetworkOutputs(); for(String s : outputLayers ){ GraphVertex gv = getVertex(s); - if(gv instanceof LayerVertex && ((LayerVertex)gv).getLayer() instanceof IOutputLayer){ + if(gv instanceof LayerVertex && gv.getLayer() instanceof IOutputLayer){ throw new IllegalStateException("Cannot perform backprop with external errors in conjunction with an output layer:" + " output layers cannot use external errors for backprop. Layer name: " + s); } @@ -3923,7 +3923,7 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork { * @return Evaluation object; results of evaluation on all examples in the data set */ public T evaluate(DataSetIterator iterator) { - return (T)evaluate(iterator, (List)null); + return evaluate(iterator, (List)null); } /** @@ -4185,7 +4185,7 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork { @SuppressWarnings("unchecked") @SafeVarargs private final T[] doEvaluationHelper(MultiDataSetIterator iterator, T... evaluations) { - Map map = Collections.singletonMap(0, (IEvaluation[])evaluations); + Map map = Collections.singletonMap(0, evaluations); return (T[])doEvaluationHelper(iterator, map).get(0); } @@ -4311,7 +4311,7 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork { configuration.setTrainingWorkspaceMode(cMode); - return (Map) evaluations; + return evaluations; } /** @@ -4385,7 +4385,7 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork { connections = configuration.getVertexInputs().get(currentVertexName).toString(); List inputTypeList = new ArrayList<>(); if (currentVertex.hasLayer()) { - Layer currentLayer = ((LayerVertex) currentVertex).getLayer(); + Layer currentLayer = currentVertex.getLayer(); classNameArr = currentLayer.getClass().getName().split("\\."); className = classNameArr[classNameArr.length - 1]; paramCount = String.format("%,d", currentLayer.numParams()); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ElementWiseVertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ElementWiseVertex.java index 5d28feb9b..f678fb782 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ElementWiseVertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ElementWiseVertex.java @@ -51,7 +51,7 @@ public class ElementWiseVertex extends BaseGraphVertex { Add, Subtract, Product, Average, Max } - private Op op; + private final Op op; private int nInForwardPass; public ElementWiseVertex(ComputationGraph graph, String name, int vertexIndex, Op op, DataType dataType) { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/L2NormalizeVertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/L2NormalizeVertex.java index 28f83c16f..0931bdb98 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/L2NormalizeVertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/L2NormalizeVertex.java @@ -43,8 +43,8 @@ public class L2NormalizeVertex extends BaseGraphVertex { private static final int[] DEFAULT_RANK3_DIMS = new int[] {1, 2}; private static final int[] DEFAULT_RANK4_DIMS = new int[] {1, 2, 3}; - private int[] dimension; - private double eps; + private final int[] dimension; + private final double eps; public L2NormalizeVertex(ComputationGraph graph, String name, int vertexIndex, int[] dimension, double eps, DataType dataType) { this(graph, name, vertexIndex, null, null, dimension, eps, dataType); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/L2Vertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/L2Vertex.java index b7db002c5..d839b9872 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/L2Vertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/L2Vertex.java @@ -38,7 +38,7 @@ import org.deeplearning4j.nn.workspace.ArrayType; import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; public class L2Vertex extends BaseGraphVertex { - private double eps; + private final double eps; public L2Vertex(ComputationGraph graph, String name, int vertexIndex, double eps, DataType dataType) { this(graph, name, vertexIndex, null, null, eps, dataType); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/LayerVertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/LayerVertex.java index a57b24eee..fdd05c390 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/LayerVertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/LayerVertex.java @@ -199,11 +199,10 @@ public class LayerVertex extends BaseGraphVertex { @Override public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("LayerVertex(id=").append(vertexIndex).append(",name=\"").append(vertexName).append("\",inputs=") - .append(Arrays.toString(inputVertices)).append(",outputs=") - .append(Arrays.toString(outputVertices)).append(")"); - return sb.toString(); + String sb = "LayerVertex(id=" + vertexIndex + ",name=\"" + vertexName + "\",inputs=" + + Arrays.toString(inputVertices) + ",outputs=" + + Arrays.toString(outputVertices) + ")"; + return sb; } @Override @@ -229,9 +228,7 @@ public class LayerVertex extends BaseGraphVertex { } if (!(resolvedLayer instanceof IOutputLayer)) { - if (epsilon == null) { - return false; - } + return epsilon != null; } return true; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/MergeVertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/MergeVertex.java index f1e4a4f8b..1187bbabb 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/MergeVertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/MergeVertex.java @@ -44,7 +44,7 @@ public class MergeVertex extends BaseGraphVertex { private long[][] forwardPassShapes; private int fwdPassRank; - private int mergeAxis; + private final int mergeAxis; public MergeVertex(ComputationGraph graph, String name, int vertexIndex, DataType dataType, int mergeAxis) { this(graph, name, vertexIndex, null, null, dataType, mergeAxis); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/PreprocessorVertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/PreprocessorVertex.java index b8bedadbb..4586dd3d8 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/PreprocessorVertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/PreprocessorVertex.java @@ -35,7 +35,7 @@ import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; public class PreprocessorVertex extends BaseGraphVertex { @Getter - private InputPreProcessor preProcessor; + private final InputPreProcessor preProcessor; public PreprocessorVertex(ComputationGraph graph, String name, int vertexIndex, InputPreProcessor preProcessor, DataType dataType) { this(graph, name, vertexIndex, null, null, preProcessor, dataType); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ReshapeVertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ReshapeVertex.java index 4c8bbfc16..5ccc81132 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ReshapeVertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ReshapeVertex.java @@ -34,9 +34,9 @@ import org.nd4j.common.primitives.Pair; public class ReshapeVertex extends BaseGraphVertex { - private char order; - private int[] newShape; - private int[] maskShape; + private final char order; + private final int[] newShape; + private final int[] maskShape; public ReshapeVertex(ComputationGraph graph, String name, int vertexIndex, char order, int[] newShape, int[] maskShape, DataType dataType) { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ScaleVertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ScaleVertex.java index f62a9278d..16863434a 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ScaleVertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ScaleVertex.java @@ -35,7 +35,7 @@ import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; public class ScaleVertex extends BaseGraphVertex { - private double scaleFactor; + private final double scaleFactor; public ScaleVertex(ComputationGraph graph, String name, int vertexIndex, double scaleFactor, DataType dataType) { this(graph, name, vertexIndex, null, null, scaleFactor, dataType); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ShiftVertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ShiftVertex.java index 82c2e1155..d289c4e75 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ShiftVertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/ShiftVertex.java @@ -35,7 +35,7 @@ import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; public class ShiftVertex extends BaseGraphVertex { - private double shiftFactor; + private final double shiftFactor; public ShiftVertex(ComputationGraph graph, String name, int vertexIndex, double shiftFactor, DataType dataType) { this(graph, name, vertexIndex, null, null, shiftFactor, dataType); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/StackVertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/StackVertex.java index 34d7b63e6..6889a0f39 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/StackVertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/StackVertex.java @@ -72,9 +72,7 @@ public class StackVertex extends BaseGraphVertex { // create the new shape outShape[0] = nStack * inShape[0]; - for (int i = 1; i < inShape.length; i++) { - outShape[i] = inShape[i]; - } + System.arraycopy(inShape, 1, outShape, 1, inShape.length - 1); boolean variableLengthTS = false; if (inShape.length == 3) { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/SubsetVertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/SubsetVertex.java index 50fcf1699..d3271849c 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/SubsetVertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/SubsetVertex.java @@ -37,8 +37,8 @@ import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; import java.util.Arrays; public class SubsetVertex extends BaseGraphVertex { - private int from; - private int to; //inclusive + private final int from; + private final int to; //inclusive private long[] forwardShape; public SubsetVertex(ComputationGraph graph, String name, int vertexIndex, int from, int to, DataType dataType) { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/UnstackVertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/UnstackVertex.java index a9c70c27a..c31cd1ae1 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/UnstackVertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/UnstackVertex.java @@ -37,9 +37,9 @@ import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; import java.util.Arrays; public class UnstackVertex extends BaseGraphVertex { - private long from; - private int stackSize; - private long forwardShape[]; + private final long from; + private final int stackSize; + private long[] forwardShape; private long step; public UnstackVertex(ComputationGraph graph, String name, int vertexIndex, int from, int stackSize, DataType dataType) { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/rnn/DuplicateToTimeSeriesVertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/rnn/DuplicateToTimeSeriesVertex.java index 85dc8b06b..2bfc6ee97 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/rnn/DuplicateToTimeSeriesVertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/rnn/DuplicateToTimeSeriesVertex.java @@ -37,8 +37,8 @@ import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; public class DuplicateToTimeSeriesVertex extends BaseGraphVertex { - private String inputName; - private int inputVertexIndex; + private final String inputName; + private final int inputVertexIndex; public DuplicateToTimeSeriesVertex(ComputationGraph graph, String name, int vertexIndex, String inputVertexName, DataType dataType) { this(graph, name, vertexIndex, null, null, inputVertexName, dataType); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/rnn/LastTimeStepVertex.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/rnn/LastTimeStepVertex.java index 4eab20e41..0475936d0 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/rnn/LastTimeStepVertex.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/rnn/LastTimeStepVertex.java @@ -38,8 +38,8 @@ import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; public class LastTimeStepVertex extends BaseGraphVertex { - private String inputName; - private int inputIdx; + private final String inputName; + private final int inputIdx; /** Shape of the forward pass activations */ private long[] fwdPassShape; /** Indexes of the time steps that were extracted, for each example */ diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/FrozenLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/FrozenLayer.java index 06f3f53b3..1e6c60add 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/FrozenLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/FrozenLayer.java @@ -40,7 +40,7 @@ public class FrozenLayer extends BaseWrapperLayer { private boolean logFit = false; private boolean logTestMode = false; private boolean logGradient = false; - private Gradient zeroGradient; + private final Gradient zeroGradient; private transient DummyConfig config; public FrozenLayer(Layer insideLayer) { @@ -176,7 +176,6 @@ public class FrozenLayer extends BaseWrapperLayer { if (!training) return; if (logTestMode) { - return; } else { OneTimeLogger.info(log, "Frozen layer instance found! Frozen layers are treated as always in test mode. Warning will only be issued once per instance"); @@ -188,7 +187,6 @@ public class FrozenLayer extends BaseWrapperLayer { if (training.equals(TrainingMode.TEST)) return; if (logTestMode) { - return; } else { OneTimeLogger.info(log, "Frozen layer instance found! Frozen layers are treated as always in test mode. Warning will only be issued once per instance"); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/FrozenLayerWithBackprop.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/FrozenLayerWithBackprop.java index a5bb54857..918a21a4a 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/FrozenLayerWithBackprop.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/FrozenLayerWithBackprop.java @@ -38,7 +38,7 @@ public class FrozenLayerWithBackprop extends BaseWrapperLayer { private boolean logTestMode = false; private boolean logGradient = false; - private Gradient zeroGradient; + private final Gradient zeroGradient; public FrozenLayerWithBackprop(final Layer insideLayer) { super(insideLayer); @@ -144,7 +144,6 @@ public class FrozenLayerWithBackprop extends BaseWrapperLayer { if (!training) return; if (logTestMode) { - return; } else { OneTimeLogger.info(log, "Frozen layer instance found! Frozen layers are treated as always in test mode. Warning will only be issued once per instance"); @@ -156,7 +155,6 @@ public class FrozenLayerWithBackprop extends BaseWrapperLayer { if (training.equals(TrainingMode.TEST)) return; if (logTestMode) { - return; } else { OneTimeLogger.info(log, "Frozen layer instance found! Frozen layers are treated as always in test mode. Warning will only be issued once per instance"); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/HelperUtils.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/HelperUtils.java index eb59a2c5f..dfff491e4 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/HelperUtils.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/HelperUtils.java @@ -64,7 +64,7 @@ public class HelperUtils { if("CUDA".equalsIgnoreCase(backend) && cudnnHelperClassName != null && !cudnnHelperClassName.isEmpty()) { if(DL4JClassLoading.loadClassByName(cudnnHelperClassName) != null) { log.debug("Attempting to initialize cudnn helper {}",cudnnHelperClassName); - helperRet = (LayerHelper) DL4JClassLoading.createNewInstance( + helperRet = DL4JClassLoading.createNewInstance( cudnnHelperClassName, (Class) layerHelperSuperClass, new Object[]{arguments}); @@ -76,7 +76,7 @@ public class HelperUtils { ClassLoader classLoader = DL4JClassLoading.getDl4jClassloader(); DL4JClassLoading.setDl4jClassloaderFromClass(layerHelperSuperClass); try { - helperRet = (LayerHelper) DL4JClassLoading.createNewInstance( + helperRet = DL4JClassLoading.createNewInstance( cudnnHelperClassName, (Class) layerHelperSuperClass, arguments); @@ -99,7 +99,7 @@ public class HelperUtils { } } else if("CPU".equalsIgnoreCase(backend) && oneDnnClassName != null && !oneDnnClassName.isEmpty()) { - helperRet = DL4JClassLoading.createNewInstance( + helperRet = DL4JClassLoading.createNewInstance( oneDnnClassName, arguments); log.trace("Created oneDNN helper: {}, layer {}", oneDnnClassName,layerName); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/RepeatVector.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/RepeatVector.java index 442808357..84dd1fd1f 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/RepeatVector.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/RepeatVector.java @@ -98,13 +98,13 @@ public class RepeatVector extends AbstractLayer { - private int[] cropping; //[padTop, padBottom] + private final int[] cropping; //[padTop, padBottom] public Cropping1DLayer(NeuralNetConfiguration conf, DataType dataType) { super(conf, dataType); @@ -67,7 +67,7 @@ public class Cropping1DLayer extends AbstractLayer { INDArray epsNext = workspaceMgr.create(ArrayType.ACTIVATION_GRAD, dataType, inShape, 'c'); INDArray epsNextSubset = epsNext.get(all(), all(), interval(cropping[0], epsNext.size(2)-cropping[1])); epsNextSubset.assign(epsilon); - return new Pair<>((Gradient) new DefaultGradient(), epsNext); + return new Pair<>(new DefaultGradient(), epsNext); } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Cropping2DLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Cropping2DLayer.java index 8e40fc652..3d6beac05 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Cropping2DLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Cropping2DLayer.java @@ -38,7 +38,7 @@ import static org.nd4j.linalg.indexing.NDArrayIndex.interval; public class Cropping2DLayer extends AbstractLayer { - private int[] cropping; //[padTop, padBottom, padLeft, padRight] + private final int[] cropping; //[padTop, padBottom, padLeft, padRight] public Cropping2DLayer(NeuralNetConfiguration conf, DataType dataType) { super(conf, dataType); @@ -66,7 +66,7 @@ public class Cropping2DLayer extends AbstractLayer((Gradient) new DefaultGradient(), epsNext); + return new Pair<>(new DefaultGradient(), epsNext); } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Cropping3DLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Cropping3DLayer.java index ea2c5a20a..4dc09217a 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Cropping3DLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Cropping3DLayer.java @@ -37,7 +37,7 @@ import static org.nd4j.linalg.indexing.NDArrayIndex.interval; public class Cropping3DLayer extends AbstractLayer { - private int[] cropping; //[cropLeftD, cropRightD, cropLeftH, cropRightH, cropLeftW, cropRightW] + private final int[] cropping; //[cropLeftD, cropRightD, cropLeftH, cropRightH, cropLeftW, cropRightW] public Cropping3DLayer(NeuralNetConfiguration conf, DataType dataType) { super(conf, dataType); @@ -65,7 +65,7 @@ public class Cropping3DLayer extends AbstractLayer((Gradient) new DefaultGradient(), epsNext); + return new Pair<>(new DefaultGradient(), epsNext); } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ZeroPadding1DLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ZeroPadding1DLayer.java index 6c293c6ab..386c312e6 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ZeroPadding1DLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ZeroPadding1DLayer.java @@ -36,7 +36,7 @@ import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; public class ZeroPadding1DLayer extends AbstractLayer { - private int[] padding; // [padLeft, padRight] + private final int[] padding; // [padLeft, padRight] public ZeroPadding1DLayer(NeuralNetConfiguration conf, DataType dataType) { super(conf, dataType); @@ -66,7 +66,7 @@ public class ZeroPadding1DLayer extends AbstractLayer((Gradient) new DefaultGradient(), workspaceMgr.leverageTo(ArrayType.ACTIVATION_GRAD, epsNext)); + return new Pair<>(new DefaultGradient(), workspaceMgr.leverageTo(ArrayType.ACTIVATION_GRAD, epsNext)); } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ZeroPadding3DLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ZeroPadding3DLayer.java index e39d6886b..bffd04288 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ZeroPadding3DLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ZeroPadding3DLayer.java @@ -36,7 +36,7 @@ import org.nd4j.common.primitives.Pair; public class ZeroPadding3DLayer extends AbstractLayer { - private int[] padding; // [padLeft1, padRight1, padLeft2, padRight2, padLeft3, padRight3] + private final int[] padding; // [padLeft1, padRight1, padLeft2, padRight2, padLeft3, padRight3] public ZeroPadding3DLayer(NeuralNetConfiguration conf, DataType dataType) { super(conf, dataType); @@ -69,7 +69,7 @@ public class ZeroPadding3DLayer extends AbstractLayer((Gradient) new DefaultGradient(), epsNext); + return new Pair<>(new DefaultGradient(), epsNext); } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ZeroPaddingLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ZeroPaddingLayer.java index d467474e3..c46167bee 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ZeroPaddingLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ZeroPaddingLayer.java @@ -80,7 +80,7 @@ public class ZeroPaddingLayer extends AbstractLayer((Gradient) new DefaultGradient(), epsNext); + return new Pair<>(new DefaultGradient(), epsNext); } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/autoencoder/recursive/Tree.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/autoencoder/recursive/Tree.java index 48c84c2ea..ef0accc18 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/autoencoder/recursive/Tree.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/autoencoder/recursive/Tree.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import java.io.Serializable; import java.util.ArrayList; import java.util.List; +import java.util.Objects; public class Tree implements Serializable { @@ -446,23 +447,23 @@ public class Tree implements Serializable { return false; if (goldLabel != tree.goldLabel) return false; - if (headWord != null ? !headWord.equals(tree.headWord) : tree.headWord != null) + if (!Objects.equals(headWord, tree.headWord)) return false; - if (label != null ? !label.equals(tree.label) : tree.label != null) + if (!Objects.equals(label, tree.label)) return false; - if (parse != null ? !parse.equals(tree.parse) : tree.parse != null) + if (!Objects.equals(parse, tree.parse)) return false; - if (prediction != null ? !prediction.equals(tree.prediction) : tree.prediction != null) + if (!Objects.equals(prediction, tree.prediction)) return false; - if (tags != null ? !tags.equals(tree.tags) : tree.tags != null) + if (!Objects.equals(tags, tree.tags)) return false; - if (tokens != null ? !tokens.equals(tree.tokens) : tree.tokens != null) + if (!Objects.equals(tokens, tree.tokens)) return false; - if (type != null ? !type.equals(tree.type) : tree.type != null) + if (!Objects.equals(type, tree.type)) return false; - if (value != null ? !value.equals(tree.value) : tree.value != null) + if (!Objects.equals(value, tree.value)) return false; - return !(vector != null ? !vector.equals(tree.vector) : tree.vector != null); + return !(!Objects.equals(vector, tree.vector)); } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/embedding/EmbeddingSequenceLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/embedding/EmbeddingSequenceLayer.java index 51a988f13..762407264 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/embedding/EmbeddingSequenceLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/embedding/EmbeddingSequenceLayer.java @@ -155,7 +155,7 @@ public class EmbeddingSequenceLayer extends BaseLayer gradientViews; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/LSTMHelpers.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/LSTMHelpers.java index a99244c13..3ad4f8b0a 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/LSTMHelpers.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/LSTMHelpers.java @@ -178,7 +178,7 @@ public class LSTMHelpers { //initialize prevOutputActivations to zeroes if (prevOutputActivations == null) { - prevOutputActivations = Nd4j.zeros(input.dataType(), new long[] {miniBatchSize, hiddenLayerSize}); + prevOutputActivations = Nd4j.zeros(input.dataType(), miniBatchSize, hiddenLayerSize); } if (helper != null && (layer.helperCountFail == 0 || !isHelperAllowFallback)) { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/MaskZeroLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/MaskZeroLayer.java index 5241d9b41..c591cd18d 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/MaskZeroLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/MaskZeroLayer.java @@ -37,7 +37,7 @@ import static org.deeplearning4j.nn.conf.RNNFormat.NWC; public class MaskZeroLayer extends BaseWrapperLayer { private static final long serialVersionUID = -7369482676002469854L; - private double maskingValue; + private final double maskingValue; public MaskZeroLayer(@NonNull Layer underlying, double maskingValue){ super(underlying); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/SimpleRnn.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/SimpleRnn.java index 2655739c9..0176ce720 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/SimpleRnn.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/SimpleRnn.java @@ -267,7 +267,7 @@ public class SimpleRnn extends BaseRecurrentLayer 0 || prevStepOut != null) { if(hasLayerNorm()){ - INDArray currRecPreNorm = forBackprop ? recPreNorm.get(all(), all(), point(i)) : workspaceMgr.createUninitialized(ArrayType.FF_WORKING_MEM, currOut.dataType(), currOut.shape(), 'f');; + INDArray currRecPreNorm = forBackprop ? recPreNorm.get(all(), all(), point(i)) : workspaceMgr.createUninitialized(ArrayType.FF_WORKING_MEM, currOut.dataType(), currOut.shape(), 'f'); Nd4j.gemm(prevStepOut, rw, currRecPreNorm, false, false, 1.0, 0.0); INDArray recNorm = workspaceMgr.createUninitialized(ArrayType.FF_WORKING_MEM, currOut.dataType(), currOut.shape(), 'f'); Nd4j.getExecutioner().exec(new LayerNorm(currRecPreNorm, gr, recNorm, true, 1)); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/TimeDistributedLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/TimeDistributedLayer.java index 9f9d6cb43..9a97f6a4a 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/TimeDistributedLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/TimeDistributedLayer.java @@ -34,7 +34,7 @@ import org.nd4j.common.util.ArrayUtil; public class TimeDistributedLayer extends BaseWrapperLayer { - private RNNFormat rnnDataFormat; + private final RNNFormat rnnDataFormat; public TimeDistributedLayer(Layer underlying, RNNFormat rnnDataFormat) { super(underlying); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/util/MaskLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/util/MaskLayer.java index f5d1b24cf..984fe67ee 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/util/MaskLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/util/MaskLayer.java @@ -35,7 +35,7 @@ import org.deeplearning4j.nn.workspace.ArrayType; import java.util.Arrays; public class MaskLayer extends AbstractLayer { - private Gradient emptyGradient = new DefaultGradient(); + private final Gradient emptyGradient = new DefaultGradient(); public MaskLayer(NeuralNetConfiguration conf, DataType dataType) { super(conf, dataType); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/variational/VariationalAutoencoder.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/variational/VariationalAutoencoder.java index 5936168be..75df1dfad 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/variational/VariationalAutoencoder.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/variational/VariationalAutoencoder.java @@ -796,7 +796,7 @@ public class VariationalAutoencoder implements Layer { @Override public void setListeners(TrainingListener... listeners) { - setListeners(Arrays.asList(listeners)); + setListeners(Arrays.asList(listeners)); } @Override @@ -828,8 +828,7 @@ public class VariationalAutoencoder implements Layer { return; } - for (TrainingListener listener : listeners) - trainingListeners.add(listener); + Collections.addAll(trainingListeners, listeners); } @Override diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java index 4b4a97c2d..f590a1caa 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java @@ -99,8 +99,6 @@ import org.nd4j.common.util.OneTimeLogger; import java.io.*; import java.util.*; -; - @Slf4j public class MultiLayerNetwork implements Serializable, Classifier, Layer, NeuralNetwork { @@ -1997,7 +1995,7 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura for (Map.Entry entry : currPair.getFirst().gradientForVariable().entrySet()) { String origName = entry.getKey(); - multiGradientKey = String.valueOf(i) + "_" + origName; + multiGradientKey = i + "_" + origName; gradientList.addLast(new Triple<>(multiGradientKey, entry.getValue(), currPair.getFirst().flatteningOrderForVariable(origName))); } @@ -2109,7 +2107,7 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura rnnClearPreviousState(); for (int i = 0; i < nSubsets; i++) { - long startTimeIdx = i * fwdLen; + long startTimeIdx = (long) i * fwdLen; long endTimeIdx = startTimeIdx + fwdLen; if (endTimeIdx > timeSeriesLength) endTimeIdx = timeSeriesLength; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/params/DepthwiseConvolutionParamInitializer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/params/DepthwiseConvolutionParamInitializer.java index af5ce819d..b9f682818 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/params/DepthwiseConvolutionParamInitializer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/params/DepthwiseConvolutionParamInitializer.java @@ -91,7 +91,7 @@ public class DepthwiseConvolutionParamInitializer implements ParamInitializer { @Override public List weightKeys(Layer layer) { - return Arrays.asList(WEIGHT_KEY); + return Collections.singletonList(WEIGHT_KEY); } @Override diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/params/PReLUParamInitializer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/params/PReLUParamInitializer.java index 05e723eed..d0a93e368 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/params/PReLUParamInitializer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/params/PReLUParamInitializer.java @@ -39,8 +39,8 @@ import java.util.Map; public class PReLUParamInitializer implements ParamInitializer { public final static String WEIGHT_KEY = "W"; - private long[] weightShape; - private long[] sharedAxes; + private final long[] weightShape; + private final long[] sharedAxes; public PReLUParamInitializer(long[] shape, long[] sharedAxes) { this.weightShape = shape; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/transferlearning/TransferLearning.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/transferlearning/TransferLearning.java index ec64afe2c..52ae7c891 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/transferlearning/TransferLearning.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/transferlearning/TransferLearning.java @@ -51,28 +51,28 @@ import java.util.*; public class TransferLearning { public static class Builder { - private MultiLayerConfiguration origConf; - private MultiLayerNetwork origModel; + private final MultiLayerConfiguration origConf; + private final MultiLayerNetwork origModel; private MultiLayerNetwork editedModel; private FineTuneConfiguration finetuneConfiguration; private int frozenTill = -1; private int popN = 0; private boolean prepDone = false; - private Set editedLayers = new HashSet<>(); - private Map> editedLayersMap = + private final Set editedLayers = new HashSet<>(); + private final Map> editedLayersMap = new HashMap<>(); - private Map> nInEditedMap = new HashMap<>(); - private List editedParams = new ArrayList<>(); - private List editedConfs = new ArrayList<>(); - private List appendParams = new ArrayList<>(); //these could be new arrays, and views from origParams - private List appendConfs = new ArrayList<>(); + private final Map> nInEditedMap = new HashMap<>(); + private final List editedParams = new ArrayList<>(); + private final List editedConfs = new ArrayList<>(); + private final List appendParams = new ArrayList<>(); //these could be new arrays, and views from origParams + private final List appendConfs = new ArrayList<>(); private Map inputPreProcessors = new HashMap<>(); private InputType inputType; private Boolean validateOutputLayerConfig; - private DataType dataType; + private final DataType dataType; /** * Multilayer Network to tweak for transfer learning @@ -430,9 +430,7 @@ public class TransferLearning { int i = 0; while (i < popN) { Integer layerNum = origModel.getnLayers() - i; - if (inputPreProcessors.containsKey(layerNum)) { - inputPreProcessors.remove(layerNum); - } + inputPreProcessors.remove(layerNum); editedConfs.remove(editedConfs.size() - 1); editedParams.remove(editedParams.size() - 1); i++; @@ -543,7 +541,7 @@ public class TransferLearning { MultiLayerConfiguration conf = new MultiLayerConfiguration.Builder().inputPreProcessors(inputPreProcessors) .setInputType(this.inputType).confs(allConfs) - .validateOutputLayerConfig(validateOutputLayerConfig == null ? true : validateOutputLayerConfig) + .validateOutputLayerConfig(validateOutputLayerConfig == null || validateOutputLayerConfig) .dataType(origConf.getDataType()) .build(); if (finetuneConfiguration != null) { @@ -554,19 +552,19 @@ public class TransferLearning { } public static class GraphBuilder { - private ComputationGraph origGraph; - private ComputationGraphConfiguration origConfig; + private final ComputationGraph origGraph; + private final ComputationGraphConfiguration origConfig; private FineTuneConfiguration fineTuneConfiguration; private ComputationGraphConfiguration.GraphBuilder editedConfigBuilder; private String[] frozenOutputAt; private boolean hasFrozen = false; - private Set editedVertices = new HashSet<>(); + private final Set editedVertices = new HashSet<>(); private WorkspaceMode workspaceMode; private Boolean validateOutputLayerConfig = null; - private Map nInFromNewConfig = new HashMap<>(); + private final Map nInFromNewConfig = new HashMap<>(); /** * Computation Graph to tweak for transfer learning @@ -960,7 +958,7 @@ public class TransferLearning { initBuilderIfReq(); ComputationGraphConfiguration newConfig = editedConfigBuilder - .validateOutputLayerConfig(validateOutputLayerConfig == null ? true : validateOutputLayerConfig).build(); + .validateOutputLayerConfig(validateOutputLayerConfig == null || validateOutputLayerConfig).build(); if (this.workspaceMode != null) newConfig.setTrainingWorkspaceMode(workspaceMode); ComputationGraph newGraph = new ComputationGraph(newConfig); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/updater/BaseMultiLayerUpdater.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/updater/BaseMultiLayerUpdater.java index b2d12a620..4f4d1690f 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/updater/BaseMultiLayerUpdater.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/updater/BaseMultiLayerUpdater.java @@ -462,8 +462,7 @@ public abstract class BaseMultiLayerUpdater implements Updater return false; BaseMultiLayerUpdater that = (BaseMultiLayerUpdater) o; - return updaterStateViewArray != null ? updaterStateViewArray.equals(that.updaterStateViewArray) - : that.updaterStateViewArray == null; + return Objects.equals(updaterStateViewArray, that.updaterStateViewArray); } @Override diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/updater/LayerUpdater.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/updater/LayerUpdater.java index 87c791c54..dea50edd9 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/updater/LayerUpdater.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/updater/LayerUpdater.java @@ -47,7 +47,7 @@ public class LayerUpdater extends BaseMultiLayerUpdater { @Override protected Trainable[] getOrderedLayers() { - return new Trainable[] {(Trainable)network}; + return new Trainable[] {network}; } @Override diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/weights/embeddings/WeightInitEmbedding.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/weights/embeddings/WeightInitEmbedding.java index 6e92b2187..4ca7f2635 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/weights/embeddings/WeightInitEmbedding.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/weights/embeddings/WeightInitEmbedding.java @@ -32,7 +32,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; @EqualsAndHashCode public class WeightInitEmbedding implements IWeightInit { - private EmbeddingInitializer serializableInit; + private final EmbeddingInitializer serializableInit; private EmbeddingInitializer nonSerializableInit; public WeightInitEmbedding(@NonNull EmbeddingInitializer embeddingInitializer){ diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/workspace/LayerWorkspaceMgr.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/workspace/LayerWorkspaceMgr.java index bf8126ed5..a7b972a2f 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/workspace/LayerWorkspaceMgr.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/workspace/LayerWorkspaceMgr.java @@ -35,12 +35,12 @@ import java.util.*; public class LayerWorkspaceMgr extends BaseWorkspaceMgr { public static String CUDNN_WORKSPACE_KEY = "CUDNN_WORKSPACE"; - private static LayerWorkspaceMgr NO_WS_IMMUTABLE; + private static final LayerWorkspaceMgr NO_WS_IMMUTABLE; static{ Set all = new HashSet<>(); Collections.addAll(all, ArrayType.values()); NO_WS_IMMUTABLE = new LayerWorkspaceMgr( - all, Collections.emptyMap(), Collections.emptyMap()); + all, Collections.emptyMap(), Collections.emptyMap()); } protected Set noLeverageOverride; @@ -136,7 +136,7 @@ public class LayerWorkspaceMgr extends BaseWorkspaceMgr { public static class Builder { - private LayerWorkspaceMgr mgr; + private final LayerWorkspaceMgr mgr; public Builder(){ mgr = new LayerWorkspaceMgr(); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/Solver.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/Solver.java index 9f1a64de9..4cb638c5d 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/Solver.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/Solver.java @@ -91,7 +91,7 @@ public class Solver { public static class Builder { private NeuralNetConfiguration conf; private Model model; - private List listeners = new ArrayList<>(); + private final List listeners = new ArrayList<>(); public Builder configure(NeuralNetConfiguration conf) { this.conf = conf; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/CheckpointListener.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/CheckpointListener.java index 5871b99a0..4ebf2e050 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/CheckpointListener.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/CheckpointListener.java @@ -39,26 +39,27 @@ import java.util.concurrent.TimeUnit; @Slf4j public class CheckpointListener extends BaseTrainingListener implements Serializable { - private enum KeepMode {ALL, LAST, LAST_AND_EVERY}; + private enum KeepMode {ALL, LAST, LAST_AND_EVERY} + private static final String[] MODEL_TYPES = new String[]{"MultiLayerNetwork", "ComputationGraph", "Model"}; - private File rootDir; - private KeepMode keepMode; - private int keepLast; - private int keepEvery; - private boolean logSaving; - private boolean deleteExisting; + private final File rootDir; + private final KeepMode keepMode; + private final int keepLast; + private final int keepEvery; + private final boolean logSaving; + private final boolean deleteExisting; - private Integer saveEveryNEpochs; - private Integer saveEveryNIterations; - private boolean saveEveryNIterSinceLast; - private Long saveEveryAmount; - private TimeUnit saveEveryUnit; + private final Integer saveEveryNEpochs; + private final Integer saveEveryNIterations; + private final boolean saveEveryNIterSinceLast; + private final Long saveEveryAmount; + private final TimeUnit saveEveryUnit; private Long saveEveryMs; - private boolean saveEverySinceLast; + private final boolean saveEverySinceLast; private int lastCheckpointNum = -1; - private File checkpointRecordFile; + private final File checkpointRecordFile; private Checkpoint lastCheckpoint; private long startTime = -1; @@ -151,7 +152,6 @@ public class CheckpointListener extends BaseTrainingListener implements Serializ long lastSaveTime = (lastCheckpoint != null ? lastCheckpoint.getTimestamp() : startTime); if((time - lastSaveTime) >= saveEveryMs){ saveCheckpoint(model); - return; } } else { //Save periodically, regardless of when last model was saved @@ -159,7 +159,6 @@ public class CheckpointListener extends BaseTrainingListener implements Serializ if((time - lastSave) > saveEveryMs){ saveCheckpoint(model); lastSaveEveryMsNoSinceLast = time; - return; } } } @@ -197,7 +196,6 @@ public class CheckpointListener extends BaseTrainingListener implements Serializ //Finally: determine if we should delete some old models... if(keepMode == null || keepMode == KeepMode.ALL){ - return; } else if(keepMode == KeepMode.LAST){ List checkpoints = availableCheckpoints(); Iterator iter = checkpoints.iterator(); @@ -490,7 +488,7 @@ public class CheckpointListener extends BaseTrainingListener implements Serializ public static class Builder { - private File rootDir; + private final File rootDir; private KeepMode keepMode; private int keepLast; private int keepEvery; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/CollectScoresIterationListener.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/CollectScoresIterationListener.java index 3112e559c..51f798e26 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/CollectScoresIterationListener.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/CollectScoresIterationListener.java @@ -27,13 +27,14 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.List; public class CollectScoresIterationListener extends BaseTrainingListener { - private int frequency; + private final int frequency; private int iterationCount = 0; //private List> scoreVsIter = new ArrayList<>(); @@ -42,8 +43,8 @@ public class CollectScoresIterationListener extends BaseTrainingListener { private int position = 0; private int bucketNumber = 1; - private List indexes; - private List scores; + private final List indexes; + private final List scores; public ScoreStat() { indexes = new ArrayList<>(1); @@ -170,7 +171,7 @@ public class CollectScoresIterationListener extends BaseTrainingListener { sb.append("\n").append(indexes[i]).append(delimiter).append(scores[i]); } } - outputStream.write(sb.toString().getBytes("UTF-8")); + outputStream.write(sb.toString().getBytes(StandardCharsets.UTF_8)); } /** diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/PerformanceListener.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/PerformanceListener.java index 23d0e81fe..68402f40e 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/PerformanceListener.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/PerformanceListener.java @@ -50,7 +50,7 @@ public class PerformanceListener extends BaseTrainingListener implements Seriali private transient ThreadLocal> lastGcMs = new ThreadLocal<>(); private transient List gcBeans = null; - private boolean reportScore; + private final boolean reportScore; private boolean reportGC; private boolean reportSample = true; private boolean reportBatch = true; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/TimeIterationListener.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/TimeIterationListener.java index 0dc166a3f..cc48c216b 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/TimeIterationListener.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/TimeIterationListener.java @@ -32,9 +32,9 @@ import java.util.concurrent.atomic.AtomicLong; @Slf4j public class TimeIterationListener extends BaseTrainingListener implements Serializable { - private long start; - private int iterationCount; - private AtomicLong iterationCounter = new AtomicLong(0); + private final long start; + private final int iterationCount; + private final AtomicLong iterationCounter = new AtomicLong(0); /** * Constructor @@ -52,7 +52,7 @@ public class TimeIterationListener extends BaseTrainingListener implements Seria long remaining = (iterationCount - currentIteration) * elapsed / currentIteration; long minutes = remaining / (1000 * 60); Date date = new Date(start + elapsed + remaining); - log.info("Remaining time : " + minutes + "mn - End expected : " + date.toString()); + log.info("Remaining time : " + minutes + "mn - End expected : " + date); } } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/solvers/BackTrackLineSearch.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/solvers/BackTrackLineSearch.java index 39c9f8da2..18e64c081 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/solvers/BackTrackLineSearch.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/solvers/BackTrackLineSearch.java @@ -44,9 +44,9 @@ import static org.nd4j.linalg.ops.transforms.Transforms.abs; public class BackTrackLineSearch implements LineOptimizer { private static final Logger log = LoggerFactory.getLogger(BackTrackLineSearch.class); - private Model layer; - private StepFunction stepFunction; - private ConvexOptimizer optimizer; + private final Model layer; + private final StepFunction stepFunction; + private final ConvexOptimizer optimizer; private int maxIterations; double stepMax = 100; private boolean minObjectiveFunction = true; diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/solvers/LBFGS.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/solvers/LBFGS.java index 320b4293a..5760ee337 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/solvers/LBFGS.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/solvers/LBFGS.java @@ -39,7 +39,7 @@ import java.util.LinkedList; */ public class LBFGS extends BaseOptimizer { private static final long serialVersionUID = 9148732140255034888L; - private int m = 4; + private final int m = 4; public LBFGS(NeuralNetConfiguration conf, StepFunction stepFunction, Collection trainingListeners, Model model) { diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/Convolution1DUtils.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/Convolution1DUtils.java index 41a73e577..32c40bdfc 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/Convolution1DUtils.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/Convolution1DUtils.java @@ -141,7 +141,7 @@ public class Convolution1DUtils { if (convolutionMode == ConvolutionMode.Same || convolutionMode == ConvolutionMode.Causal) { return (int) Math.ceil(inH / ((double) strides)); } - return (inH - eKernel + 2 * padding) / strides + 1; + return (inH - eKernel + 2L * padding) / strides + 1; } /** @@ -204,25 +204,24 @@ public class Convolution1DUtils { int truncated = (int) d; int sameSize = (int) Math.ceil(inH / ((double) strides)); - StringBuilder sb = new StringBuilder(); - sb.append("Invalid input data or configuration: Combination of kernel size, " + + String sb = "Invalid input data or configuration: Combination of kernel size, " + "stride and padding are not " + - "valid for given input height, using ConvolutionMode.Strict\n") - .append("ConvolutionMode.Strict requires: output height = (input height - kernelSize + " + - "2*padding)/stride + 1 to be an integer. Got: (") - .append(inH).append(" - ").append(eKernel).append(" + 2*").append(padding).append(")/") - .append(strides).append(" + 1 = ") - .append(str).append("\n").append("See \"Constraints on strides\" at http://cs231n.github." + - "io/convolutional-networks/ and ConvolutionType enumeration Javadoc.\n") - .append("To truncate/crop the input, such that output height = floor(") - .append(str).append(") = ") - .append(truncated).append(", use ConvolutionType.Truncate.\n") - .append("Alternatively use ConvolutionType.Same, which will use padding to give an " + - "output height of ceil(") - .append(inH).append("/").append(strides).append(")=").append(sameSize) - .append(getCommonErrorMsg(inputData, eKernel, strides, padding, dilation)); + "valid for given input height, using ConvolutionMode.Strict\n" + + "ConvolutionMode.Strict requires: output height = (input height - kernelSize + " + + "2*padding)/stride + 1 to be an integer. Got: (" + + inH + " - " + eKernel + " + 2*" + padding + ")/" + + strides + " + 1 = " + + str + "\n" + "See \"Constraints on strides\" at http://cs231n.github." + + "io/convolutional-networks/ and ConvolutionType enumeration Javadoc.\n" + + "To truncate/crop the input, such that output height = floor(" + + str + ") = " + + truncated + ", use ConvolutionType.Truncate.\n" + + "Alternatively use ConvolutionType.Same, which will use padding to give an " + + "output height of ceil(" + + inH + "/" + strides + ")=" + sameSize + + getCommonErrorMsg(inputData, eKernel, strides, padding, dilation); - throw new DL4JInvalidConfigException(sb.toString()); + throw new DL4JInvalidConfigException(sb); } } @@ -254,8 +253,7 @@ public class Convolution1DUtils { */ public static void validateConvolutionModePadding(ConvolutionMode mode, int padding) { if (mode == ConvolutionMode.Same) { - boolean nullPadding = true; - if (padding != 0) nullPadding = false; + boolean nullPadding = padding == 0; if (!nullPadding) throw new IllegalArgumentException("Padding cannot be used when using the `same' convolution mode"); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/Convolution3DUtils.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/Convolution3DUtils.java index e7101ad75..28cafe388 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/Convolution3DUtils.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/Convolution3DUtils.java @@ -89,7 +89,7 @@ public class Convolution3DUtils { if (convolutionMode != ConvolutionMode.Same) { for (int i = 0; i < 3; i++) { - if ((eKernel[i] <= 0 || eKernel[i] > inShape[i] + 2 * padding[i])) { + if ((eKernel[i] <= 0 || eKernel[i] > inShape[i] + 2L * padding[i])) { StringBuilder sb = new StringBuilder(); sb.append("Invalid input data or configuration: "); if (atrous) sb.append("effective "); @@ -102,7 +102,7 @@ public class Convolution3DUtils { sb.append("kernel = ").append(eKernel[i]).append(", input ").append(dims[i]).append(" = ") .append(inShape[i]).append(" and padding ").append(dims[i]).append(" = ") .append(padding[i]).append(" which do not satisfy 0 < ") - .append(eKernel[i]).append(" <= ").append(inShape[i] + 2 * padding[i]) + .append(eKernel[i]).append(" <= ").append(inShape[i] + 2L * padding[i]) .append(getCommonErrorMsg(inputDataShape, eKernel, strides, padding, dilation)); throw new DL4JInvalidInputException(sb.toString()); @@ -111,30 +111,29 @@ public class Convolution3DUtils { } if (convolutionMode == ConvolutionMode.Strict) { for (int j = 0; j < 3; j++) { - if ((inShape[j] - eKernel[0] + 2 * padding[0]) % strides[0] != 0) { + if ((inShape[j] - eKernel[0] + 2L * padding[0]) % strides[0] != 0) { double d = (inShape[j] - eKernel[0] + 2 * padding[0]) / ((double) strides[0]) + 1.0; String str = String.format("%.2f", d); int truncated = (int) d; int sameSize = (int) Math.ceil(inShape[j] / ((double) strides[0])); - StringBuilder sb = new StringBuilder(); - sb.append("Invalid input data or configuration: Combination of kernel size, stride and padding ") - .append("are not valid for given input height, using ConvolutionMode.Strict\n") - .append("ConvolutionMode.Strict requires: output height = (input height - kernelSize + ") - .append( "2*padding)/stride + 1 to be an integer. Got: (") - .append(inShape[j]).append(" - ").append(eKernel[0]).append(" + 2*") - .append(padding[0]).append(")/").append(strides[0]).append(" + 1 = ") - .append(str).append("\n") - .append("See \"Constraints on strides\" at http://cs231n.github.io/convolutional-networks/ ") - .append("and ConvolutionType enumeration Javadoc.\n") - .append("To truncate/crop the input, such that output height = floor(").append(str) - .append(") = ").append(truncated).append(", use ConvolutionType.Truncate.\n") - .append("Alternatively use ConvolutionType.Same, which will use padding to give ") - .append("an output height of ceil(") - .append(inShape[j]).append("/").append(strides[0]).append(")=").append(sameSize) - .append(getCommonErrorMsg(inputDataShape, eKernel, strides, padding, dilation)); + String sb = "Invalid input data or configuration: Combination of kernel size, stride and padding " + + "are not valid for given input height, using ConvolutionMode.Strict\n" + + "ConvolutionMode.Strict requires: output height = (input height - kernelSize + " + + "2*padding)/stride + 1 to be an integer. Got: (" + + inShape[j] + " - " + eKernel[0] + " + 2*" + + padding[0] + ")/" + strides[0] + " + 1 = " + + str + "\n" + + "See \"Constraints on strides\" at http://cs231n.github.io/convolutional-networks/ " + + "and ConvolutionType enumeration Javadoc.\n" + + "To truncate/crop the input, such that output height = floor(" + str + + ") = " + truncated + ", use ConvolutionType.Truncate.\n" + + "Alternatively use ConvolutionType.Same, which will use padding to give " + + "an output height of ceil(" + + inShape[j] + "/" + strides[0] + ")=" + sameSize + + getCommonErrorMsg(inputDataShape, eKernel, strides, padding, dilation); - throw new DL4JInvalidConfigException(sb.toString()); + throw new DL4JInvalidConfigException(sb); } } } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java index 8737c974e..616f1c620 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java @@ -143,9 +143,9 @@ public class ConvolutionUtils { return new long[]{hOut, wOut, dOut}; } - long hOut = strides[0] * (hIn - 1) + eKernel[0] - 2 * padding[0]; - long wOut = strides[1] * (wIn - 1) + eKernel[1] - 2 * padding[1]; - long dOut = strides[2] * (dIn - 1) + eKernel[2] - 2 * padding[2]; + long hOut = strides[0] * (hIn - 1) + eKernel[0] - 2L * padding[0]; + long wOut = strides[1] * (wIn - 1) + eKernel[1] - 2L * padding[1]; + long dOut = strides[2] * (dIn - 1) + eKernel[2] - 2L * padding[2]; return new long[]{hOut, wOut, dOut}; } @@ -376,17 +376,16 @@ public class ConvolutionUtils { int truncated = (int) d; int sameSize = (int) Math.ceil(inH / ((double) strides[0])); - StringBuilder sb = new StringBuilder(); - sb.append("Invalid input data or configuration: Combination of kernel size, stride and padding are not valid for given input height, using ConvolutionMode.Strict\n") - .append("ConvolutionMode.Strict requires: output height = (input height - kernelSize + 2*padding)/stride + 1 to be an integer. Got: (") - .append(inH).append(" - ").append(eKernel[0]).append(" + 2*").append(padding[0]).append(")/").append(strides[0]).append(" + 1 = ") - .append(str).append("\n").append("See \"Constraints on strides\" at http://cs231n.github.io/convolutional-networks/ and ConvolutionType enumeration Javadoc.\n") - .append("To truncate/crop the input, such that output height = floor(").append(str).append(") = ") - .append(truncated).append(", use ConvolutionType.Truncate.\n") - .append("Alternatively use ConvolutionType.Same, which will use padding to give an output height of ceil(") - .append(inH).append("/").append(strides[0]).append(")=").append(sameSize).append(getCommonErrorMsg(inputData, eKernel, strides, padding, dilation)); + String sb = "Invalid input data or configuration: Combination of kernel size, stride and padding are not valid for given input height, using ConvolutionMode.Strict\n" + + "ConvolutionMode.Strict requires: output height = (input height - kernelSize + 2*padding)/stride + 1 to be an integer. Got: (" + + inH + " - " + eKernel[0] + " + 2*" + padding[0] + ")/" + strides[0] + " + 1 = " + + str + "\n" + "See \"Constraints on strides\" at http://cs231n.github.io/convolutional-networks/ and ConvolutionType enumeration Javadoc.\n" + + "To truncate/crop the input, such that output height = floor(" + str + ") = " + + truncated + ", use ConvolutionType.Truncate.\n" + + "Alternatively use ConvolutionType.Same, which will use padding to give an output height of ceil(" + + inH + "/" + strides[0] + ")=" + sameSize + getCommonErrorMsg(inputData, eKernel, strides, padding, dilation); - throw new DL4JInvalidConfigException(sb.toString()); + throw new DL4JInvalidConfigException(sb); } if ((inW - eKernel[1] + 2 * padding[1]) % strides[1] != 0) { @@ -394,19 +393,18 @@ public class ConvolutionUtils { String str = String.format("%.2f", d); int truncated = (int) d; int sameSize = (int) Math.ceil(inW / ((double) strides[1])); - StringBuilder sb = new StringBuilder(); - sb.append("Invalid input data or configuration: Combination of kernel size, stride and padding are not valid for given input width, using ConvolutionMode.Strict\n") - .append("ConvolutionMode.Strict requires: output width = (input - kernelSize + 2*padding)/stride + 1 to be an integer. Got: (") - .append(inW).append(" - ").append(eKernel[1]).append(" + 2*").append(padding[1]) - .append(")/").append(strides[1]).append(" + 1 = ").append(str).append("\n") - .append("See \"Constraints on strides\" at http://cs231n.github.io/convolutional-networks/ and ConvolutionType enumeration Javadoc.\n") - .append("To truncate/crop the input, such that output width = floor(").append(str).append(") = ") - .append(truncated).append(", use ConvolutionType.Truncate.\n") - .append("Alternatively use ConvolutionType.Same, which will use padding to give an output width of ceil(") - .append(inW).append("/").append(strides[1]).append(")=").append(sameSize) - .append(getCommonErrorMsg(inputData, eKernel, strides, padding, dilation)); + String sb = "Invalid input data or configuration: Combination of kernel size, stride and padding are not valid for given input width, using ConvolutionMode.Strict\n" + + "ConvolutionMode.Strict requires: output width = (input - kernelSize + 2*padding)/stride + 1 to be an integer. Got: (" + + inW + " - " + eKernel[1] + " + 2*" + padding[1] + + ")/" + strides[1] + " + 1 = " + str + "\n" + + "See \"Constraints on strides\" at http://cs231n.github.io/convolutional-networks/ and ConvolutionType enumeration Javadoc.\n" + + "To truncate/crop the input, such that output width = floor(" + str + ") = " + + truncated + ", use ConvolutionType.Truncate.\n" + + "Alternatively use ConvolutionType.Same, which will use padding to give an output width of ceil(" + + inW + "/" + strides[1] + ")=" + sameSize + + getCommonErrorMsg(inputData, eKernel, strides, padding, dilation); throw new DL4JInvalidConfigException( - sb.toString()); + sb); } if (eKernel.length == 3 && (inShape[2] - eKernel[2] + 2 * padding[2]) % strides[2] != 0) { @@ -415,19 +413,18 @@ public class ConvolutionUtils { String str = String.format("%.2f", d); int truncated = (int) d; int sameSize = (int) Math.ceil(inD / ((double) strides[2])); - StringBuilder sb = new StringBuilder(); - sb.append("Invalid input data or configuration: Combination of kernel size, stride and padding are not valid for given input width, using ConvolutionMode.Strict\n") - .append("ConvolutionMode.Strict requires: output channels = (input - kernelSize + 2*padding)/stride + 1 to be an integer. Got: (") - .append(inD).append(" - ").append(eKernel[2]).append(" + 2*").append(padding[2]) - .append(")/").append(strides[1]).append(" + 1 = ").append(str).append("\n") - .append("See \"Constraints on strides\" at http://cs231n.github.io/convolutional-networks/ and ConvolutionType enumeration Javadoc.\n") - .append("To truncate/crop the input, such that output width = floor(").append(str).append(") = ") - .append(truncated).append(", use ConvolutionType.Truncate.\n") - .append("Alternatively use ConvolutionType.Same, which will use padding to give an output width of ceil(") - .append(inW).append("/").append(strides[2]).append(")=").append(sameSize) - .append(getCommonErrorMsg(inputData, eKernel, strides, padding, dilation)); + String sb = "Invalid input data or configuration: Combination of kernel size, stride and padding are not valid for given input width, using ConvolutionMode.Strict\n" + + "ConvolutionMode.Strict requires: output channels = (input - kernelSize + 2*padding)/stride + 1 to be an integer. Got: (" + + inD + " - " + eKernel[2] + " + 2*" + padding[2] + + ")/" + strides[1] + " + 1 = " + str + "\n" + + "See \"Constraints on strides\" at http://cs231n.github.io/convolutional-networks/ and ConvolutionType enumeration Javadoc.\n" + + "To truncate/crop the input, such that output width = floor(" + str + ") = " + + truncated + ", use ConvolutionType.Truncate.\n" + + "Alternatively use ConvolutionType.Same, which will use padding to give an output width of ceil(" + + inW + "/" + strides[2] + ")=" + sameSize + + getCommonErrorMsg(inputData, eKernel, strides, padding, dilation); throw new DL4JInvalidConfigException( - sb.toString()); + sb); } } @@ -574,7 +571,10 @@ public class ConvolutionUtils { if (mode == ConvolutionMode.Same) { boolean nullPadding = true; for (int i : padding) { - if (i != 0) nullPadding = false; + if (i != 0) { + nullPadding = false; + break; + } } if (!nullPadding) throw new IllegalArgumentException("Padding cannot be used when using the `same' convolution mode"); diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/CrashReportingUtil.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/CrashReportingUtil.java index 9fd95b22d..ac28ced80 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/CrashReportingUtil.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/CrashReportingUtil.java @@ -234,7 +234,7 @@ public class CrashReportingUtil { sb.append(String.format(wsFormat, ws.getId(), (ws.isScopeActive() ? "OPEN" : "CLOSED"), fBytes(ws.getCurrentSize()), - String.valueOf(numCycles))).append("\n"); + numCycles)).append("\n"); } } sb.append(fBytes("Workspaces total size", totalWsSize)); @@ -471,7 +471,7 @@ public class CrashReportingUtil { for(Layer layer : layers){ long numParams = layer.numParams(); sb.append(String.format(format, layer.getIndex(), layer.conf().getLayer().getLayerName(), - layer.getClass().getSimpleName(), String.valueOf(numParams), fBytes(numParams * bytesPerElement))).append("\n"); + layer.getClass().getSimpleName(), numParams, fBytes(numParams * bytesPerElement))).append("\n"); } } @@ -515,7 +515,7 @@ public class CrashReportingUtil { } sb.append(String.format(format, idx, layerName, l.getClass().getSimpleName(), h.getClass().getSimpleName(), - fBytes(layerTotal), mem.toString())).append("\n"); + fBytes(layerTotal), mem)).append("\n"); totalHelperMem += layerTotal; } @@ -567,7 +567,7 @@ public class CrashReportingUtil { bytes = 0; } totalActivationBytes += bytes; - sb.append(String.format(format, String.valueOf(i), layers[i].conf().getLayer().getLayerName(), layers[i].getClass().getSimpleName(), + sb.append(String.format(format, i, layers[i].conf().getLayer().getLayerName(), layers[i].getClass().getSimpleName(), inputTypes.get(i), Arrays.toString(shape), (numElements < 0 ? "" : String.valueOf(numElements)), fBytes(bytes))).append("\n"); last = bytes; } @@ -630,7 +630,7 @@ public class CrashReportingUtil { className = gv.getClass().getSimpleName(); } - sb.append(String.format(format, String.valueOf(i), layerName, className, it, + sb.append(String.format(format, i, layerName, className, it, Arrays.toString(shape), (numElements < 0 ? "" : String.valueOf(numElements)), fBytes(bytes))).append("\n"); if(!net.getConfiguration().getNetworkOutputs().contains(layerName)){ diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/OutputLayerUtil.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/OutputLayerUtil.java index 8f1ba93e4..08a3d086a 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/OutputLayerUtil.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/OutputLayerUtil.java @@ -151,12 +151,9 @@ public class OutputLayerUtil { public static boolean activationExceedsZeroOneRange(IActivation activation, boolean isLossLayer){ if(OUTSIDE_ZERO_ONE_RANGE.contains(activation.getClass())){ - if(isLossLayer && activation instanceof ActivationIdentity){ - //Note: we're intentionally excluding identity here, for situations like dense(softmax) -> loss(identity) - //However, we might miss a few invalid configs like dense(relu) -> loss(identity) - return false; - } - return true; + //Note: we're intentionally excluding identity here, for situations like dense(softmax) -> loss(identity) + //However, we might miss a few invalid configs like dense(relu) -> loss(identity) + return !isLossLayer || !(activation instanceof ActivationIdentity); } return false; } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/ValidationUtils.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/ValidationUtils.java index f2dd9b5d2..0e9952989 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/ValidationUtils.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/util/ValidationUtils.java @@ -70,8 +70,9 @@ public class ValidationUtils { boolean nonnegative = true; for(int value : data){ - if(value < 0) { + if (value < 0) { nonnegative = false; + break; } } diff --git a/cavis-dnn/cavis-dnn-parallelwrapper-parameterserver/src/main/java/org/deeplearning4j/parallelism/parameterserver/ParameterServerTrainerContext.java b/cavis-dnn/cavis-dnn-parallelwrapper-parameterserver/src/main/java/org/deeplearning4j/parallelism/parameterserver/ParameterServerTrainerContext.java index 9e509888c..47d04d303 100644 --- a/cavis-dnn/cavis-dnn-parallelwrapper-parameterserver/src/main/java/org/deeplearning4j/parallelism/parameterserver/ParameterServerTrainerContext.java +++ b/cavis-dnn/cavis-dnn-parallelwrapper-parameterserver/src/main/java/org/deeplearning4j/parallelism/parameterserver/ParameterServerTrainerContext.java @@ -35,10 +35,10 @@ public class ParameterServerTrainerContext implements TrainerContext { private ParameterServerNode parameterServerNode; private MediaDriver mediaDriver; private MediaDriver.Context mediaDriverContext; - private int statusServerPort = 33000; - private int numUpdatesPerEpoch = 1; + private final int statusServerPort = 33000; + private final int numUpdatesPerEpoch = 1; private String[] parameterServerArgs; - private int numWorkers = 1; + private final int numWorkers = 1; /** * Initialize the context @@ -52,7 +52,7 @@ public class ParameterServerTrainerContext implements TrainerContext { mediaDriver = MediaDriver.launchEmbedded(mediaDriverContext); parameterServerNode = new ParameterServerNode(mediaDriver, statusServerPort, numWorkers); if (parameterServerArgs == null) - parameterServerArgs = new String[] {"-m", "true", "-s", "1," + String.valueOf(model.numParams()), "-p", + parameterServerArgs = new String[] {"-m", "true", "-s", "1," + model.numParams(), "-p", "40323", "-h", "localhost", "-id", "11", "-md", mediaDriver.aeronDirectoryName(), "-sh", "localhost", "-sp", String.valueOf(statusServerPort), "-u", String.valueOf(numUpdatesPerEpoch)}; diff --git a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/EarlyStoppingParallelTrainer.java b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/EarlyStoppingParallelTrainer.java index 683db198a..e1f8b9273 100644 --- a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/EarlyStoppingParallelTrainer.java +++ b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/EarlyStoppingParallelTrainer.java @@ -58,9 +58,9 @@ public class EarlyStoppingParallelTrainer implements IEarlyStop private ParallelWrapper wrapper; private double bestModelScore = Double.MAX_VALUE; private int bestModelEpoch = -1; - private AtomicDouble latestScore = new AtomicDouble(0.0); - private AtomicBoolean terminate = new AtomicBoolean(false); - private AtomicInteger iterCount = new AtomicInteger(0); + private final AtomicDouble latestScore = new AtomicDouble(0.0); + private final AtomicBoolean terminate = new AtomicBoolean(false); + private final AtomicInteger iterCount = new AtomicInteger(0); protected volatile IterationTerminationCondition terminationReason = null; public EarlyStoppingParallelTrainer(EarlyStoppingConfiguration earlyStoppingConfiguration, T model, @@ -262,7 +262,7 @@ public class EarlyStoppingParallelTrainer implements IEarlyStop } if (epochTerminate) { log.info("Hit epoch termination condition at epoch {}. Details: {}", epochCount, - termReason.toString()); + termReason); T bestModel; try { bestModel = esConfig.getModelSaver().getBestModel(); @@ -316,8 +316,8 @@ public class EarlyStoppingParallelTrainer implements IEarlyStop */ private class AveragingTrainingListener extends BaseTrainingListener { private final Logger log = LoggerFactory.getLogger(AveragingTrainingListener.class); - private IterationTerminationCondition terminationReason = null; - private EarlyStoppingParallelTrainer trainer; + private final IterationTerminationCondition terminationReason = null; + private final EarlyStoppingParallelTrainer trainer; /** Default constructor printing every 10 iterations */ public AveragingTrainingListener(EarlyStoppingParallelTrainer trainer) { diff --git a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelInference.java b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelInference.java index 67a693dc0..52a28606e 100644 --- a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelInference.java +++ b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelInference.java @@ -284,7 +284,7 @@ public class ParallelInference { public static class Builder { - private Model model; + private final Model model; private int workers = DEFAULT_NUM_WORKERS; private int batchLimit = DEFAULT_BATCH_LIMIT; private InferenceMode inferenceMode = DEFAULT_INFERENCE_MODE; @@ -413,16 +413,16 @@ public class ParallelInference { * */ private class InferenceWorker extends Thread implements Runnable { - private BlockingQueue inputQueue; - private AtomicBoolean shouldWork = new AtomicBoolean(true); - private AtomicBoolean isStopped = new AtomicBoolean(false); + private final BlockingQueue inputQueue; + private final AtomicBoolean shouldWork = new AtomicBoolean(true); + private final AtomicBoolean isStopped = new AtomicBoolean(false); private Model protoModel; private Model replicatedModel; - private AtomicLong counter = new AtomicLong(0); - private boolean rootDevice; - private int deviceId; + private final AtomicLong counter = new AtomicLong(0); + private final boolean rootDevice; + private final int deviceId; - private ReentrantReadWriteLock modelLock = new ReentrantReadWriteLock(); + private final ReentrantReadWriteLock modelLock = new ReentrantReadWriteLock(); private InferenceWorker(int id, @NonNull Model model, @NonNull BlockingQueue inputQueue, boolean rootDevice, int deviceId) { this.inputQueue = inputQueue; @@ -571,9 +571,9 @@ public class ParallelInference { protected static class ObservablesProvider { - private BlockingQueue targetQueue; - private long nanos; - private int batchLimit; + private final BlockingQueue targetQueue; + private final long nanos; + private final int batchLimit; private volatile BatchedInferenceObservable currentObservable; private final Object locker = new Object(); diff --git a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelWrapper.java b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelWrapper.java index 46390ee1d..8da3b5262 100644 --- a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelWrapper.java +++ b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/ParallelWrapper.java @@ -26,7 +26,7 @@ import org.deeplearning4j.core.storage.StatsStorageRouter; import org.deeplearning4j.core.storage.listener.RoutingIterationListener; import org.deeplearning4j.optimize.solvers.accumulation.EncodingHandler; import org.deeplearning4j.optimize.solvers.accumulation.encoding.threshold.AdaptiveThresholdAlgorithm; -import org.nd4j.linalg.dataset.AsyncDataSetIterator;; +import org.nd4j.linalg.dataset.AsyncDataSetIterator; import org.nd4j.linalg.dataset.AsyncMultiDataSetIterator; import org.deeplearning4j.datasets.iterator.DummyBlockDataSetIterator; import org.deeplearning4j.datasets.iterator.DummyBlockMultiDataSetIterator; @@ -932,7 +932,7 @@ public class ParallelWrapper implements AutoCloseable { // memory sie in number of bytes long memorySize = encoderMemory == null || encoderMemory < 0 - ? maxUpdate * 4 * (workers + 3) + ? (long) maxUpdate * 4 * (workers + 3) : encoderMemory; this.accumulator = new EncodedGradientsAccumulator(workers, new EncodingHandler(thresholdAlgorithm, residualPostProcessor, maxUpdate, false), memorySize, workers + 2, Integer.MAX_VALUE, false); diff --git a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/inference/observers/BasicInferenceObserver.java b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/inference/observers/BasicInferenceObserver.java index cf87082b1..559a82f88 100644 --- a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/inference/observers/BasicInferenceObserver.java +++ b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/inference/observers/BasicInferenceObserver.java @@ -29,7 +29,7 @@ import java.util.concurrent.locks.LockSupport; @Slf4j public class BasicInferenceObserver implements Observer { - private AtomicBoolean finished; + private final AtomicBoolean finished; public BasicInferenceObserver() { finished = new AtomicBoolean(false); diff --git a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/inference/observers/BatchedInferenceObservable.java b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/inference/observers/BatchedInferenceObservable.java index 1ae8995d8..5ae162931 100644 --- a/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/inference/observers/BatchedInferenceObservable.java +++ b/cavis-dnn/cavis-dnn-parallelwrapper/src/main/java/org/deeplearning4j/parallelism/inference/observers/BatchedInferenceObservable.java @@ -39,18 +39,18 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; @Slf4j public class BatchedInferenceObservable extends BasicInferenceObservable implements InferenceObservable { - private List inputs = new ArrayList<>(); - private List inputMasks = new ArrayList<>(); - private List outputs = new ArrayList<>(); - private AtomicInteger counter = new AtomicInteger(0); - private ThreadLocal position = new ThreadLocal<>(); - private List outputBatchInputArrays = new ArrayList<>(); + private final List inputs = new ArrayList<>(); + private final List inputMasks = new ArrayList<>(); + private final List outputs = new ArrayList<>(); + private final AtomicInteger counter = new AtomicInteger(0); + private final ThreadLocal position = new ThreadLocal<>(); + private final List outputBatchInputArrays = new ArrayList<>(); private final Object locker = new Object(); - private ReentrantReadWriteLock realLocker = new ReentrantReadWriteLock(); - private AtomicBoolean isLocked = new AtomicBoolean(false); - private AtomicBoolean isReadLocked = new AtomicBoolean(false); + private final ReentrantReadWriteLock realLocker = new ReentrantReadWriteLock(); + private final AtomicBoolean isLocked = new AtomicBoolean(false); + private final AtomicBoolean isReadLocked = new AtomicBoolean(false); public BatchedInferenceObservable() { diff --git a/cavis-dnn/cavis-dnn-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/InplaceParallelInferenceTest.java b/cavis-dnn/cavis-dnn-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/InplaceParallelInferenceTest.java index a8db019b4..ecb28ef9b 100644 --- a/cavis-dnn/cavis-dnn-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/InplaceParallelInferenceTest.java +++ b/cavis-dnn/cavis-dnn-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/InplaceParallelInferenceTest.java @@ -120,8 +120,8 @@ public class InplaceParallelInferenceTest extends BaseDL4JTest { try { - val result0 = pi.output(new INDArray[]{Nd4j.create(new double[]{1.0, 2.0, 3.0, 4.0, 5.0}, new long[]{1, 5})}, null)[0]; - val result1 = pi.output(new INDArray[]{Nd4j.create(new double[]{1.0, 2.0, 3.0, 4.0, 5.0}, new long[]{1, 5})}, null)[0]; + val result0 = pi.output(new INDArray[]{Nd4j.create(new double[]{1.0, 2.0, 3.0, 4.0, 5.0}, 1, 5)}, null)[0]; + val result1 = pi.output(new INDArray[]{Nd4j.create(new double[]{1.0, 2.0, 3.0, 4.0, 5.0}, 1, 5)}, null)[0]; assertNotNull(result0); assertEquals(result0, result1); @@ -153,8 +153,8 @@ public class InplaceParallelInferenceTest extends BaseDL4JTest { try { - val result0 = pi.output(new INDArray[]{Nd4j.create(new double[]{1.0, 2.0, 3.0, 4.0, 5.0}, new long[]{1, 5})}, null)[0]; - val result1 = pi.output(new INDArray[]{Nd4j.create(new double[]{1.0, 2.0, 3.0, 4.0, 5.0}, new long[]{1, 5})}, null)[0]; + val result0 = pi.output(new INDArray[]{Nd4j.create(new double[]{1.0, 2.0, 3.0, 4.0, 5.0}, 1, 5)}, null)[0]; + val result1 = pi.output(new INDArray[]{Nd4j.create(new double[]{1.0, 2.0, 3.0, 4.0, 5.0}, 1, 5)}, null)[0]; assertNotNull(result0); assertEquals(result0, result1); diff --git a/cavis-dnn/cavis-dnn-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/ParallelInferenceTest.java b/cavis-dnn/cavis-dnn-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/ParallelInferenceTest.java index 6f694286d..3919bfbc7 100644 --- a/cavis-dnn/cavis-dnn-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/ParallelInferenceTest.java +++ b/cavis-dnn/cavis-dnn-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/ParallelInferenceTest.java @@ -233,7 +233,7 @@ public class ParallelInferenceTest extends BaseDL4JTest { assertNotEquals(null, observable1); - assertTrue(observable1 == observable2); + assertSame(observable1, observable2); } @Test @@ -248,7 +248,7 @@ public class ParallelInferenceTest extends BaseDL4JTest { assertNotEquals(null, observable1); - assertTrue(observable1 == observable2); + assertSame(observable1, observable2); List> l = observable1.getInputBatches(); assertEquals(1, l.size()); @@ -276,8 +276,8 @@ public class ParallelInferenceTest extends BaseDL4JTest { assertNotEquals(null, observable1); assertNotEquals(null, observable3); - assertTrue(observable1 == observable2); - assertTrue(observable1 != observable3); + assertSame(observable1, observable2); + assertNotSame(observable1, observable3); List> l = observable1.getInputBatches(); assertEquals(1, l.size()); @@ -439,7 +439,7 @@ public class ParallelInferenceTest extends BaseDL4JTest { List arrs = new ArrayList<>(); List exp = new ArrayList<>(); for (int l : tsLengths) { - INDArray in = Nd4j.rand(new int[]{1, nIn, l}); + INDArray in = Nd4j.rand(1, nIn, l); arrs.add(in); INDArray out = net.output(in); exp.add(out); @@ -724,7 +724,7 @@ public class ParallelInferenceTest extends BaseDL4JTest { for (int i = 0; i < nRuns; i++) { int currTSLength = (randomTSLength ? 1 + r.nextInt(tsLength) : tsLength); int currNumEx = 1 + r.nextInt(3); - INDArray inArr = Nd4j.rand(new int[]{currNumEx, nIn, currTSLength}); + INDArray inArr = Nd4j.rand(currNumEx, nIn, currTSLength); in.add(inArr); INDArray inMask = null; @@ -857,7 +857,7 @@ public class ParallelInferenceTest extends BaseDL4JTest { int runs = isIntegrationTests() ? 100 : 20; for (int i = 0; i < 100; i++) { int currNumEx = 1 + r.nextInt(3); - INDArray inArr = Nd4j.rand(new int[]{currNumEx, nIn}); + INDArray inArr = Nd4j.rand(currNumEx, nIn); in.add(new INDArray[]{inArr}); INDArray[] out = net.output(inArr); diff --git a/cavis-dnn/cavis-dnn-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/main/ParallelWrapperMainTest.java b/cavis-dnn/cavis-dnn-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/main/ParallelWrapperMainTest.java index bf525ac67..315788855 100644 --- a/cavis-dnn/cavis-dnn-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/main/ParallelWrapperMainTest.java +++ b/cavis-dnn/cavis-dnn-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/main/ParallelWrapperMainTest.java @@ -95,9 +95,9 @@ public class ParallelWrapperMainTest extends BaseDL4JTest { tmp.deleteOnExit(); ParallelWrapperMain parallelWrapperMain = new ParallelWrapperMain(); try { - parallelWrapperMain.runMain(new String[]{"--modelPath", tempModel.getAbsolutePath(), + parallelWrapperMain.runMain("--modelPath", tempModel.getAbsolutePath(), "--dataSetIteratorFactoryClazz", MnistDataSetIteratorProviderFactory.class.getName(), - "--modelOutputPath", tmp.getAbsolutePath(), "--uiUrl", "localhost:" + uiPort}); + "--modelOutputPath", tmp.getAbsolutePath(), "--uiUrl", "localhost:" + uiPort); } finally { parallelWrapperMain.stop(); } diff --git a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonContextManager.java b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonContextManager.java index 4a4ac3aaa..4eb8846a8 100644 --- a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonContextManager.java +++ b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonContextManager.java @@ -29,8 +29,8 @@ import java.util.concurrent.atomic.AtomicBoolean; public class PythonContextManager { - private static Set contexts = new HashSet<>(); - private static AtomicBoolean init = new AtomicBoolean(false); + private static final Set contexts = new HashSet<>(); + private static final AtomicBoolean init = new AtomicBoolean(false); private static String currentContext; private static final String MAIN_CONTEXT = "main"; private static final String COLLAPSED_KEY = "__collapsed__"; diff --git a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonExecutioner.java b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonExecutioner.java index 40131a237..c05735def 100644 --- a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonExecutioner.java +++ b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonExecutioner.java @@ -41,7 +41,7 @@ import static org.bytedeco.cpython.helper.python.Py_SetPath; public class PythonExecutioner { private final static String PYTHON_EXCEPTION_KEY = "__python_exception__"; - private static AtomicBoolean init = new AtomicBoolean(false); + private static final AtomicBoolean init = new AtomicBoolean(false); public final static String DEFAULT_PYTHON_PATH_PROPERTY = "org.eclipse.python4j.path"; public final static String JAVACPP_PYTHON_APPEND_TYPE = "org.eclipse.python4j.path.append"; public final static String DEFAULT_APPEND_TYPE = "before"; diff --git a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonObject.java b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonObject.java index bd0893a72..59ae4b224 100644 --- a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonObject.java +++ b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonObject.java @@ -91,12 +91,12 @@ public class PythonObject { public PythonObject callWithKwargs(PythonObject kwargs) { if (!Python.callable(this)) { - throw new PythonException("Object is not callable: " + toString()); + throw new PythonException("Object is not callable: " + this); } PyObject tuple = PyTuple_New(0); PyObject dict = kwargs.nativePythonObject; if (PyObject_IsInstance(dict, new PyObject(PyDict_Type())) != 1) { - throw new PythonException("Expected kwargs to be dict. Received: " + kwargs.toString()); + throw new PythonException("Expected kwargs to be dict. Received: " + kwargs); } PythonObject ret = new PythonObject(PyObject_Call(nativePythonObject, tuple, dict)); Py_DecRef(tuple); @@ -109,7 +109,7 @@ public class PythonObject { boolean ownsTuple = false; try { if (!Python.callable(this)) { - throw new PythonException("Object is not callable: " + toString()); + throw new PythonException("Object is not callable: " + this); } if (PyObject_IsInstance(args.nativePythonObject, new PyObject(PyTuple_Type())) == 1) { @@ -118,10 +118,10 @@ public class PythonObject { tuple = PyList_AsTuple(args.nativePythonObject); ownsTuple = true; } else { - throw new PythonException("Expected args to be tuple or list. Received: " + args.toString()); + throw new PythonException("Expected args to be tuple or list. Received: " + args); } if (kwargs != null && PyObject_IsInstance(kwargs.nativePythonObject, new PyObject(PyDict_Type())) != 1) { - throw new PythonException("Expected kwargs to be dict. Received: " + kwargs.toString()); + throw new PythonException("Expected kwargs to be dict. Received: " + kwargs); } return new PythonObject(PyObject_Call(nativePythonObject, tuple, kwargs == null ? null : kwargs.nativePythonObject)); } finally { @@ -147,7 +147,7 @@ public class PythonObject { PythonGIL.assertThreadSafe(); try (PythonGC pgc = PythonGC.watch()) { if (!Python.callable(this)) { - throw new PythonException("Object is not callable: " + toString()); + throw new PythonException("Object is not callable: " + this); } PythonObject pyArgs; PythonObject pyKwargs; diff --git a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonProcess.java b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonProcess.java index 21f22eaf6..d02234f69 100644 --- a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonProcess.java +++ b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonProcess.java @@ -28,12 +28,10 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; public class PythonProcess { - private static String pythonExecutable = Loader.load(org.bytedeco.cpython.python.class); + private static final String pythonExecutable = Loader.load(org.bytedeco.cpython.python.class); public static String runAndReturn(String... arguments)throws IOException, InterruptedException{ String[] allArgs = new String[arguments.length + 1]; - for (int i = 0; i < arguments.length; i++){ - allArgs[i + 1] = arguments[i]; - } + System.arraycopy(arguments, 0, allArgs, 1, arguments.length); allArgs[0] = pythonExecutable; ProcessBuilder pb = new ProcessBuilder(allArgs); Process process = pb.start(); @@ -45,9 +43,7 @@ public class PythonProcess { public static void run(String... arguments)throws IOException, InterruptedException{ String[] allArgs = new String[arguments.length + 1]; - for (int i = 0; i < arguments.length; i++){ - allArgs[i + 1] = arguments[i]; - } + System.arraycopy(arguments, 0, allArgs, 1, arguments.length); allArgs[0] = pythonExecutable; ProcessBuilder pb = new ProcessBuilder(allArgs); pb.inheritIO().start().waitFor(); diff --git a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonTypes.java b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonTypes.java index 9120c82d4..77eb71a25 100644 --- a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonTypes.java +++ b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-core/src/main/java/org/nd4j/python4j/PythonTypes.java @@ -31,11 +31,11 @@ public class PythonTypes { private static List getPrimitiveTypes() { - return Arrays.asList(STR, INT, FLOAT, BOOL, BYTES); + return Arrays.asList(STR, INT, FLOAT, BOOL, BYTES); } private static List getCollectionTypes() { - return Arrays.asList(LIST, DICT); + return Arrays.asList(LIST, DICT); } private static List getExternalTypes() { @@ -149,7 +149,7 @@ public class PythonTypes { PythonGIL.assertThreadSafe(); long val = PyLong_AsLong(pythonObject.getNativePythonObject()); if (val == -1 && PyErr_Occurred() != null) { - throw new PythonException("Could not convert value to int: " + pythonObject.toString()); + throw new PythonException("Could not convert value to int: " + pythonObject); } return val; } @@ -180,7 +180,7 @@ public class PythonTypes { PythonGIL.assertThreadSafe(); double val = PyFloat_AsDouble(pythonObject.getNativePythonObject()); if (val == -1 && PyErr_Occurred() != null) { - throw new PythonException("Could not convert value to float: " + pythonObject.toString()); + throw new PythonException("Could not convert value to float: " + pythonObject); } return val; } @@ -344,7 +344,7 @@ public class PythonTypes { HashMap ret = new HashMap(); PyObject dictType = new PyObject(PyDict_Type()); if (PyObject_IsInstance(pythonObject.getNativePythonObject(), dictType) != 1) { - throw new PythonException("Expected dict, received: " + pythonObject.toString()); + throw new PythonException("Expected dict, received: " + pythonObject); } PyObject keys = PyDict_Keys(pythonObject.getNativePythonObject()); diff --git a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-numpy/src/test/java/PythonNumpyBasicTest.java b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-numpy/src/test/java/PythonNumpyBasicTest.java index 2d9851977..17f1b246b 100644 --- a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-numpy/src/test/java/PythonNumpyBasicTest.java +++ b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-numpy/src/test/java/PythonNumpyBasicTest.java @@ -37,8 +37,8 @@ import java.util.List; @NotThreadSafe ////@RunWith(Parameterized.class) public class PythonNumpyBasicTest { - private DataType dataType; - private long[] shape; + private final DataType dataType; + private final long[] shape; public PythonNumpyBasicTest(DataType dataType, long[] shape, String dummyArg) { this.dataType = dataType; diff --git a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-numpy/src/test/java/PythonNumpyCollectionsTest.java b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-numpy/src/test/java/PythonNumpyCollectionsTest.java index 58c466d13..e3c4fb311 100644 --- a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-numpy/src/test/java/PythonNumpyCollectionsTest.java +++ b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-numpy/src/test/java/PythonNumpyCollectionsTest.java @@ -35,7 +35,7 @@ import java.util.*; @NotThreadSafe ////@RunWith(Parameterized.class) public class PythonNumpyCollectionsTest { - private DataType dataType; + private final DataType dataType; public PythonNumpyCollectionsTest(DataType dataType){ this.dataType = dataType; diff --git a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-numpy/src/test/java/PythonNumpyMultiThreadTest.java b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-numpy/src/test/java/PythonNumpyMultiThreadTest.java index c18d0a925..49bf7fd61 100644 --- a/cavis-dnn/cavis-dnn-python4j/cavis-python4j-numpy/src/test/java/PythonNumpyMultiThreadTest.java +++ b/cavis-dnn/cavis-dnn-python4j/cavis-python4j-numpy/src/test/java/PythonNumpyMultiThreadTest.java @@ -35,7 +35,7 @@ import java.util.List; @NotThreadSafe public class PythonNumpyMultiThreadTest { - private DataType dataType; + private final DataType dataType; public PythonNumpyMultiThreadTest(DataType dataType) { this.dataType = dataType; diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/api/stats/StatsCalculationHelper.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/api/stats/StatsCalculationHelper.java index 7e769cbb5..5a9735a8e 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/api/stats/StatsCalculationHelper.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/api/stats/StatsCalculationHelper.java @@ -39,10 +39,10 @@ public class StatsCalculationHelper { private long lastDataSetBefore; private long lastProcessBefore; private long totalExampleCount; - private List dataSetGetTimes = new ArrayList<>(); - private List processMiniBatchTimes = new ArrayList<>(); + private final List dataSetGetTimes = new ArrayList<>(); + private final List processMiniBatchTimes = new ArrayList<>(); - private TimeSource timeSource = TimeSourceProvider.getInstance(); + private final TimeSource timeSource = TimeSourceProvider.getInstance(); public void logMethodStartTime() { methodStartTime = timeSource.currentTimeMillis(); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/api/worker/ExecuteWorkerPathMDSFlatMap.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/api/worker/ExecuteWorkerPathMDSFlatMap.java index b012f3a0d..1515e4ee3 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/api/worker/ExecuteWorkerPathMDSFlatMap.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/api/worker/ExecuteWorkerPathMDSFlatMap.java @@ -36,7 +36,7 @@ import java.util.List; public class ExecuteWorkerPathMDSFlatMap implements FlatMapFunction, R> { private final FlatMapFunction, R> workerFlatMap; - private MultiDataSetLoader loader; + private final MultiDataSetLoader loader; private final int maxDataSetObjects; private final Broadcast hadoopConfig; diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/data/BatchAndExportDataSetsFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/data/BatchAndExportDataSetsFunction.java index ac9a0a256..6a42c2259 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/data/BatchAndExportDataSetsFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/data/BatchAndExportDataSetsFunction.java @@ -103,7 +103,7 @@ public class BatchAndExportDataSetsFunction implements Function2(countBefore, Collections.emptyList()); + return new Pair<>(countBefore, Collections.emptyList()); } List exportPaths = new ArrayList<>(); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/data/BatchAndExportMultiDataSetsFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/data/BatchAndExportMultiDataSetsFunction.java index b7e30b351..a5f607ee6 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/data/BatchAndExportMultiDataSetsFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/data/BatchAndExportMultiDataSetsFunction.java @@ -105,7 +105,7 @@ public class BatchAndExportMultiDataSetsFunction if (tempList.isEmpty() || (numExamples < minibatchSize && !finalExport)) { //No op - return new Pair<>(countBefore, Collections.emptyList()); + return new Pair<>(countBefore, Collections.emptyList()); } List exportPaths = new ArrayList<>(); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/data/shuffle/SplitDataSetExamplesPairFlatMapFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/data/shuffle/SplitDataSetExamplesPairFlatMapFunction.java index f6b12a1eb..b72484095 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/data/shuffle/SplitDataSetExamplesPairFlatMapFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/data/shuffle/SplitDataSetExamplesPairFlatMapFunction.java @@ -34,7 +34,7 @@ import java.util.Random; public class SplitDataSetExamplesPairFlatMapFunction implements PairFlatMapFunction { private transient Random r; - private int maxKeyIndex; + private final int maxKeyIndex; public SplitDataSetExamplesPairFlatMapFunction(int maxKeyIndex) { this.maxKeyIndex = maxKeyIndex; diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/datavec/DataVecByteDataSetFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/datavec/DataVecByteDataSetFunction.java index f8413037b..10ad4847d 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/datavec/DataVecByteDataSetFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/datavec/DataVecByteDataSetFunction.java @@ -44,12 +44,12 @@ import java.util.List; public class DataVecByteDataSetFunction implements PairFunction, Double, DataSet> { private int labelIndex = 0; - private int numPossibleLabels; - private int byteFileLen; - private int batchSize; + private final int numPossibleLabels; + private final int byteFileLen; + private final int batchSize; private int numExamples; private boolean regression = false; - private DataSetPreProcessor preProcessor; + private final DataSetPreProcessor preProcessor; public DataVecByteDataSetFunction(int labelIndex, int numPossibleLabels, int batchSize, int byteFileLen) { this(labelIndex, numPossibleLabels, batchSize, byteFileLen, false, null); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/datavec/RDDMiniBatches.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/datavec/RDDMiniBatches.java index 4c0da6832..926051ba1 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/datavec/RDDMiniBatches.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/datavec/RDDMiniBatches.java @@ -31,8 +31,8 @@ import java.util.Iterator; import java.util.List; public class RDDMiniBatches implements Serializable { - private int miniBatches; - private JavaRDD toSplitJava; + private final int miniBatches; + private final JavaRDD toSplitJava; public RDDMiniBatches(int miniBatches, JavaRDD toSplit) { this.miniBatches = miniBatches; diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/datavec/RecordReaderFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/datavec/RecordReaderFunction.java index 8d24bba6a..48ff6d0b0 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/datavec/RecordReaderFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/datavec/RecordReaderFunction.java @@ -34,10 +34,10 @@ import java.util.ArrayList; import java.util.List; public class RecordReaderFunction implements Function { - private RecordReader recordReader; + private final RecordReader recordReader; private int labelIndex = -1; private int numPossibleLabels = -1; - private WritableConverter converter; + private final WritableConverter converter; public RecordReaderFunction(RecordReader recordReader, int labelIndex, int numPossibleLabels, WritableConverter converter) { diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/BaseSparkEarlyStoppingTrainer.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/BaseSparkEarlyStoppingTrainer.java index 5f1029131..5ed1848b7 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/BaseSparkEarlyStoppingTrainer.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/BaseSparkEarlyStoppingTrainer.java @@ -41,11 +41,11 @@ import java.util.Map; public abstract class BaseSparkEarlyStoppingTrainer implements IEarlyStoppingTrainer { - private static Logger log = LoggerFactory.getLogger(BaseSparkEarlyStoppingTrainer.class); + private static final Logger log = LoggerFactory.getLogger(BaseSparkEarlyStoppingTrainer.class); - private JavaSparkContext sc; + private final JavaSparkContext sc; private final EarlyStoppingConfiguration esConfig; - private T net; + private final T net; private final JavaRDD train; private final JavaRDD trainMulti; private EarlyStoppingListener listener; @@ -206,7 +206,7 @@ public abstract class BaseSparkEarlyStoppingTrainer implements } if (epochTerminate) { log.info("Hit epoch termination condition at epoch {}. Details: {}", epochCount, - termReason.toString()); + termReason); T bestModel; try { bestModel = esConfig.getModelSaver().getBestModel(); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkDataSetLossCalculator.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkDataSetLossCalculator.java index be71c408c..fd5590e7f 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkDataSetLossCalculator.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkDataSetLossCalculator.java @@ -30,9 +30,9 @@ import org.nd4j.linalg.dataset.DataSet; public class SparkDataSetLossCalculator implements ScoreCalculator { - private JavaRDD data; - private boolean average; - private SparkContext sc; + private final JavaRDD data; + private final boolean average; + private final SparkContext sc; /**Calculate the score (loss function value) on a given data set (usually a test set) * diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkEarlyStoppingGraphTrainer.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkEarlyStoppingGraphTrainer.java index efdab70aa..fb052d008 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkEarlyStoppingGraphTrainer.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkEarlyStoppingGraphTrainer.java @@ -35,7 +35,7 @@ import org.nd4j.linalg.dataset.api.MultiDataSet; public class SparkEarlyStoppingGraphTrainer extends BaseSparkEarlyStoppingTrainer { - private SparkComputationGraph sparkNet; + private final SparkComputationGraph sparkNet; public SparkEarlyStoppingGraphTrainer(SparkContext sc, TrainingMaster trainingMaster, EarlyStoppingConfiguration esConfig, ComputationGraph net, diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkEarlyStoppingTrainer.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkEarlyStoppingTrainer.java index 3e61bd7cd..cab795894 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkEarlyStoppingTrainer.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkEarlyStoppingTrainer.java @@ -34,7 +34,7 @@ import org.nd4j.linalg.dataset.api.MultiDataSet; public class SparkEarlyStoppingTrainer extends BaseSparkEarlyStoppingTrainer { - private SparkDl4jMultiLayer sparkNet; + private final SparkDl4jMultiLayer sparkNet; public SparkEarlyStoppingTrainer(SparkContext sc, TrainingMaster trainingMaster, EarlyStoppingConfiguration esConfig, MultiLayerNetwork net, diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkLossCalculatorComputationGraph.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkLossCalculatorComputationGraph.java index be03c85af..5227aeef7 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkLossCalculatorComputationGraph.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/earlystopping/SparkLossCalculatorComputationGraph.java @@ -30,9 +30,9 @@ import org.nd4j.linalg.dataset.api.MultiDataSet; public class SparkLossCalculatorComputationGraph implements ScoreCalculator { - private JavaRDD data; - private boolean average; - private SparkContext sc; + private final JavaRDD data; + private final boolean average; + private final SparkContext sc; /** * Calculate the score (loss function value) on a given data set (usually a test set) diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/SparkListenable.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/SparkListenable.java index 36011825d..6762b7486 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/SparkListenable.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/SparkListenable.java @@ -39,7 +39,7 @@ import java.util.List; public class SparkListenable { protected TrainingMaster trainingMaster; - private List listeners = new ArrayList<>(); + private final List listeners = new ArrayList<>(); /** diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/common/repartition/HashingBalancedPartitioner.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/common/repartition/HashingBalancedPartitioner.java index e2f5814bd..b0f532a54 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/common/repartition/HashingBalancedPartitioner.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/common/repartition/HashingBalancedPartitioner.java @@ -42,11 +42,11 @@ public class HashingBalancedPartitioner extends Partitioner { // avg # red elems per partition : 2.33 // avg # blue elems per partition : 3.33 // partitionWeightsByClass = [[1.714, .429, .857], [0.9, 0.6, 1.5]] - private List> partitionWeightsByClass; + private final List> partitionWeightsByClass; // The cumulative distribution of jump probabilities of extra elements by partition, by class // 0 for partitions that already have enough elements - private List> jumpTable; + private final List> jumpTable; private Random r; public HashingBalancedPartitioner(List> partitionWeightsByClass) { @@ -63,7 +63,7 @@ public class HashingBalancedPartitioner extends Partitioner { } this.partitionWeightsByClass = partitionWeightsByClass; // p_(j, i) - List> jumpsByClass = new ArrayList<>();; + List> jumpsByClass = new ArrayList<>(); for (int j = 0; j < numClasses; j++) { Double totalImbalance = 0D; // i_j = sum(max(1 - p_(j, i), 0) , i = 1..numPartitions) for (int i = 0; i < numPartitions; i++) { diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/evaluation/EvaluationRunner.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/evaluation/EvaluationRunner.java index 8550c6e3c..a38322234 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/evaluation/EvaluationRunner.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/evaluation/EvaluationRunner.java @@ -53,14 +53,14 @@ public class EvaluationRunner { } private final AtomicInteger workerCount = new AtomicInteger(0); - private Queue queue = new ConcurrentLinkedQueue<>(); + private final Queue queue = new ConcurrentLinkedQueue<>(); //parameters map for device local parameters for a given broadcast //Note: byte[] doesn't override Object.equals hence this is effectively an *identity* weak hash map, which is what we want here //i.e., DeviceLocal can be GC'd once the Broadcast is no longer referenced anywhere //This approach relies on the fact that a single Broadcast object's *content* will be shared by all of Spark's threads, // even though the Broadcast object itself mayb not be //Also by storing params as a byte[] (i.e., in serialized form), we sidestep a lot of the thread locality issues - private Map paramsMap = new WeakHashMap<>(); + private final Map paramsMap = new WeakHashMap<>(); private EvaluationRunner(){ } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/SparkComputationGraph.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/SparkComputationGraph.java index 14d08dc99..67b120ddf 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/SparkComputationGraph.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/SparkComputationGraph.java @@ -78,13 +78,13 @@ public class SparkComputationGraph extends SparkListenable { public static final int DEFAULT_ROC_THRESHOLD_STEPS = 32; public static final int DEFAULT_EVAL_SCORE_BATCH_SIZE = 64; public static final int DEFAULT_EVAL_WORKERS = 4; - private transient JavaSparkContext sc; - private ComputationGraphConfiguration conf; + private final transient JavaSparkContext sc; + private final ComputationGraphConfiguration conf; private ComputationGraph network; private double lastScore; private int defaultEvaluationWorkers = DEFAULT_EVAL_WORKERS; - private transient AtomicInteger iterationsCount = new AtomicInteger(0); + private final transient AtomicInteger iterationsCount = new AtomicInteger(0); /** * Instantiate a ComputationGraph instance with the given context, network and training master. diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/CGVaeReconstructionErrorWithKeyFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/CGVaeReconstructionErrorWithKeyFunction.java index d8aadc3f1..3fa3312d7 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/CGVaeReconstructionErrorWithKeyFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/CGVaeReconstructionErrorWithKeyFunction.java @@ -44,9 +44,9 @@ public class CGVaeReconstructionErrorWithKeyFunction extends BaseVaeScoreWith @Override public VariationalAutoencoder getVaeLayer() { ComputationGraph network = - new ComputationGraph(ComputationGraphConfiguration.fromJson((String) jsonConfig.getValue())); + new ComputationGraph(ComputationGraphConfiguration.fromJson(jsonConfig.getValue())); network.init(); - INDArray val = ((INDArray) params.value()).unsafeDuplication(); + INDArray val = params.value().unsafeDuplication(); if (val.length() != network.numParams(false)) throw new IllegalStateException( "Network did not have same number of parameters as the broadcasted set parameters"); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/CGVaeReconstructionProbWithKeyFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/CGVaeReconstructionProbWithKeyFunction.java index 57c568239..a71912367 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/CGVaeReconstructionProbWithKeyFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/CGVaeReconstructionProbWithKeyFunction.java @@ -46,9 +46,9 @@ public class CGVaeReconstructionProbWithKeyFunction extends BaseVaeReconstruc @Override public VariationalAutoencoder getVaeLayer() { ComputationGraph network = - new ComputationGraph(ComputationGraphConfiguration.fromJson((String) jsonConfig.getValue())); + new ComputationGraph(ComputationGraphConfiguration.fromJson(jsonConfig.getValue())); network.init(); - INDArray val = ((INDArray) params.value()).unsafeDuplication(); + INDArray val = params.value().unsafeDuplication(); if (val.length() != network.numParams(false)) throw new IllegalStateException( "Network did not have same number of parameters as the broadcasted set parameters"); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/ScoreFlatMapFunctionCGDataSet.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/ScoreFlatMapFunctionCGDataSet.java index 7acae9d8f..578165fc7 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/ScoreFlatMapFunctionCGDataSet.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/ScoreFlatMapFunctionCGDataSet.java @@ -41,9 +41,9 @@ import java.util.List; public class ScoreFlatMapFunctionCGDataSet implements FlatMapFunction, Tuple2> { private static final Logger log = LoggerFactory.getLogger(ScoreFlatMapFunctionCGDataSet.class); - private String json; - private Broadcast params; - private int minibatchSize; + private final String json; + private final Broadcast params; + private final int minibatchSize; public ScoreFlatMapFunctionCGDataSet(String json, Broadcast params, int minibatchSize) { diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/ScoreFlatMapFunctionCGMultiDataSet.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/ScoreFlatMapFunctionCGMultiDataSet.java index 60ba08857..5ea855fbd 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/ScoreFlatMapFunctionCGMultiDataSet.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/ScoreFlatMapFunctionCGMultiDataSet.java @@ -41,9 +41,9 @@ import java.util.List; public class ScoreFlatMapFunctionCGMultiDataSet implements FlatMapFunction, Tuple2> { private static final Logger log = LoggerFactory.getLogger(ScoreFlatMapFunctionCGMultiDataSet.class); - private String json; - private Broadcast params; - private int minibatchSize; + private final String json; + private final Broadcast params; + private final int minibatchSize; public ScoreFlatMapFunctionCGMultiDataSet(String json, Broadcast params, int minibatchSize) { diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/SparkDl4jMultiLayer.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/SparkDl4jMultiLayer.java index be7780f2f..d8e1c1437 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/SparkDl4jMultiLayer.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/SparkDl4jMultiLayer.java @@ -79,8 +79,8 @@ public class SparkDl4jMultiLayer extends SparkListenable { public static final int DEFAULT_EVAL_SCORE_BATCH_SIZE = 64; public static final int DEFAULT_ROC_THRESHOLD_STEPS = 32; public static final int DEFAULT_EVAL_WORKERS = 4; - private transient JavaSparkContext sc; - private MultiLayerConfiguration conf; + private final transient JavaSparkContext sc; + private final MultiLayerConfiguration conf; private MultiLayerNetwork network; private double lastScore; private int defaultEvaluationWorkers = DEFAULT_EVAL_WORKERS; @@ -157,7 +157,7 @@ public class SparkDl4jMultiLayer extends SparkListenable { } /** - * Set the network that underlies this SparkDl4jMultiLayer instacne + * Set the network that underlies this SparkDl4jMultiLayer instance * * @param network network to set */ diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/VaeReconstructionErrorWithKeyFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/VaeReconstructionErrorWithKeyFunction.java index 3f7c5ba6c..e1c2f760d 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/VaeReconstructionErrorWithKeyFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/VaeReconstructionErrorWithKeyFunction.java @@ -47,9 +47,9 @@ public class VaeReconstructionErrorWithKeyFunction extends BaseVaeScoreWithKe @Override public VariationalAutoencoder getVaeLayer() { MultiLayerNetwork network = - new MultiLayerNetwork(MultiLayerConfiguration.fromJson((String) jsonConfig.getValue())); + new MultiLayerNetwork(MultiLayerConfiguration.fromJson(jsonConfig.getValue())); network.init(); - INDArray val = ((INDArray) params.value()).unsafeDuplication(); + INDArray val = params.value().unsafeDuplication(); if (val.length() != network.numParams(false)) throw new IllegalStateException( "Network did not have same number of parameters as the broadcast set parameters"); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/VaeReconstructionProbWithKeyFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/VaeReconstructionProbWithKeyFunction.java index d9dd8a155..12fbbbeb6 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/VaeReconstructionProbWithKeyFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/VaeReconstructionProbWithKeyFunction.java @@ -47,9 +47,9 @@ public class VaeReconstructionProbWithKeyFunction extends BaseVaeReconstructi @Override public VariationalAutoencoder getVaeLayer() { MultiLayerNetwork network = - new MultiLayerNetwork(MultiLayerConfiguration.fromJson((String) jsonConfig.getValue())); + new MultiLayerNetwork(MultiLayerConfiguration.fromJson(jsonConfig.getValue())); network.init(); - INDArray val = ((INDArray) params.value()).unsafeDuplication(); + INDArray val = params.value().unsafeDuplication(); if (val.length() != network.numParams(false)) throw new IllegalStateException( "Network did not have same number of parameters as the broadcast set parameters"); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingWorker.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingWorker.java index 5030a21b6..87374a584 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingWorker.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingWorker.java @@ -59,8 +59,8 @@ public class ParameterAveragingTrainingWorker extends BaseTrainingWorker trainingHooks; private final WorkerConfiguration configuration; private ParameterAveragingTrainingWorkerStats.ParameterAveragingTrainingWorkerStatsHelper stats = null; - private Collection trainingListeners; - private StatsStorageRouterProvider listenerRouterProvider; + private final Collection trainingListeners; + private final StatsStorageRouterProvider listenerRouterProvider; public ParameterAveragingTrainingWorker(Broadcast broadcast, boolean saveUpdater, WorkerConfiguration configuration, Collection trainingHooks, @@ -172,9 +172,9 @@ public class ParameterAveragingTrainingWorker extends BaseTrainingWorker exportTimes = new ArrayList<>(); //Starts for exporting data - private List countTimes = new ArrayList<>(); - private List broadcastTimes = new ArrayList<>(); - private List repartitionTimes = new ArrayList<>(); - private List fitTimes = new ArrayList<>(); - private List splitTimes = new ArrayList<>(); - private List mapPartitions = new ArrayList<>(); - private List aggregateTimes = new ArrayList<>(); - private List processParamsUpdaterTimes = new ArrayList<>(); + private final List exportTimes = new ArrayList<>(); //Starts for exporting data + private final List countTimes = new ArrayList<>(); + private final List broadcastTimes = new ArrayList<>(); + private final List repartitionTimes = new ArrayList<>(); + private final List fitTimes = new ArrayList<>(); + private final List splitTimes = new ArrayList<>(); + private final List mapPartitions = new ArrayList<>(); + private final List aggregateTimes = new ArrayList<>(); + private final List processParamsUpdaterTimes = new ArrayList<>(); private final TimeSource timeSource = TimeSourceProvider.getInstance(); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/stats/ParameterAveragingTrainingWorkerStats.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/stats/ParameterAveragingTrainingWorkerStats.java index fce3ec751..35e6ba9f0 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/stats/ParameterAveragingTrainingWorkerStats.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/impl/paramavg/stats/ParameterAveragingTrainingWorkerStats.java @@ -173,7 +173,7 @@ public class ParameterAveragingTrainingWorkerStats implements SparkTrainingStats private long initEndTime; private long lastFitStartTime; //TODO replace with fast int collection (no boxing) - private List fitTimes = new ArrayList<>(); + private final List fitTimes = new ArrayList<>(); private final TimeSource timeSource = TimeSourceProvider.getInstance(); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/iterator/PathSparkDataSetIterator.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/iterator/PathSparkDataSetIterator.java index 2e7c6bad5..a4d06bfba 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/iterator/PathSparkDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/iterator/PathSparkDataSetIterator.java @@ -38,8 +38,8 @@ public class PathSparkDataSetIterator extends BaseDataSetIterator { public static final int BUFFER_SIZE = 4194304; //4 MB private FileSystem fileSystem; - private DataSetLoader dataSetLoader; - private Broadcast hadoopConfig; + private final DataSetLoader dataSetLoader; + private final Broadcast hadoopConfig; public PathSparkDataSetIterator(Iterator iter, DataSetLoader dataSetLoader, Broadcast hadoopConfig) { this.dataSetStreams = null; diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/iterator/SparkADSI.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/iterator/SparkADSI.java index 09ed9973c..f462352d0 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/iterator/SparkADSI.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/iterator/SparkADSI.java @@ -90,7 +90,7 @@ public class SparkADSI extends AsyncDataSetIterator { this.buffer = queue; this.prefetchSize = queueSize; this.backedIterator = iterator; - this.workspaceId = "SADSI_ITER-" + java.util.UUID.randomUUID().toString(); + this.workspaceId = "SADSI_ITER-" + java.util.UUID.randomUUID(); if (iterator.resetSupported()) this.backedIterator.reset(); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/iterator/SparkAMDSI.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/iterator/SparkAMDSI.java index 128db97a7..ab5a3ee20 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/iterator/SparkAMDSI.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/iterator/SparkAMDSI.java @@ -91,7 +91,7 @@ public class SparkAMDSI extends AsyncMultiDataSetIterator { this.backedIterator = iterator; this.useWorkspaces = useWorkspace; this.prefetchSize = queueSize; - this.workspaceId = "SAMDSI_ITER-" + java.util.UUID.randomUUID().toString(); + this.workspaceId = "SAMDSI_ITER-" + java.util.UUID.randomUUID(); this.deviceId = deviceId; if (iterator.resetSupported()) diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/stats/StatsUtils.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/stats/StatsUtils.java index 867d89795..0e083f31a 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/stats/StatsUtils.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/stats/StatsUtils.java @@ -44,6 +44,7 @@ import java.awt.*; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.nio.charset.StandardCharsets; import java.util.*; import java.util.List; @@ -238,7 +239,7 @@ public class StatsUtils { } String html = StaticPageUtil.renderHTML(components); - outputStream.write(html.getBytes("UTF-8")); + outputStream.write(html.getBytes(StandardCharsets.UTF_8)); } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/time/NTPTimeSource.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/time/NTPTimeSource.java index 8b6332ba4..b55f560da 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/time/NTPTimeSource.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/time/NTPTimeSource.java @@ -49,7 +49,7 @@ public class NTPTimeSource implements TimeSource { public static final String DEFAULT_NTP_SERVER = "0.pool.ntp.org"; - private static Logger log = LoggerFactory.getLogger(NTPTimeSource.class); + private static final Logger log = LoggerFactory.getLogger(NTPTimeSource.class); private static NTPTimeSource instance; public static synchronized TimeSource getInstance() { diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/MLLibUtil.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/MLLibUtil.java index dbde9f862..576cda013 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/MLLibUtil.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/MLLibUtil.java @@ -428,7 +428,7 @@ public class MLLibUtil { // FIXMEL int cast double[] fArr = features.toArray(); - return new DataSet(Nd4j.create(fArr, new long[]{1,fArr.length}), + return new DataSet(Nd4j.create(fArr, 1,fArr.length), FeatureUtil.toOutcomeVector((int) label, (int) numPossibleLabels)); } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/SparkUtils.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/SparkUtils.java index 6e88fbfa8..c6d935912 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/SparkUtils.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/SparkUtils.java @@ -61,6 +61,7 @@ import java.io.*; import java.lang.reflect.Array; import java.net.URI; import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; import java.util.*; @Slf4j @@ -113,7 +114,7 @@ public class SparkUtils { boolean equals; INDArray deserialized; try { - deserialized = (INDArray) si.deserialize(bb, null); + deserialized = si.deserialize(bb, null); //Equals method may fail on malformed INDArrays, hence should be within the try-catch equals = Nd4j.linspace(1, 5, 5).equals(deserialized); } catch (Exception e) { @@ -153,7 +154,7 @@ public class SparkUtils { public static void writeStringToFile(String path, String toWrite, SparkContext sc) throws IOException { FileSystem fileSystem = FileSystem.get(sc.hadoopConfiguration()); try (BufferedOutputStream bos = new BufferedOutputStream(fileSystem.create(new Path(path)))) { - bos.write(toWrite.getBytes("UTF-8")); + bos.write(toWrite.getBytes(StandardCharsets.UTF_8)); } } @@ -177,7 +178,7 @@ public class SparkUtils { FileSystem fileSystem = FileSystem.get(sc.hadoopConfiguration()); try (BufferedInputStream bis = new BufferedInputStream(fileSystem.open(new Path(path)))) { byte[] asBytes = IOUtils.toByteArray(bis); - return new String(asBytes, "UTF-8"); + return new String(asBytes, StandardCharsets.UTF_8); } } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/serde/StorageLevelDeserializer.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/serde/StorageLevelDeserializer.java index cc9490a9a..f5831a6ef 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/serde/StorageLevelDeserializer.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/serde/StorageLevelDeserializer.java @@ -32,7 +32,7 @@ import java.io.IOException; public class StorageLevelDeserializer extends JsonDeserializer { @Override public StorageLevel deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) - throws IOException, JsonProcessingException { + throws IOException { JsonNode node = jsonParser.getCodec().readTree(jsonParser); String value = node.textValue(); if (value == null || "null".equals(value)) { diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/serde/StorageLevelSerializer.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/serde/StorageLevelSerializer.java index db02ea278..2b9257df5 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/serde/StorageLevelSerializer.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/main/java/org/deeplearning4j/spark/util/serde/StorageLevelSerializer.java @@ -53,7 +53,7 @@ public class StorageLevelSerializer extends JsonSerializer { @Override public void serialize(StorageLevel storageLevel, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) - throws IOException, JsonProcessingException { + throws IOException { //This is a little ugly, but Spark doesn't provide many options here... String s = null; if (storageLevel != null) { diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/TestEarlyStoppingSpark.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/TestEarlyStoppingSpark.java index ed8de3623..f4e9f674e 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/TestEarlyStoppingSpark.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/TestEarlyStoppingSpark.java @@ -284,7 +284,7 @@ public class TestEarlyStoppingSpark extends BaseSparkTest { private static class LoggingEarlyStoppingListener implements EarlyStoppingListener { - private static Logger log = LoggerFactory.getLogger(LoggingEarlyStoppingListener.class); + private static final Logger log = LoggerFactory.getLogger(LoggingEarlyStoppingListener.class); private int onStartCallCount = 0; private int onEpochCallCount = 0; private int onCompletionCallCount = 0; diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/TestEarlyStoppingSparkCompGraph.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/TestEarlyStoppingSparkCompGraph.java index 3de17a742..39618055e 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/TestEarlyStoppingSparkCompGraph.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/TestEarlyStoppingSparkCompGraph.java @@ -290,7 +290,7 @@ public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest { private static class LoggingEarlyStoppingListener implements EarlyStoppingListener { - private static Logger log = LoggerFactory.getLogger(LoggingEarlyStoppingListener.class); + private static final Logger log = LoggerFactory.getLogger(LoggingEarlyStoppingListener.class); private int onStartCallCount = 0; private int onEpochCallCount = 0; private int onCompletionCallCount = 0; diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/TestKryo.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/TestKryo.java index 48212f814..da5d7822a 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/TestKryo.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/TestKryo.java @@ -53,10 +53,10 @@ public class TestKryo extends BaseSparkKryoTest { private void testSerialization(T in, SerializerInstance si) { ByteBuffer bb = si.serialize(in, null); - T deserialized = (T)si.deserialize(bb, null); + T deserialized = si.deserialize(bb, null); boolean equals = in.equals(deserialized); - assertTrue(equals, in.getClass() + "\t" + in.toString()); + assertTrue(equals, in.getClass() + "\t" + in); } @Test @@ -105,7 +105,7 @@ public class TestKryo extends BaseSparkKryoTest { GraphVertex[] vertices = new GraphVertex[] {new ElementWiseVertex(ElementWiseVertex.Op.Add), new L2NormalizeVertex(), new LayerVertex(null, null), new MergeVertex(), new PoolHelperVertex(), new PreprocessorVertex(new CnnToFeedForwardPreProcessor(28, 28, 1)), - new ReshapeVertex(new int[] {1, 1}), new ScaleVertex(1.0), new ShiftVertex(1.0), + new ReshapeVertex(1, 1), new ScaleVertex(1.0), new ShiftVertex(1.0), new SubsetVertex(1, 1), new UnstackVertex(0, 2), new DuplicateToTimeSeriesVertex("in1"), new LastTimeStepVertex("in1")}; @@ -118,26 +118,26 @@ public class TestKryo extends BaseSparkKryoTest { public void testSerializationEvaluation() { Evaluation e = new Evaluation(); - e.eval(Nd4j.create(new double[] {1, 0, 0}, new long[]{1, 3}), Nd4j.create(new double[] {0.2, 0.5, 0.3}, new long[]{1, 3})); + e.eval(Nd4j.create(new double[] {1, 0, 0}, 1, 3), Nd4j.create(new double[] {0.2, 0.5, 0.3}, 1, 3)); EvaluationBinary eb = new EvaluationBinary(); - eb.eval(Nd4j.create(new double[] {1, 0, 0}, new long[]{1, 3}), Nd4j.create(new double[] {0.2, 0.6, 0.3}, new long[]{1, 3})); + eb.eval(Nd4j.create(new double[] {1, 0, 0}, 1, 3), Nd4j.create(new double[] {0.2, 0.6, 0.3}, 1, 3)); ROC roc = new ROC(30); - roc.eval(Nd4j.create(new double[] {1}, new long[]{1, 1}), Nd4j.create(new double[] {0.2}, new long[]{1, 1})); + roc.eval(Nd4j.create(new double[] {1}, 1, 1), Nd4j.create(new double[] {0.2}, 1, 1)); ROC roc2 = new ROC(); - roc2.eval(Nd4j.create(new double[] {1}, new long[]{1, 1}), Nd4j.create(new double[] {0.2}, new long[]{1, 1})); + roc2.eval(Nd4j.create(new double[] {1}, 1, 1), Nd4j.create(new double[] {0.2}, 1, 1)); ROCMultiClass rocM = new ROCMultiClass(30); - rocM.eval(Nd4j.create(new double[] {1, 0, 0}, new long[]{1, 3}), Nd4j.create(new double[] {0.2, 0.5, 0.3}, new long[]{1, 3})); + rocM.eval(Nd4j.create(new double[] {1, 0, 0}, 1, 3), Nd4j.create(new double[] {0.2, 0.5, 0.3}, 1, 3)); ROCMultiClass rocM2 = new ROCMultiClass(); - rocM2.eval(Nd4j.create(new double[] {1, 0, 0}, new long[]{1, 3}), Nd4j.create(new double[] {0.2, 0.5, 0.3}, new long[]{1, 3})); + rocM2.eval(Nd4j.create(new double[] {1, 0, 0}, 1, 3), Nd4j.create(new double[] {0.2, 0.5, 0.3}, 1, 3)); ROCBinary rocB = new ROCBinary(30); - rocB.eval(Nd4j.create(new double[] {1, 0, 0}, new long[]{1, 3}), Nd4j.create(new double[] {0.2, 0.6, 0.3}, new long[]{1, 3})); + rocB.eval(Nd4j.create(new double[] {1, 0, 0}, 1, 3), Nd4j.create(new double[] {0.2, 0.6, 0.3}, 1, 3)); ROCBinary rocB2 = new ROCBinary(); - rocB2.eval(Nd4j.create(new double[] {1, 0, 0}, new long[]{1, 3}), Nd4j.create(new double[] {0.2, 0.6, 0.3}, new long[]{1, 3})); + rocB2.eval(Nd4j.create(new double[] {1, 0, 0}, 1, 3), Nd4j.create(new double[] {0.2, 0.6, 0.3}, 1, 3)); RegressionEvaluation re = new RegressionEvaluation(); re.eval(Nd4j.rand(1, 5), Nd4j.rand(1, 5)); @@ -184,16 +184,16 @@ public class TestKryo extends BaseSparkKryoTest { testSerialization(new ConcurrentHashMap<>(m), si); testSerialization(Collections.unmodifiableMap(m), si); - testSerialization(Arrays.asList("s"), si); + testSerialization(Collections.singletonList("s"), si); testSerialization(Collections.singleton("s"), si); - testSerialization(Collections.synchronizedList(Arrays.asList("s")), si); + testSerialization(Collections.synchronizedList(Collections.singletonList("s")), si); testSerialization(Collections.emptyList(), si); - testSerialization(new CopyOnWriteArrayList<>(Arrays.asList("s")), si); - testSerialization(Collections.unmodifiableList(Arrays.asList("s")), si); + testSerialization(new CopyOnWriteArrayList<>(Collections.singletonList("s")), si); + testSerialization(Collections.unmodifiableList(Collections.singletonList("s")), si); testSerialization(Collections.singleton("s"), si); - testSerialization(Collections.synchronizedSet(new HashSet<>(Arrays.asList("s"))), si); + testSerialization(Collections.synchronizedSet(new HashSet<>(Collections.singletonList("s"))), si); testSerialization(Collections.emptySet(), si); - testSerialization(Collections.unmodifiableSet(new HashSet<>(Arrays.asList("s"))), si); + testSerialization(Collections.unmodifiableSet(new HashSet<>(Collections.singletonList("s"))), si); } } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/datavec/MiniBatchTests.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/datavec/MiniBatchTests.java index 43c50fdeb..79e6da95c 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/datavec/MiniBatchTests.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/datavec/MiniBatchTests.java @@ -74,8 +74,8 @@ public class MiniBatchTests extends BaseSparkTest { @Override public Object call(DataSet dataSet) throws Exception { - assertTrue(dataSet.getFeatures().columns() == 150); - assertTrue(dataSet.numExamples() == 30); + assertEquals(150, dataSet.getFeatures().columns()); + assertEquals(30, dataSet.numExamples()); return null; } } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/datavec/TestDataVecDataSetFunctions.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/datavec/TestDataVecDataSetFunctions.java index fad1b4092..bd2e0f389 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/datavec/TestDataVecDataSetFunctions.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/datavec/TestDataVecDataSetFunctions.java @@ -263,7 +263,7 @@ public class TestDataVecDataSetFunctions extends BaseSparkTest { Path p = new File(testDir,"dl4j_testSeqPairFn").toPath(); p.toFile().deleteOnExit(); - String outPath = p.toString() + "/out"; + String outPath = p + "/out"; new File(outPath).deleteOnExit(); toWrite.saveAsNewAPIHadoopFile(outPath, Text.class, BytesPairWritable.class, SequenceFileOutputFormat.class); @@ -540,11 +540,7 @@ public class TestDataVecDataSetFunctions extends BaseSparkTest { if (m1 != null && !m1.equals(Nd4j.ones(m1.shape()))) { return false; } - if (m2 != null && !m2.equals(Nd4j.ones(m2.shape()))) { - return false; - } - - return true; + return m2 == null || m2.equals(Nd4j.ones(m2.shape())); } } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/graph/TestSparkComputationGraph.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/graph/TestSparkComputationGraph.java index cc6e5f9ec..579effe1a 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/graph/TestSparkComputationGraph.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/graph/TestSparkComputationGraph.java @@ -119,7 +119,7 @@ public class TestSparkComputationGraph extends BaseSparkTest { TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0); SparkComputationGraph scg = new SparkComputationGraph(sc, cg, tm); - scg.setListeners(Collections.singleton((TrainingListener) new ScoreIterationListener(5))); + scg.setListeners(Collections.singleton(new ScoreIterationListener(5))); JavaRDD rdd = sc.parallelize(list); scg.fitMultiDataSet(rdd); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/multilayer/TestMiscFunctions.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/multilayer/TestMiscFunctions.java index 550ccc9b2..8b5a8b46c 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/multilayer/TestMiscFunctions.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/multilayer/TestMiscFunctions.java @@ -120,9 +120,9 @@ public class TestMiscFunctions extends BaseSparkTest { net.init(); List ds = Arrays.asList( - new org.nd4j.linalg.dataset.DataSet(Nd4j.rand(new int[]{1, 4, 5}), Nd4j.create(new double[]{1,1,1,0,0})), - new org.nd4j.linalg.dataset.DataSet(Nd4j.rand(new int[]{1, 4, 5}), Nd4j.create(new double[]{1,1,1,1,0})), - new org.nd4j.linalg.dataset.DataSet(Nd4j.rand(new int[]{1, 4, 5}), Nd4j.create(new double[]{1,1,1,1,1})) + new org.nd4j.linalg.dataset.DataSet(Nd4j.rand(1, 4, 5), Nd4j.create(new double[]{1,1,1,0,0})), + new org.nd4j.linalg.dataset.DataSet(Nd4j.rand(1, 4, 5), Nd4j.create(new double[]{1,1,1,1,0})), + new org.nd4j.linalg.dataset.DataSet(Nd4j.rand(1, 4, 5), Nd4j.create(new double[]{1,1,1,1,1})) ); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/multilayer/TestSparkDl4jMultiLayer.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/multilayer/TestSparkDl4jMultiLayer.java index 9c7f783e0..d2c0d66bc 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/multilayer/TestSparkDl4jMultiLayer.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/multilayer/TestSparkDl4jMultiLayer.java @@ -47,8 +47,7 @@ import org.nd4j.linalg.lossfunctions.LossFunctions; import java.util.ArrayList; import java.util.List; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; @Slf4j public class TestSparkDl4jMultiLayer extends BaseSparkTest { @@ -136,7 +135,7 @@ public class TestSparkDl4jMultiLayer extends BaseSparkTest { tm.deleteTempFiles(sc); assertEquals(10000, evaluation.getNumRowCounter()); //10k test set - assertTrue(!Double.isNaN(evaluation.accuracy())); + assertFalse(Double.isNaN(evaluation.accuracy())); assertTrue(evaluation.accuracy() >= 0.10); assertTrue(evaluation.precision() >= 0.10); assertTrue(evaluation.recall() >= 0.10); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/paramavg/TestCompareParameterAveragingSparkVsSingleMachine.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/paramavg/TestCompareParameterAveragingSparkVsSingleMachine.java index cbe7247bd..050e6279c 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/paramavg/TestCompareParameterAveragingSparkVsSingleMachine.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/paramavg/TestCompareParameterAveragingSparkVsSingleMachine.java @@ -157,7 +157,7 @@ public class TestCompareParameterAveragingSparkVsSingleMachine { Nd4j.getRandom().setSeed(seed); List list = new ArrayList<>(); for (int i = 0; i < totalExamples; i++) { - INDArray f = Nd4j.rand(new int[] {1, 3, 10, 10}); + INDArray f = Nd4j.rand(1, 3, 10, 10); INDArray l = Nd4j.rand(1, 10); DataSet ds = new DataSet(f, l); list.add(ds); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/stats/TestTrainingStatsCollection.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/stats/TestTrainingStatsCollection.java index f4939e369..5d33e82c6 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/stats/TestTrainingStatsCollection.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/impl/stats/TestTrainingStatsCollection.java @@ -47,6 +47,7 @@ import org.nd4j.linalg.factory.Nd4j; import java.io.ByteArrayOutputStream; import java.lang.reflect.Field; +import java.nio.charset.StandardCharsets; import java.util.*; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -255,8 +256,7 @@ public class TestTrainingStatsCollection extends BaseSparkTest { ByteArrayOutputStream baos = new ByteArrayOutputStream(); StatsUtils.exportStatsAsHTML(stats, baos); baos.close(); - byte[] bytes = baos.toByteArray(); - String str = new String(bytes, "UTF-8"); + String str = baos.toString(StandardCharsets.UTF_8); // System.out.println(str); } finally { sc.stop(); @@ -294,8 +294,8 @@ public class TestTrainingStatsCollection extends BaseSparkTest { jvmIDs.add(e.getJvmID()); threadIDs.add(e.getThreadID()); } - assertTrue(machineIDs.size() == expNMachineIDs); - assertTrue(jvmIDs.size() == expNumJvmIds); - assertTrue(threadIDs.size() == expNumThreadIds); + assertEquals(machineIDs.size(), expNMachineIDs); + assertEquals(jvmIDs.size(), expNumJvmIds); + assertEquals(threadIDs.size(), expNumThreadIds); } } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/ui/TestListeners.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/ui/TestListeners.java index 6f79d7595..aadf69cdd 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/ui/TestListeners.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/ui/TestListeners.java @@ -119,7 +119,7 @@ public class TestListeners extends BaseSparkTest { String widSubstring = wid.substring(0, wid.length() - 1); assertEquals(firstWorkerSubstring, widSubstring); - String counterVal = wid.substring(wid.length() - 1, wid.length()); + String counterVal = wid.substring(wid.length() - 1); int cv = Integer.parseInt(counterVal); assertTrue(0 <= cv && cv < numExecutors()); } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/util/TestRepartitioning.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/util/TestRepartitioning.java index 77fdff58e..d7f705e55 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/util/TestRepartitioning.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/util/TestRepartitioning.java @@ -39,9 +39,7 @@ import java.util.Arrays; import java.util.List; import java.util.Random; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; @Timeout(300) public class TestRepartitioning extends BaseSparkTest { @@ -61,7 +59,7 @@ public class TestRepartitioning extends BaseSparkTest { rdd = rdd.repartition(200); JavaRDD rdd2 = SparkUtils.repartitionBalanceIfRequired(rdd, Repartition.Always, 100, 10); - assertFalse(rdd == rdd2); //Should be different objects due to repartitioning + assertNotSame(rdd, rdd2); //Should be different objects due to repartitioning assertEquals(10, rdd2.partitions().size()); for (int i = 0; i < 10; i++) { @@ -255,7 +253,7 @@ public class TestRepartitioning extends BaseSparkTest { rdd = rdd.repartition(200); JavaRDD rdd2 = SparkUtils.repartitionApproximateBalance(rdd, Repartition.Always, 10); - assertFalse(rdd == rdd2); //Should be different objects due to repartitioning + assertNotSame(rdd, rdd2); //Should be different objects due to repartitioning assertEquals(10, rdd2.partitions().size()); @@ -277,7 +275,7 @@ public class TestRepartitioning extends BaseSparkTest { JavaRDD rdd = sc.parallelize(list); JavaRDD rdd2 = SparkUtils.repartitionApproximateBalance(rdd, Repartition.Always, 100); - assertFalse(rdd == rdd2); //Should be different objects due to repartitioning + assertNotSame(rdd, rdd2); //Should be different objects due to repartitioning assertEquals(100, rdd2.partitions().size()); } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/util/TestValidation.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/util/TestValidation.java index 21ba9fc23..fe2968ed8 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/util/TestValidation.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-core/src/test/java/org/deeplearning4j/spark/util/TestValidation.java @@ -36,6 +36,7 @@ import org.nd4j.linalg.factory.Nd4j; import java.io.File; import java.util.Arrays; +import java.util.Collections; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -169,8 +170,8 @@ public class TestValidation extends BaseSparkTest { //Add MultiDataSet with incorrect labels shape: new MultiDataSet(Nd4j.create(1,10), Nd4j.create(1,20)).save(f3); - r = SparkDataValidation.validateMultiDataSets(sc, f.toURI().toString(), Arrays.asList(new int[]{-1,10}), - Arrays.asList(new int[]{-1,10})); + r = SparkDataValidation.validateMultiDataSets(sc, f.toURI().toString(), Collections.singletonList(new int[]{-1, 10}), + Collections.singletonList(new int[]{-1, 10})); exp = ValidationResult.builder() .countTotal(4) .countTotalValid(3) @@ -183,8 +184,8 @@ public class TestValidation extends BaseSparkTest { //Add a MultiDataSet with incorrect number of feature arrays: new MultiDataSet(new INDArray[]{Nd4j.create(1,10), Nd4j.create(1,10)}, new INDArray[]{Nd4j.create(1,10)}).save(f3); - r = SparkDataValidation.validateMultiDataSets(sc, f.toURI().toString(), Arrays.asList(new int[]{-1,10}), - Arrays.asList(new int[]{-1,10})); + r = SparkDataValidation.validateMultiDataSets(sc, f.toURI().toString(), Collections.singletonList(new int[]{-1, 10}), + Collections.singletonList(new int[]{-1, 10})); exp = ValidationResult.builder() .countTotal(4) .countTotalValid(3) @@ -194,8 +195,8 @@ public class TestValidation extends BaseSparkTest { assertEquals(exp, r); - r = SparkDataValidation.deleteInvalidMultiDataSets(sc, f.toURI().toString(), Arrays.asList(new int[]{-1,10}), - Arrays.asList(new int[]{-1,10})); + r = SparkDataValidation.deleteInvalidMultiDataSets(sc, f.toURI().toString(), Collections.singletonList(new int[]{-1, 10}), + Collections.singletonList(new int[]{-1, 10})); exp.setCountInvalidDeleted(1); assertEquals(exp, r); assertFalse(f3.exists()); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/FirstIterationFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/FirstIterationFunction.java index d8e6f235d..aafb35fcf 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/FirstIterationFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/FirstIterationFunction.java @@ -36,27 +36,27 @@ import java.util.concurrent.atomic.AtomicLong; public class FirstIterationFunction implements FlatMapFunction, Long>>, Entry> { - private int ithIteration = 1; - private int vectorLength; - private boolean useAdaGrad; + private final int ithIteration = 1; + private final int vectorLength; + private final boolean useAdaGrad; private int batchSize = 0; - private double negative; - private int window; - private double alpha; - private double minAlpha; - private long totalWordCount; - private long seed; - private int maxExp; - private double[] expTable; - private int iterations; - private Map indexSyn0VecMap; - private Map pointSyn1VecMap; - private AtomicLong nextRandom = new AtomicLong(5); + private final double negative; + private final int window; + private final double alpha; + private final double minAlpha; + private final long totalWordCount; + private final long seed; + private final int maxExp; + private final double[] expTable; + private final int iterations; + private final Map indexSyn0VecMap; + private final Map pointSyn1VecMap; + private final AtomicLong nextRandom = new AtomicLong(5); - private volatile VocabCache vocab; + private final VocabCache vocab; private volatile NegativeHolder negativeHolder; - private AtomicLong cid = new AtomicLong(0); - private AtomicLong aff = new AtomicLong(0); + private final AtomicLong cid = new AtomicLong(0); + private final AtomicLong aff = new AtomicLong(0); @@ -123,7 +123,7 @@ public class FirstIterationFunction implements for (int ithWordInSentence = 0; ithWordInSentence < vocabWordsList.size(); ithWordInSentence++) { // Random value ranging from 0 to window size nextRandom.set(Math.abs(nextRandom.get() * 25214903917L + 11)); - int b = (int) (long) this.nextRandom.get() % window; + int b = (int) this.nextRandom.get() % window; VocabWord currentWord = vocabWordsList.get(ithWordInSentence); if (currentWord != null) { skipGram(ithWordInSentence, vocabWordsList, b, currentSentenceAlpha); @@ -164,7 +164,7 @@ public class FirstIterationFunction implements if (indexSyn0VecMap.containsKey(vocab.elementAtIndex(currentWordIndex))) { l1 = indexSyn0VecMap.get(vocab.elementAtIndex(currentWordIndex)); } else { - l1 = getRandomSyn0Vec(vectorLength, (long) currentWordIndex); + l1 = getRandomSyn0Vec(vectorLength, currentWordIndex); } // diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/NegativeHolder.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/NegativeHolder.java index 5b788562b..bade562b0 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/NegativeHolder.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/NegativeHolder.java @@ -32,7 +32,7 @@ import java.io.Serializable; import java.util.concurrent.atomic.AtomicBoolean; public class NegativeHolder implements Serializable { - private static NegativeHolder ourInstance = new NegativeHolder(); + private static final NegativeHolder ourInstance = new NegativeHolder(); public static NegativeHolder getInstance() { return ourInstance; @@ -43,7 +43,7 @@ public class NegativeHolder implements Serializable { @Getter private volatile INDArray table; - private transient AtomicBoolean wasInit = new AtomicBoolean(false); + private final transient AtomicBoolean wasInit = new AtomicBoolean(false); private transient VocabCache vocab; private NegativeHolder() { diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SecondIterationFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SecondIterationFunction.java index 205d54ae0..7907821cb 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SecondIterationFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SecondIterationFunction.java @@ -38,27 +38,27 @@ import java.util.concurrent.atomic.AtomicLong; public class SecondIterationFunction implements FlatMapFunction, Long>>, Entry> { - private int ithIteration = 1; - private int vectorLength; - private boolean useAdaGrad; + private final int ithIteration = 1; + private final int vectorLength; + private final boolean useAdaGrad; private int batchSize = 0; - private double negative; - private int window; - private double alpha; - private double minAlpha; - private long totalWordCount; - private long seed; - private int maxExp; - private double[] expTable; - private int iterations; + private final double negative; + private final int window; + private final double alpha; + private final double minAlpha; + private final long totalWordCount; + private final long seed; + private final int maxExp; + private final double[] expTable; + private final int iterations; - private AtomicLong nextRandom = new AtomicLong(5); + private final AtomicLong nextRandom = new AtomicLong(5); - private volatile VocabCache vocab; + private final VocabCache vocab; private transient volatile NegativeHolder negativeHolder; private transient volatile VocabHolder vocabHolder; - private AtomicLong cid = new AtomicLong(0); - private AtomicLong aff = new AtomicLong(0); + private final AtomicLong cid = new AtomicLong(0); + private final AtomicLong aff = new AtomicLong(0); @@ -133,7 +133,7 @@ public class SecondIterationFunction implements FlatMapFunction { - private AtomicLong nextRandom = new AtomicLong(5); + private final AtomicLong nextRandom = new AtomicLong(5); // private static Logger log = LoggerFactory.getLogger(SentenceBatch.class); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/VocabHolder.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/VocabHolder.java index 1a983c68d..480215b2c 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/VocabHolder.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/VocabHolder.java @@ -35,14 +35,14 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; public class VocabHolder implements Serializable { - private static VocabHolder ourInstance = new VocabHolder(); + private static final VocabHolder ourInstance = new VocabHolder(); - private Map indexSyn0VecMap = new ConcurrentHashMap<>(); - private Map pointSyn1VecMap = new ConcurrentHashMap<>(); - private HashSet workers = new LinkedHashSet<>(); + private final Map indexSyn0VecMap = new ConcurrentHashMap<>(); + private final Map pointSyn1VecMap = new ConcurrentHashMap<>(); + private final HashSet workers = new LinkedHashSet<>(); - private AtomicLong seed = new AtomicLong(0); - private AtomicInteger vectorLength = new AtomicInteger(0); + private final AtomicLong seed = new AtomicLong(0); + private final AtomicInteger vectorLength = new AtomicInteger(0); public static VocabHolder getInstance() { return ourInstance; @@ -56,8 +56,7 @@ public class VocabHolder implements Serializable { } public INDArray getSyn0Vector(Integer wordIndex, VocabCache vocabCache) { - if (!workers.contains(Thread.currentThread().getId())) - workers.add(Thread.currentThread().getId()); + workers.add(Thread.currentThread().getId()); VocabWord word = vocabCache.elementAtIndex(wordIndex); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2Vec.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2Vec.java index b5146f74d..f52676adc 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2Vec.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2Vec.java @@ -57,10 +57,10 @@ import java.util.concurrent.atomic.AtomicLong; public class Word2Vec extends WordVectorsImpl implements Serializable { private INDArray trainedSyn1; - private static Logger log = LoggerFactory.getLogger(Word2Vec.class); - private int MAX_EXP = 6; + private static final Logger log = LoggerFactory.getLogger(Word2Vec.class); + private final int MAX_EXP = 6; @Getter - private double[] expTable; + private final double[] expTable; @Getter protected VectorsConfiguration configuration; @@ -68,8 +68,8 @@ public class Word2Vec extends WordVectorsImpl implements Serializable private int nGrams = 1; private String tokenizer = "org.deeplearning4j.text.tokenization.tokenizerfactory.DefaultTokenizerFactory"; private String tokenPreprocessor = "org.deeplearning4j.text.tokenization.tokenizer.preprocessor.CommonPreprocessor"; - private boolean removeStop = false; - private long seed = 42L; + private final boolean removeStop = false; + private final long seed = 42L; private boolean useUnknown = false; // Constructor to take InMemoryLookupCache table from an already trained model diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecChange.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecChange.java index 5ce201f0c..eebdb1eb2 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecChange.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecChange.java @@ -33,7 +33,7 @@ import java.util.*; */ @Deprecated public class Word2VecChange implements Serializable { - private Map> changes = new HashMap<>(); + private final Map> changes = new HashMap<>(); public Word2VecChange(List> counterMap, Word2VecParam param) { Iterator> iter = counterMap.iterator(); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecParam.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecParam.java index 1e7f81133..68c464377 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecParam.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecParam.java @@ -44,7 +44,7 @@ public class Word2VecParam implements Serializable { private double alpha = 0.025; private double minAlpha = 1e-2; private int totalWords = 1; - private static transient final Logger log = LoggerFactory.getLogger(Word2VecPerformer.class); + private static final Logger log = LoggerFactory.getLogger(Word2VecPerformer.class); private int lastChecked = 0; private Broadcast wordCount; private InMemoryLookupTable weights; diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformer.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformer.java index 3b65a353d..6bd786052 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformer.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformer.java @@ -41,21 +41,21 @@ import java.util.concurrent.atomic.AtomicLong; @Deprecated public class Word2VecPerformer implements VoidFunction, AtomicLong>> { - private static double MAX_EXP = 6; + private static final double MAX_EXP = 6; private boolean useAdaGrad = false; private double negative = 5; private int numWords = 1; private INDArray table; private int window = 5; - private AtomicLong nextRandom = new AtomicLong(5); + private final AtomicLong nextRandom = new AtomicLong(5); private double alpha = 0.025; private double minAlpha = 1e-2; private int totalWords = 1; - private static transient final Logger log = LoggerFactory.getLogger(Word2VecPerformer.class); + private static final Logger log = LoggerFactory.getLogger(Word2VecPerformer.class); private int lastChecked = 0; - private Broadcast wordCount; - private InMemoryLookupTable weights; - private double[] expTable = new double[1000]; + private final Broadcast wordCount; + private final InMemoryLookupTable weights; + private final double[] expTable = new double[1000]; private int vectorLength; @@ -239,7 +239,7 @@ public class Word2VecPerformer implements VoidFunction, Ato double numWordsSoFar = wordCount.getValue().doubleValue(); List sentence = pair.getFirst(); - double alpha2 = Math.max(minAlpha, alpha * (1 - (1.0 * numWordsSoFar / (double) totalWords))); + double alpha2 = Math.max(minAlpha, alpha * (1 - (numWordsSoFar / (double) totalWords))); int totalNewWords = 0; trainSentence(sentence, alpha2); totalNewWords += sentence.size(); @@ -253,7 +253,7 @@ public class Word2VecPerformer implements VoidFunction, Ato log.info("Words so far " + newWords + " out of " + totalWords); } - pair.getSecond().getAndAdd((long) totalNewWords); + pair.getSecond().getAndAdd(totalNewWords); } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformerVoid.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformerVoid.java index 7bb7c44d8..b8330b064 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformerVoid.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformerVoid.java @@ -64,7 +64,7 @@ public class Word2VecPerformerVoid implements VoidFunction, private double minAlpha = 1e-2; private int totalWords = 1; private int iterations = 5; - private static transient final Logger log = LoggerFactory.getLogger(Word2VecPerformerVoid.class); + private static final Logger log = LoggerFactory.getLogger(Word2VecPerformerVoid.class); private int lastChecked = 0; private Broadcast wordCount; private InMemoryLookupTable weights; @@ -389,7 +389,7 @@ public class Word2VecPerformerVoid implements VoidFunction, double numWordsSoFar = wordCount.getValue().doubleValue(); List sentence = pair.getFirst(); - double alpha2 = Math.max(minAlpha, alpha * (1 - (1.0 * numWordsSoFar / (double) totalWords))); + double alpha2 = Math.max(minAlpha, alpha * (1 - (numWordsSoFar / (double) totalWords))); int totalNewWords = 0; trainSentence(sentence, alpha2); totalNewWords += sentence.size(); @@ -403,7 +403,7 @@ public class Word2VecPerformerVoid implements VoidFunction, log.info("Words so far " + newWords + " out of " + totalWords); } - pair.getSecond().getAndAdd((long) totalNewWords); + pair.getSecond().getAndAdd(totalNewWords); } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecSetup.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecSetup.java index 677fb3738..db0e7d5ef 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecSetup.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecSetup.java @@ -29,7 +29,7 @@ import java.util.List; @Deprecated public class Word2VecSetup implements Function, Long>, Word2VecFuncCall> { - private Broadcast param; + private final Broadcast param; public Word2VecSetup(Broadcast param) { this.param = param; diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecVariables.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecVariables.java index 6adcc7d1f..21908df41 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecVariables.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecVariables.java @@ -77,19 +77,19 @@ public class Word2VecVariables { public static T assignVar(String variableName, SparkConf conf, Class clazz) throws Exception { Object ret; if (clazz.equals(Integer.class)) { - ret = conf.getInt(variableName, (Integer) getDefault(variableName)); + ret = conf.getInt(variableName, getDefault(variableName)); } else if (clazz.equals(Double.class)) { - ret = conf.getDouble(variableName, (Double) getDefault(variableName)); + ret = conf.getDouble(variableName, getDefault(variableName)); } else if (clazz.equals(Boolean.class)) { - ret = conf.getBoolean(variableName, (Boolean) getDefault(variableName)); + ret = conf.getBoolean(variableName, getDefault(variableName)); } else if (clazz.equals(String.class)) { - ret = conf.get(variableName, (String) getDefault(variableName)); + ret = conf.get(variableName, getDefault(variableName)); } else if (clazz.equals(Long.class)) { - ret = conf.getLong(variableName, (Long) getDefault(variableName)); + ret = conf.getLong(variableName, getDefault(variableName)); } else { throw new Exception("Variable Type not supported. Only boolean, int, double and String supported."); } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/CountCumSum.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/CountCumSum.java index 4b757ec5f..dcd600e18 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/CountCumSum.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/CountCumSum.java @@ -36,8 +36,8 @@ import java.util.concurrent.atomic.AtomicLong; public class CountCumSum { // Starting variables - private JavaSparkContext sc; - private JavaRDD sentenceCountRDD; + private final JavaSparkContext sc; + private final JavaRDD sentenceCountRDD; // Variables to fill in as we go private JavaRDD foldWithinPartitionRDD; diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/FoldBetweenPartitionFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/FoldBetweenPartitionFunction.java index f332c1f92..a4d54ba9c 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/FoldBetweenPartitionFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/FoldBetweenPartitionFunction.java @@ -33,7 +33,7 @@ import java.util.concurrent.atomic.AtomicLong; * @author jeffreytang */ public class FoldBetweenPartitionFunction implements Function2, Iterator> { - private Broadcast> broadcastedMaxPerPartitionCounter; + private final Broadcast> broadcastedMaxPerPartitionCounter; public FoldBetweenPartitionFunction(Broadcast> broadcastedMaxPerPartitionCounter) { this.broadcastedMaxPerPartitionCounter = broadcastedMaxPerPartitionCounter; diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/FoldWithinPartitionFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/FoldWithinPartitionFunction.java index 38910c623..7e4b3e7f8 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/FoldWithinPartitionFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/FoldWithinPartitionFunction.java @@ -38,7 +38,7 @@ public class FoldWithinPartitionFunction implements Function2> maxPerPartitionAcc; + private final CollectionAccumulator> maxPerPartitionAcc; @Override diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/TextPipeline.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/TextPipeline.java index 5fb7b0fbc..49329436b 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/TextPipeline.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/TextPipeline.java @@ -55,7 +55,7 @@ public class TextPipeline { private Broadcast> stopWordBroadCast; // Return values private JavaRDD, AtomicLong>> sentenceWordsCountRDD; - private VocabCache vocabCache = new AbstractCache<>(); + private final VocabCache vocabCache = new AbstractCache<>(); private Broadcast> vocabCacheBroadcast; private JavaRDD> vocabWordListRDD; private JavaRDD sentenceCountRDD; diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/TokenizerFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/TokenizerFunction.java index 75b855695..335294d4f 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/TokenizerFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/TokenizerFunction.java @@ -34,8 +34,8 @@ import java.util.List; @SuppressWarnings("unchecked") @Slf4j public class TokenizerFunction implements Function> { - private String tokenizerFactoryClazz; - private String tokenizerPreprocessorClazz; + private final String tokenizerFactoryClazz; + private final String tokenizerPreprocessorClazz; private transient TokenizerFactory tokenizerFactory; private int nGrams = 1; diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/UpdateWordFreqAccumulatorFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/UpdateWordFreqAccumulatorFunction.java index 312677c98..e66067be5 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/UpdateWordFreqAccumulatorFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/main/java/org/deeplearning4j/spark/text/functions/UpdateWordFreqAccumulatorFunction.java @@ -34,8 +34,8 @@ import java.util.concurrent.atomic.AtomicLong; */ public class UpdateWordFreqAccumulatorFunction implements Function, Pair, AtomicLong>> { - private Broadcast> stopWords; - private CollectionAccumulator> wordFreqAcc; + private final Broadcast> stopWords; + private final CollectionAccumulator> wordFreqAcc; public UpdateWordFreqAccumulatorFunction(Broadcast> stopWords, CollectionAccumulator> wordFreqAcc) { diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/test/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecTest.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/test/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecTest.java index 4859b91a6..983855af9 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/test/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecTest.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/test/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecTest.java @@ -45,6 +45,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import java.io.File; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import static org.junit.jupiter.api.Assertions.*; @@ -85,7 +86,7 @@ public class Word2VecTest { // .setRemoveStop(false) .tokenizerFactory(t).seed(42L).negative(10).useAdaGrad(false).layerSize(150).windowSize(5) .learningRate(0.025).minLearningRate(0.0001).iterations(1).batchSize(100).minWordFrequency(5) - .stopWords(Arrays.asList("three")).useUnknown(true).build(); + .stopWords(Collections.singletonList("three")).useUnknown(true).build(); word2Vec.train(corpus); diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/test/java/org/deeplearning4j/spark/text/TestFunction.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/test/java/org/deeplearning4j/spark/text/TestFunction.java index 618bf0ac7..12f52f26c 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/test/java/org/deeplearning4j/spark/text/TestFunction.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/test/java/org/deeplearning4j/spark/text/TestFunction.java @@ -37,7 +37,7 @@ public class TestFunction implements Function { return a; } - private List lst; + private final List lst; private int a; diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/test/java/org/deeplearning4j/spark/text/TextPipelineTest.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/test/java/org/deeplearning4j/spark/text/TextPipelineTest.java index 7e4a4944e..e570bce58 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/test/java/org/deeplearning4j/spark/text/TextPipelineTest.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-nlp/src/test/java/org/deeplearning4j/spark/text/TextPipelineTest.java @@ -48,8 +48,7 @@ import scala.Tuple2; import java.util.*; import java.util.concurrent.atomic.AtomicLong; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; /** * @author Jeffrey Tang @@ -206,7 +205,7 @@ public class TextPipelineTest extends BaseSparkTest { pipeline.filterMinWordAddVocab(wordFreqCounter); VocabCache vocabCache = pipeline.getVocabCache(); - assertTrue(vocabCache != null); + assertNotNull(vocabCache); VocabWord redVocab = vocabCache.tokenFor("red"); VocabWord flowerVocab = vocabCache.tokenFor("flowers"); @@ -239,7 +238,7 @@ public class TextPipelineTest extends BaseSparkTest { pipeline.buildVocabCache(); VocabCache vocabCache = pipeline.getVocabCache(); - assertTrue(vocabCache != null); + assertNotNull(vocabCache); log.info("VocabWords: " + vocabCache.words()); assertEquals(5, vocabCache.numWords()); @@ -349,8 +348,8 @@ public class TextPipelineTest extends BaseSparkTest { CountCumSum countCumSum = new CountCumSum(sentenceCountRDD); JavaRDD sentenceCountCumSumRDD = countCumSum.buildCumSum(); List sentenceCountCumSumList = sentenceCountCumSumRDD.collect(); - assertTrue(sentenceCountCumSumList.get(0) == 6L); - assertTrue(sentenceCountCumSumList.get(1) == 9L); + assertEquals(6L, (long) sentenceCountCumSumList.get(0)); + assertEquals(9L, (long) sentenceCountCumSumList.get(1)); sc.stop(); } diff --git a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-parameterserver/src/main/java/org/deeplearning4j/spark/parameterserver/networking/v1/SilentTrainingDriver.java b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-parameterserver/src/main/java/org/deeplearning4j/spark/parameterserver/networking/v1/SilentTrainingDriver.java index 64d83910f..1b9ba7539 100644 --- a/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-parameterserver/src/main/java/org/deeplearning4j/spark/parameterserver/networking/v1/SilentTrainingDriver.java +++ b/cavis-dnn/cavis-dnn-spark/cavis-dnn-spark-parameterserver/src/main/java/org/deeplearning4j/spark/parameterserver/networking/v1/SilentTrainingDriver.java @@ -139,7 +139,7 @@ public class SilentTrainingDriver implements TrainingDriver maxIter / 2 || i >= stopLyingIteration)) { diff --git a/cavis-dnn/cavis-dnn-tsne/src/test/java/org/deeplearning4j/plot/Test6058.java b/cavis-dnn/cavis-dnn-tsne/src/test/java/org/deeplearning4j/plot/Test6058.java index de88c6851..280da3878 100644 --- a/cavis-dnn/cavis-dnn-tsne/src/test/java/org/deeplearning4j/plot/Test6058.java +++ b/cavis-dnn/cavis-dnn-tsne/src/test/java/org/deeplearning4j/plot/Test6058.java @@ -52,7 +52,7 @@ public class Test6058 extends BaseDL4JTest { .build(); System.out.println("fit"); - INDArray weights = Nd4j.rand(new int[]{nWords, 100}); + INDArray weights = Nd4j.rand(nWords, 100); weights.getRow(1).assign(0); try { tsne.fit(weights); diff --git a/cavis-native/cavis-native-blas/src/main/java/org/nd4j/nativeblas/BaseNativeNDArrayFactory.java b/cavis-native/cavis-native-blas/src/main/java/org/nd4j/nativeblas/BaseNativeNDArrayFactory.java index 2a544489a..3044db667 100644 --- a/cavis-native/cavis-native-blas/src/main/java/org/nd4j/nativeblas/BaseNativeNDArrayFactory.java +++ b/cavis-native/cavis-native-blas/src/main/java/org/nd4j/nativeblas/BaseNativeNDArrayFactory.java @@ -43,6 +43,7 @@ import java.nio.Buffer; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; @@ -114,8 +115,8 @@ public abstract class BaseNativeNDArrayFactory extends BaseNDArrayFactory { DataBuffer data = null; Pointer shapeBufferPointer = nativeOps.shapeBufferForNumpy(pointer); int length = nativeOps.lengthForShapeBufferPointer(shapeBufferPointer); - shapeBufferPointer.capacity(8 * length); - shapeBufferPointer.limit(8 * length); + shapeBufferPointer.capacity(8L * length); + shapeBufferPointer.limit(8L * length); shapeBufferPointer.position(0); @@ -307,8 +308,8 @@ public abstract class BaseNativeNDArrayFactory extends BaseNDArrayFactory { DataBuffer data = null; Pointer shapeBufferPointer = nativeOps.shapeBufferForNumpyHeader(pointer); int length = nativeOps.lengthForShapeBufferPointer(shapeBufferPointer); - shapeBufferPointer.capacity(8 * length); - shapeBufferPointer.limit(8 * length); + shapeBufferPointer.capacity(8L * length); + shapeBufferPointer.limit(8L * length); shapeBufferPointer.position(0); @@ -488,7 +489,7 @@ public abstract class BaseNativeNDArrayFactory extends BaseNDArrayFactory { */ @Override public INDArray createFromNpyFile(File file) { - byte[] pathBytes = file.getAbsolutePath().getBytes(Charset.forName("UTF-8")); + byte[] pathBytes = file.getAbsolutePath().getBytes(StandardCharsets.UTF_8); ByteBuffer directBuffer = ByteBuffer.allocateDirect(pathBytes.length).order(ByteOrder.nativeOrder()); directBuffer.put(pathBytes); ((Buffer) directBuffer).rewind(); @@ -668,7 +669,7 @@ public abstract class BaseNativeNDArrayFactory extends BaseNDArrayFactory { public Map _createFromNpzFile(File file) throws Exception{ // TODO: Fix libnd4j implementation - byte[] pathBytes = file.getAbsolutePath().getBytes(Charset.forName("UTF-8")); + byte[] pathBytes = file.getAbsolutePath().getBytes(StandardCharsets.UTF_8); ByteBuffer directBuffer = ByteBuffer.allocateDirect(pathBytes.length).order(ByteOrder.nativeOrder()); directBuffer.put(pathBytes); ((Buffer) directBuffer).rewind(); @@ -735,7 +736,7 @@ public abstract class BaseNativeNDArrayFactory extends BaseNDArrayFactory { } else{ - throw new Exception("Unsupported data type: " + String.valueOf(elemSize)); + throw new Exception("Unsupported data type: " + elemSize); } diff --git a/cavis-native/cavis-native-common/src/main/java/org/nd4j/compression/impl/NoOp.java b/cavis-native/cavis-native-common/src/main/java/org/nd4j/compression/impl/NoOp.java index bd21a9a5e..9be6daf7c 100644 --- a/cavis-native/cavis-native-common/src/main/java/org/nd4j/compression/impl/NoOp.java +++ b/cavis-native/cavis-native-common/src/main/java/org/nd4j/compression/impl/NoOp.java @@ -84,20 +84,20 @@ public class NoOp extends AbstractCompressor { CompressionDescriptor descriptor = new CompressionDescriptor(); descriptor.setCompressionType(getCompressionType()); - descriptor.setOriginalLength(length * elementSize); + descriptor.setOriginalLength((long) length * elementSize); descriptor.setCompressionAlgorithm(getDescriptor()); descriptor.setOriginalElementSize(elementSize); - descriptor.setCompressedLength(length * elementSize); + descriptor.setCompressedLength((long) length * elementSize); descriptor.setNumberOfElements(length); - BytePointer ptr = new BytePointer(length * elementSize); + BytePointer ptr = new BytePointer((long) length * elementSize); val perfD = PerformanceTracker.getInstance().helperStartTransaction(); // this Pointer.memcpy is used intentionally. This method operates on host memory ALWAYS - Pointer.memcpy(ptr, srcPointer, length * elementSize); + Pointer.memcpy(ptr, srcPointer, (long) length * elementSize); - PerformanceTracker.getInstance().helperRegisterTransaction(0, perfD, length * elementSize, MemcpyDirection.HOST_TO_HOST); + PerformanceTracker.getInstance().helperRegisterTransaction(0, perfD, (long) length * elementSize, MemcpyDirection.HOST_TO_HOST); CompressedDataBuffer buffer = new CompressedDataBuffer(ptr, descriptor); diff --git a/cavis-native/cavis-native-common/src/main/java/org/nd4j/rng/deallocator/GarbageStateReference.java b/cavis-native/cavis-native-common/src/main/java/org/nd4j/rng/deallocator/GarbageStateReference.java index bbf81f2e7..9ed8a5608 100644 --- a/cavis-native/cavis-native-common/src/main/java/org/nd4j/rng/deallocator/GarbageStateReference.java +++ b/cavis-native/cavis-native-common/src/main/java/org/nd4j/rng/deallocator/GarbageStateReference.java @@ -28,7 +28,7 @@ import java.lang.ref.WeakReference; public class GarbageStateReference extends WeakReference { @Getter - private Pointer statePointer; + private final Pointer statePointer; public GarbageStateReference(NativePack referent, ReferenceQueue queue) { super(referent, queue); diff --git a/cavis-native/cavis-native-common/src/main/java/org/nd4j/rng/deallocator/NativeRandomDeallocator.java b/cavis-native/cavis-native-common/src/main/java/org/nd4j/rng/deallocator/NativeRandomDeallocator.java index 362989583..091fa590e 100644 --- a/cavis-native/cavis-native-common/src/main/java/org/nd4j/rng/deallocator/NativeRandomDeallocator.java +++ b/cavis-native/cavis-native-common/src/main/java/org/nd4j/rng/deallocator/NativeRandomDeallocator.java @@ -38,7 +38,7 @@ public class NativeRandomDeallocator { // we don't really need concurrency here, so 1 queue will be just fine private final ReferenceQueue queue; private final Map referenceMap; - private List deallocatorThreads = new ArrayList<>(); + private final List deallocatorThreads = new ArrayList<>(); private NativeRandomDeallocator() { this.queue = new ReferenceQueue<>(); diff --git a/cavis-native/cavis-native-common/src/main/java/org/nd4j/storage/CompressedRamStorage.java b/cavis-native/cavis-native-common/src/main/java/org/nd4j/storage/CompressedRamStorage.java index 73b3f2c66..8c6cd0500 100644 --- a/cavis-native/cavis-native-common/src/main/java/org/nd4j/storage/CompressedRamStorage.java +++ b/cavis-native/cavis-native-common/src/main/java/org/nd4j/storage/CompressedRamStorage.java @@ -35,9 +35,9 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; public class CompressedRamStorage implements AbstractStorage { private NDArrayCompressor compressor = new NoOp(); - private Map compressedEntries = new ConcurrentHashMap<>(); + private final Map compressedEntries = new ConcurrentHashMap<>(); private boolean useInplaceCompression = false; - private ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); + private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); private boolean emulateIsAbsent = false; private CompressedRamStorage() { diff --git a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuNDArrayFactory.java b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuNDArrayFactory.java index 668b4e25b..d216cca3c 100644 --- a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuNDArrayFactory.java +++ b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuNDArrayFactory.java @@ -272,12 +272,12 @@ public class CpuNDArrayFactory extends BaseNativeNDArrayFactory { @Override public INDArray create(double[] data, long[] shape, char ordering) { - return create(data, shape, (Character) ordering); + return create(data, shape, ordering); } @Override public INDArray create(float[] data, long[] shape, char ordering) { - return create(data, shape, (Character) ordering); + return create(data, shape, ordering); } @Override @@ -682,9 +682,9 @@ public class CpuNDArrayFactory extends BaseNativeNDArrayFactory { val tadManager = Nd4j.getExecutioner().getTADManager(); - val tadBuffers = tadManager.getTADOnlyShapeInfo(source, new int[] {sourceDimension}); + val tadBuffers = tadManager.getTADOnlyShapeInfo(source, sourceDimension); - val zTadBuffers = tadManager.getTADOnlyShapeInfo(ret, new int[] {sourceDimension}); + val zTadBuffers = tadManager.getTADOnlyShapeInfo(ret, sourceDimension); val hostTadShapeInfo = tadBuffers.getFirst().addressPointer(); @@ -970,10 +970,7 @@ public class CpuNDArrayFactory extends BaseNativeNDArrayFactory { source.setData(buffer); - if (buffer instanceof CompressedDataBuffer) - source.markAsCompressed(true); - else - source.markAsCompressed(false); + source.markAsCompressed(buffer instanceof CompressedDataBuffer); return source; } diff --git a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuTADManager.java b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuTADManager.java index f3179af14..adb7438cc 100644 --- a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuTADManager.java +++ b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuTADManager.java @@ -41,8 +41,8 @@ public class CpuTADManager implements TADManager { private Map> cache = new ConcurrentHashMap<>(); private NativeOps nativeOps; private ConstantHandler constantHandler; - private AtomicLong bytes = new AtomicLong(0); - private AtomicInteger counter = new AtomicInteger(0); + private final AtomicLong bytes = new AtomicLong(0); + private final AtomicInteger counter = new AtomicInteger(0); private static final int MAX_ENTRIES = 100; public CpuTADManager() { diff --git a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/DirectShapeInfoProvider.java b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/DirectShapeInfoProvider.java index 3e2cc6619..538caaa7e 100644 --- a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/DirectShapeInfoProvider.java +++ b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/DirectShapeInfoProvider.java @@ -40,8 +40,8 @@ public class DirectShapeInfoProvider extends BaseShapeInfoProvider { // TODO: to be removed private Map> shapeCache = new ConcurrentHashMap<>(); - private Map> longCache = new ConcurrentHashMap<>(); - private AtomicInteger counter = new AtomicInteger(0); + private final Map> longCache = new ConcurrentHashMap<>(); + private final AtomicInteger counter = new AtomicInteger(0); private static final int MAX_ENTRIES = 1000; public Pair createShapeInformation(long[] shape, long[] stride, long elementWiseStride, char order, DataType dataType) { diff --git a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/blas/CpuLapack.java b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/blas/CpuLapack.java index bf23aec07..16841b11c 100644 --- a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/blas/CpuLapack.java +++ b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/blas/CpuLapack.java @@ -44,7 +44,7 @@ public class CpuLapack extends BaseLapack { if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE) { throw new ND4JArraySizeException(); } - return A.ordering() == 'f' ? (int) A.rows() : (int) A.columns(); + return A.ordering() == 'f' ? A.rows() : A.columns(); } //========================= // L U DECOMP @@ -86,7 +86,7 @@ public class CpuLapack extends BaseLapack { // Copy R ( upper part of Q ) into result if( R != null ) { R.assign( A.get( NDArrayIndex.interval( 0, A.columns() ), NDArrayIndex.all() ) ) ; - INDArrayIndex ix[] = new INDArrayIndex[ 2 ] ; + INDArrayIndex[] ix = new INDArrayIndex[ 2 ] ; for( int i=1 ; i strings) { // header size first - long size = (strings.size() + 1) * 8; + long size = (strings.size() + 1) * 8L; for (val s:strings) size += s.length(); diff --git a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/cache/ConstantBuffersCache.java b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/cache/ConstantBuffersCache.java index dd61c6d83..8d00cc5cc 100644 --- a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/cache/ConstantBuffersCache.java +++ b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/cache/ConstantBuffersCache.java @@ -35,8 +35,8 @@ import java.util.concurrent.atomic.AtomicLong; public class ConstantBuffersCache extends BasicConstantHandler { protected Map buffersCache = new ConcurrentHashMap<>(); - private AtomicInteger counter = new AtomicInteger(0); - private AtomicLong bytes = new AtomicLong(0); + private final AtomicInteger counter = new AtomicInteger(0); + private final AtomicLong bytes = new AtomicLong(0); private static final int MAX_ENTRIES = 1000; /** @@ -58,8 +58,8 @@ public class ConstantBuffersCache extends BasicConstantHandler { counter.incrementAndGet(); buffersCache.put(descriptor, buffer); - bytes.addAndGet(array.length * Nd4j.sizeOfDataType(dataType)); - AllocationsTracker.getInstance().markAllocated(AllocationKind.CONSTANT, 0, array.length * Nd4j.sizeOfDataType(dataType)); + bytes.addAndGet((long) array.length * Nd4j.sizeOfDataType(dataType)); + AllocationsTracker.getInstance().markAllocated(AllocationKind.CONSTANT, 0, (long) array.length * Nd4j.sizeOfDataType(dataType)); } return buffer; } @@ -78,8 +78,8 @@ public class ConstantBuffersCache extends BasicConstantHandler { counter.incrementAndGet(); buffersCache.put(descriptor, buffer); - bytes.addAndGet(array.length * Nd4j.sizeOfDataType(dataType)); - AllocationsTracker.getInstance().markAllocated(AllocationKind.CONSTANT, 0, array.length * Nd4j.sizeOfDataType(dataType)); + bytes.addAndGet((long) array.length * Nd4j.sizeOfDataType(dataType)); + AllocationsTracker.getInstance().markAllocated(AllocationKind.CONSTANT, 0, (long) array.length * Nd4j.sizeOfDataType(dataType)); } return buffer; } @@ -98,8 +98,8 @@ public class ConstantBuffersCache extends BasicConstantHandler { counter.incrementAndGet(); buffersCache.put(descriptor, buffer); - bytes.addAndGet(array.length * Nd4j.sizeOfDataType(dataType)); - AllocationsTracker.getInstance().markAllocated(AllocationKind.CONSTANT, 0, array.length * Nd4j.sizeOfDataType(dataType)); + bytes.addAndGet((long) array.length * Nd4j.sizeOfDataType(dataType)); + AllocationsTracker.getInstance().markAllocated(AllocationKind.CONSTANT, 0, (long) array.length * Nd4j.sizeOfDataType(dataType)); } return buffer; } @@ -118,8 +118,8 @@ public class ConstantBuffersCache extends BasicConstantHandler { counter.incrementAndGet(); buffersCache.put(descriptor, buffer); - bytes.addAndGet(array.length * Nd4j.sizeOfDataType(dataType)); - AllocationsTracker.getInstance().markAllocated(AllocationKind.CONSTANT, 0, array.length * Nd4j.sizeOfDataType(dataType)); + bytes.addAndGet((long) array.length * Nd4j.sizeOfDataType(dataType)); + AllocationsTracker.getInstance().markAllocated(AllocationKind.CONSTANT, 0, (long) array.length * Nd4j.sizeOfDataType(dataType)); } return buffer; } @@ -138,8 +138,8 @@ public class ConstantBuffersCache extends BasicConstantHandler { counter.incrementAndGet(); buffersCache.put(descriptor, buffer); - bytes.addAndGet(array.length * Nd4j.sizeOfDataType(dataType)); - AllocationsTracker.getInstance().markAllocated(AllocationKind.CONSTANT, 0, array.length * Nd4j.sizeOfDataType(dataType)); + bytes.addAndGet((long) array.length * Nd4j.sizeOfDataType(dataType)); + AllocationsTracker.getInstance().markAllocated(AllocationKind.CONSTANT, 0, (long) array.length * Nd4j.sizeOfDataType(dataType)); } return buffer; } diff --git a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/compression/CpuFlexibleThreshold.java b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/compression/CpuFlexibleThreshold.java index e13355cfe..9b43a1414 100644 --- a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/compression/CpuFlexibleThreshold.java +++ b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/compression/CpuFlexibleThreshold.java @@ -77,7 +77,7 @@ public class CpuFlexibleThreshold extends CpuThreshold { pointer.put(3, 0); CompressionDescriptor descriptor = new CompressionDescriptor(); - descriptor.setCompressedLength(compressedLength * 4); // sizeOf(INT) + descriptor.setCompressedLength(compressedLength * 4L); // sizeOf(INT) descriptor.setOriginalLength(originalLength); descriptor.setOriginalElementSize(Nd4j.sizeOfDataType(buffer.dataType())); descriptor.setNumberOfElements(buffer.length()); diff --git a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/compression/CpuThreshold.java b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/compression/CpuThreshold.java index 209747157..96d0ff617 100644 --- a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/compression/CpuThreshold.java +++ b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/compression/CpuThreshold.java @@ -124,7 +124,7 @@ public class CpuThreshold extends AbstractCompressor { pointer.put(3, 0); CompressionDescriptor descriptor = new CompressionDescriptor(); - descriptor.setCompressedLength(compressedLength * 4); // sizeOf(INT) + descriptor.setCompressedLength(compressedLength * 4L); // sizeOf(INT) descriptor.setOriginalLength(originalLength); descriptor.setOriginalElementSize(Nd4j.sizeOfDataType(buffer.dataType())); descriptor.setNumberOfElements(buffer.length()); diff --git a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/ops/CpuOpContext.java b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/ops/CpuOpContext.java index 0a922c704..d6ddf49de 100644 --- a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/ops/CpuOpContext.java +++ b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/ops/CpuOpContext.java @@ -40,8 +40,8 @@ import org.nd4j.nativeblas.OpaqueRandomGenerator; public class CpuOpContext extends BaseOpContext implements OpContext, Deallocatable { // we might want to have configurable - private NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); - private OpaqueContext context = nativeOps.createGraphContext(1); + private final NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); + private final OpaqueContext context = nativeOps.createGraphContext(1); private final transient long id = Nd4j.getDeallocatorService().nextValue(); public CpuOpContext() { @@ -74,7 +74,7 @@ public class CpuOpContext extends BaseOpContext implements OpContext, Deallocata if (arguments.length > 0) { super.setTArguments(arguments); nativeOps.setGraphContextTArguments(context, new DoublePointer(arguments), arguments.length); - }; + } } @Override @@ -86,7 +86,7 @@ public class CpuOpContext extends BaseOpContext implements OpContext, Deallocata args[e] = arguments[e].toInt(); nativeOps.setGraphContextDArguments(context, new IntPointer(args), arguments.length); - }; + } } @Override @@ -150,7 +150,7 @@ public class CpuOpContext extends BaseOpContext implements OpContext, Deallocata @Override public String getUniqueId() { - return new String("CTX_" + id); + return "CTX_" + id; } @Override diff --git a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/ops/NativeOpExecutioner.java b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/ops/NativeOpExecutioner.java index 52ab50235..53fc39d7e 100644 --- a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/ops/NativeOpExecutioner.java +++ b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/ops/NativeOpExecutioner.java @@ -76,20 +76,20 @@ import java.util.*; @Slf4j public class NativeOpExecutioner extends DefaultOpExecutioner { private final NativeOpsHolder holder = NativeOpsHolder.getInstance(); - private NativeOps loop = holder.getDeviceNativeOps(); - private ConstantHandler constantHandler = Nd4j.getConstantHandler(); - private CpuTADManager tadManager = new CpuTADManager(); + private final NativeOps loop = holder.getDeviceNativeOps(); + private final ConstantHandler constantHandler = Nd4j.getConstantHandler(); + private final CpuTADManager tadManager = new CpuTADManager(); //thread locals for custom op inputs and outputs to prevent allocations //every time exec(CustomOp) is called - private ThreadLocal> inputShapes = new ThreadLocal<>(); - private ThreadLocal> inputBuffers = new ThreadLocal<>(); - private ThreadLocal> outputShapes = new ThreadLocal<>(); - private ThreadLocal> outputBuffers = new ThreadLocal<>(); - private ThreadLocal> iArgsPointer = new ThreadLocal<>(); - private ThreadLocal> tArgsPointer = new ThreadLocal<>(); - private ThreadLocal> bArgsPointer = new ThreadLocal<>(); - private ThreadLocal> halfArgsPointer = new ThreadLocal<>(); + private final ThreadLocal> inputShapes = new ThreadLocal<>(); + private final ThreadLocal> inputBuffers = new ThreadLocal<>(); + private final ThreadLocal> outputShapes = new ThreadLocal<>(); + private final ThreadLocal> outputBuffers = new ThreadLocal<>(); + private final ThreadLocal> iArgsPointer = new ThreadLocal<>(); + private final ThreadLocal> tArgsPointer = new ThreadLocal<>(); + private final ThreadLocal> bArgsPointer = new ThreadLocal<>(); + private final ThreadLocal> halfArgsPointer = new ThreadLocal<>(); protected Map customOps = null; @@ -103,8 +103,8 @@ public class NativeOpExecutioner extends DefaultOpExecutioner { * Instead of allocating new memory chunks for each batch invocation, we reuse them on thread/opNum basis * Since for NativeOpExecutioner all executions are synchronous */ - private ThreadLocal> batchPointers = new ThreadLocal<>(); - private ThreadLocal> memoryBlocks = new ThreadLocal<>(); + private final ThreadLocal> batchPointers = new ThreadLocal<>(); + private final ThreadLocal> memoryBlocks = new ThreadLocal<>(); public NativeOpExecutioner() { tadManager.init(loop, constantHandler); @@ -120,7 +120,7 @@ public class NativeOpExecutioner extends DefaultOpExecutioner { } else { val split = env.toLowerCase().split(","); for (val name:split) { - mklOverrides.put(name, new Boolean(true)); + mklOverrides.put(name, Boolean.TRUE); } } } @@ -298,7 +298,7 @@ public class NativeOpExecutioner extends DefaultOpExecutioner { long xT = x.tensorsAlongDimension(dimension); long yT = y.tensorsAlongDimension(dimension); - ret = Nd4j.create(op.resultType(), new long[]{xT, yT}); + ret = Nd4j.create(op.resultType(), xT, yT); } else { if (y != null) { @@ -354,7 +354,7 @@ public class NativeOpExecutioner extends DefaultOpExecutioner { * and the associated offsets for each {@link INDArray#tensorAlongDimension(int, int...)} * The first item is the shape information. The second one is the offsets. */ - Pair tadBuffers = x.isEmpty() ? Pair.makePair(x.data(), null): tadManager.getTADOnlyShapeInfo(x, dimension); + Pair tadBuffers = x.isEmpty() ? Pair.makePair(x.data(), null): tadManager.getTADOnlyShapeInfo(x, dimension); Pair yTadBuffers = null; /** * Note that we use addresses in libnd4j. @@ -1699,7 +1699,7 @@ public class NativeOpExecutioner extends DefaultOpExecutioner { int nOut = opContext != null ? opContext.numOutputArguments() : op.numOutputArguments(); log.error("Failed to calculate output shapes for op {}. Attempted to execute with {} inputs, {} outputs, " + "{} targs, {} iargs, {} bargs and {} dargs. {} - Please see above message (printed out from c++) for a possible cause of error.", - op.opName(), nIn, nOut, nTArgs, nIArgs, nBArgs, nDArgs, sb.toString()); + op.opName(), nIn, nOut, nTArgs, nIArgs, nBArgs, nDArgs, sb); throw t; } @@ -1898,7 +1898,7 @@ public class NativeOpExecutioner extends DefaultOpExecutioner { if (Nd4jCpu.Environment.getInstance().isUseMKLDNN()) { val opName = op.opName(); val state = mklOverrides.get(op); - if (state != null && state == true) { + if (state != null && state) { mklOverride = true; Nd4jCpu.Environment.getInstance().setUseMKLDNN(true); } @@ -1972,7 +1972,7 @@ public class NativeOpExecutioner extends DefaultOpExecutioner { nT + " targs," + nB + " bargs and " + nI + " iargs. " + - sb.toString() + + sb + " - Please see above message (printed out from c++) for a possible cause of error."); throw e; } finally { diff --git a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/workspace/CpuWorkspaceDeallocator.java b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/workspace/CpuWorkspaceDeallocator.java index 91f29101d..671c39a93 100644 --- a/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/workspace/CpuWorkspaceDeallocator.java +++ b/cavis-native/cavis-native-cpu/src/main/java/org/nd4j/linalg/cpu/nativecpu/workspace/CpuWorkspaceDeallocator.java @@ -36,10 +36,10 @@ import java.util.Queue; @Slf4j public class CpuWorkspaceDeallocator implements Deallocator { - private PointersPair pointersPair; - private Queue pinnedPointers; - private List externalPointers; - private LocationPolicy location; + private final PointersPair pointersPair; + private final Queue pinnedPointers; + private final List externalPointers; + private final LocationPolicy location; private Pair mmapInfo; public CpuWorkspaceDeallocator(@NonNull CpuWorkspace workspace) { diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/concurrency/DeviceAllocationsTracker.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/concurrency/DeviceAllocationsTracker.java index 907fd8103..870989777 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/concurrency/DeviceAllocationsTracker.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/concurrency/DeviceAllocationsTracker.java @@ -38,7 +38,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; */ @Slf4j public class DeviceAllocationsTracker { - private Configuration configuration; + private final Configuration configuration; private final ReentrantReadWriteLock globalLock = new ReentrantReadWriteLock(); diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/concurrency/RRWLock.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/concurrency/RRWLock.java index 35aa72207..0c298bc83 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/concurrency/RRWLock.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/concurrency/RRWLock.java @@ -32,10 +32,10 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; * @author raver119@gmail.com */ public class RRWLock implements Lock { - private ReentrantReadWriteLock globalLock = new ReentrantReadWriteLock(); - private ReentrantReadWriteLock externalsLock = new ReentrantReadWriteLock(); + private final ReentrantReadWriteLock globalLock = new ReentrantReadWriteLock(); + private final ReentrantReadWriteLock externalsLock = new ReentrantReadWriteLock(); - private Map objectLocks = new ConcurrentHashMap<>(); + private final Map objectLocks = new ConcurrentHashMap<>(); /** diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/AllocationPoint.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/AllocationPoint.java index e07bbf544..97378d94f 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/AllocationPoint.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/AllocationPoint.java @@ -54,7 +54,7 @@ import java.util.concurrent.atomic.AtomicBoolean; public class AllocationPoint { @Getter - private OpaqueDataBuffer ptrDataBuffer; + private final OpaqueDataBuffer ptrDataBuffer; @Getter @Setter @@ -75,14 +75,14 @@ public class AllocationPoint { // thread safety is guaranteed by allocLock private AllocationStatus allocationStatus = AllocationStatus.UNDEFINED; - private transient TimeProvider timeProvider = new OperativeProvider(); + private final transient TimeProvider timeProvider = new OperativeProvider(); // corresponding access times in TimeProvider quants - private long accessHostRead = 0L; + private final long accessHostRead = 0L; private long accessDeviceRead = 0L; - private long accessHostWrite = 0L; - private long accessDeviceWrite = 0L; + private final long accessHostWrite = 0L; + private final long accessDeviceWrite = 0L; protected static final NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); /* @@ -103,7 +103,7 @@ public class AllocationPoint { */ private volatile int deviceId; - private long bytes; + private final long bytes; public AllocationPoint(@NonNull OpaqueDataBuffer opaqueDataBuffer, long bytes) { ptrDataBuffer = opaqueDataBuffer; @@ -124,7 +124,7 @@ public class AllocationPoint { NativeOpsHolder.getInstance().getDeviceNativeOps().dbSetDeviceId(ptrDataBuffer, deviceId); } - private AtomicBoolean enqueued = new AtomicBoolean(false); + private final AtomicBoolean enqueued = new AtomicBoolean(false); @Getter @Setter diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/AtomicAllocator.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/AtomicAllocator.java index 21ba561f8..e0102fcac 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/AtomicAllocator.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/AtomicAllocator.java @@ -89,18 +89,18 @@ public class AtomicAllocator implements Allocator { @Getter private transient MemoryHandler memoryHandler; - private AtomicLong allocationsCounter = new AtomicLong(0); + private final AtomicLong allocationsCounter = new AtomicLong(0); - private AtomicLong objectsTracker = new AtomicLong(0); + private final AtomicLong objectsTracker = new AtomicLong(0); // we have single tracking point for allocation points, since we're not going to cycle through it any time soon - private Map allocationsMap = new ConcurrentHashMap<>(); + private final Map allocationsMap = new ConcurrentHashMap<>(); /* locks for internal resources */ - private ReentrantReadWriteLock globalLock = new ReentrantReadWriteLock(); - private ReentrantReadWriteLock externalsLock = new ReentrantReadWriteLock(); + private final ReentrantReadWriteLock globalLock = new ReentrantReadWriteLock(); + private final ReentrantReadWriteLock externalsLock = new ReentrantReadWriteLock(); /* here we have handles for garbage collector threads @@ -118,8 +118,8 @@ public class AtomicAllocator implements Allocator { private final Ring zeroLong = new LockedRing(30); private final Ring zeroShort = new LockedRing(30); - private ConstantHandler constantHandler = Nd4j.getConstantHandler(); - private AtomicLong useTracker = new AtomicLong(System.currentTimeMillis()); + private final ConstantHandler constantHandler = Nd4j.getConstantHandler(); + private final AtomicLong useTracker = new AtomicLong(System.currentTimeMillis()); public static AtomicAllocator getInstance() { if (INSTANCE == null) @@ -139,7 +139,7 @@ public class AtomicAllocator implements Allocator { /*initDeviceCollectors(); initHostCollectors();*/ - this.protector = ConstantProtector.getInstance(); + protector = ConstantProtector.getInstance(); } @@ -607,7 +607,7 @@ public class AtomicAllocator implements Allocator { //elementsDropped.incrementAndGet(); //continue; throw new UnsupportedOperationException("Pew-pew"); - } ; + } } else { elementsSurvived.incrementAndGet(); } @@ -777,7 +777,7 @@ public class AtomicAllocator implements Allocator { if (memoryHandler.getAllocatedHostMemory() < (configuration.getMaximumZeroAllocation() * 0.25) && (memoryHandler.getAllocatedHostObjects(bucketId) < 5000) && lastCheck > System.currentTimeMillis() - 30000) { - ; // i don't want deallocation to be fired on lower thresholds. just no sense locking stuff + // i don't want deallocation to be fired on lower thresholds. just no sense locking stuff //log.debug("Skipping zero GC round: ["+zeroUseCounter.get()+"/" +zeroAllocations.get(threadId).size() + "]"); } else { seekUnusedZero(bucketId, aggressiveness); diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/CudaDeallocator.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/CudaDeallocator.java index 72848face..81a411f85 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/CudaDeallocator.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/CudaDeallocator.java @@ -28,7 +28,7 @@ import org.nd4j.nativeblas.OpaqueDataBuffer; @Slf4j public class CudaDeallocator implements Deallocator { - private OpaqueDataBuffer opaqueDataBuffer; + private final OpaqueDataBuffer opaqueDataBuffer; public CudaDeallocator(@NonNull BaseCudaDataBuffer buffer) { opaqueDataBuffer = buffer.getOpaqueDataBuffer(); diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/MemoryTracker.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/MemoryTracker.java index 4db458499..bacf78bbc 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/MemoryTracker.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/MemoryTracker.java @@ -30,13 +30,13 @@ import java.util.concurrent.atomic.AtomicLong; @Slf4j public class MemoryTracker { - private List allocatedPerDevice = new ArrayList<>(); - private List cachedPerDevice = new ArrayList<>(); - private List totalPerDevice = new ArrayList<>(); - private List freePerDevice = new ArrayList<>(); - private List workspacesPerDevice = new ArrayList<>(); - private AtomicLong cachedHost = new AtomicLong(0); - private AtomicLong allocatedHost = new AtomicLong(0); + private final List allocatedPerDevice = new ArrayList<>(); + private final List cachedPerDevice = new ArrayList<>(); + private final List totalPerDevice = new ArrayList<>(); + private final List freePerDevice = new ArrayList<>(); + private final List workspacesPerDevice = new ArrayList<>(); + private final AtomicLong cachedHost = new AtomicLong(0); + private final AtomicLong allocatedHost = new AtomicLong(0); private final static MemoryTracker INSTANCE = new MemoryTracker(); public MemoryTracker() { diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/NestedPoint.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/NestedPoint.java index 118c3628b..208e87ae9 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/NestedPoint.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/NestedPoint.java @@ -29,6 +29,7 @@ import org.nd4j.jita.allocator.enums.AllocationStatus; import org.nd4j.jita.allocator.time.RateTimer; import org.nd4j.jita.allocator.time.impl.BinaryTimer; +import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; @@ -47,9 +48,9 @@ public class NestedPoint { private AtomicState accessState; private AtomicLong accessTime; @Getter - private RateTimer timerShort = new BinaryTimer(10, TimeUnit.SECONDS); + private final RateTimer timerShort = new BinaryTimer(10, TimeUnit.SECONDS); @Getter - private RateTimer timerLong = new BinaryTimer(60, TimeUnit.SECONDS); + private final RateTimer timerLong = new BinaryTimer(60, TimeUnit.SECONDS); // by default memory is UNDEFINED, and depends on parent memory chunk for now @@ -57,7 +58,7 @@ public class NestedPoint { @Setter private AllocationStatus nestedStatus = AllocationStatus.UNDEFINED; - private AtomicLong counter = new AtomicLong(0); + private final AtomicLong counter = new AtomicLong(0); public NestedPoint(@NonNull AllocationShape shape) { this.shape = shape; @@ -94,7 +95,7 @@ public class NestedPoint { NestedPoint that = (NestedPoint) o; - return shape != null ? shape.equals(that.shape) : that.shape == null; + return Objects.equals(shape, that.shape); } diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/tad/DeviceTADManager.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/tad/DeviceTADManager.java index 133a2044e..cd0efcad8 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/tad/DeviceTADManager.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/tad/DeviceTADManager.java @@ -41,7 +41,7 @@ import java.util.concurrent.Semaphore; @Slf4j public class DeviceTADManager extends BasicTADManager { protected List>> tadCache = new ArrayList<>(); - private Semaphore lock = new Semaphore(1); + private final Semaphore lock = new Semaphore(1); public DeviceTADManager() { int numDevices = Nd4j.getAffinityManager().getNumberOfDevices(); diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/impl/BinaryTimer.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/impl/BinaryTimer.java index f71b46bf8..281135cb7 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/impl/BinaryTimer.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/impl/BinaryTimer.java @@ -32,8 +32,8 @@ import java.util.concurrent.atomic.AtomicLong; * @author raver119@gmail.com */ public class BinaryTimer implements RateTimer { - private AtomicLong timer; - private long timeframeMilliseconds; + private final AtomicLong timer; + private final long timeframeMilliseconds; public BinaryTimer(long timeframe, TimeUnit timeUnit) { timer = new AtomicLong(System.currentTimeMillis()); @@ -80,10 +80,6 @@ public class BinaryTimer implements RateTimer { protected boolean isAlive() { long currentTime = System.currentTimeMillis(); - if (currentTime - timer.get() > timeframeMilliseconds) { - return false; - } - - return true; + return currentTime - timer.get() <= timeframeMilliseconds; } } diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/impl/SimpleTimer.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/impl/SimpleTimer.java index 1089ffdff..b89aec51d 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/impl/SimpleTimer.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/impl/SimpleTimer.java @@ -91,7 +91,6 @@ public class SimpleTimer implements RateTimer { buckets[x] = 0; } else { // do nothing here probably - ; } } diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/providers/OperativeProvider.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/providers/OperativeProvider.java index 001796fae..e6dacf8c7 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/providers/OperativeProvider.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/providers/OperativeProvider.java @@ -28,7 +28,7 @@ import java.util.concurrent.atomic.AtomicLong; * @author raver119@gmail.com */ public class OperativeProvider implements TimeProvider { - private AtomicLong time = new AtomicLong(0); + private final AtomicLong time = new AtomicLong(0); /** diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/rings/LockedRing.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/rings/LockedRing.java index 86e6195f9..bf0f5efa5 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/rings/LockedRing.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/time/rings/LockedRing.java @@ -33,7 +33,7 @@ public class LockedRing implements Ring { private final float[] ring; private final AtomicInteger position = new AtomicInteger(0); - private ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); + private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); /** * Builds new BasicRing with specified number of elements stored diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/utils/AllocationUtils.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/utils/AllocationUtils.java index e8f137506..afbb06e46 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/utils/AllocationUtils.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/utils/AllocationUtils.java @@ -92,7 +92,7 @@ public class AllocationUtils { public static DataBuffer getPointersBuffer(long[] pointers) { CudaDoubleDataBuffer tempX = new CudaDoubleDataBuffer(pointers.length); - AtomicAllocator.getInstance().memcpyBlocking(tempX, new LongPointer(pointers), pointers.length * 8, 0); + AtomicAllocator.getInstance().memcpyBlocking(tempX, new LongPointer(pointers), pointers.length * 8L, 0); return tempX; } } diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/concurrency/CudaAffinityManager.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/concurrency/CudaAffinityManager.java index 5e90a12a6..5e8377e84 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/concurrency/CudaAffinityManager.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/concurrency/CudaAffinityManager.java @@ -47,11 +47,11 @@ import java.util.concurrent.atomic.AtomicInteger; */ @Slf4j public class CudaAffinityManager extends BasicAffinityManager { - private Map affinityMap = new ConcurrentHashMap<>(); - private AtomicInteger devPtr = new AtomicInteger(0); - private ThreadLocal affiliated = new ThreadLocal<>(); + private final Map affinityMap = new ConcurrentHashMap<>(); + private final AtomicInteger devPtr = new AtomicInteger(0); + private final ThreadLocal affiliated = new ThreadLocal<>(); - private AtomicInteger numberOfDevices = new AtomicInteger(-1); + private final AtomicInteger numberOfDevices = new AtomicInteger(-1); public CudaAffinityManager() { super(); diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/concurrency/EventsProvider.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/concurrency/EventsProvider.java index 4e5ee96a8..d1354765f 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/concurrency/EventsProvider.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/concurrency/EventsProvider.java @@ -35,9 +35,9 @@ import java.util.concurrent.atomic.AtomicLong; */ @Deprecated public class EventsProvider { - private List> queue = new ArrayList<>(); - private AtomicLong newCounter = new AtomicLong(0); - private AtomicLong cacheCounter = new AtomicLong(0); + private final List> queue = new ArrayList<>(); + private final AtomicLong newCounter = new AtomicLong(0); + private final AtomicLong cacheCounter = new AtomicLong(0); public EventsProvider() { int numDev = Nd4j.getAffinityManager().getNumberOfDevices(); diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/conf/Configuration.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/conf/Configuration.java index f25c90698..b20c23d4c 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/conf/Configuration.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/conf/Configuration.java @@ -89,13 +89,13 @@ public class Configuration implements Serializable { * Minimal number of activations for relocation threshold */ @Getter - private int minimumRelocationThreshold = 5; + private final int minimumRelocationThreshold = 5; /** * Minimal guaranteed TTL for memory chunk */ @Getter - private long minimumTTLMilliseconds = 10 * 1000L; + private final long minimumTTLMilliseconds = 10 * 1000L; /** * Number of buckets/garbage collectors for host memory @@ -108,18 +108,18 @@ public class Configuration implements Serializable { */ @Deprecated @Getter - private Aggressiveness hostDeallocAggressiveness = Aggressiveness.REASONABLE; + private final Aggressiveness hostDeallocAggressiveness = Aggressiveness.REASONABLE; @Deprecated @Getter - private Aggressiveness gpuDeallocAggressiveness = Aggressiveness.REASONABLE; + private final Aggressiveness gpuDeallocAggressiveness = Aggressiveness.REASONABLE; /** * Allocation aggressiveness */ @Deprecated @Getter - private Aggressiveness gpuAllocAggressiveness = Aggressiveness.REASONABLE; + private final Aggressiveness gpuAllocAggressiveness = Aggressiveness.REASONABLE; /** @@ -157,10 +157,10 @@ public class Configuration implements Serializable { private long maximumSingleDeviceAllocation = 1024 * 1024 * 1024L; @Getter - private List availableDevices = new ArrayList<>(); + private final List availableDevices = new ArrayList<>(); @Getter - private List bannedDevices = new ArrayList<>(); + private final List bannedDevices = new ArrayList<>(); @Getter private int maximumGridSize = 4096; diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/conf/CudaEnvironment.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/conf/CudaEnvironment.java index 69f600cd5..802e631cb 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/conf/CudaEnvironment.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/conf/CudaEnvironment.java @@ -37,7 +37,7 @@ import java.util.concurrent.ConcurrentHashMap; public class CudaEnvironment { private static final CudaEnvironment INSTANCE = new CudaEnvironment(); private static volatile Configuration configuration; - private static Map arch = new ConcurrentHashMap<>(); + private static final Map arch = new ConcurrentHashMap<>(); private CudaEnvironment() { configuration = new Configuration(); @@ -67,7 +67,7 @@ public class CudaEnvironment { if (!arch.containsKey(deviceId)) { int major = NativeOpsHolder.getInstance().getDeviceNativeOps().getDeviceMajor(deviceId); int minor = NativeOpsHolder.getInstance().getDeviceNativeOps().getDeviceMinor(deviceId); - Integer cc = Integer.parseInt(new String("" + major + minor)); + Integer cc = Integer.parseInt("" + major + minor); arch.put(deviceId, cc); return cc; } diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/constant/ConstantProtector.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/constant/ConstantProtector.java index 635b4d3dd..2a36e3f6d 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/constant/ConstantProtector.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/constant/ConstantProtector.java @@ -38,13 +38,13 @@ import java.util.concurrent.CopyOnWriteArrayList; * @author raver119@gmail.com */ public class ConstantProtector { - private static ConstantProtector ourInstance = new ConstantProtector(); + private static final ConstantProtector ourInstance = new ConstantProtector(); public static ConstantProtector getInstance() { return ourInstance; } - private List protectorLegacy = new CopyOnWriteArrayList<>(); + private final List protectorLegacy = new CopyOnWriteArrayList<>(); private List> protector = new CopyOnWriteArrayList<>(); private List>> deviceCache = new ArrayList<>(); diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/constant/ProtectedCudaConstantHandler.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/constant/ProtectedCudaConstantHandler.java index 239fa6a8e..c43171f3d 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/constant/ProtectedCudaConstantHandler.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/constant/ProtectedCudaConstantHandler.java @@ -55,7 +55,7 @@ import java.util.concurrent.atomic.AtomicLong; */ @Slf4j public class ProtectedCudaConstantHandler implements ConstantHandler { - private static ProtectedCudaConstantHandler ourInstance = new ProtectedCudaConstantHandler(); + private static final ProtectedCudaConstantHandler ourInstance = new ProtectedCudaConstantHandler(); protected Map constantOffsets = new HashMap<>(); protected Map deviceLocks = new ConcurrentHashMap<>(); diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/constant/ProtectedCudaShapeInfoProvider.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/constant/ProtectedCudaShapeInfoProvider.java index e225e68c8..a1eac1e5f 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/constant/ProtectedCudaShapeInfoProvider.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/constant/ProtectedCudaShapeInfoProvider.java @@ -41,14 +41,14 @@ public class ProtectedCudaShapeInfoProvider extends BaseShapeInfoProvider { private AtomicAllocator allocator; - private AtomicLong cacheHit = new AtomicLong(1); - private AtomicLong cacheMiss = new AtomicLong(1); + private final AtomicLong cacheHit = new AtomicLong(1); + private final AtomicLong cacheMiss = new AtomicLong(1); - private Semaphore lock = new Semaphore(1); + private final Semaphore lock = new Semaphore(1); protected static final ConstantProtector protector = ConstantProtector.getInstance(); - private static ProtectedCudaShapeInfoProvider ourInstance = new ProtectedCudaShapeInfoProvider(); + private static final ProtectedCudaShapeInfoProvider ourInstance = new ProtectedCudaShapeInfoProvider(); private ProtectedCudaShapeInfoProvider() { diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/handler/impl/CudaZeroHandler.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/handler/impl/CudaZeroHandler.java index abc3aa5f0..a36f1d4c1 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/handler/impl/CudaZeroHandler.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/handler/impl/CudaZeroHandler.java @@ -92,9 +92,9 @@ public class CudaZeroHandler implements MemoryHandler { // tracker for thread->device affinity protected Map devicesAffinity = new ConcurrentHashMap<>(); - private ReentrantReadWriteLock deviceLock = new ReentrantReadWriteLock(); + private final ReentrantReadWriteLock deviceLock = new ReentrantReadWriteLock(); - private AtomicInteger devPtr = new AtomicInteger(0); + private final AtomicInteger devPtr = new AtomicInteger(0); private final AtomicBoolean wasInitialised = new AtomicBoolean(false); @@ -127,7 +127,7 @@ public class CudaZeroHandler implements MemoryHandler { private final Map> zeroAllocations = new ConcurrentHashMap<>(); - private AtomicLong zeroCounter = new AtomicLong(0); + private final AtomicLong zeroCounter = new AtomicLong(0); protected NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); @@ -137,13 +137,10 @@ public class CudaZeroHandler implements MemoryHandler { this.INITIAL_LOCATION = configuration.getFirstMemory(); - switch (configuration.getExecutionModel()) { - case SEQUENTIAL: { - this.flowController = new GridFlowController(); - } - break; - default: - throw new RuntimeException("Unknown ExecutionModel: [" + configuration.getExecutionModel() + "]"); + if (configuration.getExecutionModel() == Configuration.ExecutionModel.SEQUENTIAL) { + this.flowController = new GridFlowController(); + } else { + throw new RuntimeException("Unknown ExecutionModel: [" + configuration.getExecutionModel() + "]"); } int numDevices = NativeOpsHolder.getInstance().getDeviceNativeOps().getAvailableDevices(); @@ -165,9 +162,9 @@ public class CudaZeroHandler implements MemoryHandler { */ @Override public void init(@NonNull Configuration configuration, @NonNull Allocator allocator) { - this.configuration = configuration; + CudaZeroHandler.configuration = configuration; - this.deviceMemoryTracker = new DeviceAllocationsTracker(this.configuration); + this.deviceMemoryTracker = new DeviceAllocationsTracker(CudaZeroHandler.configuration); this.flowController.init(allocator); } diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/workspace/CudaWorkspace.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/workspace/CudaWorkspace.java index 62a02fd12..39dad7bd4 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/workspace/CudaWorkspace.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/workspace/CudaWorkspace.java @@ -391,8 +391,8 @@ public class CudaWorkspace extends Nd4jWorkspace { @Override protected void resetWorkspace() { - if (currentSize.get() < 1) - return; + if (currentSize.get() < 1) { + } /* diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/workspace/CudaWorkspaceDeallocator.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/workspace/CudaWorkspaceDeallocator.java index 4c7b15450..806986fc7 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/workspace/CudaWorkspaceDeallocator.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/workspace/CudaWorkspaceDeallocator.java @@ -36,9 +36,9 @@ import java.util.Queue; */ @Slf4j public class CudaWorkspaceDeallocator implements Deallocator { - private PointersPair pointersPair; - private Queue pinnedPointers; - private List externalPointers; + private final PointersPair pointersPair; + private final Queue pinnedPointers; + private final List externalPointers; public CudaWorkspaceDeallocator(@NonNull CudaWorkspace workspace) { this.pointersPair = workspace.workspace(); diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/CublasPointer.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/CublasPointer.java index 911348239..289e79416 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/CublasPointer.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/CublasPointer.java @@ -44,9 +44,9 @@ public class CublasPointer implements AutoCloseable { private Pointer devicePointer; private Pointer hostPointer; @Getter - private boolean closed = false; + private final boolean closed = false; private INDArray arr; - private CudaContext cudaContext; + private final CudaContext cudaContext; private boolean resultPointer = false; @@ -161,9 +161,7 @@ public class CublasPointer implements AutoCloseable { @Override public String toString() { - StringBuffer sb = new StringBuffer(); - sb.append("NativePointer: [" + devicePointer.address() + "]"); - return sb.toString(); + return "NativePointer: [" + devicePointer.address() + "]"; } diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/JCublasNDArrayFactory.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/JCublasNDArrayFactory.java index 88c08ceaa..95339aa30 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/JCublasNDArrayFactory.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/JCublasNDArrayFactory.java @@ -523,14 +523,14 @@ public class JCublasNDArrayFactory extends BaseNativeNDArrayFactory { context.getOldStream(), allocator.getDeviceIdPointer()); val tempIndexes = new CudaLongDataBuffer(indexes.length); - AtomicAllocator.getInstance().memcpyBlocking(tempIndexes, new LongPointer(ArrayUtil.toLongArray(indexes)), indexes.length * 8, 0); + AtomicAllocator.getInstance().memcpyBlocking(tempIndexes, new LongPointer(ArrayUtil.toLongArray(indexes)), indexes.length * 8L, 0); Pointer pIndex = AtomicAllocator.getInstance().getPointer(tempIndexes, context); TADManager tadManager = Nd4j.getExecutioner().getTADManager(); - Pair tadBuffers = tadManager.getTADOnlyShapeInfo(source, new int[]{sourceDimension}); - Pair zTadBuffers = tadManager.getTADOnlyShapeInfo(ret, new int[]{sourceDimension}); + Pair tadBuffers = tadManager.getTADOnlyShapeInfo(source, sourceDimension); + Pair zTadBuffers = tadManager.getTADOnlyShapeInfo(ret, sourceDimension); Pointer tadShapeInfo = AtomicAllocator.getInstance().getPointer(tadBuffers.getFirst(), context); Pointer zTadShapeInfo = AtomicAllocator.getInstance().getPointer(zTadBuffers.getFirst(), context); @@ -598,7 +598,7 @@ public class JCublasNDArrayFactory extends BaseNativeNDArrayFactory { CudaDoubleDataBuffer tempX = new CudaDoubleDataBuffer(arrays.length); - allocator.memcpyBlocking(tempX, new LongPointer(xPointers), xPointers.length * 8, 0); + allocator.memcpyBlocking(tempX, new LongPointer(xPointers), xPointers.length * 8L, 0); PointerPointer x = new PointerPointer(AtomicAllocator.getInstance().getPointer(tempX, context)); @@ -707,7 +707,7 @@ public class JCublasNDArrayFactory extends BaseNativeNDArrayFactory { CudaDoubleDataBuffer tempX = new CudaDoubleDataBuffer(arrays.length); - allocator.memcpyBlocking(tempX, new LongPointer(xPointers), xPointers.length * 8, 0); + allocator.memcpyBlocking(tempX, new LongPointer(xPointers), xPointers.length * 8L, 0); PointerPointer x = new PointerPointer(AtomicAllocator.getInstance().getPointer(tempX, context)); @@ -930,10 +930,10 @@ public class JCublasNDArrayFactory extends BaseNativeNDArrayFactory { val tempTAD = new CudaDoubleDataBuffer(arrays.size()); val tempOffsets = new CudaDoubleDataBuffer(arrays.size()); - AtomicAllocator.getInstance().memcpyBlocking(tempX, new LongPointer(xPointers), xPointers.length * 8, 0); - AtomicAllocator.getInstance().memcpyBlocking(tempShapes, new LongPointer(xShapes), xPointers.length * 8, 0); - AtomicAllocator.getInstance().memcpyBlocking(tempTAD, new LongPointer(tadShapes), xPointers.length * 8, 0); - AtomicAllocator.getInstance().memcpyBlocking(tempOffsets, new LongPointer(tadOffsets), xPointers.length * 8, 0); + AtomicAllocator.getInstance().memcpyBlocking(tempX, new LongPointer(xPointers), xPointers.length * 8L, 0); + AtomicAllocator.getInstance().memcpyBlocking(tempShapes, new LongPointer(xShapes), xPointers.length * 8L, 0); + AtomicAllocator.getInstance().memcpyBlocking(tempTAD, new LongPointer(tadShapes), xPointers.length * 8L, 0); + AtomicAllocator.getInstance().memcpyBlocking(tempOffsets, new LongPointer(tadOffsets), xPointers.length * 8L, 0); nativeOps.shuffle(extras, null, @@ -1078,10 +1078,7 @@ public class JCublasNDArrayFactory extends BaseNativeNDArrayFactory { DataBuffer buffer = convertDataEx(typeSrc, source.data(), typeDst); source.setData(buffer); - if (buffer instanceof CompressedDataBuffer) - source.markAsCompressed(true); - else - source.markAsCompressed(false); + source.markAsCompressed(buffer instanceof CompressedDataBuffer); return source; } @@ -1307,7 +1304,7 @@ public class JCublasNDArrayFactory extends BaseNativeNDArrayFactory { CudaDoubleDataBuffer tempX = new CudaDoubleDataBuffer(numTads); - AtomicAllocator.getInstance().memcpyBlocking(tempX, new LongPointer(xPointers), xPointers.length * 8, 0); + AtomicAllocator.getInstance().memcpyBlocking(tempX, new LongPointer(xPointers), xPointers.length * 8L, 0); PointerPointer extraz = new PointerPointer(null, // not used context.getOldStream(), AtomicAllocator.getInstance().getDeviceIdPointer()); diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLapack.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLapack.java index 912d2c388..c581f5696 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLapack.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLapack.java @@ -64,8 +64,8 @@ import static org.bytedeco.cuda.global.cusolver.*; @Slf4j public class JcublasLapack extends BaseLapack { - private NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); - private Allocator allocator = AtomicAllocator.getInstance(); + private final NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); + private final Allocator allocator = AtomicAllocator.getInstance(); @Override public void sgetrf(int M, int N, INDArray A, INDArray IPIV, INDArray INFO) { @@ -109,7 +109,7 @@ public class JcublasLapack extends BaseLapack { int worksize = worksizeBuffer.getInt(0); // Now allocate memory for the workspace, the permutation matrix and a return code - Pointer workspace = new Workspace(worksize * Nd4j.sizeOfDataType()); + Pointer workspace = new Workspace((long) worksize * Nd4j.sizeOfDataType()); // Do the actual LU decomp stat = cusolverDnSgetrf(solverDn, M, N, (FloatPointer) xAPointer.getDevicePointer(), M, @@ -176,7 +176,7 @@ public class JcublasLapack extends BaseLapack { int worksize = worksizeBuffer.getInt(0); // Now allocate memory for the workspace, the permutation matrix and a return code - val workspace = new Workspace(worksize * Nd4j.sizeOfDataType()); + val workspace = new Workspace((long) worksize * Nd4j.sizeOfDataType()); // Do the actual LU decomp stat = cusolverDnDgetrf(solverDn, M, N, (DoublePointer) xAPointer.getDevicePointer(), M, @@ -250,7 +250,7 @@ public class JcublasLapack extends BaseLapack { } int worksize = worksizeBuffer.getInt(0); // Now allocate memory for the workspace, the permutation matrix and a return code - Pointer workspace = new Workspace(worksize * Nd4j.sizeOfDataType()); + Pointer workspace = new Workspace((long) worksize * Nd4j.sizeOfDataType()); // Do the actual QR decomp stat = cusolverDnSgeqrf(solverDn, M, N, @@ -275,7 +275,7 @@ public class JcublasLapack extends BaseLapack { if (r != null) { r.assign(a.get(NDArrayIndex.interval(0, a.columns()), NDArrayIndex.all())); - INDArrayIndex ix[] = new INDArrayIndex[2]; + INDArrayIndex[] ix = new INDArrayIndex[2]; for (int i = 1; i < Math.min(a.rows(), a.columns()); i++) { ix[0] = NDArrayIndex.point(i); ix[1] = NDArrayIndex.interval(0, i); @@ -289,7 +289,7 @@ public class JcublasLapack extends BaseLapack { (IntPointer) worksizeBuffer.addressPointer() ); worksize = worksizeBuffer.getInt(0); - workspace = new Workspace(worksize * Nd4j.sizeOfDataType()); + workspace = new Workspace((long) worksize * Nd4j.sizeOfDataType()); stat = cusolverDnSorgqr(solverDn, M, N, N, (FloatPointer) xAPointer.getDevicePointer(), M, @@ -365,7 +365,7 @@ public class JcublasLapack extends BaseLapack { } int worksize = worksizeBuffer.getInt(0); // Now allocate memory for the workspace, the permutation matrix and a return code - Pointer workspace = new Workspace(worksize * Nd4j.sizeOfDataType()); + Pointer workspace = new Workspace((long) worksize * Nd4j.sizeOfDataType()); // Do the actual QR decomp stat = cusolverDnDgeqrf(solverDn, M, N, @@ -390,7 +390,7 @@ public class JcublasLapack extends BaseLapack { if (r != null) { r.assign(a.get(NDArrayIndex.interval(0, a.columns()), NDArrayIndex.all())); - INDArrayIndex ix[] = new INDArrayIndex[2]; + INDArrayIndex[] ix = new INDArrayIndex[2]; for (int i = 1; i < Math.min(a.rows(), a.columns()); i++) { ix[0] = NDArrayIndex.point(i); ix[1] = NDArrayIndex.interval(0, i); @@ -403,7 +403,7 @@ public class JcublasLapack extends BaseLapack { (IntPointer) worksizeBuffer.addressPointer() ); worksize = worksizeBuffer.getInt(0); - workspace = new Workspace(worksize * Nd4j.sizeOfDataType()); + workspace = new Workspace((long) worksize * Nd4j.sizeOfDataType()); stat = cusolverDnDorgqr(solverDn, M, N, N, (DoublePointer) xAPointer.getDevicePointer(), M, @@ -476,7 +476,7 @@ public class JcublasLapack extends BaseLapack { int worksize = worksizeBuffer.getInt(0); // Now allocate memory for the workspace, the permutation matrix and a return code - Pointer workspace = new Workspace(worksize * Nd4j.sizeOfDataType()); + Pointer workspace = new Workspace((long) worksize * Nd4j.sizeOfDataType()); // Do the actual decomp stat = cusolverDnSpotrf(solverDn, uplo, N, @@ -498,14 +498,14 @@ public class JcublasLapack extends BaseLapack { if (uplo == cublas.CUBLAS_FILL_MODE_UPPER ) { A.assign(A.transpose()); - INDArrayIndex ix[] = new INDArrayIndex[2]; + INDArrayIndex[] ix = new INDArrayIndex[2]; for (int i = 1; i < Math.min(A.rows(), A.columns()); i++) { ix[0] = NDArrayIndex.point(i); ix[1] = NDArrayIndex.interval(0, i); A.put(ix, 0); } } else { - INDArrayIndex ix[] = new INDArrayIndex[2]; + INDArrayIndex[] ix = new INDArrayIndex[2]; for (int i = 0; i < Math.min(A.rows(), A.columns() - 1); i++) { ix[0] = NDArrayIndex.point(i); ix[1] = NDArrayIndex.interval(i + 1, A.columns()); @@ -562,7 +562,7 @@ public class JcublasLapack extends BaseLapack { int worksize = worksizeBuffer.getInt(0); // Now allocate memory for the workspace, the permutation matrix and a return code - Pointer workspace = new Workspace(worksize * Nd4j.sizeOfDataType(DataType.DOUBLE)); + Pointer workspace = new Workspace((long) worksize * Nd4j.sizeOfDataType(DataType.DOUBLE)); // Do the actual decomp stat = cusolverDnDpotrf(solverDn, uplo, N, @@ -584,14 +584,14 @@ public class JcublasLapack extends BaseLapack { if (uplo == cublas.CUBLAS_FILL_MODE_UPPER ) { A.assign(A.transpose()); - INDArrayIndex ix[] = new INDArrayIndex[2]; + INDArrayIndex[] ix = new INDArrayIndex[2]; for (int i = 1; i < Math.min(A.rows(), A.columns()); i++) { ix[0] = NDArrayIndex.point(i); ix[1] = NDArrayIndex.interval(0, i); A.put(ix, 0); } } else { - INDArrayIndex ix[] = new INDArrayIndex[2]; + INDArrayIndex[] ix = new INDArrayIndex[2]; for (int i = 0; i < Math.min(A.rows(), A.columns() - 1); i++) { ix[0] = NDArrayIndex.point(i); ix[1] = NDArrayIndex.interval(i + 1, A.columns()); @@ -691,7 +691,7 @@ public class JcublasLapack extends BaseLapack { } int worksize = worksizeBuffer.getInt(0); - Pointer workspace = new Workspace(worksize * Nd4j.sizeOfDataType()); + Pointer workspace = new Workspace((long) worksize * Nd4j.sizeOfDataType()); DataBuffer rwork = Nd4j.getDataBufferFactory().createFloat((M < N ? M : N) - 1); // Do the actual decomp @@ -803,7 +803,7 @@ public class JcublasLapack extends BaseLapack { int worksize = worksizeBuffer.getInt(0); // Now allocate memory for the workspace, the non-converging row buffer and a return code - Pointer workspace = new Workspace(worksize * Nd4j.sizeOfDataType()); + Pointer workspace = new Workspace((long) worksize * Nd4j.sizeOfDataType()); DataBuffer rwork = Nd4j.getDataBufferFactory().createDouble((M < N ? M : N) - 1); // Do the actual decomp @@ -858,7 +858,7 @@ public class JcublasLapack extends BaseLapack { if (A.rows() > Integer.MAX_VALUE) { throw new RuntimeException("Rows overflow"); } - int M = (int) A.rows(); + int M = A.rows(); if (Nd4j.getExecutioner() instanceof GridExecutioner) ((GridExecutioner) Nd4j.getExecutioner()).flushQueue(); @@ -892,7 +892,7 @@ public class JcublasLapack extends BaseLapack { int worksize = worksizeBuffer.getInt(0); // allocate memory for the workspace, the non-converging row buffer and a return code - val workspace = new Workspace(worksize * 4); //4 = float width + val workspace = new Workspace(worksize * 4L); //4 = float width INDArray INFO = Nd4j.createArrayFromShapeBuffer(Nd4j.getDataBufferFactory().createInt(1), Nd4j.getShapeInfoProvider().createShapeInformation(new long[]{1, 1}, A.dataType())); @@ -936,7 +936,7 @@ public class JcublasLapack extends BaseLapack { throw new RuntimeException("Rows overflow"); } - int M = (int) A.rows(); + int M = A.rows(); if (Nd4j.getExecutioner() instanceof GridExecutioner) ((GridExecutioner) Nd4j.getExecutioner()).flushQueue(); @@ -970,7 +970,7 @@ public class JcublasLapack extends BaseLapack { int worksize = worksizeBuffer.getInt(0); // allocate memory for the workspace, the non-converging row buffer and a return code - Pointer workspace = new Workspace(worksize * 8); //8 = double width + Pointer workspace = new Workspace(worksize * 8L); //8 = double width INDArray INFO = Nd4j.createArrayFromShapeBuffer(Nd4j.getDataBufferFactory().createInt(1), Nd4j.getShapeInfoProvider().createShapeInformation(new long[]{1, 1}, A.dataType())); diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLevel1.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLevel1.java index e20a9f1d4..5ace8e798 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLevel1.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLevel1.java @@ -59,9 +59,9 @@ import static org.bytedeco.cuda.global.cublas.*; */ @Slf4j public class JcublasLevel1 extends BaseLevel1 { - private Allocator allocator = AtomicAllocator.getInstance(); - private Nd4jBlas nd4jBlas = (Nd4jBlas) Nd4j.factory().blas(); - private NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); + private final Allocator allocator = AtomicAllocator.getInstance(); + private final Nd4jBlas nd4jBlas = (Nd4jBlas) Nd4j.factory().blas(); + private final NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); @Override protected float sdsdot(long N, float alpha, INDArray X, int incX, INDArray Y, int incY) { @@ -403,7 +403,7 @@ public class JcublasLevel1 extends BaseLevel1 { // cublasHandle_t handle = ctx.getCublasHandle(); - ((CudaExecutioner) Nd4j.getExecutioner()).exec(new Axpy(X, Y, Y, alpha)); + Nd4j.getExecutioner().exec(new Axpy(X, Y, Y, alpha)); OpExecutionerUtil.checkForAny(Y); } diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLevel2.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLevel2.java index ef6a5a567..a2fa256a6 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLevel2.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLevel2.java @@ -49,9 +49,9 @@ import static org.nd4j.linalg.jcublas.blas.CudaBlas.convertTranspose; */ @Slf4j public class JcublasLevel2 extends BaseLevel2 { - private Allocator allocator = AtomicAllocator.getInstance(); - private Nd4jBlas nd4jBlas = (Nd4jBlas) Nd4j.factory().blas(); - private NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); + private final Allocator allocator = AtomicAllocator.getInstance(); + private final Nd4jBlas nd4jBlas = (Nd4jBlas) Nd4j.factory().blas(); + private final NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); @Override protected void sgemv(char order, char TransA, int M, int N, float alpha, INDArray A, int lda, INDArray X, int incX, diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLevel3.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLevel3.java index 69338ed7b..1543bee72 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLevel3.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLevel3.java @@ -75,9 +75,9 @@ import static org.nd4j.linalg.jcublas.blas.CudaBlas.convertUplo; */ @Slf4j public class JcublasLevel3 extends BaseLevel3 { - private Allocator allocator = AtomicAllocator.getInstance(); - private Nd4jBlas nd4jBlas = (Nd4jBlas) Nd4j.factory().blas(); - private NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); + private final Allocator allocator = AtomicAllocator.getInstance(); + private final Nd4jBlas nd4jBlas = (Nd4jBlas) Nd4j.factory().blas(); + private final NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); @Override protected void hgemm(char Order, char TransA, char TransB, int M, int N, int K, float alpha, INDArray A, int lda, @@ -114,9 +114,9 @@ public class JcublasLevel3 extends BaseLevel3 { // CUDA_R_16F == 2 for CUDA 8 // CUBLAS_DATA_HALF == 2 for CUDA 7.5 cublasSgemmEx(new cublasContext(handle), convertTranspose(TransA), convertTranspose(TransB), M, N, K, - new FloatPointer(alpha), (ShortPointer) cAPointer.getDevicePointer(), 2, lda, - (ShortPointer) cBPointer.getDevicePointer(), 2, ldb, new FloatPointer(beta), - (ShortPointer) cCPointer.getDevicePointer(), 2, ldc); + new FloatPointer(alpha), cAPointer.getDevicePointer(), 2, lda, + cBPointer.getDevicePointer(), 2, ldb, new FloatPointer(beta), + cCPointer.getDevicePointer(), 2, ldc); } diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/buffer/BaseCudaDataBuffer.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/buffer/BaseCudaDataBuffer.java index 3f97c6818..6b4793704 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/buffer/BaseCudaDataBuffer.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/buffer/BaseCudaDataBuffer.java @@ -83,7 +83,7 @@ public abstract class BaseCudaDataBuffer extends BaseDataBuffer implements JCuda @Getter protected transient volatile AllocationPoint allocationPoint; - private static AtomicAllocator allocator = AtomicAllocator.getInstance(); + private static final AtomicAllocator allocator = AtomicAllocator.getInstance(); @@ -1366,10 +1366,7 @@ public abstract class BaseCudaDataBuffer extends BaseDataBuffer implements JCuda public boolean equals(Object o) { if (o == null) return false; - if (this == o) - return true; - - return false; + return this == o; } @Override diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/buffer/CudaUtf8Buffer.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/buffer/CudaUtf8Buffer.java index 096646312..f5908afba 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/buffer/CudaUtf8Buffer.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/buffer/CudaUtf8Buffer.java @@ -35,6 +35,7 @@ import org.nd4j.linalg.api.memory.MemoryWorkspace; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; @@ -190,11 +191,7 @@ public class CudaUtf8Buffer extends BaseCudaDataBuffer { bytes[e] = dataPointer.get(idx); } - try { - return new String(bytes, "UTF-8"); - } catch (UnsupportedEncodingException e) { - throw new RuntimeException(e); - } + return new String(bytes, StandardCharsets.UTF_8); } @Override @@ -219,7 +216,7 @@ public class CudaUtf8Buffer extends BaseCudaDataBuffer { private static long stringBufferRequiredLength(@NonNull Collection strings) { // header size first - long size = (strings.size() + 1) * 8; + long size = (strings.size() + 1) * 8L; for (val s:strings) size += s.length(); diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/context/CudaContext.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/context/CudaContext.java index 7be9e08c1..b7cb87275 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/context/CudaContext.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/context/CudaContext.java @@ -67,7 +67,7 @@ public class CudaContext { @Builder.Default private int deviceId = -1; - private transient final static NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); + private final static NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); @Override public String toString() { diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaExecutioner.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaExecutioner.java index 2cc5077e4..612dfdda8 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaExecutioner.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaExecutioner.java @@ -477,7 +477,7 @@ public class CudaExecutioner extends DefaultOpExecutioner { val yT = op.y().tensorsAlongDimension(dimension); // we intentionally want to set it to 0.0 - ret = Nd4j.createUninitialized(dtype, new long[] {xT, yT}); + ret = Nd4j.createUninitialized(dtype, xT, yT); } else { if (op.y() != null) { //2 options here: either pairwise, equal sizes - OR every X TAD vs. entirety of Y @@ -823,7 +823,7 @@ public class CudaExecutioner extends DefaultOpExecutioner { Pointer hostYShapeInfo = y == null ? null : AddressRetriever.retrieveHostPointer(y.shapeInfoDataBuffer()); Pointer hostZShapeInfo = z == null ? null : AddressRetriever.retrieveHostPointer(z.shapeInfoDataBuffer()); - int fdimension[] = dimension; + int[] fdimension = dimension; if (fdimension == null) fdimension = new int[] {0}; @@ -940,7 +940,7 @@ public class CudaExecutioner extends DefaultOpExecutioner { Pointer devTadShapeInfo = AtomicAllocator.getInstance().getPointer(tadBuffers.getFirst(), context); DataBuffer offsets = x.isEmpty() ? null : tadBuffers.getSecond(); - Pointer devTadOffsets = offsets == null ? null : AtomicAllocator.getInstance().getPointer((DataBuffer) offsets, context); + Pointer devTadOffsets = offsets == null ? null : AtomicAllocator.getInstance().getPointer(offsets, context); Pointer xShapeInfo = AtomicAllocator.getInstance().getPointer(x.shapeInfoDataBuffer(), context); @@ -1337,7 +1337,7 @@ public class CudaExecutioner extends DefaultOpExecutioner { Pointer dimensionHostPointer = null; Pointer retPointer = null; Pointer retHostShape = null; - int dimension[] = null; + int[] dimension = null; Pointer hostXShapeInfo = x == null ? null : AddressRetriever.retrieveHostPointer(x.shapeInfoDataBuffer()); Pointer hostYShapeInfo = y == null ? null : AddressRetriever.retrieveHostPointer(y.shapeInfoDataBuffer()); @@ -1742,7 +1742,7 @@ public class CudaExecutioner extends DefaultOpExecutioner { return Collections.emptyList(); } - val inputBuffers = new PointerPointer<>(nIn * 2); + val inputBuffers = new PointerPointer<>(nIn * 2L); val inputShapes = new PointerPointer<>(nIn); val inputArgs = opContext != null ? opContext.getInputArrays() : op.inputArguments(); @@ -1934,8 +1934,8 @@ public class CudaExecutioner extends DefaultOpExecutioner { Nd4j.getExecutioner().commit(); - val ptrBuffers = new PointerPointer(map.size() * 2); - val ptrShapes = new PointerPointer(map.size() * 2); + val ptrBuffers = new PointerPointer(map.size() * 2L); + val ptrShapes = new PointerPointer(map.size() * 2L); val ptrIndices = new IntPointer(map.size()); int cnt = 0; @@ -1980,7 +1980,7 @@ public class CudaExecutioner extends DefaultOpExecutioner { val order = Shape.order(jshape); val array = Nd4j.create(shapeOf, stridesOf, 0, order); - Pointer.memcpy(AtomicAllocator.getInstance().getHostPointer(array), buffer, ArrayUtil.prod(shapeOf) * Nd4j.sizeOfDataType()); + Pointer.memcpy(AtomicAllocator.getInstance().getHostPointer(array), buffer, (long) ArrayUtil.prod(shapeOf) * Nd4j.sizeOfDataType()); //AtomicAllocator.getInstance().getAllocationPoint(array).tickHostWrite(); if (1 > 0) throw new UnsupportedOperationException("Pew-pew"); diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaGridExecutioner.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaGridExecutioner.java index 2787e7282..6f2343fcd 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaGridExecutioner.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaGridExecutioner.java @@ -78,20 +78,20 @@ public class CudaGridExecutioner extends CudaExecutioner implements GridExecutio //private List> deviceQueues = new ArrayList<>(); // last op - private ThreadLocal lastOp = new ThreadLocal<>(); + private final ThreadLocal lastOp = new ThreadLocal<>(); // private ThreadLocal extraz = new ThreadLocal<>(); - private ThreadLocal> deviceQueues = new ThreadLocal<>(); + private final ThreadLocal> deviceQueues = new ThreadLocal<>(); - private ThreadLocal opCounter = new ThreadLocal<>(); + private final ThreadLocal opCounter = new ThreadLocal<>(); - private AtomicLong metaCounter = new AtomicLong(0); - private AtomicLong execCounter = new AtomicLong(0); + private final AtomicLong metaCounter = new AtomicLong(0); + private final AtomicLong execCounter = new AtomicLong(0); - private List watchdog = new CopyOnWriteArrayList<>(); + private final List watchdog = new CopyOnWriteArrayList<>(); - private List> aggregates = new ArrayList<>(); + private final List> aggregates = new ArrayList<>(); - private AtomicBoolean experimental = new AtomicBoolean(false); + private final AtomicBoolean experimental = new AtomicBoolean(false); public CudaGridExecutioner() { // extraz.set(new PointerPointer(10)); @@ -125,9 +125,9 @@ public class CudaGridExecutioner extends CudaExecutioner implements GridExecutio invokeWatchdog(op); if (op instanceof ReduceOp) { - exec((ReduceOp) op, new int[] {Integer.MAX_VALUE}); + exec((ReduceOp) op, Integer.MAX_VALUE); } else if (op instanceof IndexAccumulation) { - exec((IndexAccumulation) op, new int[] {Integer.MAX_VALUE}); + exec((IndexAccumulation) op, Integer.MAX_VALUE); } else if (op instanceof ScalarOp || op instanceof TransformOp) { // the only entry place for TADless ops processAsGridOp(op); @@ -188,12 +188,8 @@ public class CudaGridExecutioner extends CudaExecutioner implements GridExecutio return true; } - if (opX == pointer.address()) { - //logger.error("op.X matched: {}", pointer.address()); - return true; - } - - return false; + //logger.error("op.X matched: {}", pointer.address()); + return opX == pointer.address(); } @@ -207,17 +203,11 @@ public class CudaGridExecutioner extends CudaExecutioner implements GridExecutio long opY = op.y() == null ? 0 : AtomicAllocator.getInstance().getHostPointer(op.y()).address(); - if (opZ == pointer.address() || opY == pointer.address() || opX == pointer.address()) - return true; - - return false; + return opZ == pointer.address() || opY == pointer.address() || opX == pointer.address(); } protected boolean compareArrays(INDArray array, Op op) { - if (op.x() == array || op.y() == array || op.z() == array) - return true; - - return false; + return op.x() == array || op.y() == array || op.z() == array; } /** @@ -476,10 +466,7 @@ public class CudaGridExecutioner extends CudaExecutioner implements GridExecutio * @return */ protected boolean isMatchingZX(Op opA, Op opB) { - if (opA.x() == opB.x() && opA.z() == opB.z() && opA.x() == opB.z()) - return true; - - return false; + return opA.x() == opB.x() && opA.z() == opB.z() && opA.x() == opB.z(); } /** @@ -490,10 +477,7 @@ public class CudaGridExecutioner extends CudaExecutioner implements GridExecutio * @return */ protected boolean isMatchingZXY(Op opA, Op opB) { - if (opA.z() == opB.x() || opA.z() == opB.y()) - return true; - - return false; + return opA.z() == opB.x() || opA.z() == opB.y(); } protected GridPointers pointerizeOp(OpDescriptor descriptor) { @@ -694,7 +678,7 @@ public class CudaGridExecutioner extends CudaExecutioner implements GridExecutio // So, that's scalar. We'll have to flush queue flushQueue(); - buildZ(op, new int[] {Integer.MAX_VALUE}); + buildZ(op, Integer.MAX_VALUE); super.invoke(op, null, new int[] {Integer.MAX_VALUE}); } else { buildZ(op, dimension); diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaOpContext.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaOpContext.java index c14b9c7eb..3ba143e36 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaOpContext.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaOpContext.java @@ -46,8 +46,8 @@ import org.nd4j.nativeblas.OpaqueRandomGenerator; */ public class CudaOpContext extends BaseOpContext implements OpContext, Deallocatable { // we might want to have configurable - private NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); - private OpaqueContext context = nativeOps.createGraphContext(1); + private final NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); + private final OpaqueContext context = nativeOps.createGraphContext(1); private final transient long id = Nd4j.getDeallocatorService().nextValue(); public CudaOpContext() { @@ -92,7 +92,7 @@ public class CudaOpContext extends BaseOpContext implements OpContext, Deallocat args[e] = arguments[e].toInt(); nativeOps.setGraphContextDArguments(context, new IntPointer(args), arguments.length); - }; + } } @Override @@ -161,7 +161,7 @@ public class CudaOpContext extends BaseOpContext implements OpContext, Deallocat @Override public String getUniqueId() { - return new String("CTX_" + id); + return "CTX_" + id; } @Override diff --git a/cavis-native/cavis-native-lib/build.gradle b/cavis-native/cavis-native-lib/build.gradle index fab153c54..8ecbeafc1 100644 --- a/cavis-native/cavis-native-lib/build.gradle +++ b/cavis-native/cavis-native-lib/build.gradle @@ -202,7 +202,7 @@ tasks.withType(org.bytedeco.gradle.javacpp.BuildTask) { // Disable the standard javacpp generated tasks and use own // versions below. This allows to build for each variant [javacppBuildParser, javacppBuildCommand, javacppCompileJava, javacppBuildCompiler].each { - it.enabled false; + it.enabled false } chipList.each { thisChip -> diff --git a/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/AeronNDArrayPublisher.java b/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/AeronNDArrayPublisher.java index 9e8d1660f..8a1bfaaee 100644 --- a/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/AeronNDArrayPublisher.java +++ b/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/AeronNDArrayPublisher.java @@ -105,7 +105,7 @@ public class AeronNDArrayPublisher implements AutoCloseable { publication = aeron.addPublication(channel, streamId); log.info("Created publication on channel " + channel + " and stream " + streamId); } catch (DriverTimeoutException e) { - Thread.sleep(1000 * (connectionTries + 1)); + Thread.sleep(1000L * (connectionTries + 1)); log.warn("Failed to connect due to driver time out on channel " + channel + " and stream " + streamId + "...retrying in " + connectionTries + " seconds"); connectionTries++; diff --git a/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/AeronUtil.java b/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/AeronUtil.java index 97e83a5aa..be3a29781 100644 --- a/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/AeronUtil.java +++ b/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/AeronUtil.java @@ -135,8 +135,8 @@ public class AeronUtil { final byte[] data = new byte[length]; buffer.getBytes(offset, data); - System.out.println(String.format("Message to stream %d from session %d (%d@%d) <<%s>>", streamId, - header.sessionId(), length, offset, new String(data))); + System.out.printf("Message to stream %d from session %d (%d@%d) <<%s>>%n", streamId, + header.sessionId(), length, offset, new String(data)); }; } @@ -165,8 +165,8 @@ public class AeronUtil { */ public static void printRate(final double messagesPerSec, final double bytesPerSec, final long totalMessages, final long totalBytes) { - System.out.println(String.format("%.02g msgs/sec, %.02g bytes/sec, totals %d messages %d MB", messagesPerSec, - bytesPerSec, totalMessages, totalBytes / (1024 * 1024))); + System.out.printf("%.02g msgs/sec, %.02g bytes/sec, totals %d messages %d MB%n", messagesPerSec, + bytesPerSec, totalMessages, totalBytes / (1024 * 1024)); } /** @@ -176,8 +176,8 @@ public class AeronUtil { */ public static void printAvailableImage(final Image image) { final Subscription subscription = image.subscription(); - System.out.println(String.format("Available image on %s streamId=%d sessionId=%d from %s", - subscription.channel(), subscription.streamId(), image.sessionId(), image.sourceIdentity())); + System.out.printf("Available image on %s streamId=%d sessionId=%d from %s%n", + subscription.channel(), subscription.streamId(), image.sessionId(), image.sourceIdentity()); } /** @@ -187,8 +187,8 @@ public class AeronUtil { */ public static void printUnavailableImage(final Image image) { final Subscription subscription = image.subscription(); - System.out.println(String.format("Unavailable image on %s streamId=%d sessionId=%d", subscription.channel(), - subscription.streamId(), image.sessionId())); + System.out.printf("Unavailable image on %s streamId=%d sessionId=%d%n", subscription.channel(), + subscription.streamId(), image.sessionId()); } private static final AtomicInteger conductorCount = new AtomicInteger(); diff --git a/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/NDArrayFragmentHandler.java b/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/NDArrayFragmentHandler.java index 59afadde0..0495e4877 100644 --- a/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/NDArrayFragmentHandler.java +++ b/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/NDArrayFragmentHandler.java @@ -40,8 +40,8 @@ import java.nio.ByteOrder; */ @Slf4j public class NDArrayFragmentHandler implements FragmentHandler { - private NDArrayCallback ndArrayCallback; - private ChunkAccumulator chunkAccumulator = new InMemoryChunkAccumulator(); + private final NDArrayCallback ndArrayCallback; + private final ChunkAccumulator chunkAccumulator = new InMemoryChunkAccumulator(); public NDArrayFragmentHandler(NDArrayCallback ndArrayCallback) { this.ndArrayCallback = ndArrayCallback; diff --git a/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/NDArrayMessage.java b/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/NDArrayMessage.java index 3658a8006..c73f9c3bb 100644 --- a/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/NDArrayMessage.java +++ b/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/NDArrayMessage.java @@ -256,7 +256,7 @@ public class NDArrayMessage implements Serializable { String messageId = UUID.randomUUID().toString(); for (int i = 0; i < ret.length; i++) { //data: only grab a chunk of the data - ByteBuffer view = (ByteBuffer) wholeBuffer.byteBuffer().asReadOnlyBuffer().position(i * chunkSize); + ByteBuffer view = wholeBuffer.byteBuffer().asReadOnlyBuffer().position(i * chunkSize); view.limit(Math.min(i * chunkSize + chunkSize, wholeBuffer.capacity())); view.order(ByteOrder.nativeOrder()); view = view.slice(); diff --git a/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/chunk/InMemoryChunkAccumulator.java b/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/chunk/InMemoryChunkAccumulator.java index b725d3c04..963413423 100644 --- a/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/chunk/InMemoryChunkAccumulator.java +++ b/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/chunk/InMemoryChunkAccumulator.java @@ -30,7 +30,7 @@ import java.util.Map; @Slf4j public class InMemoryChunkAccumulator implements ChunkAccumulator { - private Map> chunks = Maps.newConcurrentMap(); + private final Map> chunks = Maps.newConcurrentMap(); /** * Returns the number of chunks diff --git a/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ndarrayholder/InMemoryNDArrayHolder.java b/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ndarrayholder/InMemoryNDArrayHolder.java index 20b082819..21ea8a465 100644 --- a/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ndarrayholder/InMemoryNDArrayHolder.java +++ b/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ndarrayholder/InMemoryNDArrayHolder.java @@ -36,8 +36,8 @@ import java.util.concurrent.atomic.AtomicReference; @NoArgsConstructor public class InMemoryNDArrayHolder implements NDArrayHolder { - private AtomicReference arr = new AtomicReference<>(); - private AtomicInteger totalUpdates = new AtomicInteger(0); + private final AtomicReference arr = new AtomicReference<>(); + private final AtomicInteger totalUpdates = new AtomicInteger(0); public InMemoryNDArrayHolder(int[] shape) { diff --git a/cavis-nd4j/cavis-nd4j-aeron/src/test/java/org/nd4j/aeron/ipc/LargeNdArrayIpcTest.java b/cavis-nd4j/cavis-nd4j-aeron/src/test/java/org/nd4j/aeron/ipc/LargeNdArrayIpcTest.java index de0f74356..477dd1e1d 100644 --- a/cavis-nd4j/cavis-nd4j-aeron/src/test/java/org/nd4j/aeron/ipc/LargeNdArrayIpcTest.java +++ b/cavis-nd4j/cavis-nd4j-aeron/src/test/java/org/nd4j/aeron/ipc/LargeNdArrayIpcTest.java @@ -41,9 +41,9 @@ import static org.junit.jupiter.api.Assertions.assertFalse; public class LargeNdArrayIpcTest extends BaseND4JTest { private MediaDriver mediaDriver; private Aeron.Context ctx; - private String channel = "aeron:udp?endpoint=localhost:" + (40123 + new java.util.Random().nextInt(130)); - private int streamId = 10; - private int length = (int) 1e7; + private final String channel = "aeron:udp?endpoint=localhost:" + (40123 + new java.util.Random().nextInt(130)); + private final int streamId = 10; + private final int length = (int) 1e7; @Override public long getTimeoutMilliseconds() { diff --git a/cavis-nd4j/cavis-nd4j-aeron/src/test/java/org/nd4j/aeron/ipc/NdArrayIpcTest.java b/cavis-nd4j/cavis-nd4j-aeron/src/test/java/org/nd4j/aeron/ipc/NdArrayIpcTest.java index 999e11281..e0c680952 100644 --- a/cavis-nd4j/cavis-nd4j-aeron/src/test/java/org/nd4j/aeron/ipc/NdArrayIpcTest.java +++ b/cavis-nd4j/cavis-nd4j-aeron/src/test/java/org/nd4j/aeron/ipc/NdArrayIpcTest.java @@ -37,11 +37,11 @@ import java.util.concurrent.atomic.AtomicBoolean; @Timeout(120) public class NdArrayIpcTest extends BaseND4JTest { private MediaDriver mediaDriver; - private static Logger log = LoggerFactory.getLogger(NdArrayIpcTest.class); + private static final Logger log = LoggerFactory.getLogger(NdArrayIpcTest.class); private Aeron.Context ctx; - private String channel = "aeron:udp?endpoint=localhost:" + (40132 + new java.util.Random().nextInt(3000)); - private int streamId = 10; - private int length = (int) 1e7; + private final String channel = "aeron:udp?endpoint=localhost:" + (40132 + new java.util.Random().nextInt(3000)); + private final int streamId = 10; + private final int length = (int) 1e7; @Override public long getTimeoutMilliseconds() { diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/base/Preconditions.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/base/Preconditions.java index c8bc3966d..c709d29b8 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/base/Preconditions.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/base/Preconditions.java @@ -687,12 +687,12 @@ public final class Preconditions { } else { if(nextCustom < 0 || (nextIdx > 0 && nextIdx < nextCustom)){ //%s tag - sb.append(message.substring(indexOfStart, nextIdx)) + sb.append(message, indexOfStart, nextIdx) .append(formatArg(args[i])); indexOfStart = nextIdx + 2; } else { //Custom tag - sb.append(message.substring(indexOfStart, nextCustom)); + sb.append(message, indexOfStart, nextCustom); String s = FORMATTERS.get(nextCustomTag).format(nextCustomTag, args[i]); sb.append(s); indexOfStart = nextCustom + nextCustomTag.length(); diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/CompactHeapStringList.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/CompactHeapStringList.java index b7a25f248..dec86b34b 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/CompactHeapStringList.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/CompactHeapStringList.java @@ -286,7 +286,7 @@ public class CompactHeapStringList implements List { while (e1.hasNext() && e2.hasNext()) { String o1 = e1.next(); Object o2 = e2.next(); - if (!(o1 == null ? o2 == null : o1.equals(o2))) + if (!(Objects.equals(o1, o2))) return false; } return !(e1.hasNext() || e2.hasNext()); diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/IntArrayKeyMap.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/IntArrayKeyMap.java index 2ed1b154d..84730c572 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/IntArrayKeyMap.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/IntArrayKeyMap.java @@ -28,7 +28,7 @@ import java.util.*; public class IntArrayKeyMap implements Map { - private Map map = new LinkedHashMap<>(); + private final Map map = new LinkedHashMap<>(); @Override public int size() { @@ -120,7 +120,7 @@ public class IntArrayKeyMap implements Map { public static class IntArray implements Comparable { @Getter - private int[] backingArray; + private final int[] backingArray; public IntArray(int[] backingArray) { Preconditions.checkNotNull(backingArray,"Backing array must not be null!"); diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/IntArrayKeySet.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/IntArrayKeySet.java index 1a8893cda..b1db74f72 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/IntArrayKeySet.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/IntArrayKeySet.java @@ -23,7 +23,7 @@ package org.nd4j.common.collection; import java.util.*; public class IntArrayKeySet implements Set { - private Set set = new LinkedHashSet<>(); + private final Set set = new LinkedHashSet<>(); @Override public int size() { return set.size(); diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/MultiDimensionalMap.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/MultiDimensionalMap.java index a88871152..03ec92701 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/MultiDimensionalMap.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/MultiDimensionalMap.java @@ -361,7 +361,7 @@ public class MultiDimensionalMap implements Serializable { MultiDimensionalMap that = (MultiDimensionalMap) o; - return !(backedMap != null ? !backedMap.equals(that.backedMap) : that.backedMap != null); + return !(!Objects.equals(backedMap, that.backedMap)); } diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/MultiDimensionalSet.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/MultiDimensionalSet.java index c5712d3eb..d16c190cb 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/MultiDimensionalSet.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/collection/MultiDimensionalSet.java @@ -28,7 +28,7 @@ import java.util.concurrent.ConcurrentSkipListSet; public class MultiDimensionalSet implements Set> { - private Set> backedSet; + private final Set> backedSet; public MultiDimensionalSet(Set> backedSet) { this.backedSet = backedSet; diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/holder/ObjectMapperHolder.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/holder/ObjectMapperHolder.java index 0cd5166a1..9df59f5f7 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/holder/ObjectMapperHolder.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/holder/ObjectMapperHolder.java @@ -26,7 +26,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; public class ObjectMapperHolder { - private static ObjectMapper objectMapper = getMapper(); + private static final ObjectMapper objectMapper = getMapper(); private ObjectMapperHolder() {} diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/AbstractFileResolvingResource.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/AbstractFileResolvingResource.java index 69728a1c3..d15d109d3 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/AbstractFileResolvingResource.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/AbstractFileResolvingResource.java @@ -124,7 +124,7 @@ public abstract class AbstractFileResolvingResource extends AbstractResource { ((HttpURLConnection) con).setRequestMethod("HEAD"); } - return (long) con.getContentLength(); + return con.getContentLength(); } } diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/AbstractResource.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/AbstractResource.java index a6595a0e3..cf7ac3f38 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/AbstractResource.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/AbstractResource.java @@ -79,8 +79,7 @@ public abstract class AbstractResource implements Resource { long size = 0L; int read; - for (byte[] buf = new byte[255]; (read = is.read(buf)) != -1; size += (long) read) { - ; + for (byte[] buf = new byte[255]; (read = is.read(buf)) != -1; size += read) { } long var6 = size; @@ -89,7 +88,6 @@ public abstract class AbstractResource implements Resource { try { is.close(); } catch (IOException var14) { - ; } } diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/ClassPathResource.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/ClassPathResource.java index cf3d45944..0ef4cde64 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/ClassPathResource.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/ClassPathResource.java @@ -365,7 +365,7 @@ public class ClassPathResource extends AbstractFileResolvingResource { private ZipFile zipFile; private ZipEntry entry; private InputStream stream; - private String resourceName; + private final String resourceName; public GetStreamFromZip(URL url, String resourceName) { this.url = url; diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/CollectionUtils.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/CollectionUtils.java index 268c6fac0..9d224b63f 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/CollectionUtils.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/CollectionUtils.java @@ -50,10 +50,7 @@ public abstract class CollectionUtils { Object[] arr$ = arr; int len$ = arr.length; - for (int i$ = 0; i$ < len$; ++i$) { - Object elem = arr$[i$]; - collection.add(elem); - } + collection.addAll(Arrays.asList(arr$).subList(0, len$)); } } @@ -157,7 +154,7 @@ public abstract class CollectionUtils { } public static T findValueOfType(Collection collection, Class type) { - if (isEmpty((Collection) collection)) { + if (isEmpty(collection)) { return null; } else { Object value = null; @@ -179,7 +176,7 @@ public abstract class CollectionUtils { } public static Object findValueOfType(Collection collection, Class[] types) { - if (!isEmpty((Collection) collection) && !ObjectUtils.isEmpty(types)) { + if (!isEmpty(collection) && !ObjectUtils.isEmpty(types)) { Class[] arr$ = types; int len$ = types.length; @@ -260,7 +257,7 @@ public abstract class CollectionUtils { } public static MultiValueMap unmodifiableMultiValueMap(MultiValueMap map) { - Assert.notNull(map, "\'map\' must not be null"); + Assert.notNull(map, "'map' must not be null"); LinkedHashMap result = new LinkedHashMap(map.size()); Iterator unmodifiableMap = map.entrySet().iterator(); @@ -278,7 +275,7 @@ public abstract class CollectionUtils { private final Map> map; public MultiValueMapAdapter(Map> map) { - Assert.notNull(map, "\'map\' must not be null"); + Assert.notNull(map, "'map' must not be null"); this.map = map; } @@ -374,7 +371,7 @@ public abstract class CollectionUtils { } public boolean equals(Object other) { - return this == other ? true : this.map.equals(other); + return this == other || this.map.equals(other); } public int hashCode() { @@ -387,7 +384,7 @@ public abstract class CollectionUtils { } private static class EnumerationIterator implements Iterator { - private Enumeration enumeration; + private final Enumeration enumeration; public EnumerationIterator(Enumeration enumeration) { this.enumeration = enumeration; diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/ObjectUtils.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/ObjectUtils.java index e1dcf32e9..43f6db46b 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/ObjectUtils.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/ObjectUtils.java @@ -122,7 +122,7 @@ public abstract class ObjectUtils { } throw new IllegalArgumentException(String.format("constant [%s] does not exist in enum opType %s", - new Object[] {constant, enumValues.getClass().getComponentType().getName()})); + constant, enumValues.getClass().getComponentType().getName())); } public static A[] addObjectToArray(A[] array, O obj) { @@ -479,7 +479,7 @@ public abstract class ObjectUtils { sb.append(", "); } - sb.append(String.valueOf(array[i])); + sb.append(array[i]); } sb.append("}"); @@ -557,7 +557,7 @@ public abstract class ObjectUtils { sb.append(", "); } - sb.append("\'").append(array[i]).append("\'"); + sb.append("'").append(array[i]).append("'"); } sb.append("}"); diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/ReflectionUtils.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/ReflectionUtils.java index 2a70e13d5..2332fcecc 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/ReflectionUtils.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/ReflectionUtils.java @@ -289,7 +289,7 @@ public abstract class ReflectionUtils { mc.doWith(superIfc); } catch (IllegalAccessException var9) { throw new IllegalStateException( - "Shouldn\'t be illegal to access method \'" + superIfc.getName() + "\': " + var9); + "Shouldn't be illegal to access method '" + superIfc.getName() + "': " + var9); } } } @@ -374,7 +374,7 @@ public abstract class ReflectionUtils { fc.doWith(field); } catch (IllegalAccessException var10) { throw new IllegalStateException( - "Shouldn\'t be illegal to access field \'" + field.getName() + "\': " + var10); + "Shouldn't be illegal to access field '" + field.getName() + "': " + var10); } } } diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/StringUtils.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/StringUtils.java index 9f4fecbec..264f76cf5 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/StringUtils.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/StringUtils.java @@ -242,7 +242,7 @@ public abstract class StringUtils { int index = inString.indexOf(oldPattern); for (int patLen = oldPattern.length(); index >= 0; index = inString.indexOf(oldPattern, pos)) { - sb.append(inString.substring(pos, index)); + sb.append(inString, pos, index); sb.append(newPattern); pos = index + patLen; } @@ -276,7 +276,7 @@ public abstract class StringUtils { } public static String quote(String str) { - return str != null ? "\'" + str + "\'" : null; + return str != null ? "'" + str + "'" : null; } public static Object quoteIfString(Object obj) { @@ -536,10 +536,7 @@ public abstract class StringUtils { String[] arr$ = array; int len$ = array.length; - for (int i$ = 0; i$ < len$; ++i$) { - String element = arr$[i$]; - set.add(element); - } + set.addAll(Arrays.asList(arr$).subList(0, len$)); return toStringArray(set); } @@ -656,10 +653,7 @@ public abstract class StringUtils { String[] arr$ = tokens; int len$ = tokens.length; - for (int i$ = 0; i$ < len$; ++i$) { - String token = arr$[i$]; - set.add(token); - } + set.addAll(Arrays.asList(arr$).subList(0, len$)); return set; } diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/VfsUtils.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/VfsUtils.java index 2255c8176..502859a73 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/VfsUtils.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/io/VfsUtils.java @@ -43,14 +43,14 @@ public abstract class VfsUtils { private static Method VFS_METHOD_GET_ROOT_URL = null; private static Method VFS_METHOD_GET_ROOT_URI = null; private static Method VIRTUAL_FILE_METHOD_EXISTS = null; - private static Method VIRTUAL_FILE_METHOD_GET_INPUT_STREAM; - private static Method VIRTUAL_FILE_METHOD_GET_SIZE; - private static Method VIRTUAL_FILE_METHOD_GET_LAST_MODIFIED; - private static Method VIRTUAL_FILE_METHOD_TO_URL; - private static Method VIRTUAL_FILE_METHOD_TO_URI; - private static Method VIRTUAL_FILE_METHOD_GET_NAME; - private static Method VIRTUAL_FILE_METHOD_GET_PATH_NAME; - private static Method VIRTUAL_FILE_METHOD_GET_CHILD; + private static final Method VIRTUAL_FILE_METHOD_GET_INPUT_STREAM; + private static final Method VIRTUAL_FILE_METHOD_GET_SIZE; + private static final Method VIRTUAL_FILE_METHOD_GET_LAST_MODIFIED; + private static final Method VIRTUAL_FILE_METHOD_TO_URL; + private static final Method VIRTUAL_FILE_METHOD_TO_URI; + private static final Method VIRTUAL_FILE_METHOD_GET_NAME; + private static final Method VIRTUAL_FILE_METHOD_GET_PATH_NAME; + private static final Method VIRTUAL_FILE_METHOD_GET_CHILD; protected static Class VIRTUAL_FILE_VISITOR_INTERFACE; protected static Method VIRTUAL_FILE_METHOD_VISIT; private static Method VFS_UTILS_METHOD_IS_NESTED_FILE = null; @@ -122,11 +122,11 @@ public abstract class VfsUtils { } static Object getRelative(URL url) throws IOException { - return invokeVfsMethod(VFS_METHOD_GET_ROOT_URL, null, new Object[] {url}); + return invokeVfsMethod(VFS_METHOD_GET_ROOT_URL, null, url); } static Object getChild(Object vfsResource, String path) throws IOException { - return invokeVfsMethod(VIRTUAL_FILE_METHOD_GET_CHILD, vfsResource, new Object[] {path}); + return invokeVfsMethod(VIRTUAL_FILE_METHOD_GET_CHILD, vfsResource, path); } static File getFile(Object vfsResource) throws IOException { @@ -148,11 +148,11 @@ public abstract class VfsUtils { } static Object getRoot(URI url) throws IOException { - return invokeVfsMethod(VFS_METHOD_GET_ROOT_URI, null, new Object[] {url}); + return invokeVfsMethod(VFS_METHOD_GET_ROOT_URI, null, url); } protected static Object getRoot(URL url) throws IOException { - return invokeVfsMethod(VFS_METHOD_GET_ROOT_URL, null, new Object[] {url}); + return invokeVfsMethod(VFS_METHOD_GET_ROOT_URL, null, url); } protected static Object doGetVisitorAttribute() { @@ -195,8 +195,8 @@ public abstract class VfsUtils { try { String ex = VfsUtils.VFS_VER.V3.equals(version) ? "getChild" : "getRoot"; - VFS_METHOD_GET_ROOT_URL = ReflectionUtils.findMethod(vfsClass, ex, new Class[] {URL.class}); - VFS_METHOD_GET_ROOT_URI = ReflectionUtils.findMethod(vfsClass, ex, new Class[] {URI.class}); + VFS_METHOD_GET_ROOT_URL = ReflectionUtils.findMethod(vfsClass, ex, URL.class); + VFS_METHOD_GET_ROOT_URI = ReflectionUtils.findMethod(vfsClass, ex, URI.class); Class virtualFile = loader.loadClass(pkg + "VirtualFile"); VIRTUAL_FILE_METHOD_EXISTS = ReflectionUtils.findMethod(virtualFile, "exists"); VIRTUAL_FILE_METHOD_GET_INPUT_STREAM = ReflectionUtils.findMethod(virtualFile, "openStream"); @@ -208,15 +208,15 @@ public abstract class VfsUtils { VIRTUAL_FILE_METHOD_GET_PATH_NAME = ReflectionUtils.findMethod(virtualFile, "getPathName"); GET_PHYSICAL_FILE = ReflectionUtils.findMethod(virtualFile, "getPhysicalFile"); ex = VfsUtils.VFS_VER.V3.equals(version) ? "getChild" : "findChild"; - VIRTUAL_FILE_METHOD_GET_CHILD = ReflectionUtils.findMethod(virtualFile, ex, new Class[] {String.class}); + VIRTUAL_FILE_METHOD_GET_CHILD = ReflectionUtils.findMethod(virtualFile, ex, String.class); Class utilsClass = loader.loadClass(pkg + "VFSUtils"); VFS_UTILS_METHOD_GET_COMPATIBLE_URI = - ReflectionUtils.findMethod(utilsClass, "getCompatibleURI", new Class[] {virtualFile}); + ReflectionUtils.findMethod(utilsClass, "getCompatibleURI", virtualFile); VFS_UTILS_METHOD_IS_NESTED_FILE = - ReflectionUtils.findMethod(utilsClass, "isNestedFile", new Class[] {virtualFile}); + ReflectionUtils.findMethod(utilsClass, "isNestedFile", virtualFile); VIRTUAL_FILE_VISITOR_INTERFACE = loader.loadClass(pkg + "VirtualFileVisitor"); VIRTUAL_FILE_METHOD_VISIT = ReflectionUtils.findMethod(virtualFile, "visit", - new Class[] {VIRTUAL_FILE_VISITOR_INTERFACE}); + VIRTUAL_FILE_VISITOR_INTERFACE); Class visitorAttributesClass = loader.loadClass(pkg + "VisitorAttributes"); VISITOR_ATTRIBUTES_FIELD_RECURSE = ReflectionUtils.findField(visitorAttributesClass, "RECURSE"); } catch (ClassNotFoundException var7) { @@ -224,9 +224,9 @@ public abstract class VfsUtils { } } - private static enum VFS_VER { + private enum VFS_VER { V2, V3; - private VFS_VER() {} + VFS_VER() {} } } diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/CounterMap.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/CounterMap.java index 1cc6758e6..597513300 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/CounterMap.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/CounterMap.java @@ -192,7 +192,7 @@ public class CounterMap implements Serializable{ public Iterator> getIterator() { return new Iterator>() { - Iterator outerIt; + final Iterator outerIt; Iterator innerIt; F curKey; diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicBoolean.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicBoolean.java index 6c807feea..13a9e523a 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicBoolean.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicBoolean.java @@ -31,7 +31,7 @@ import java.io.IOException; public class JsonDeserializerAtomicBoolean extends JsonDeserializer { @Override - public AtomicBoolean deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { + public AtomicBoolean deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException { JsonNode node = jsonParser.getCodec().readTree(jsonParser); boolean value = node.asBoolean(); return new AtomicBoolean(value); diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicDouble.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicDouble.java index d777b0072..2b152e750 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicDouble.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonDeserializerAtomicDouble.java @@ -31,7 +31,7 @@ import java.io.IOException; public class JsonDeserializerAtomicDouble extends JsonDeserializer { @Override - public AtomicDouble deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { + public AtomicDouble deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException { JsonNode node = jsonParser.getCodec().readTree(jsonParser); double value = node.asDouble(); return new AtomicDouble(value); diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicBoolean.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicBoolean.java index c10f1bc95..e2d51b105 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicBoolean.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicBoolean.java @@ -30,7 +30,7 @@ import java.io.IOException; public class JsonSerializerAtomicBoolean extends JsonSerializer { @Override - public void serialize(AtomicBoolean atomicDouble, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException, JsonProcessingException { + public void serialize(AtomicBoolean atomicDouble, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeBoolean(atomicDouble.get()); } } diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicDouble.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicDouble.java index 1f9041ccd..9e00819d4 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicDouble.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/primitives/serde/JsonSerializerAtomicDouble.java @@ -30,7 +30,7 @@ import java.io.IOException; public class JsonSerializerAtomicDouble extends JsonSerializer { @Override - public void serialize(AtomicDouble atomicDouble, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException, JsonProcessingException { + public void serialize(AtomicDouble atomicDouble, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeNumber(atomicDouble.doubleValue()); } } diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/resources/Resources.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/resources/Resources.java index f8fa974f4..aec97ba3e 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/resources/Resources.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/resources/Resources.java @@ -31,7 +31,7 @@ import java.util.*; @Slf4j public class Resources { - private static Resources INSTANCE = new Resources(); + private static final Resources INSTANCE = new Resources(); protected final List resolvers; @@ -123,7 +123,7 @@ public class Resources { } throw new IllegalStateException("Cannot resolve resource (not found): none of " + resolvers.size() + - " resolvers can resolve resource \"" + resourcePath + "\" - available resolvers: " + resolvers.toString()); + " resolvers can resolve resource \"" + resourcePath + "\" - available resolvers: " + resolvers); } public InputStream getAsStream(String resourcePath) { @@ -135,7 +135,7 @@ public class Resources { } throw new IllegalStateException("Cannot resolve resource (not found): none of " + resolvers.size() + - " resolvers can resolve resource \"" + resourcePath + "\" - available resolvers: " + resolvers.toString()); + " resolvers can resolve resource \"" + resourcePath + "\" - available resolvers: " + resolvers); } public void copyDir(String directoryPath, File destinationDir) { diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/resources/strumpf/ResourceFile.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/resources/strumpf/ResourceFile.java index 0141be02f..8bdeae89c 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/resources/strumpf/ResourceFile.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/resources/strumpf/ResourceFile.java @@ -118,10 +118,7 @@ public class ResourceFile { Preconditions.checkState(expSha256 != null, "Expected JSON property %s was not found in resource reference file %s", sha256Property, filePath); String actualSha256 = sha256(file); - if (!expSha256.equals(actualSha256)) { - return false; - } - return true; + return expSha256.equals(actualSha256); } /** diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/resources/strumpf/StrumpfResolver.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/resources/strumpf/StrumpfResolver.java index 54ff89459..ba879f740 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/resources/strumpf/StrumpfResolver.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/resources/strumpf/StrumpfResolver.java @@ -94,11 +94,7 @@ public class StrumpfResolver implements Resolver { } cpr = new ClassPathResource(resourcePath); - if (cpr.exists()) { - return true; - } - - return false; + return cpr.exists(); } @Override @@ -116,11 +112,7 @@ public class StrumpfResolver implements Resolver { //Second: Check classpath ClassPathResource cpr = new ClassPathResource(dirPath); - if (cpr.exists()) { - return true; - } - - return false; + return cpr.exists(); } @Override diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/tools/BTools.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/tools/BTools.java index 7e4d06b49..d22b22998 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/tools/BTools.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/tools/BTools.java @@ -272,10 +272,10 @@ public class BTools { // String FormatS = ""; if ( LeadingChar == '0' ) { - FormatS = "%" + LeadingChar + Integer.toString( CharsCount ) + "d"; + FormatS = "%" + LeadingChar + CharsCount + "d"; } else { - FormatS = "%" + Integer.toString( CharsCount ) + "d"; + FormatS = "%" + CharsCount + "d"; } // Result = String.format( FormatS, Value ); diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/tools/SIS.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/tools/SIS.java index b10296fcc..a2ee4f925 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/tools/SIS.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/tools/SIS.java @@ -33,7 +33,7 @@ import java.time.format.DateTimeFormatter; public class SIS { // System Informations Saving // - private String baseModuleCode = "SIS"; + private final String baseModuleCode = "SIS"; private String moduleCode = "?"; // private PrintStream out; diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/ArchiveUtils.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/ArchiveUtils.java index 317c5a23d..cd682f3b2 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/ArchiveUtils.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/ArchiveUtils.java @@ -80,7 +80,7 @@ public class ArchiveUtils { new File(dest).mkdirs(); FileInputStream fin = new FileInputStream(target); int BUFFER = 2048; - byte data[] = new byte[BUFFER]; + byte[] data = new byte[BUFFER]; if (file.endsWith(".zip") || file.endsWith(".jar")) { try(ZipInputStream zis = new ZipInputStream(fin)) { @@ -152,7 +152,7 @@ public class ArchiveUtils { else { int count; try(FileOutputStream fos = new FileOutputStream(dest + File.separator + entry.getName()); - BufferedOutputStream destStream = new BufferedOutputStream(fos, BUFFER);) { + BufferedOutputStream destStream = new BufferedOutputStream(fos, BUFFER)) { while ((count = tarIn.read(data, 0, BUFFER)) != -1) { destStream.write(data, 0, count); } diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/ArrayUtil.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/ArrayUtil.java index 8a30f0e48..13780f3a6 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/ArrayUtil.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/ArrayUtil.java @@ -295,7 +295,7 @@ public class ArrayUtil { public static long[] toLongs(byte[] data) { val ret = new long[data.length]; for (int i = 0; i < ret.length; i++) { - ret[i] = (long) data[i]; + ret[i] = data[i]; } return ret; } @@ -311,7 +311,7 @@ public class ArrayUtil { public static long[] toLongs(short[] data) { val ret = new long[data.length]; for (int i = 0; i < ret.length; i++) { - ret[i] = (long) data[i]; + ret[i] = data[i]; } return ret; } @@ -319,7 +319,7 @@ public class ArrayUtil { public static long[] toLongs(int[] data) { val ret = new long[data.length]; for (int i = 0; i < ret.length; i++) { - ret[i] = (long) data[i]; + ret[i] = data[i]; } return ret; } @@ -1105,7 +1105,7 @@ public class ArrayUtil { public static double[] toDoubles(int[] ints) { double[] ret = new double[ints.length]; for (int i = 0; i < ints.length; i++) - ret[i] = (double) ints[i]; + ret[i] = ints[i]; return ret; } @@ -1119,7 +1119,7 @@ public class ArrayUtil { public static double[] toDoubles(float[] ints) { double[] ret = new double[ints.length]; for (int i = 0; i < ints.length; i++) - ret[i] = (double) ints[i]; + ret[i] = ints[i]; return ret; } diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/Index.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/Index.java index cc64e145d..ff91a9a4e 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/Index.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/Index.java @@ -23,14 +23,15 @@ package org.nd4j.common.util; import java.io.Serializable; import java.util.Map; +import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; @SuppressWarnings({"rawtypes", "unchecked"}) public class Index implements Serializable { private static final long serialVersionUID = 1160629777026141078L; - private Map objects = new ConcurrentHashMap<>(); - private Map indexes = new ConcurrentHashMap<>(); + private final Map objects = new ConcurrentHashMap<>(); + private final Map indexes = new ConcurrentHashMap<>(); public synchronized boolean add(Object o, int idx) { if (o instanceof String && o.toString().isEmpty()) { @@ -103,9 +104,9 @@ public class Index implements Serializable { Index index = (Index) o; - if (objects != null ? !objects.equals(index.objects) : index.objects != null) + if (!Objects.equals(objects, index.objects)) return false; - return !(indexes != null ? !indexes.equals(index.indexes) : index.indexes != null); + return !(!Objects.equals(indexes, index.indexes)); } diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/MathUtils.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/MathUtils.java index 58d72eace..6e249ffbd 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/MathUtils.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/MathUtils.java @@ -163,7 +163,7 @@ public class MathUtils { * @param targetAttribute target attribute vector * @return the correlation coefficient or r */ - public static double correlation(double[] residuals, double targetAttribute[]) { + public static double correlation(double[] residuals, double[] targetAttribute) { double[] predictedValues = new double[residuals.length]; for (int i = 0; i < predictedValues.length; i++) { predictedValues[i] = targetAttribute[i] - residuals[i]; @@ -1042,7 +1042,7 @@ public class MathUtils { */ public static /*@pure@*/ double roundDouble(double value, int afterDecimalPoint) { - double mask = Math.pow(10.0, (double) afterDecimalPoint); + double mask = Math.pow(10.0, afterDecimalPoint); return (double) (Math.round(value * mask)) / mask; }//end roundDouble diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/Rational.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/Rational.java index 404874016..e9914479c 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/Rational.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/Rational.java @@ -234,10 +234,10 @@ class Rational implements Cloneable { public Rational pow(BigInteger exponent) throws NumberFormatException { /* test for overflow */ if (exponent.compareTo(MAX_INT) == 1) { - throw new NumberFormatException("Exponent " + exponent.toString() + " too large."); + throw new NumberFormatException("Exponent " + exponent + " too large."); } if (exponent.compareTo(MIN_INT) == -1) { - throw new NumberFormatException("Exponent " + exponent.toString() + " too small."); + throw new NumberFormatException("Exponent " + exponent + " too small."); } /* promote to the simpler interface above */ return pow(exponent.intValue()); diff --git a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/SynchronizedTable.java b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/SynchronizedTable.java index ace0bf5f1..37c16114e 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/SynchronizedTable.java +++ b/cavis-nd4j/cavis-nd4j-common/src/main/java/org/nd4j/common/util/SynchronizedTable.java @@ -27,7 +27,7 @@ import java.util.Map; import java.util.Set; public class SynchronizedTable implements Table { - private Table wrapped; + private final Table wrapped; public SynchronizedTable(Table wrapped) { this.wrapped = wrapped; diff --git a/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/function/FunctionalUtilsTest.java b/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/function/FunctionalUtilsTest.java index b4c86b2e9..b08be4f36 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/function/FunctionalUtilsTest.java +++ b/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/function/FunctionalUtilsTest.java @@ -45,9 +45,9 @@ public class FunctionalUtilsTest { //[(fish,([],[alex])), (dog,([adam],[steve])), (cat,([adam],[alice]))] Map,List>> assertion = new HashMap<>(); - assertion.put("cat",Pair.of(Arrays.asList("adam"),Arrays.asList("alice"))); - assertion.put("dog",Pair.of(Arrays.asList("adam"),Arrays.asList("steve"))); - assertion.put("fish",Pair.of(Collections.emptyList(),Arrays.asList("alex"))); + assertion.put("cat",Pair.of(Collections.singletonList("adam"), Collections.singletonList("alice"))); + assertion.put("dog",Pair.of(Collections.singletonList("adam"), Collections.singletonList("steve"))); + assertion.put("fish",Pair.of(Collections.emptyList(), Collections.singletonList("alex"))); Map, List>> cogroup = FunctionalUtils.cogroup(leftMap, rightMap); assertEquals(assertion,cogroup); diff --git a/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/loader/TestFileBatch.java b/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/loader/TestFileBatch.java index b3c924919..b215da12a 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/loader/TestFileBatch.java +++ b/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/loader/TestFileBatch.java @@ -83,7 +83,7 @@ public class TestFileBatch { //Check that it is indeed a valid zip file: - File f = new File(FileUtils.getTempDirectoryPath()+"/"+UUID.randomUUID().toString()); + File f = new File(FileUtils.getTempDirectoryPath()+"/"+ UUID.randomUUID()); f.delete(); fb.writeAsZip(f); diff --git a/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/tools/InfoValuesTest.java b/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/tools/InfoValuesTest.java index ee40a1089..992cff871 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/tools/InfoValuesTest.java +++ b/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/tools/InfoValuesTest.java @@ -27,9 +27,9 @@ import static org.junit.jupiter.api.Assertions.*; public class InfoValuesTest { // - private String[] t1_titleA = { "T0", "T1", "T2", "T3", "T4", "T5" }; + private final String[] t1_titleA = { "T0", "T1", "T2", "T3", "T4", "T5" }; // - private String[] t2_titleA = { "", "T1", "T2" }; + private final String[] t2_titleA = { "", "T1", "T2" }; // @Test diff --git a/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/tools/SISTest.java b/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/tools/SISTest.java index e89fdd324..95b625c32 100644 --- a/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/tools/SISTest.java +++ b/cavis-nd4j/cavis-nd4j-common/src/test/java/org/nd4j/common/tools/SISTest.java @@ -50,7 +50,7 @@ public class SISTest { // assertEquals( 33, fFName.length() ); assertEquals( "Z", fFName.substring( 0, 1 ) ); - assertEquals( "_Test_ABC.txt", fFName.substring( fFName.length() - 13, fFName.length() ) ); + assertEquals( "_Test_ABC.txt", fFName.substring( fFName.length() - 13) ); // assertEquals( "", fFName ); // assertEquals( "", tmpFld.getRoot().getAbsolutePath() ); // diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/background/BackgroundDaemonStarter.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/background/BackgroundDaemonStarter.java index 7a582400a..67c79803b 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/background/BackgroundDaemonStarter.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/background/BackgroundDaemonStarter.java @@ -47,7 +47,7 @@ public class BackgroundDaemonStarter { * @throws InterruptedException */ public static int startSlave(int parameterLength, String masterUrl, String mediaDriverDirectory) throws Exception { - return exec(ParameterServerSubscriber.class, mediaDriverDirectory, "-s", "1," + String.valueOf(parameterLength), + return exec(ParameterServerSubscriber.class, mediaDriverDirectory, "-s", "1," + parameterLength, "-p", "40126", "-h", "localhost", "-id", "10", "-pm", masterUrl, "-sp", "9500", "--updatesPerEpoch", "1"); } @@ -96,7 +96,7 @@ public class BackgroundDaemonStarter { */ public static int startMaster(int parameterLength, String mediaDriverDirectory) throws Exception { return exec(ParameterServerSubscriber.class, mediaDriverDirectory, "-m", "true", "-s", - "1," + String.valueOf(parameterLength), "-p", "40123", "-h", "localhost", "-id", "11", "-sp", + "1," + parameterLength, "-p", "40123", "-h", "localhost", "-id", "11", "-sp", "9200", "--updatesPerEpoch", "1"); } diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/background/RemoteParameterServerClientTests.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/background/RemoteParameterServerClientTests.java index 005443fe3..8568637b4 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/background/RemoteParameterServerClientTests.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/background/RemoteParameterServerClientTests.java @@ -43,11 +43,11 @@ import static org.junit.jupiter.api.Assertions.assertEquals; @Slf4j public class RemoteParameterServerClientTests extends BaseND4JTest { - private int parameterLength = 1000; + private final int parameterLength = 1000; private Aeron.Context ctx; private MediaDriver mediaDriver; - private AtomicInteger masterStatus = new AtomicInteger(0); - private AtomicInteger slaveStatus = new AtomicInteger(0); + private final AtomicInteger masterStatus = new AtomicInteger(0); + private final AtomicInteger slaveStatus = new AtomicInteger(0); private Aeron aeron; @BeforeEach diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/client/ParameterServerClientPartialTest.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/client/ParameterServerClientPartialTest.java index b57618211..2309aae84 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/client/ParameterServerClientPartialTest.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/client/ParameterServerClientPartialTest.java @@ -46,7 +46,7 @@ public class ParameterServerClientPartialTest extends BaseND4JTest { private static MediaDriver mediaDriver; private static Aeron.Context ctx; private static ParameterServerSubscriber masterNode, slaveNode; - private int[] shape = {2, 2}; + private final int[] shape = {2, 2}; private static Aeron aeron; @BeforeAll @@ -74,7 +74,7 @@ public class ParameterServerClientPartialTest extends BaseND4JTest { assertEquals("localhost", masterNode.getHost()); assertEquals(11, masterNode.getStreamId()); assertEquals(12, masterNode.getResponder().getStreamId()); - assertEquals(masterNode.getMasterArray(), Nd4j.create(new int[] {2, 2})); + assertEquals(masterNode.getMasterArray(), Nd4j.create(2, 2)); slaveNode = new ParameterServerSubscriber(mediaDriver); slaveNode.setAeron(aeron); @@ -127,7 +127,7 @@ public class ParameterServerClientPartialTest extends BaseND4JTest { Thread.sleep(30000); ParameterServerListener listener = (ParameterServerListener) masterNode.getCallback(); assertEquals(1, listener.getUpdater().numUpdates()); - INDArray assertion = Nd4j.create(new int[] {2, 2}); + INDArray assertion = Nd4j.create(2, 2); assertion.getColumn(0).addi(1.0); assertEquals(assertion, listener.getUpdater().ndArrayHolder().get()); INDArray arr = client.getArray(); diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/client/ParameterServerClientTest.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/client/ParameterServerClientTest.java index 985c77ec8..5492fb0f4 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/client/ParameterServerClientTest.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-client/src/test/java/org/nd4j/parameterserver/client/ParameterServerClientTest.java @@ -41,10 +41,10 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class ParameterServerClientTest extends BaseND4JTest { private static MediaDriver mediaDriver; - private static Logger log = LoggerFactory.getLogger(ParameterServerClientTest.class); + private static final Logger log = LoggerFactory.getLogger(ParameterServerClientTest.class); private static Aeron aeron; private static ParameterServerSubscriber masterNode, slaveNode; - private static int parameterLength = 1000; + private static final int parameterLength = 1000; @BeforeAll public static void beforeClass() throws Exception { @@ -54,7 +54,7 @@ public class ParameterServerClientTest extends BaseND4JTest { masterNode = new ParameterServerSubscriber(mediaDriver); masterNode.setAeron(aeron); int masterPort = 40323 + new java.util.Random().nextInt(3000); - masterNode.run(new String[] {"-m", "true", "-s", "1," + String.valueOf(parameterLength), "-p", + masterNode.run(new String[] {"-m", "true", "-s", "1," + parameterLength, "-p", String.valueOf(masterPort), "-h", "localhost", "-id", "11", "-md", mediaDriver.aeronDirectoryName(), "-sp", "33000", "-u", String.valueOf(1)}); diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/SoftSyncParameterUpdater.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/SoftSyncParameterUpdater.java index 59e19ab10..5ba28b6bc 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/SoftSyncParameterUpdater.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/SoftSyncParameterUpdater.java @@ -32,7 +32,7 @@ public class SoftSyncParameterUpdater extends BaseParameterUpdater { //s is the number of updates private int s; private int currentVersion; - private int accumulatedUpdates = 0; + private final int accumulatedUpdates = 0; private double scalingFactor; diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/SynchronousParameterUpdater.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/SynchronousParameterUpdater.java index 9ebf5bcbd..12635f433 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/SynchronousParameterUpdater.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/SynchronousParameterUpdater.java @@ -33,7 +33,7 @@ import java.util.Map; public class SynchronousParameterUpdater extends BaseParameterUpdater { private int workers = Runtime.getRuntime().availableProcessors(); - private static ObjectMapper objectMapper = new ObjectMapper(); + private static final ObjectMapper objectMapper = new ObjectMapper(); /** * Returns the number of required diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/storage/InMemoryUpdateStorage.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/storage/InMemoryUpdateStorage.java index 73202e0d2..7c4a81eb0 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/storage/InMemoryUpdateStorage.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/storage/InMemoryUpdateStorage.java @@ -28,7 +28,7 @@ import java.util.concurrent.CopyOnWriteArrayList; public class InMemoryUpdateStorage extends BaseUpdateStorage { - private List updates = new CopyOnWriteArrayList<>(); + private final List updates = new CopyOnWriteArrayList<>(); /** * Add an ndarray to the storage diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/storage/NoUpdateStorage.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/storage/NoUpdateStorage.java index a44db1a40..16ec168fc 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/storage/NoUpdateStorage.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-core/src/main/java/org/nd4j/parameterserver/updater/storage/NoUpdateStorage.java @@ -27,7 +27,7 @@ import java.util.concurrent.atomic.AtomicInteger; @Slf4j public class NoUpdateStorage extends BaseUpdateStorage { - private AtomicInteger updateCount = new AtomicInteger(0); + private final AtomicInteger updateCount = new AtomicInteger(0); /** * Add an ndarray to the storage diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/logic/RetransmissionHandler.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/logic/RetransmissionHandler.java index 59db0f670..af46e8a7e 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/logic/RetransmissionHandler.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/logic/RetransmissionHandler.java @@ -27,7 +27,7 @@ import org.nd4j.parameterserver.distributed.transport.Transport; @Deprecated public interface RetransmissionHandler { - public enum TransmissionStatus { + enum TransmissionStatus { MESSAGE_SENT, NOT_CONNECTED, BACKPRESSURE, ADMIN_ACTION, } diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/logic/completion/FrameCompletionHandler.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/logic/completion/FrameCompletionHandler.java index 6f12029b3..9ba7014ae 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/logic/completion/FrameCompletionHandler.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/logic/completion/FrameCompletionHandler.java @@ -32,7 +32,7 @@ import java.util.concurrent.atomic.AtomicInteger; @Deprecated public class FrameCompletionHandler { - private Map frames = new ConcurrentHashMap<>(); + private final Map frames = new ConcurrentHashMap<>(); public boolean isTrackingFrame(RequestDescriptor descriptor) { return frames.containsKey(descriptor); @@ -104,12 +104,12 @@ public class FrameCompletionHandler { public static class FrameDescriptor { @Getter - private long frameOriginatorId; + private final long frameOriginatorId; // messageId within frame, and it's state - private Map states = new ConcurrentHashMap<>(); - private AtomicInteger messages = new AtomicInteger(0); - private AtomicInteger finished = new AtomicInteger(0); + private final Map states = new ConcurrentHashMap<>(); + private final AtomicInteger messages = new AtomicInteger(0); + private final AtomicInteger finished = new AtomicInteger(0); public FrameDescriptor(long frameOriginatorId) { diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/logic/storage/BaseStorage.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/logic/storage/BaseStorage.java index 846a689d1..25d74566f 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/logic/storage/BaseStorage.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/logic/storage/BaseStorage.java @@ -31,7 +31,7 @@ import java.util.concurrent.ConcurrentHashMap; @Deprecated public abstract class BaseStorage implements Storage { - private ConcurrentHashMap storage = new ConcurrentHashMap<>(); + private final ConcurrentHashMap storage = new ConcurrentHashMap<>(); @Override diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/messages/Frame.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/messages/Frame.java index c6f4f8134..42313290b 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/messages/Frame.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/messages/Frame.java @@ -217,7 +217,7 @@ public class Frame implements Serializable, Iterable< //log.info("Firing message {}; originator: {}; frameId: {}; taskId: {}", message.getClass().getSimpleName(), message.getOriginatorId(), message.getFrameId(), message.getTaskId()); message.processMessage(); } - } ; + } } @Override diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/messages/intercom/DistributedCbowDotMessage.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/messages/intercom/DistributedCbowDotMessage.java index 258738a61..5df923c91 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/messages/intercom/DistributedCbowDotMessage.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/messages/intercom/DistributedCbowDotMessage.java @@ -87,7 +87,7 @@ public class DistributedCbowDotMessage extends BaseVoidMessage implements Distri CbowRequestMessage cbrm = new CbowRequestMessage(rowsA, rowsB, w1, codes, negSamples, alpha, 119); if (negSamples > 0) { // unfortunately we have to get copy of negSamples here - int negatives[] = Arrays.copyOfRange(rowsB, codes.length, rowsB.length); + int[] negatives = Arrays.copyOfRange(rowsB, codes.length, rowsB.length); cbrm.setNegatives(negatives); } cbrm.setFrameId(-119L); diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/messages/intercom/DistributedSgDotMessage.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/messages/intercom/DistributedSgDotMessage.java index 003cb6d15..0d3ec17f7 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/messages/intercom/DistributedSgDotMessage.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/messages/intercom/DistributedSgDotMessage.java @@ -84,7 +84,7 @@ public class DistributedSgDotMessage extends BaseVoidMessage implements Distribu SkipGramRequestMessage sgrm = new SkipGramRequestMessage(w1, w2, rowsB, codes, negSamples, alpha, 119); if (negSamples > 0) { // unfortunately we have to get copy of negSamples here - int negatives[] = Arrays.copyOfRange(rowsB, codes.length, rowsB.length); + int[] negatives = Arrays.copyOfRange(rowsB, codes.length, rowsB.length); sgrm.setNegatives(negatives); } sgrm.setTaskId(this.taskId); diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/training/impl/CbowTrainer.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/training/impl/CbowTrainer.java index 78aa40bd3..b6eaf3e44 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/training/impl/CbowTrainer.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/training/impl/CbowTrainer.java @@ -57,11 +57,11 @@ public class CbowTrainer extends BaseTrainer { chains.put(RequestDescriptor.createDescriptor(message.getOriginatorId(), message.getTaskId()), chain); - int row_syn1[] = message.getSyn1rows(); + int[] row_syn1 = message.getSyn1rows(); if (message.getNegSamples() > 0) { - int rows = (int) storage.getArray(WordVectorStorage.SYN_0).rows(); - int tempArray[] = new int[message.getNegSamples() + 1]; + int rows = storage.getArray(WordVectorStorage.SYN_0).rows(); + int[] tempArray = new int[message.getNegSamples() + 1]; tempArray[0] = message.getW1(); for (int e = 1; e < message.getNegSamples() + 1; e++) { @@ -118,7 +118,7 @@ public class CbowTrainer extends BaseTrainer { + "]; taskId: [" + aggregation.getTaskId() + "]"); } - chain.addElement((DotAggregation) aggregation); + chain.addElement(aggregation); finishTraining(aggregation.getOriginatorId(), aggregation.getTaskId()); } diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/training/impl/SkipGramTrainer.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/training/impl/SkipGramTrainer.java index add4c0867..8805ee65b 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/training/impl/SkipGramTrainer.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/training/impl/SkipGramTrainer.java @@ -70,13 +70,13 @@ public class SkipGramTrainer extends BaseTrainer { // we assume this is HS round //if (message.getPoints() != null && message.getPoints().length > 0) { - int row_syn0[] = new int[0]; //replicate(message.getW2(), message.getPoints().length); + int[] row_syn0 = new int[0]; //replicate(message.getW2(), message.getPoints().length); - int row_syn1[] = message.getPoints(); + int[] row_syn1 = message.getPoints(); if (message.getNegSamples() > 0) { - int rows = (int) storage.getArray(WordVectorStorage.SYN_0).rows(); - int tempArray[] = new int[message.getNegSamples() + 1]; + int rows = storage.getArray(WordVectorStorage.SYN_0).rows(); + int[] tempArray = new int[message.getNegSamples() + 1]; tempArray[0] = message.getW1(); for (int e = 1; e < message.getNegSamples() + 1; e++) { @@ -156,7 +156,7 @@ public class SkipGramTrainer extends BaseTrainer { + "]; taskId: [" + aggregation.getTaskId() + "]"); } - chain.addElement((DotAggregation) aggregation); + chain.addElement(aggregation); finishTraining(aggregation.getOriginatorId(), aggregation.getTaskId()); } diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/transport/BaseTransport.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/transport/BaseTransport.java index bad3a3fb4..222ff3546 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/transport/BaseTransport.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/transport/BaseTransport.java @@ -279,7 +279,7 @@ public abstract class BaseTransport implements Transport { byte[] data = new byte[length]; buffer.getBytes(offset, data); - MeaningfulMessage message = (MeaningfulMessage) VoidMessage.fromBytes(data); + MeaningfulMessage message = VoidMessage.fromBytes(data); completed.put(message.getTaskId(), message); } @@ -412,7 +412,7 @@ public abstract class BaseTransport implements Transport { } break; default: - throw new IllegalStateException("Unknown thread model: [" + threading.toString() + "]"); + throw new IllegalStateException("Unknown thread model: [" + threading + "]"); } } diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/transport/RoutedTransport.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/transport/RoutedTransport.java index 7ee969015..8f49c7e7a 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/transport/RoutedTransport.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/transport/RoutedTransport.java @@ -562,7 +562,7 @@ public class RoutedTransport extends BaseTransport { completed.put(message.getTaskId(), msg); } else if (message instanceof RequestMessage) { try { - messages.put((RequestMessage) message); + messages.put(message); } catch (InterruptedException e) { // do nothing } catch (Exception e) { @@ -570,7 +570,7 @@ public class RoutedTransport extends BaseTransport { } } else if (message instanceof DistributedMessage) { try { - messages.put((DistributedMessage) message); + messages.put(message); } catch (InterruptedException e) { // do nothing } catch (Exception e) { @@ -578,7 +578,7 @@ public class RoutedTransport extends BaseTransport { } } else if (message instanceof TrainingMessage) { try { - messages.put((TrainingMessage) message); + messages.put(message); } catch (InterruptedException e) { // do nothing } catch (Exception e) { @@ -586,7 +586,7 @@ public class RoutedTransport extends BaseTransport { } } else if (message instanceof VoidAggregation) { try { - messages.put((VoidAggregation) message); + messages.put(message); } catch (InterruptedException e) { // do nothing } catch (Exception e) { @@ -594,7 +594,7 @@ public class RoutedTransport extends BaseTransport { } } else if (message instanceof Frame) { try { - messages.put((Frame) message); + messages.put(message); } catch (InterruptedException e) { // do nothing } catch (Exception e) { @@ -664,8 +664,8 @@ public class RoutedTransport extends BaseTransport { public static class RemoteConnectionBuilder { - private Object locker = new Object(); - private AtomicBoolean activated = new AtomicBoolean(); + private final Object locker = new Object(); + private final AtomicBoolean activated = new AtomicBoolean(); } } diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/util/NetworkInformation.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/util/NetworkInformation.java index 4937478eb..e55dc4b1c 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/util/NetworkInformation.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/util/NetworkInformation.java @@ -27,6 +27,7 @@ import lombok.NonNull; import java.io.Serializable; import java.util.ArrayList; import java.util.List; +import java.util.Objects; @NoArgsConstructor @Data @@ -49,7 +50,7 @@ public class NetworkInformation implements Serializable { NetworkInformation that = (NetworkInformation) o; - return ipAddresses != null ? ipAddresses.equals(that.ipAddresses) : that.ipAddresses == null; + return Objects.equals(ipAddresses, that.ipAddresses); } @Override diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/ModelParameterServer.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/ModelParameterServer.java index 0f1fc902c..fb6768334 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/ModelParameterServer.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/ModelParameterServer.java @@ -62,7 +62,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; */ @Slf4j public final class ModelParameterServer { - protected static final ModelParameterServer INSTANCE = new ModelParameterServer(); + private static final ModelParameterServer INSTANCE = new ModelParameterServer(); @Getter private Transport transport; @@ -79,33 +79,33 @@ public final class ModelParameterServer { private final BlockingQueue updatesQueue = new LinkedBlockingQueue<>(4096); // subsribers that are connected to actual model - protected final List updatesSubscribers = new CopyOnWriteArrayList<>(); - protected final List> modelParamsSubsribers = new CopyOnWriteArrayList<>(); - protected final List> updaterParamsSubscribers = new CopyOnWriteArrayList<>(); + private final List updatesSubscribers = new CopyOnWriteArrayList<>(); + private final List> modelParamsSubsribers = new CopyOnWriteArrayList<>(); + private final List> updaterParamsSubscribers = new CopyOnWriteArrayList<>(); private boolean masterMode; - protected VoidConfiguration configuration; + private VoidConfiguration configuration; // this flag is true once mps is launched private final AtomicBoolean launchLock = new AtomicBoolean(false); private final AtomicBoolean stopLock = new AtomicBoolean(false); // this queue is used as temporary storage for updates received during restart event. - protected BlockingQueue updatesBacklog = new LinkedBlockingQueue<>(); + private BlockingQueue updatesBacklog = new LinkedBlockingQueue<>(); // these two fields only used at master node, to store latest updater copy - protected final Atomic updaterParameters = new Atomic<>(); - protected final ReentrantReadWriteLock updaterParamsLock = new ReentrantReadWriteLock(); - protected final AtomicBoolean gotFinalState = new AtomicBoolean(false); + private final Atomic updaterParameters = new Atomic<>(); + private final ReentrantReadWriteLock updaterParamsLock = new ReentrantReadWriteLock(); + private final AtomicBoolean gotFinalState = new AtomicBoolean(false); private Disposable disposable; - private AtomicInteger iterationNumber = new AtomicInteger(0); - private AtomicInteger epochNumber = new AtomicInteger(0); + private final AtomicInteger iterationNumber = new AtomicInteger(0); + private final AtomicInteger epochNumber = new AtomicInteger(0); - protected ModelParameterServer() { + private ModelParameterServer() { // } @@ -118,7 +118,7 @@ public final class ModelParameterServer { * * @param transport */ - protected ModelParameterServer(@NonNull Transport transport) { + private ModelParameterServer(@NonNull Transport transport) { this(transport, false); } @@ -128,7 +128,7 @@ public final class ModelParameterServer { * @param transport * @param isMasterNode */ - protected ModelParameterServer(@NonNull Transport transport, boolean isMasterNode) { + ModelParameterServer(@NonNull Transport transport, boolean isMasterNode) { this(VoidConfiguration.builder().portSupplier(new StaticPortSupplier(40123)).streamId(119).build(), transport, isMasterNode); } diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/chunks/impl/FileChunksTracker.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/chunks/impl/FileChunksTracker.java index 18ff83fb3..07545a2d2 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/chunks/impl/FileChunksTracker.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/chunks/impl/FileChunksTracker.java @@ -43,9 +43,9 @@ public class FileChunksTracker implements ChunksTracker map = new ConcurrentHashMap<>(); + private final Map map = new ConcurrentHashMap<>(); - private File holder; + private final File holder; private final long size; @@ -87,7 +87,7 @@ public class FileChunksTracker implements ChunksTracker implements ChunksTrack private final int numChunks; - private Map map = new ConcurrentHashMap<>(); + private final Map map = new ConcurrentHashMap<>(); private final byte[] buffer; diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/messages/pairs/params/UpdaterParametersMessage.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/messages/pairs/params/UpdaterParametersMessage.java index bb59958d2..82b92b810 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/messages/pairs/params/UpdaterParametersMessage.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/messages/pairs/params/UpdaterParametersMessage.java @@ -34,7 +34,7 @@ public final class UpdaterParametersMessage extends BaseINDArrayMessage implemen @Getter @Setter - protected boolean finalState = false; + private boolean finalState = false; public UpdaterParametersMessage(@NonNull String messageId, INDArray payload) { super(messageId, payload); diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/transport/impl/BaseTransport.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/transport/impl/BaseTransport.java index 3cc941171..81b01e915 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/transport/impl/BaseTransport.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/transport/impl/BaseTransport.java @@ -332,7 +332,7 @@ public abstract class BaseTransport implements Transport { if (!isLoopedNode(n, originatorId, relayId)) { sendMessage(voidMessage, n.getId()); } - }; + } } } @@ -637,7 +637,7 @@ public abstract class BaseTransport implements Transport { * @param */ public static class MessageFlow implements Consumer, Publisher { - private List> subscribers = new CopyOnWriteArrayList<>(); + private final List> subscribers = new CopyOnWriteArrayList<>(); @Override public void accept(T voidMessage) throws Exception { diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/transport/impl/DummyTransport.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/transport/impl/DummyTransport.java index 4ed3fdb4b..508485244 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/transport/impl/DummyTransport.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/transport/impl/DummyTransport.java @@ -141,8 +141,8 @@ public class DummyTransport extends BaseTransport { * This class is written to mimic network connectivity locally */ public static class Connector { - private Map transports = new ConcurrentHashMap<>(); - private ThreadPoolExecutor executorService = (ThreadPoolExecutor) Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors(), new ThreadFactory() { + private final Map transports = new ConcurrentHashMap<>(); + private final ThreadPoolExecutor executorService = (ThreadPoolExecutor) Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors(), new ThreadFactory() { @Override public Thread newThread(@NonNull Runnable r) { val t = Executors.defaultThreadFactory().newThread(r); diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/util/MeshOrganizer.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/util/MeshOrganizer.java index 3adaec329..7b0108fe8 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/util/MeshOrganizer.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/v2/util/MeshOrganizer.java @@ -51,10 +51,10 @@ public class MeshOrganizer implements Serializable { // just shortcut to the root node of the tree @Getter(AccessLevel.PUBLIC) - private Node rootNode = new Node(true); + private final Node rootNode = new Node(true); // SortedSet, with sort by number of downstreams - private transient List sortedNodes = new ArrayList<>(); + private final transient List sortedNodes = new ArrayList<>(); // flattened map of the tree, ID -> Node private transient Map nodeMap = new HashMap<>(); @@ -325,7 +325,7 @@ public class MeshOrganizer implements Serializable { * @return */ protected long flatSize() { - return (long) nodeMap.size(); + return nodeMap.size(); } /** @@ -476,7 +476,7 @@ public class MeshOrganizer implements Serializable { val distance = distanceFromRoot(); for (val d: downstream) - if (d.numberOfDescendants() < MeshOrganizer.MAX_DOWNSTREAMS * (MeshOrganizer.MAX_DEPTH - distance)) + if (d.numberOfDescendants() < (long) MeshOrganizer.MAX_DOWNSTREAMS * (MeshOrganizer.MAX_DEPTH - distance)) return d.pushDownstreamNode(node); return addDownstreamNode(node); diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/VoidParameterServerStressTest.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/VoidParameterServerStressTest.java index 8f95c5ff9..66bc47f81 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/VoidParameterServerStressTest.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/VoidParameterServerStressTest.java @@ -279,7 +279,7 @@ public class VoidParameterServerStressTest extends BaseND4JTest { log.info("p50: {} us", newTimes.get(newTimes.size() / 2) / 1000); - parameterServer.shutdown();; + parameterServer.shutdown(); for (VoidParameterServer server : shards) { server.shutdown(); @@ -492,7 +492,7 @@ public class VoidParameterServerStressTest extends BaseND4JTest { @Test @Timeout(60) public void testPerformanceUnicast3() throws Exception { VoidConfiguration voidConfiguration = VoidConfiguration.builder().numberOfShards(1) - .shardAddresses(Arrays.asList("127.0.0.1:49823")).build(); + .shardAddresses(Collections.singletonList("127.0.0.1:49823")).build(); voidConfiguration.setUnicastControllerPort(49823); Transport transport = new RoutedTransport(); @@ -538,7 +538,7 @@ public class VoidParameterServerStressTest extends BaseND4JTest { @Test @Timeout(60) public void testPerformanceUnicast4() throws Exception { VoidConfiguration voidConfiguration = VoidConfiguration.builder().numberOfShards(1) - .shardAddresses(Arrays.asList("127.0.0.1:49823")).build(); + .shardAddresses(Collections.singletonList("127.0.0.1:49823")).build(); voidConfiguration.setUnicastControllerPort(49823); Transport transport = new RoutedTransport(); @@ -635,7 +635,7 @@ public class VoidParameterServerStressTest extends BaseND4JTest { protected static CbowRequestMessage getCRM() { int w1 = RandomUtils.nextInt(0, NUM_WORDS); - int syn0[] = new int[5]; + int[] syn0 = new int[5]; for (int e = 0; e < syn0.length; e++) { syn0[e] = RandomUtils.nextInt(0, NUM_WORDS); diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/VoidParameterServerTest.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/VoidParameterServerTest.java index c29f7d6e3..5b716db55 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/VoidParameterServerTest.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/VoidParameterServerTest.java @@ -48,6 +48,7 @@ import org.nd4j.parameterserver.distributed.transport.Transport; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -67,7 +68,7 @@ public class VoidParameterServerTest extends BaseND4JTest { if (localIPs == null) { localIPs = new ArrayList<>(VoidParameterServer.getLocalAddresses()); - badIPs = Arrays.asList("127.0.0.1"); + badIPs = Collections.singletonList("127.0.0.1"); } } @@ -277,8 +278,8 @@ public class VoidParameterServerTest extends BaseND4JTest { * Now we're checking how data storage was initialized */ - assertEquals(null, shards[t].getNegTable()); - assertEquals(null, shards[t].getSyn1()); + assertNull(shards[t].getNegTable()); + assertNull(shards[t].getSyn1()); assertNotEquals(null, shards[t].getExpTable()); @@ -302,7 +303,7 @@ public class VoidParameterServerTest extends BaseND4JTest { // now we assign each row to something for (int t = 0; t < threads.length; t++) { - shards[t].handleMessage(new DistributedAssignMessage(WordVectorStorage.SYN_0, 1, (double) t)); + shards[t].handleMessage(new DistributedAssignMessage(WordVectorStorage.SYN_0, 1, t)); assertEquals(Nd4j.create(message.getColumnsPerShard()).assign((double) t), shards[t].getSyn0().getRow(1)); } @@ -342,8 +343,8 @@ public class VoidParameterServerTest extends BaseND4JTest { } // and at this moment, Shard_0 should contain aggregated vector for us - assertEquals(true, shards[0].clipboard.isTracking(0L, 1L)); - assertEquals(true, shards[0].clipboard.isReady(0L, 1L)); + assertTrue(shards[0].clipboard.isTracking(0L, 1L)); + assertTrue(shards[0].clipboard.isReady(0L, 1L)); INDArray jointVector = shards[0].clipboard.nextCandidate().getAccumulatedResult(); @@ -385,7 +386,7 @@ public class VoidParameterServerTest extends BaseND4JTest { // at this moment ot should be caclulated everywhere exp = Nd4j.create(new double[] {0.0, 30.0, 120.0}); for (int t = 0; t < threads.length; t++) { - assertEquals(true, shards[t].clipboard.isReady(0L, 2L)); + assertTrue(shards[t].clipboard.isReady(0L, 2L)); DotAggregation dot = (DotAggregation) shards[t].clipboard.unpin(0L, 2L); INDArray aggregated = dot.getAccumulatedResult(); assertEquals(exp, aggregated); diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/logic/ClipboardTest.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/logic/ClipboardTest.java index 48b024e38..73bb74a1e 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/logic/ClipboardTest.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/logic/ClipboardTest.java @@ -66,7 +66,7 @@ public class ClipboardTest extends BaseND4JTest { clipboard.pin(aggregation); } - assertEquals(false, clipboard.hasCandidates()); + assertFalse(clipboard.hasCandidates()); assertEquals(0, clipboard.getNumberOfCompleteStacks()); assertEquals(100, clipboard.getNumberOfPinnedStacks()); } @@ -98,7 +98,7 @@ public class ClipboardTest extends BaseND4JTest { assertEquals(0, aggregation.getMissingChunks()); - assertEquals(true, clipboard.hasCandidates()); + assertTrue(clipboard.hasCandidates()); assertEquals(1, clipboard.getNumberOfCompleteStacks()); } diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/logic/FrameCompletionHandlerTest.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/logic/FrameCompletionHandlerTest.java index 16638a32f..b398f61a6 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/logic/FrameCompletionHandlerTest.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/logic/FrameCompletionHandlerTest.java @@ -66,7 +66,7 @@ public class FrameCompletionHandlerTest extends BaseND4JTest { for (Long originator : originators) { for (Long frame : frames) { - assertEquals(true, handler.isCompleted(originator, frame)); + assertTrue(handler.isCompleted(originator, frame)); } } } diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/logic/routing/InterleavedRouterTest.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/logic/routing/InterleavedRouterTest.java index 2890ed7d2..c39fb6afd 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/logic/routing/InterleavedRouterTest.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/logic/routing/InterleavedRouterTest.java @@ -86,7 +86,7 @@ public class InterleavedRouterTest extends BaseND4JTest { InterleavedRouter router = new InterleavedRouter(); router.init(configuration, transport); - int w1[] = new int[] {512, 345, 486, 212}; + int[] w1 = new int[] {512, 345, 486, 212}; for (int i = 0; i < w1.length; i++) { SkipGramRequestMessage message = new SkipGramRequestMessage(w1[i], 1, new int[] {1, 2, 3}, diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/messages/VoidMessageTest.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/messages/VoidMessageTest.java index 23b94d76f..7d283148e 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/messages/VoidMessageTest.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/messages/VoidMessageTest.java @@ -51,7 +51,7 @@ public class VoidMessageTest extends BaseND4JTest { byte[] bytes = message.asBytes(); - SkipGramRequestMessage restored = (SkipGramRequestMessage) VoidMessage.fromBytes(bytes); + SkipGramRequestMessage restored = VoidMessage.fromBytes(bytes); assertNotEquals(null, restored); diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/messages/aggregations/VoidAggregationTest.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/messages/aggregations/VoidAggregationTest.java index 4456c6d04..d3a5f56f6 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/messages/aggregations/VoidAggregationTest.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/messages/aggregations/VoidAggregationTest.java @@ -124,7 +124,7 @@ public class VoidAggregationTest extends BaseND4JTest { } INDArray result = aggregation.getAccumulatedResult(); - assertEquals(true, result.isScalar()); + assertTrue(result.isScalar()); assertEquals(exp, result.getDouble(0), 1e-5); } diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/transport/RoutedTransportTest.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/transport/RoutedTransportTest.java index f977912ec..51640d444 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/transport/RoutedTransportTest.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/transport/RoutedTransportTest.java @@ -110,7 +110,7 @@ public class RoutedTransportTest extends BaseND4JTest { for (int t = 1; t < transports.length; t++) { message = transports[t].messages.poll(1, TimeUnit.SECONDS); - assertEquals(null, message); + assertNull(message); } diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/util/NetworkOrganizerTest.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/util/NetworkOrganizerTest.java index 24b8915e9..896754a41 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/util/NetworkOrganizerTest.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/util/NetworkOrganizerTest.java @@ -426,23 +426,21 @@ public class NetworkOrganizerTest extends BaseND4JTest { } protected String getRandomIp() { - StringBuilder builder = new StringBuilder(); - builder.append(RandomUtils.nextInt(1, 172)).append("."); - builder.append(RandomUtils.nextInt(0, 255)).append("."); - builder.append(RandomUtils.nextInt(0, 255)).append("."); - builder.append(RandomUtils.nextInt(1, 255)); + String builder = RandomUtils.nextInt(1, 172) + "." + + RandomUtils.nextInt(0, 255) + "." + + RandomUtils.nextInt(0, 255) + "." + + RandomUtils.nextInt(1, 255); - return builder.toString(); + return builder; } protected String getRandomAwsIp() { - StringBuilder builder = new StringBuilder("172."); - builder.append(RandomUtils.nextInt(16, 32)).append("."); - builder.append(RandomUtils.nextInt(0, 255)).append("."); - builder.append(RandomUtils.nextInt(1, 255)); + String builder = "172." + RandomUtils.nextInt(16, 32) + "." + + RandomUtils.nextInt(0, 255) + "." + + RandomUtils.nextInt(1, 255); - return builder.toString(); + return builder; } } diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/v2/util/MeshOrganizerTest.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/v2/util/MeshOrganizerTest.java index 92a6a668c..83fd4c324 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/v2/util/MeshOrganizerTest.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/v2/util/MeshOrganizerTest.java @@ -330,7 +330,7 @@ public class MeshOrganizerTest extends BaseND4JTest { mesh1.addNode(java.util.UUID.randomUUID().toString()); - try(val baos = new ByteArrayOutputStream();) { + try(val baos = new ByteArrayOutputStream()) { SerializationUtils.serialize(mesh1, baos); try(val bais = new ByteArrayInputStream(baos.toByteArray())) { diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/v2/util/MessageSplitterTest.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/v2/util/MessageSplitterTest.java index 17e9dd7aa..08d763cf8 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/v2/util/MessageSplitterTest.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/distributed/v2/util/MessageSplitterTest.java @@ -100,7 +100,7 @@ public class MessageSplitterTest extends BaseND4JTest { assertNotNull(ref.get()); assertEquals(array, ref.get().getPayload()); assertEquals(0, splitter.memoryUse.intValue()); - assertEquals(false, splitter.isTrackedMessage(message.getMessageId())); + assertFalse(splitter.isTrackedMessage(message.getMessageId())); assertEquals(0, splitter.trackers.size()); } } diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/node/ParameterServerNodeTest.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/node/ParameterServerNodeTest.java index 7fee8e9c0..1626fc398 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/node/ParameterServerNodeTest.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/test/java/org/nd4j/parameterserver/node/ParameterServerNodeTest.java @@ -44,9 +44,9 @@ public class ParameterServerNodeTest extends BaseND4JTest { private static MediaDriver mediaDriver; private static Aeron aeron; private static ParameterServerNode parameterServerNode; - private static int parameterLength = 4; - private static int masterStatusPort = 40323 + new java.util.Random().nextInt(15999); - private static int statusPort = masterStatusPort - 1299; + private static final int parameterLength = 4; + private static final int masterStatusPort = 40323 + new java.util.Random().nextInt(15999); + private static final int statusPort = masterStatusPort - 1299; @BeforeAll public static void before() throws Exception { @@ -54,7 +54,7 @@ public class ParameterServerNodeTest extends BaseND4JTest { System.setProperty("play.server.dir", "/tmp"); aeron = Aeron.connect(getContext()); parameterServerNode = new ParameterServerNode(mediaDriver, statusPort); - parameterServerNode.runMain(new String[] {"-m", "true", "-s", "1," + String.valueOf(parameterLength), "-p", + parameterServerNode.runMain(new String[] {"-m", "true", "-s", "1," + parameterLength, "-p", String.valueOf(masterStatusPort), "-h", "localhost", "-id", "11", "-md", mediaDriver.aeronDirectoryName(), "-sp", String.valueOf(statusPort), "-sh", "localhost", "-u", String.valueOf(Runtime.getRuntime().availableProcessors())}); diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-status/src/test/java/org/nd4j/parameterserver/status/play/StorageTests.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-status/src/test/java/org/nd4j/parameterserver/status/play/StorageTests.java index 9d49e454f..dc281a329 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-status/src/test/java/org/nd4j/parameterserver/status/play/StorageTests.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-status/src/test/java/org/nd4j/parameterserver/status/play/StorageTests.java @@ -42,7 +42,7 @@ public class StorageTests extends BaseND4JTest { assertEquals(noEmpty, mapDb.getState(1)); Thread.sleep(10000); - assertTrue(mapDb.numStates() == 0); + assertEquals(0, mapDb.numStates()); } @@ -57,7 +57,7 @@ public class StorageTests extends BaseND4JTest { assertEquals(noEmpty, statusStorage.getState(1)); Thread.sleep(10000); - assertTrue(statusStorage.numStates() == 0); + assertEquals(0, statusStorage.numStates()); } diff --git a/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/DummyDeAllocator.java b/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/DummyDeAllocator.java index 36fd694b7..94b284035 100644 --- a/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/DummyDeAllocator.java +++ b/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/DummyDeAllocator.java @@ -24,7 +24,7 @@ import org.bytedeco.javacpp.Pointer; import org.bytedeco.tensorflow.Deallocator_Pointer_long_Pointer; public class DummyDeAllocator extends Deallocator_Pointer_long_Pointer { - private static DummyDeAllocator INSTANCE = new DummyDeAllocator(); + private static final DummyDeAllocator INSTANCE = new DummyDeAllocator(); public static DummyDeAllocator getInstance() { return INSTANCE; diff --git a/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/TensorDataType.java b/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/TensorDataType.java index 08cb4610d..2d9ba91cc 100644 --- a/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/TensorDataType.java +++ b/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/TensorDataType.java @@ -113,22 +113,26 @@ public enum TensorDataType { public static TensorDataType fromNd4jType(INDArray array) { DataType dataType = array.dataType(); - switch(dataType) { - case COMPRESSED: - CompressedDataBuffer compressedData = (CompressedDataBuffer) array.data(); - CompressionDescriptor desc = compressedData.getCompressionDescriptor(); - String algo = desc.getCompressionAlgorithm(); - switch (algo) { - case "FLOAT16": return HALF; - case "INT8": return INT8; - case "UINT8": return UINT8; - case "INT16": return INT16; - case "UINT16": return UINT16; - default: throw new IllegalArgumentException("Unsupported compression algorithm: " + algo); - } - - default: return fromNd4jType(dataType); + if (dataType == DataType.COMPRESSED) { + CompressedDataBuffer compressedData = (CompressedDataBuffer) array.data(); + CompressionDescriptor desc = compressedData.getCompressionDescriptor(); + String algo = desc.getCompressionAlgorithm(); + switch (algo) { + case "FLOAT16": + return HALF; + case "INT8": + return INT8; + case "UINT8": + return UINT8; + case "INT16": + return INT16; + case "UINT16": + return UINT16; + default: + throw new IllegalArgumentException("Unsupported compression algorithm: " + algo); + } } + return fromNd4jType(dataType); } } diff --git a/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/TensorflowConversion.java b/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/TensorflowConversion.java index 7d9d9cb59..def412f06 100644 --- a/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/TensorflowConversion.java +++ b/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/TensorflowConversion.java @@ -177,7 +177,7 @@ public class TensorflowConversion { BytePointer tf_data = new BytePointer(TF_TensorData(tf_tensor)).capacity(TF_TensorByteSize(tf_tensor)); TF_Status status = TF_NewStatus(); for (int i = 0; i < length; i++) { - tf_data.position(8 * i).putLong(offset); + tf_data.position(8L * i).putLong(offset); offset += TF_StringEncode(strings[i], strings[i].capacity() - 1, tf_data.position(8 * length + offset), tf_data.capacity() - tf_data.position(), status); if (TF_GetCode(status) != TF_OK) { throw new IllegalStateException("ERROR: Unable to convert tensor " + TF_Message(status).getString()); @@ -233,8 +233,8 @@ public class TensorflowConversion { SizeTPointer size = new SizeTPointer(1); TF_Status status = TF_NewStatus(); for (int i = 0; i < length; i++) { - long offset = data.position(8 * i).getLong(); - TF_StringDecode(data.position(8 * length + offset), data.capacity() - data.position(), str, size, status); + long offset = data.position(8L * i).getLong(); + TF_StringDecode(data.position(8L * length + offset), data.capacity() - data.position(), str, size, status); if (TF_GetCode(status) != TF_OK) { throw new IllegalStateException("ERROR: Unable to convert tensor " + TF_Message(status).getString()); } diff --git a/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/graphrunner/GraphRunner.java b/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/graphrunner/GraphRunner.java index 11e76c519..fa309b96e 100644 --- a/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/graphrunner/GraphRunner.java +++ b/cavis-nd4j/cavis-nd4j-tensorflow/src/main/java/org/nd4j/tensorflow/conversion/graphrunner/GraphRunner.java @@ -57,7 +57,7 @@ public class GraphRunner implements Closeable { //the in memory representation parsed from protobuf private TF_Graph graph; //the conversion between nd4j and TensorFlow - private TensorflowConversion conversion = TensorflowConversion.getInstance(); + private final TensorflowConversion conversion = TensorflowConversion.getInstance(); //a persistent session to be used when running the graph private TF_Session session; //the options for the model @@ -74,7 +74,7 @@ public class GraphRunner implements Closeable { @Setter @Singular private Map inputDataTypes,outputDataTypes; - private static Map,GraphRunner> recastGraphDefs; + private static final Map,GraphRunner> recastGraphDefs; static { recastGraphDefs = new ConcurrentHashMap<>(); @@ -598,8 +598,8 @@ public class GraphRunner implements Closeable { byte[] graphForDataType = graphForDataType(from,to); GraphRunner graphRunner = GraphRunner.builder() .graphBytes(graphForDataType) - .inputNames(Arrays.asList("input")) - .outputNames(Arrays.asList("cast_output")) + .inputNames(Collections.singletonList("input")) + .outputNames(Collections.singletonList("cast_output")) .build(); recastGraphDefs.put(key,graphRunner); diff --git a/cavis-ui/cavis-ui-common/src/main/java/org/deeplearning4j/ui/weights/ConvolutionalIterationListener.java b/cavis-ui/cavis-ui-common/src/main/java/org/deeplearning4j/ui/weights/ConvolutionalIterationListener.java index d3680f43a..4770b2d76 100644 --- a/cavis-ui/cavis-ui-common/src/main/java/org/deeplearning4j/ui/weights/ConvolutionalIterationListener.java +++ b/cavis-ui/cavis-ui-common/src/main/java/org/deeplearning4j/ui/weights/ConvolutionalIterationListener.java @@ -63,11 +63,11 @@ public class ConvolutionalIterationListener extends BaseTrainingListener { private static final Logger log = LoggerFactory.getLogger(ConvolutionalIterationListener.class); private int minibatchNum = 0; private boolean openBrowser = true; - private String path; - private boolean firstIteration = true; + private final String path; + private final boolean firstIteration = true; - private Color borderColor = new Color(140, 140, 140); - private Color bgColor = new Color(255, 255, 255); + private final Color borderColor = new Color(140, 140, 140); + private final Color bgColor = new Color(255, 255, 255); private final StatsStorageRouter ssr; private final String sessionID; @@ -217,7 +217,7 @@ public class ConvolutionalIterationListener extends BaseTrainingListener { try { sourceImage = restoreRGBImage( - inputs.tensorAlongDimension(sampleDim, new int[] {3, 2, 1})); + inputs.tensorAlongDimension(sampleDim, 3, 2, 1)); } catch (Exception e) { throw new RuntimeException(e); } diff --git a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/Chart.java b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/Chart.java index d6fc0f165..458134b75 100644 --- a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/Chart.java +++ b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/Chart.java @@ -70,8 +70,8 @@ public abstract class Chart extends Component { @SuppressWarnings("unchecked") public static abstract class Builder> { - private String title; - private StyleChart style; + private final String title; + private final StyleChart style; private Boolean suppressAxisHorizontal; private Boolean suppressAxisVertical; private boolean showLegend; diff --git a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartHistogram.java b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartHistogram.java index dee8b4f9f..bbc3c5fd1 100644 --- a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartHistogram.java +++ b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartHistogram.java @@ -52,9 +52,9 @@ public class ChartHistogram extends Chart { public static class Builder extends Chart.Builder { - private List lowerBounds = new ArrayList<>(); - private List upperBounds = new ArrayList<>(); - private List yValues = new ArrayList<>(); + private final List lowerBounds = new ArrayList<>(); + private final List upperBounds = new ArrayList<>(); + private final List yValues = new ArrayList<>(); public Builder(String title, StyleChart style) { super(title, style); diff --git a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartHorizontalBar.java b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartHorizontalBar.java index bfc15f9e4..afd23f73b 100644 --- a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartHorizontalBar.java +++ b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartHorizontalBar.java @@ -55,8 +55,8 @@ public class ChartHorizontalBar extends Chart { public static class Builder extends Chart.Builder { - private List labels = new ArrayList<>(); - private List values = new ArrayList<>(); + private final List labels = new ArrayList<>(); + private final List values = new ArrayList<>(); private Double xMin; private Double xMax; diff --git a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartLine.java b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartLine.java index d40b63682..e99cbb317 100644 --- a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartLine.java +++ b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartLine.java @@ -53,10 +53,10 @@ public class ChartLine extends Chart { public static class Builder extends Chart.Builder { - private List x = new ArrayList<>(); - private List y = new ArrayList<>(); - private List seriesNames = new ArrayList<>(); - private boolean showLegend = true; + private final List x = new ArrayList<>(); + private final List y = new ArrayList<>(); + private final List seriesNames = new ArrayList<>(); + private final boolean showLegend = true; public Builder(String title, StyleChart style) { diff --git a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartScatter.java b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartScatter.java index ae79a8c77..d200b8906 100644 --- a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartScatter.java +++ b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartScatter.java @@ -54,9 +54,9 @@ public class ChartScatter extends Chart { public static class Builder extends Chart.Builder { - private List x = new ArrayList<>(); - private List y = new ArrayList<>(); - private List seriesNames = new ArrayList<>(); + private final List x = new ArrayList<>(); + private final List y = new ArrayList<>(); + private final List seriesNames = new ArrayList<>(); public Builder(String title, StyleChart style) { super(title, style); diff --git a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartStackedArea.java b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartStackedArea.java index 199357c26..238d44f76 100644 --- a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartStackedArea.java +++ b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartStackedArea.java @@ -54,8 +54,8 @@ public class ChartStackedArea extends Chart { public static class Builder extends Chart.Builder { private double[] x; - private List y = new ArrayList<>(); - private List seriesNames = new ArrayList<>(); + private final List y = new ArrayList<>(); + private final List seriesNames = new ArrayList<>(); public Builder(String title, StyleChart style) { diff --git a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartTimeline.java b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartTimeline.java index 57b2bf4f6..c5818f9a7 100644 --- a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartTimeline.java +++ b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/chart/ChartTimeline.java @@ -55,8 +55,8 @@ public class ChartTimeline extends Chart { public static class Builder extends Chart.Builder { - private List laneNames = new ArrayList<>(); - private List> laneData = new ArrayList<>(); + private final List laneNames = new ArrayList<>(); + private final List> laneData = new ArrayList<>(); public Builder(String title, StyleChart style) { diff --git a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/decorator/DecoratorAccordion.java b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/decorator/DecoratorAccordion.java index 543282d74..1e11ee29d 100644 --- a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/decorator/DecoratorAccordion.java +++ b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/decorator/DecoratorAccordion.java @@ -54,9 +54,9 @@ public class DecoratorAccordion extends Component { public static class Builder { - private StyleAccordion style; + private final StyleAccordion style; private String title; - private List innerComponents = new ArrayList<>(); + private final List innerComponents = new ArrayList<>(); private boolean defaultCollapsed; public Builder(StyleAccordion style) { diff --git a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/table/ComponentTable.java b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/table/ComponentTable.java index aebb3d0e6..80dedd965 100644 --- a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/table/ComponentTable.java +++ b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/table/ComponentTable.java @@ -56,7 +56,7 @@ public class ComponentTable extends Component { public static class Builder { - private StyleTable style; + private final StyleTable style; private String[] header; private String[][] content; diff --git a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/text/ComponentText.java b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/text/ComponentText.java index fc5ee7d7a..19f9c87f3 100644 --- a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/text/ComponentText.java +++ b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/components/text/ComponentText.java @@ -56,8 +56,8 @@ public class ComponentText extends Component { public static class Builder { - private StyleText style; - private String text; + private final StyleText style; + private final String text; public Builder(String text, StyleText style) { this.text = text; diff --git a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/standalone/StaticPageUtil.java b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/standalone/StaticPageUtil.java index 0ee0d2527..7bffdaa04 100644 --- a/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/standalone/StaticPageUtil.java +++ b/cavis-ui/cavis-ui-components/src/main/java/org/deeplearning4j/ui/standalone/StaticPageUtil.java @@ -37,6 +37,7 @@ import java.io.File; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; +import java.nio.charset.StandardCharsets; import java.util.*; public class StaticPageUtil { @@ -87,7 +88,7 @@ public class StaticPageUtil { cfg.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER); ClassPathResource cpr = new ClassPathResource("assets/dl4j-ui.js"); - String scriptContents = IOUtils.toString(cpr.getInputStream(), "UTF-8"); + String scriptContents = IOUtils.toString(cpr.getInputStream(), StandardCharsets.UTF_8); Map pageElements = new HashMap<>(); List list = new ArrayList<>(); diff --git a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/nearestneighbors/word2vec/NearestNeighborsQuery.java b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/nearestneighbors/word2vec/NearestNeighborsQuery.java index f379dba6d..b912acda8 100644 --- a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/nearestneighbors/word2vec/NearestNeighborsQuery.java +++ b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/nearestneighbors/word2vec/NearestNeighborsQuery.java @@ -21,6 +21,7 @@ package org.deeplearning4j.ui.model.nearestneighbors.word2vec; import java.io.Serializable; +import java.util.Objects; /** * @author Adam Gibson @@ -63,7 +64,7 @@ public class NearestNeighborsQuery implements Serializable { if (numWords != that.numWords) return false; - return !(word != null ? !word.equals(that.word) : that.word != null); + return !(!Objects.equals(word, that.word)); } diff --git a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/stats/BaseStatsListener.java b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/stats/BaseStatsListener.java index e56ad1f67..3ecc58fd5 100644 --- a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/stats/BaseStatsListener.java +++ b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/stats/BaseStatsListener.java @@ -73,7 +73,7 @@ public abstract class BaseStatsListener implements RoutingIterationListener { private Map> gcStatsAtLastReport; //NOTE: may have multiple models, due to multiple pretrain layers all using the same StatsListener - private List modelInfos = new ArrayList<>(); + private final List modelInfos = new ArrayList<>(); private Map activationHistograms; private Map meanActivations; //TODO replace with Eclipse collections primitive maps... @@ -687,7 +687,7 @@ public abstract class BaseStatsListener implements RoutingIterationListener { router.putStaticInfo(initReport); //TODO error handling } - private Map devPointers = new HashMap<>(); + private final Map devPointers = new HashMap<>(); private synchronized Pointer getDevicePointer(int device) { if (devPointers.containsKey(device)) { diff --git a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/stats/impl/SbeUtil.java b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/stats/impl/SbeUtil.java index 0e38e7799..7f5bda056 100644 --- a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/stats/impl/SbeUtil.java +++ b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/stats/impl/SbeUtil.java @@ -22,11 +22,12 @@ package org.deeplearning4j.ui.model.stats.impl; import java.io.*; import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.Map; public class SbeUtil { - public static final Charset UTF8 = Charset.forName("UTF-8"); + public static final Charset UTF8 = StandardCharsets.UTF_8; public static final byte[] EMPTY_BYTES = new byte[0]; //Also equivalent to "".getBytes(UTF8); private SbeUtil() {} diff --git a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/storage/InMemoryStatsStorage.java b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/storage/InMemoryStatsStorage.java index 76f00ef2b..f99a88155 100644 --- a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/storage/InMemoryStatsStorage.java +++ b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/storage/InMemoryStatsStorage.java @@ -62,9 +62,7 @@ public class InMemoryStatsStorage extends BaseCollectionStatsStorage { @Override public void putStaticInfo(Persistable staticInfo) { List sses = checkStorageEvents(staticInfo); - if (!sessionIDs.contains(staticInfo.getSessionID())) { - sessionIDs.add(staticInfo.getSessionID()); - } + sessionIDs.add(staticInfo.getSessionID()); SessionTypeWorkerId id = new SessionTypeWorkerId(staticInfo.getSessionID(), staticInfo.getTypeID(), staticInfo.getWorkerID()); diff --git a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/storage/mapdb/MapDBStatsStorage.java b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/storage/mapdb/MapDBStatsStorage.java index b7a2fecf9..b05d43d67 100644 --- a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/storage/mapdb/MapDBStatsStorage.java +++ b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/storage/mapdb/MapDBStatsStorage.java @@ -44,12 +44,12 @@ public class MapDBStatsStorage extends BaseCollectionStatsStorage { private static final String COMPOSITE_KEY_SEPARATOR = "@@@"; private boolean isClosed = false; - private DB db; - private Lock updateMapLock = new ReentrantLock(true); + private final DB db; + private final Lock updateMapLock = new ReentrantLock(true); - private Map classToInteger; //For storage - private Map integerToClass; //For storage - private Atomic.Integer classCounter; + private final Map classToInteger; //For storage + private final Map integerToClass; //For storage + private final Atomic.Integer classCounter; public MapDBStatsStorage() { this(new Builder()); @@ -147,9 +147,7 @@ public class MapDBStatsStorage extends BaseCollectionStatsStorage { @Override public void putStaticInfo(Persistable staticInfo) { List sses = checkStorageEvents(staticInfo); - if (!sessionIDs.contains(staticInfo.getSessionID())) { - sessionIDs.add(staticInfo.getSessionID()); - } + sessionIDs.add(staticInfo.getSessionID()); SessionTypeWorkerId id = new SessionTypeWorkerId(staticInfo.getSessionID(), staticInfo.getTypeID(), staticInfo.getWorkerID()); diff --git a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/storage/sqlite/J7FileStatsStorage.java b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/storage/sqlite/J7FileStatsStorage.java index 07dee8b04..1b370b13a 100644 --- a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/storage/sqlite/J7FileStatsStorage.java +++ b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/storage/sqlite/J7FileStatsStorage.java @@ -45,7 +45,7 @@ public class J7FileStatsStorage implements StatsStorage { private final File file; private final Connection connection; - private List listeners = new ArrayList<>(); + private final List listeners = new ArrayList<>(); /** * @param file Storage location for the stats @@ -445,7 +445,7 @@ public class J7FileStatsStorage implements StatsStorage { List out = new ArrayList<>(); while (rs.next()) { byte[] bytes = rs.getBytes(5); - out.add((Persistable) deserialize(bytes)); + out.add(deserialize(bytes)); } return out; } catch (SQLException e) { @@ -561,7 +561,7 @@ public class J7FileStatsStorage implements StatsStorage { List out = new ArrayList<>(); while (rs.next()) { byte[] bytes = rs.getBytes(6); - out.add((Persistable) deserialize(bytes)); + out.add(deserialize(bytes)); } return out; } catch (SQLException e) { @@ -621,7 +621,7 @@ public class J7FileStatsStorage implements StatsStorage { List out = new ArrayList<>(); while (rs.next()) { byte[] bytes = rs.getBytes(1); - out.add((Persistable) deserialize(bytes)); + out.add(deserialize(bytes)); } return out; } catch (SQLException e) { diff --git a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/weights/HistogramBin.java b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/weights/HistogramBin.java index a8f4dffc3..37fc4e87f 100644 --- a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/weights/HistogramBin.java +++ b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/weights/HistogramBin.java @@ -29,6 +29,7 @@ import org.slf4j.LoggerFactory; import java.io.Serializable; import java.math.BigDecimal; +import java.math.RoundingMode; import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; @@ -87,7 +88,7 @@ public class HistogramBin implements Serializable { BigDecimal[] keys = new BigDecimal[numberOfBins]; for (int x = 0; x < numberOfBins; x++) { - BigDecimal pos = new BigDecimal((min + (x * binSize))).setScale(rounds, BigDecimal.ROUND_CEILING); + BigDecimal pos = BigDecimal.valueOf(min + (x * binSize)).setScale(rounds, RoundingMode.CEILING); data.put(pos, new AtomicInteger(0)); keys[x] = pos; } @@ -110,7 +111,7 @@ public class HistogramBin implements Serializable { } public static class Builder { - private INDArray source; + private final INDArray source; private int binCount; private int rounds = 2; diff --git a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/weights/beans/CompactModelAndGradient.java b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/weights/beans/CompactModelAndGradient.java index 5a52f0dce..19b7a552a 100644 --- a/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/weights/beans/CompactModelAndGradient.java +++ b/cavis-ui/cavis-ui-model/src/main/java/org/deeplearning4j/ui/model/weights/beans/CompactModelAndGradient.java @@ -22,10 +22,7 @@ package org.deeplearning4j.ui.model.weights.beans; import java.io.Serializable; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; /** * Slightly modified version of ModelAndGradient, with binned params/gradients, suitable for fast network transfers for HistogramIterationListener @@ -136,9 +133,9 @@ public class CompactModelAndGradient implements Serializable { if (Double.compare(that.score, score) != 0) return false; - if (parameters != null ? !parameters.equals(that.parameters) : that.parameters != null) + if (!Objects.equals(parameters, that.parameters)) return false; - return !(gradients != null ? !gradients.equals(that.gradients) : that.gradients != null); + return !(!Objects.equals(gradients, that.gradients)); } @Override diff --git a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/VertxUIServer.java b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/VertxUIServer.java index 21b77bab2..08c87d92c 100644 --- a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/VertxUIServer.java +++ b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/VertxUIServer.java @@ -83,7 +83,7 @@ public class VertxUIServer extends AbstractVerticle implements UIServer { private static VertxUIServer instance; @Getter - private static AtomicBoolean multiSession = new AtomicBoolean(false); + private static final AtomicBoolean multiSession = new AtomicBoolean(false); @Getter @Setter private static Function statsStorageProvider; @@ -217,7 +217,7 @@ public class VertxUIServer extends AbstractVerticle implements UIServer { } - private List uiModules = new CopyOnWriteArrayList<>(); + private final List uiModules = new CopyOnWriteArrayList<>(); private RemoteReceiverModule remoteReceiverModule; /** * Loader that attaches {@code StatsStorage} provided by {@code #statsStorageProvider} for the given session ID @@ -226,16 +226,16 @@ public class VertxUIServer extends AbstractVerticle implements UIServer { private Function statsStorageLoader; //typeIDModuleMap: Records which modules are registered for which type IDs - private Map> typeIDModuleMap = new ConcurrentHashMap<>(); + private final Map> typeIDModuleMap = new ConcurrentHashMap<>(); private HttpServer server; - private AtomicBoolean shutdown = new AtomicBoolean(false); - private long uiProcessingDelay = 500; //500ms. TODO make configurable + private final AtomicBoolean shutdown = new AtomicBoolean(false); + private final long uiProcessingDelay = 500; //500ms. TODO make configurable private final BlockingQueue eventQueue = new LinkedBlockingQueue<>(); - private List> listeners = new CopyOnWriteArrayList<>(); - private List statsStorageInstances = new CopyOnWriteArrayList<>(); + private final List> listeners = new CopyOnWriteArrayList<>(); + private final List statsStorageInstances = new CopyOnWriteArrayList<>(); private Thread uiEventRoutingThread; diff --git a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/api/UIServer.java b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/api/UIServer.java index f0cb3a29c..a14f45dea 100644 --- a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/api/UIServer.java +++ b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/api/UIServer.java @@ -175,5 +175,5 @@ public interface UIServer { */ static Thread getShutdownHook() { return VertxUIServer.getShutdownHook(); - }; + } } diff --git a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/i18n/DefaultI18N.java b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/i18n/DefaultI18N.java index 25af6684e..d1cc5a01f 100644 --- a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/i18n/DefaultI18N.java +++ b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/i18n/DefaultI18N.java @@ -38,12 +38,12 @@ public class DefaultI18N implements I18N { public static final String FALLBACK_LANGUAGE = "en"; //use this if the specified language doesn't have the requested message private static DefaultI18N instance; - private static Map sessionInstances = Collections.synchronizedMap(new HashMap<>()); + private static final Map sessionInstances = Collections.synchronizedMap(new HashMap<>()); private static Throwable languageLoadingException = null; private String currentLanguage = DEFAULT_LANGUAGE; - private Map> messagesByLanguage = new HashMap<>(); + private final Map> messagesByLanguage = new HashMap<>(); /** * Get global instance (used in single-session mode) diff --git a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/i18n/I18NProvider.java b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/i18n/I18NProvider.java index 15ddbca99..10a744602 100644 --- a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/i18n/I18NProvider.java +++ b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/i18n/I18NProvider.java @@ -28,7 +28,7 @@ public class I18NProvider { /** * Current I18N instance */ - private static I18N i18n = DefaultI18N.getInstance(); + private static final I18N i18n = DefaultI18N.getInstance(); /** * Get the current/global I18N instance (used in single-session mode) diff --git a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/remote/RemoteReceiverModule.java b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/remote/RemoteReceiverModule.java index dd0ea2bf0..096b56113 100644 --- a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/remote/RemoteReceiverModule.java +++ b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/remote/RemoteReceiverModule.java @@ -43,7 +43,7 @@ import java.util.concurrent.atomic.AtomicBoolean; @Slf4j public class RemoteReceiverModule implements UIModule { - private AtomicBoolean enabled = new AtomicBoolean(false); + private final AtomicBoolean enabled = new AtomicBoolean(false); private StatsStorageRouter statsStorage; public void setEnabled(boolean enabled) { diff --git a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModule.java b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModule.java index 975d78a3f..858648018 100644 --- a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModule.java +++ b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModule.java @@ -80,19 +80,19 @@ public class TrainModule implements UIModule { public static final double NAN_REPLACEMENT_VALUE = 0.0; //UI front-end chokes on NaN in JSON public static final int DEFAULT_MAX_CHART_POINTS = 512; private static final DecimalFormat df2 = new DecimalFormat("#.00"); - private static DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + private static final DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); private enum ModelType { MLN, CG, Layer } private final int maxChartPoints; //Technically, the way it's set up: won't exceed 2*maxChartPoints - private Map knownSessionIDs = Collections.synchronizedMap(new HashMap<>()); + private final Map knownSessionIDs = Collections.synchronizedMap(new HashMap<>()); private String currentSessionID; private int currentWorkerIdx; - private Map workerIdxCount = new ConcurrentHashMap<>(); //Key: session ID - private Map> workerIdxToName = new ConcurrentHashMap<>(); //Key: session ID - private Map lastUpdateForSession = new ConcurrentHashMap<>(); + private final Map workerIdxCount = new ConcurrentHashMap<>(); //Key: session ID + private final Map> workerIdxToName = new ConcurrentHashMap<>(); //Key: session ID + private final Map lastUpdateForSession = new ConcurrentHashMap<>(); private final Configuration configuration; @@ -795,7 +795,7 @@ public class TrainModule implements UIModule { {i18N.getMessage("train.overview.perftable.examplesPerSec"), ""}}; if (last != null) { - perfInfo[2][1] = String.valueOf(dateFormat.format(new Date(last.getTimeStamp()))); + perfInfo[2][1] = dateFormat.format(new Date(last.getTimeStamp())); perfInfo[3][1] = String.valueOf(last.getTotalMinibatches()); perfInfo[4][1] = String.valueOf(df2.format(last.getMinibatchesPerSecond())); perfInfo[5][1] = String.valueOf(df2.format(last.getExamplesPerSecond())); @@ -1334,7 +1334,7 @@ public class TrainModule implements UIModule { return new MeanMagnitudes(iterCounts, ratioValues, outParamMM, outUpdateMM); } - private static Triple EMPTY_TRIPLE = new Triple<>(new int[0], new float[0], new float[0]); + private static final Triple EMPTY_TRIPLE = new Triple<>(new int[0], new float[0], new float[0]); private static Triple getLayerActivations(int index, TrainModuleUtils.GraphInfo gi, List updates, List iterationCounts) { diff --git a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModuleUtils.java b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModuleUtils.java index f90fb1f24..ff6f00901 100644 --- a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModuleUtils.java +++ b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/train/TrainModuleUtils.java @@ -245,7 +245,7 @@ public class TrainModuleUtils { if (layerName == null) layerName = "layer0"; vertexNames.add(layerName); - originalVertexName.add(String.valueOf("0")); + originalVertexName.add("0"); String layerType = config.getLayer().getClass().getSimpleName().replaceAll("Layer$", ""); layerTypes.add(layerType); diff --git a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/tsne/TsneModule.java b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/tsne/TsneModule.java index b7602ecb4..7e0b79194 100644 --- a/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/tsne/TsneModule.java +++ b/cavis-ui/cavis-ui-vertx/src/main/java/org/deeplearning4j/ui/module/tsne/TsneModule.java @@ -41,7 +41,7 @@ import java.util.*; public class TsneModule implements UIModule { private static final String UPLOADED_FILE = "UploadedFile"; - private Map> knownSessionIDs = Collections.synchronizedMap(new LinkedHashMap<>()); + private final Map> knownSessionIDs = Collections.synchronizedMap(new LinkedHashMap<>()); private List uploadedFileLines = null; public TsneModule() { diff --git a/cavis-zoo/cavis-zoo-models/src/main/java/org/deeplearning4j/zoo/ModelMetaData.java b/cavis-zoo/cavis-zoo-models/src/main/java/org/deeplearning4j/zoo/ModelMetaData.java index 397899c29..24e5fbc98 100644 --- a/cavis-zoo/cavis-zoo-models/src/main/java/org/deeplearning4j/zoo/ModelMetaData.java +++ b/cavis-zoo/cavis-zoo-models/src/main/java/org/deeplearning4j/zoo/ModelMetaData.java @@ -37,6 +37,6 @@ public class ModelMetaData { * @return */ public boolean useMDS() { - return inputShape.length > 1 ? true : false; + return inputShape.length > 1; } } diff --git a/cavis-zoo/cavis-zoo-models/src/main/java/org/deeplearning4j/zoo/ZooModel.java b/cavis-zoo/cavis-zoo-models/src/main/java/org/deeplearning4j/zoo/ZooModel.java index 977de99ae..958edec33 100644 --- a/cavis-zoo/cavis-zoo-models/src/main/java/org/deeplearning4j/zoo/ZooModel.java +++ b/cavis-zoo/cavis-zoo-models/src/main/java/org/deeplearning4j/zoo/ZooModel.java @@ -71,10 +71,10 @@ public abstract class ZooModel implements InstantiableModel { File cachedFile = new File(rootCacheDir, localFilename); if (!cachedFile.exists()) { - log.info("Downloading model to " + cachedFile.toString()); + log.info("Downloading model to " + cachedFile); FileUtils.copyURLToFile(new URL(remoteUrl), cachedFile,Integer.MAX_VALUE,Integer.MAX_VALUE); } else { - log.info("Using cached model at " + cachedFile.toString()); + log.info("Using cached model at " + cachedFile); } long expectedChecksum = pretrainedChecksum(pretrainedType); diff --git a/cavis-zoo/cavis-zoo-models/src/main/java/org/deeplearning4j/zoo/util/darknet/DarknetLabels.java b/cavis-zoo/cavis-zoo-models/src/main/java/org/deeplearning4j/zoo/util/darknet/DarknetLabels.java index 5d9f91a96..c62e18b32 100644 --- a/cavis-zoo/cavis-zoo-models/src/main/java/org/deeplearning4j/zoo/util/darknet/DarknetLabels.java +++ b/cavis-zoo/cavis-zoo-models/src/main/java/org/deeplearning4j/zoo/util/darknet/DarknetLabels.java @@ -31,8 +31,8 @@ import java.util.List; public class DarknetLabels extends BaseLabels { - private boolean shortNames; - private int numClasses; + private final boolean shortNames; + private final int numClasses; /** Calls {@code this(true)}. * Defaults to 1000 clasess diff --git a/cavis-zoo/cavis-zoo-models/src/test/java/org/deeplearning4j/zoo/TestImageNet.java b/cavis-zoo/cavis-zoo-models/src/test/java/org/deeplearning4j/zoo/TestImageNet.java index 04d1f8fce..0e6fdfb38 100644 --- a/cavis-zoo/cavis-zoo-models/src/test/java/org/deeplearning4j/zoo/TestImageNet.java +++ b/cavis-zoo/cavis-zoo-models/src/test/java/org/deeplearning4j/zoo/TestImageNet.java @@ -129,7 +129,7 @@ public class TestImageNet extends BaseDL4JTest { labels = new VOCLabels(); for (DetectedObject obj : objs) { ClassPrediction classPrediction = labels.decodePredictions(obj.getClassPredictions(), 1).get(0).get(0); - log.info(obj.toString() + " " + classPrediction); + log.info(obj + " " + classPrediction); assertEquals("dog", classPrediction.getLabel()); } @@ -155,7 +155,7 @@ public class TestImageNet extends BaseDL4JTest { labels = new COCOLabels(); for (DetectedObject obj : objs) { ClassPrediction classPrediction = labels.decodePredictions(obj.getClassPredictions(), 1).get(0).get(0); - log.info(obj.toString() + " " + classPrediction); + log.info(obj + " " + classPrediction); assertEquals("dog", classPrediction.getLabel()); } diff --git a/cavis-zoo/cavis-zoo-models/src/test/java/org/deeplearning4j/zoo/TestInstantiation.java b/cavis-zoo/cavis-zoo-models/src/test/java/org/deeplearning4j/zoo/TestInstantiation.java index 9abe0b848..f9e8b83a1 100644 --- a/cavis-zoo/cavis-zoo-models/src/test/java/org/deeplearning4j/zoo/TestInstantiation.java +++ b/cavis-zoo/cavis-zoo-models/src/test/java/org/deeplearning4j/zoo/TestInstantiation.java @@ -136,7 +136,7 @@ public class TestInstantiation extends BaseDL4JTest { assertTrue(model.pretrainedAvailable(PretrainedType.IMAGENET)); ComputationGraph initializedModel = (ComputationGraph) model.initPretrained(); - INDArray f = Nd4j.rand(new int[]{1, 3, 224, 224}); + INDArray f = Nd4j.rand(1, 3, 224, 224); INDArray[] result = initializedModel.output(f); assertArrayEquals(result[0].shape(), new long[]{1, 1000}); diff --git a/vsconfig.gradle b/vsconfig.gradle index 229600cf4..a247ceb10 100644 --- a/vsconfig.gradle +++ b/vsconfig.gradle @@ -51,7 +51,7 @@ def configureVisualStudio() { return } def vswhereOutput = "${vswherePath} -latest -format json".execute().text.trim() - def vswhereJson = new groovy.json.JsonSlurper().parseText(vswhereOutput); + def vswhereJson = new groovy.json.JsonSlurper().parseText(vswhereOutput) if (vswhereJson.isEmpty()) { println "Visual Studio not found!" return
OriginalRegexBoneConeTone
'  4.25 ''  4.25 '^\\s+|\\s+$'4.25'