diff --git a/arbiter/arbiter-deeplearning4j/src/test/java/org/deeplearning4j/arbiter/TestUtils.java b/arbiter/arbiter-deeplearning4j/src/test/java/org/deeplearning4j/arbiter/TestUtils.java index 6fcffd083..4300b3b32 100644 --- a/arbiter/arbiter-deeplearning4j/src/test/java/org/deeplearning4j/arbiter/TestUtils.java +++ b/arbiter/arbiter-deeplearning4j/src/test/java/org/deeplearning4j/arbiter/TestUtils.java @@ -124,7 +124,6 @@ public class TestUtils { public static INDArray randomOneHot(long examples, long nOut, Random rng){ INDArray arr = Nd4j.create(examples, nOut); for( int i=0; i Integer.MAX_VALUE) + throw new ND4JArraySizeException(); float[] f = new float[(int) len]; NdIndexIterator iterator = new NdIndexIterator('c', arr.shape()); for (int i = 0; i < len; i++) { diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java index a1512139f..73ebf1ccd 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java @@ -320,7 +320,6 @@ public class MultiLayerTest extends BaseDL4JTest { public static float[] asFloat(INDArray arr) { long len = arr.length(); - // FIXME: int cast float[] f = new float[(int) len]; for (int i = 0; i < len; i++) f[i] = arr.getFloat(i); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/updater/TestUpdaters.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/updater/TestUpdaters.java index f5049d211..0a17441bc 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/updater/TestUpdaters.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/updater/TestUpdaters.java @@ -331,7 +331,6 @@ public class TestUpdaters extends BaseDL4JTest { double calculatedByHandMScalar = 0.2; double[] expectedM = Nd4j.ones(1, numParams).mul(calculatedByHandMScalar).data().asDouble(); - // FIXME: int cast double[] actualM = Arrays.copyOfRange(nadamUpdater.getM().data().asDouble(), 0, (int) numParams); for (int i = 0; i < actualM.length; i++) { actualM[i] = Math.round(actualM[i] * 1e2) / 1e2; diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java index 7aa86c0a2..a7ce1622f 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java @@ -48,6 +48,7 @@ import org.nd4j.linalg.api.rng.DefaultRandom; import org.nd4j.linalg.api.rng.Random; import org.nd4j.linalg.dataset.DataSet; import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.indexing.conditions.Condition; import org.nd4j.linalg.learning.config.AdaGrad; @@ -664,8 +665,10 @@ public class TestOptimizers extends BaseDL4JTest { double xlm1 = parameters.getDouble(nDims - 2); double gl = 200 * (xl - xlm1 * xlm1); - // FIXME: int cast - gradient.put(0, (int) nDims - 1, gl); + if (nDims - 1 > Integer.MAX_VALUE) { + throw new ND4JArraySizeException(); + } + gradient.put(0, (int)nDims - 1, gl); Gradient g = new DefaultGradient(); g.gradientForVariable().put("W", gradient); this.gradient = g; @@ -865,8 +868,7 @@ public class TestOptimizers extends BaseDL4JTest { @Override public long numParams() { - // FIXME: int cast - return (int) parameters.length(); + return parameters.length(); } @Override diff --git a/deeplearning4j/deeplearning4j-data/deeplearning4j-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/RecordReaderMultiDataSetIterator.java b/deeplearning4j/deeplearning4j-data/deeplearning4j-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/RecordReaderMultiDataSetIterator.java index 89f975293..2f47c2c8b 100644 --- a/deeplearning4j/deeplearning4j-data/deeplearning4j-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/RecordReaderMultiDataSetIterator.java +++ b/deeplearning4j/deeplearning4j-data/deeplearning4j-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/RecordReaderMultiDataSetIterator.java @@ -286,8 +286,8 @@ public class RecordReaderMultiDataSetIterator implements MultiDataSetIterator, S for (INDArray w : exampleData) { val n = w.size(0); - // FIXME: int cast - minExamples = (int) Math.min(minExamples, n); + if (Math.min(minExamples, n) < Integer.MAX_VALUE) + minExamples = (int) Math.min(minExamples, n); } } } diff --git a/deeplearning4j/deeplearning4j-data/deeplearning4j-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/SequenceRecordReaderDataSetIterator.java b/deeplearning4j/deeplearning4j-data/deeplearning4j-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/SequenceRecordReaderDataSetIterator.java index 50401898c..d04ca652e 100644 --- a/deeplearning4j/deeplearning4j-data/deeplearning4j-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/SequenceRecordReaderDataSetIterator.java +++ b/deeplearning4j/deeplearning4j-data/deeplearning4j-datavec-iterators/src/main/java/org/deeplearning4j/datasets/datavec/SequenceRecordReaderDataSetIterator.java @@ -366,7 +366,6 @@ public class SequenceRecordReaderDataSetIterator implements DataSetIterator { DataSet ds = mdsToDataSet(mds); if (totalOutcomes == -1) { - // FIXME: int cast inputColumns = (int) ds.getFeatures().size(1); totalOutcomes = ds.getLabels() == null ? -1 : (int) ds.getLabels().size(1); } @@ -394,7 +393,6 @@ public class SequenceRecordReaderDataSetIterator implements DataSetIterator { stored = next(); useStored = true; - // FIXME: int cast inputColumns = (int) stored.getFeatures().size(1); totalOutcomes = (int) stored.getLabels().size(1); } diff --git a/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AbstractDataSetIterator.java b/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AbstractDataSetIterator.java index d94ee8c97..619b31bdf 100644 --- a/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AbstractDataSetIterator.java +++ b/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/AbstractDataSetIterator.java @@ -172,7 +172,6 @@ public abstract class AbstractDataSetIterator implements DataSetIterator { Pair pair = iterator.next(); if (numFeatures < 1) { if (pair.getFirst() instanceof INDArray) { - // FIXME: int cast numFeatures = (int) ((INDArray) pair.getFirst()).length(); numLabels = (int) ((INDArray) pair.getSecond()).length(); } else if (pair.getFirst() instanceof float[]) { diff --git a/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/IteratorDataSetIterator.java b/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/IteratorDataSetIterator.java index 2d90db817..23429037b 100644 --- a/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/IteratorDataSetIterator.java +++ b/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/IteratorDataSetIterator.java @@ -95,7 +95,6 @@ public class IteratorDataSetIterator implements DataSetIterator { //Set columns etc for later use DataSet temp = list.get(0); - // FIXME: int cast inputColumns = (int) temp.getFeatures().size(1); totalOutcomes = temp.getLabels() == null ? 0 : (int) temp.getLabels().size(1); //May be null for layerwise pretraining } diff --git a/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/IteratorMultiDataSetIterator.java b/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/IteratorMultiDataSetIterator.java index d27fbbc98..822701d83 100644 --- a/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/IteratorMultiDataSetIterator.java +++ b/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/IteratorMultiDataSetIterator.java @@ -73,8 +73,7 @@ public class IteratorMultiDataSetIterator implements MultiDataSetIterator { next = iterator.next(); } - // FIXME: int cast - int nExamples = (int) next.getFeatures(0).size(0); + long nExamples = next.getFeatures(0).size(0); if (countSoFar + nExamples <= batchSize) { //Add the entire MultiDataSet as-is list.add(next); @@ -140,7 +139,7 @@ public class IteratorMultiDataSetIterator implements MultiDataSetIterator { return out; } - private static INDArray getRange(INDArray arr, int exampleFrom, int exampleToExclusive) { + private static INDArray getRange(INDArray arr, long exampleFrom, long exampleToExclusive) { if (arr == null) return null; diff --git a/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/file/BaseFileIterator.java b/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/file/BaseFileIterator.java index ea16f8a18..01bd0c2a9 100644 --- a/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/file/BaseFileIterator.java +++ b/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/file/BaseFileIterator.java @@ -134,7 +134,7 @@ public abstract class BaseFileIterator implements Iterator { List remainder = new ArrayList<>(); int soFar = 0; for (T t : toMerge) { - int size = sizeOf(t); + long size = sizeOf(t); if (soFar + size <= batchSize) { correctNum.add(t); @@ -190,7 +190,7 @@ public abstract class BaseFileIterator implements Iterator { protected abstract T load(File f); - protected abstract int sizeOf(T of); + protected abstract long sizeOf(T of); protected abstract List split(T toSplit); diff --git a/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/file/FileDataSetIterator.java b/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/file/FileDataSetIterator.java index 8e6da3b0e..714f1a22c 100644 --- a/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/file/FileDataSetIterator.java +++ b/deeplearning4j/deeplearning4j-data/deeplearning4j-utility-iterators/src/main/java/org/deeplearning4j/datasets/iterator/file/FileDataSetIterator.java @@ -151,7 +151,7 @@ public class FileDataSetIterator extends BaseFileIterator list) { + long[] retVal = new long[list.size()]; + for (int i = 0; i < list.size(); ++i) { + retVal[i] = list.get(i); + } + return retVal; + } /** * Constructor from parsed Keras layer configuration dictionary. * @@ -67,9 +75,7 @@ public class KerasReshape extends KerasLayer { if (innerConfig.containsKey(targetShape)) { @SuppressWarnings("unchecked") List targetShapeList = (List) innerConfig.get(targetShape); - - // FIXME: int cast - this.targetShape = ArrayUtil.toLongArray(ArrayUtil.toArray(targetShapeList)); + this.targetShape = listToLongArray(targetShapeList); } } diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasModelEndToEndTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasModelEndToEndTest.java index e5caa3e3a..874931262 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasModelEndToEndTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasModelEndToEndTest.java @@ -690,13 +690,11 @@ public class KerasModelEndToEndTest extends BaseDL4JTest { INDArray testLabels = Nd4j.create(predictionsDl4j.shape()); if (testLabels.rank() == 2) { for (int i = 0; i < testLabels.size(0); i++) { - // FIXME: int cast testLabels.putScalar(i, r.nextInt((int) testLabels.size(1)), 1.0); } } else if (testLabels.rank() == 3) { for (int i = 0; i < testLabels.size(0); i++) { for (int j = 0; j < testLabels.size(1); j++) { - // FIXME: int cast testLabels.putScalar(i, j, r.nextInt((int) testLabels.size(1)), 1.0); } } diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/inmemory/InMemoryLookupTable.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/inmemory/InMemoryLookupTable.java index 579caa0a3..086540090 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/inmemory/InMemoryLookupTable.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/inmemory/InMemoryLookupTable.java @@ -104,7 +104,7 @@ public class InMemoryLookupTable implements WeightLoo } protected void initAdaGrad() { - int[] shape = new int[] {vocab.numWords() + 1, vectorLength}; + long[] shape = new long[] {vocab.numWords() + 1, vectorLength}; int length = ArrayUtil.prod(shape); adaGrad = new AdaGrad(shape, lr.get()); adaGrad.setStateViewArray(Nd4j.zeros(shape).reshape(1, length), shape, Nd4j.order(), true); @@ -124,8 +124,7 @@ public class InMemoryLookupTable implements WeightLoo if (adaGrad == null) initAdaGrad(); - // FIXME: int cast - return adaGrad.getGradient(gradient, column, ArrayUtil.toInts(syn0.shape())); + return adaGrad.getGradient(gradient, column, syn0.shape()); } @Override @@ -370,7 +369,6 @@ public class InMemoryLookupTable implements WeightLoo else { nextRandom.set(nextRandom.get() * 25214903917L + 11); - // FIXME: int cast int idx = (int) Math.abs((int) (nextRandom.get() >> 16) % table.length()); target = table.getInt(idx); diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/CBOW.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/CBOW.java index 80a2b6565..fdfd91926 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/CBOW.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/CBOW.java @@ -33,7 +33,6 @@ import org.deeplearning4j.models.word2vec.wordstore.VocabCache; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.aggregates.Aggregate; -import org.nd4j.linalg.api.ops.aggregates.impl.AggregateCBOW; import org.nd4j.linalg.api.ops.impl.nlp.CbowRound; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.util.DeviceLocalNDArray; diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/GloVe.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/GloVe.java index 71cf2c693..01bf7affd 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/GloVe.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/learning/impl/elements/GloVe.java @@ -104,11 +104,10 @@ public class GloVe implements ElementsLearningAlgorit - weightAdaGrad = new AdaGrad(new int[] {this.vocabCache.numWords() + 1, vectorLength}, learningRate); + weightAdaGrad = new AdaGrad(new long[] {this.vocabCache.numWords() + 1, vectorLength}, learningRate); bias = Nd4j.create(syn0.rows()); - // FIXME: int cast - biasAdaGrad = new AdaGrad(ArrayUtil.toInts(bias.shape()), this.learningRate); + biasAdaGrad = new AdaGrad(bias.shape(), this.learningRate); // maxmemory = Runtime.getRuntime().maxMemory() - (vocabCache.numWords() * vectorLength * 2 * 8); @@ -237,15 +236,13 @@ public class GloVe implements ElementsLearningAlgorit private void update(T element1, INDArray wordVector, INDArray contextVector, double gradient) { //gradient for word vectors INDArray grad1 = contextVector.mul(gradient); - // FIXME: int cast - INDArray update = weightAdaGrad.getGradient(grad1, element1.getIndex(), ArrayUtil.toInts(syn0.shape())); + INDArray update = weightAdaGrad.getGradient(grad1, element1.getIndex(), syn0.shape()); //update vector wordVector.subi(update); double w1Bias = bias.getDouble(element1.getIndex()); - // FIXME: int cast - double biasGradient = biasAdaGrad.getGradient(gradient, element1.getIndex(), ArrayUtil.toInts(bias.shape())); + double biasGradient = biasAdaGrad.getGradient(gradient, element1.getIndex(), bias.shape()); double update2 = w1Bias - biasGradient; bias.putScalar(element1.getIndex(), update2); } diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/BasicModelUtils.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/BasicModelUtils.java index 4912a3c47..bc404ac14 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/BasicModelUtils.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/reader/impl/BasicModelUtils.java @@ -358,7 +358,6 @@ public class BasicModelUtils implements ModelUtils INDArray sort = sorted[0]; List ret = new ArrayList<>(); - // FIXME: int cast if (top > sort.length()) top = (int) sort.length(); //there will be a redundant word diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/glove/GloveWeightLookupTable.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/glove/GloveWeightLookupTable.java index cb6c48872..1cda50100 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/glove/GloveWeightLookupTable.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/glove/GloveWeightLookupTable.java @@ -72,7 +72,7 @@ public class GloveWeightLookupTable extends InMemoryL putVector(Word2Vec.DEFAULT_UNK, randUnk); } if (weightAdaGrad == null || reset) { - weightAdaGrad = new AdaGrad(new int[] {vocab.numWords() + 1, vectorLength}, lr.get()); + weightAdaGrad = new AdaGrad(new long[]{vocab.numWords() + 1, vectorLength}, lr.get()); } @@ -81,7 +81,7 @@ public class GloveWeightLookupTable extends InMemoryL bias = Nd4j.create(syn0.rows()); if (biasAdaGrad == null || reset) { - biasAdaGrad = new AdaGrad(ArrayUtil.toInts(bias.shape()), lr.get()); + biasAdaGrad = new AdaGrad(bias.shape(), lr.get()); } @@ -140,13 +140,13 @@ public class GloveWeightLookupTable extends InMemoryL private void update(T w1, INDArray wordVector, INDArray contextVector, double gradient) { //gradient for word vectors INDArray grad1 = contextVector.mul(gradient); - INDArray update = weightAdaGrad.getGradient(grad1, w1.getIndex(), ArrayUtil.toInts(syn0.shape())); + INDArray update = weightAdaGrad.getGradient(grad1, w1.getIndex(), syn0.shape()); //update vector wordVector.subi(update); double w1Bias = bias.getDouble(w1.getIndex()); - double biasGradient = biasAdaGrad.getGradient(gradient, w1.getIndex(), ArrayUtil.toInts(bias.shape())); + double biasGradient = biasAdaGrad.getGradient(gradient, w1.getIndex(), bias.shape()); double update2 = w1Bias - biasGradient; bias.putScalar(w1.getIndex(), update2); } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/gradientcheck/GradientCheckUtil.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/gradientcheck/GradientCheckUtil.java index 70d106a59..d15e961b7 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/gradientcheck/GradientCheckUtil.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/gradientcheck/GradientCheckUtil.java @@ -20,6 +20,7 @@ import lombok.extern.slf4j.Slf4j; import lombok.val; import org.deeplearning4j.nn.api.Model; import org.nd4j.linalg.api.buffer.DataType; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.function.Consumer; import org.nd4j.linalg.lossfunctions.impl.LossBinaryXENT; import org.nd4j.linalg.primitives.Pair; @@ -293,7 +294,8 @@ public class GradientCheckUtil { ss = n; } - // FIXME: int cast + if (ss > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); stepSizeForParam.put(paramNames.get(i), (int) ss); } } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/ElementWiseVertex.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/ElementWiseVertex.java index 6033f0030..86c0cdf76 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/ElementWiseVertex.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/ElementWiseVertex.java @@ -140,10 +140,9 @@ public class ElementWiseVertex extends GraphVertex { //CNN inputs... also check that the channels, width and heights match: InputType.InputTypeConvolutional firstConv = (InputType.InputTypeConvolutional) first; - // FIXME: int cast - val fd = (int) firstConv.getChannels(); - val fw = (int) firstConv.getWidth(); - val fh = (int) firstConv.getHeight(); + val fd = firstConv.getChannels(); + val fw = firstConv.getWidth(); + val fh = firstConv.getHeight(); for (int i = 1; i < vertexInputs.length; i++) { if (vertexInputs[i].getType() != InputType.Type.CNN) { @@ -155,10 +154,9 @@ public class ElementWiseVertex extends GraphVertex { InputType.InputTypeConvolutional otherConv = (InputType.InputTypeConvolutional) vertexInputs[i]; - // FIXME: int cast - val od = (int) otherConv.getChannels(); - val ow = (int) otherConv.getWidth(); - val oh = (int) otherConv.getHeight(); + val od = otherConv.getChannels(); + val ow = otherConv.getWidth(); + val oh = otherConv.getHeight(); if (fd != od || fw != ow || fh != oh) { throw new InvalidInputTypeException( diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/MergeVertex.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/MergeVertex.java index c76df66f6..77dd41c3a 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/MergeVertex.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/MergeVertex.java @@ -94,13 +94,12 @@ public class MergeVertex extends GraphVertex { // CNN3D inputs: check that the channels, width and height match: InputType.InputTypeConvolutional3D firstConv = (InputType.InputTypeConvolutional3D) first; - // FIXME: int cast - val fd = (int) firstConv.getDepth(); - val fw = (int) firstConv.getWidth(); - val fh = (int) firstConv.getHeight(); - val fc = (int) firstConv.getChannels(); + val fd = firstConv.getDepth(); + val fw = firstConv.getWidth(); + val fh = firstConv.getHeight(); + val fc = firstConv.getChannels(); - int depthSum = fc; + long depthSum = fc; InputType.InputTypeConvolutional3D otherConv = null; for (int i = 1; i < vertexInputs.length; i++) { if (vertexInputs[i].getType() != InputType.Type.CNN3D) { @@ -109,10 +108,10 @@ public class MergeVertex extends GraphVertex { } otherConv = (InputType.InputTypeConvolutional3D) vertexInputs[i]; - val od = (int) otherConv.getDepth(); - val ow = (int) otherConv.getWidth(); - val oh = (int) otherConv.getHeight(); - val oc = (int) otherConv.getChannels(); + val od = otherConv.getDepth(); + val ow = otherConv.getWidth(); + val oh = otherConv.getHeight(); + val oc = otherConv.getChannels(); if (fd != od || fw != ow || fh != oh) { throw new InvalidInputTypeException("Invalid input: MergeVertex cannot merge CNN3D activations of different width/heights:" + "first [channels,width,height] = [" + fd + "," + fw + "," + fh @@ -177,12 +176,11 @@ public class MergeVertex extends GraphVertex { //CNN inputs... also check that the channels, width and heights match: InputType.InputTypeConvolutional firstConv = (InputType.InputTypeConvolutional) first; - // FIXME: int cast - val fd = (int) firstConv.getChannels(); - val fw = (int) firstConv.getWidth(); - val fh = (int) firstConv.getHeight(); + val fd = firstConv.getChannels(); + val fw = firstConv.getWidth(); + val fh = firstConv.getHeight(); - int depthSum = fd; + long depthSum = fd; for (int i = 1; i < vertexInputs.length; i++) { if (vertexInputs[i].getType() != InputType.Type.CNN) { @@ -194,10 +192,9 @@ public class MergeVertex extends GraphVertex { InputType.InputTypeConvolutional otherConv = (InputType.InputTypeConvolutional) vertexInputs[i]; - // FIXME: int cast - val od = (int) otherConv.getChannels(); - val ow = (int) otherConv.getWidth(); - val oh = (int) otherConv.getHeight(); + val od = otherConv.getChannels(); + val ow = otherConv.getWidth(); + val oh = otherConv.getHeight(); if (fw != ow || fh != oh) { throw new InvalidInputTypeException( diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/PoolHelperVertex.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/PoolHelperVertex.java index 6e2213b4e..c5034129c 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/PoolHelperVertex.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/PoolHelperVertex.java @@ -131,12 +131,11 @@ public class PoolHelperVertex extends GraphVertex { //CNN inputs... also check that the channels, width and heights match: InputType.InputTypeConvolutional firstConv = (InputType.InputTypeConvolutional) first; - // FIXME: int cast - val fd = (int) firstConv.getChannels(); - val fw = (int) firstConv.getWidth(); - val fh = (int) firstConv.getHeight(); + val fd = firstConv.getChannels(); + val fw = firstConv.getWidth(); + val fh = firstConv.getHeight(); - int depthSum = fd; + long depthSum = fd; for (int i = 1; i < vertexInputs.length; i++) { if (vertexInputs[i].getType() != InputType.Type.CNN) { @@ -148,10 +147,9 @@ public class PoolHelperVertex extends GraphVertex { InputType.InputTypeConvolutional otherConv = (InputType.InputTypeConvolutional) vertexInputs[i]; - // FIXME: int cast - int od = (int) otherConv.getChannels(); - int ow = (int) otherConv.getWidth(); - int oh = (int) otherConv.getHeight(); + long od = otherConv.getChannels(); + long ow = otherConv.getWidth(); + long oh = otherConv.getHeight(); if (fw != ow || fh != oh) { throw new InvalidInputTypeException( diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/UnstackVertex.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/UnstackVertex.java index a5d6c72f4..910db350c 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/UnstackVertex.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/graph/UnstackVertex.java @@ -150,12 +150,11 @@ public class UnstackVertex extends GraphVertex { //CNN inputs... also check that the channels, width and heights match: InputType.InputTypeConvolutional firstConv = (InputType.InputTypeConvolutional) first; - // FIXME: int cast - val fd = (int) firstConv.getChannels(); - val fw = (int) firstConv.getWidth(); - val fh = (int) firstConv.getHeight(); + val fd = firstConv.getChannels(); + val fw = firstConv.getWidth(); + val fh = firstConv.getHeight(); - int depthSum = fd; + long depthSum = fd; for (int i = 1; i < vertexInputs.length; i++) { if (vertexInputs[i].getType() != InputType.Type.CNN) { @@ -167,10 +166,9 @@ public class UnstackVertex extends GraphVertex { InputType.InputTypeConvolutional otherConv = (InputType.InputTypeConvolutional) vertexInputs[i]; - // FIXME: int cast - val od = (int) otherConv.getChannels(); - val ow = (int) otherConv.getWidth(); - val oh = (int) otherConv.getHeight(); + val od = otherConv.getChannels(); + val ow = otherConv.getWidth(); + val oh = otherConv.getHeight(); if (fw != ow || fh != oh) { throw new InvalidInputTypeException( diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/inputs/InputType.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/inputs/InputType.java index 85da86fa2..047618661 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/inputs/InputType.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/inputs/InputType.java @@ -402,18 +402,17 @@ public abstract class InputType implements Serializable { //Note: ConvolutionalFlat and FeedForward look identical... but either should work OK if using something // like FeedForwardToCnnPreProcessor - // FIXME: int cast switch (inputArray.rank()) { case 2: - return InputType.feedForward((int) inputArray.size(1)); + return InputType.feedForward(inputArray.size(1)); case 3: - return InputType.recurrent((int) inputArray.size(1), (int) inputArray.size(2)); + return InputType.recurrent(inputArray.size(1), (int) inputArray.size(2)); case 4: //Order: [minibatch, channels, height, width] -> [h, w, c] - return InputType.convolutional((int) inputArray.size(2), (int) inputArray.size(3), (int) inputArray.size(1)); + return InputType.convolutional(inputArray.size(2), (int) inputArray.size(3), (int) inputArray.size(1)); case 5: //Order: [minibatch, channels, depth, height, width] -> [d, h, w, c] - return InputType.convolutional3D((int) inputArray.size(2), (int) inputArray.size(3), + return InputType.convolutional3D(inputArray.size(2), (int) inputArray.size(3), (int) inputArray.size(4), (int) inputArray.size(1)); default: throw new IllegalArgumentException( diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Cnn3DLossLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Cnn3DLossLayer.java index b73265763..1bde3d912 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Cnn3DLossLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Cnn3DLossLayer.java @@ -152,17 +152,18 @@ public class Cnn3DLossLayer extends FeedForwardLayer { } @Override - public void setNIn(int nIn){ + public void setNIn(long nIn){ throw new UnsupportedOperationException( "Cnn3DLossLayer has no parameters, thus nIn will always equal nOut."); } @Override - public void setNOut(int nOut){ + public void setNOut(long nOut){ throw new UnsupportedOperationException( "Cnn3DLossLayer has no parameters, thus nIn will always equal nOut."); } + @Override @SuppressWarnings("unchecked") public Cnn3DLossLayer build() { diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CnnLossLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CnnLossLayer.java index 7b25ff797..3bcae0357 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CnnLossLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/CnnLossLayer.java @@ -145,13 +145,13 @@ public class CnnLossLayer extends FeedForwardLayer { } @Override - public void setNIn(int nIn){ + public void setNIn(long nIn){ throw new UnsupportedOperationException( "This layer has no parameters, thus nIn will always equal nOut."); } @Override - public void setNOut(int nOut){ + public void setNOut(long nOut){ throw new UnsupportedOperationException( "This layer has no parameters, thus nIn will always equal nOut."); } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Convolution1DLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Convolution1DLayer.java index b65f94d00..d4ccc4811 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Convolution1DLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Convolution1DLayer.java @@ -88,7 +88,7 @@ public class Convolution1DLayer extends ConvolutionLayer { //Probably: user did InputType.recurrent(x) without specifying sequence length outLength = -1; } else { - outLength = Convolution1DUtils.getOutputSize((int) inputTsLength, kernelSize[0], stride[0], padding[0], + outLength = Convolution1DUtils.getOutputSize(inputTsLength, kernelSize[0], stride[0], padding[0], convolutionMode, dilation[0]); } return InputType.recurrent(nOut, outLength); diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/FeedForwardLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/FeedForwardLayer.java index b1bd5eacf..026f0d350 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/FeedForwardLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/FeedForwardLayer.java @@ -117,14 +117,14 @@ public abstract class FeedForwardLayer extends BaseLayer { * this is the input channels, otherwise is the previous layer size. * */ - protected int nIn = 0; + protected long nIn = 0; /** * Number of inputs for the layer (usually the size of the last layer).
Note that for Convolutional layers, * this is the input channels, otherwise is the previous layer size. * */ - protected int nOut = 0; + protected long nOut = 0; /** * Number of inputs for the layer (usually the size of the last layer).
Note that for Convolutional layers, @@ -144,8 +144,7 @@ public abstract class FeedForwardLayer extends BaseLayer { * @param nIn Number of inputs for the layer */ public T nIn(long nIn) { - // FIXME: int cast - this.setNIn((int) nIn); + this.setNIn(nIn); return (T) this; } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/InputTypeUtil.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/InputTypeUtil.java index c8ce1ffe0..7c97930ae 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/InputTypeUtil.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/InputTypeUtil.java @@ -41,12 +41,9 @@ public class InputTypeUtil { Class layerClass) { InputType.InputTypeConvolutional i = (InputType.InputTypeConvolutional) inputType; - // FIXME: int cast - val hIn = (int) i.getHeight(); - val wIn = (int) i.getWidth(); + val hIn = i.getHeight(); + val wIn = i.getWidth(); - val inHeight = (int) i.getHeight(); - val inWidth = (int) i.getWidth(); int padH = (padding == null ? 0 : padding[0]); //May be null for ConvolutionMode.Same int padW = (padding == null ? 0 : padding[1]); int kH = kernelSize[0]; @@ -69,13 +66,13 @@ public class InputTypeUtil { } if (convolutionMode == ConvolutionMode.Same) { - int hOut = stride[0] * hIn; - int wOut = stride[1] * wIn; + long hOut = stride[0] * hIn; + long wOut = stride[1] * wIn; return InputType.convolutional(hOut, wOut, outputDepth); } - int hOut = sH * (hIn - 1) + kH - 2 * padH; - int wOut = sW * (wIn - 1) + kW - 2 * padW; + long hOut = sH * (hIn - 1) + kH - 2 * padH; + long wOut = sW * (wIn - 1) + kW - 2 * padW; return InputType.convolutional(hOut, wOut, outputDepth); } @@ -91,10 +88,9 @@ public class InputTypeUtil { InputType.InputTypeConvolutional3D i = (InputType.InputTypeConvolutional3D) inputType; - // FIXME: int cast - val inDepth = (int) i.getDepth(); - val inHeight = (int) i.getHeight(); - val inWidth = (int) i.getWidth(); + long inDepth = i.getDepth(); + long inHeight = i.getHeight(); + long inWidth = i.getWidth(); int padD = (padding == null ? 0 : padding[0]); int padH = (padding == null ? 0 : padding[1]); @@ -211,9 +207,9 @@ public class InputTypeUtil { return InputType.convolutional3D(outD, outH, outW, outputChannels); } - int dOut = (inDepth - kD + 2 * padD) / sD + 1; - int hOut = (inHeight - kH + 2 * padH) / sH + 1; - int wOut = (inWidth - kW + 2 * padW) / sW + 1; + long dOut = (inDepth - kD + 2 * padD) / sD + 1; + long hOut = (inHeight - kH + 2 * padH) / sH + 1; + long wOut = (inWidth - kW + 2 * padW) / sW + 1; return InputType.convolutional3D(dOut, hOut, wOut, outputChannels); } @@ -296,9 +292,8 @@ public class InputTypeUtil { InputType.InputTypeConvolutional i = (InputType.InputTypeConvolutional) inputType; - // FIXME: int cast - val inHeight = (int) i.getHeight(); - val inWidth = (int) i.getWidth(); + long inHeight = i.getHeight(); + long inWidth = i.getWidth(); int padH = (padding == null ? 0 : padding[0]); //May be null for ConvolutionMode.Same int padW = (padding == null ? 0 : padding[1]); int kH = kernelSize[0]; @@ -379,8 +374,8 @@ public class InputTypeUtil { return InputType.convolutional(outH, outW, outputDepth); } - int hOut = (inHeight - kH + 2 * padH) / sH + 1; - int wOut = (inWidth - kW + 2 * padW) / sW + 1; + long hOut = (inHeight - kH + 2 * padH) / sH + 1; + long wOut = (inWidth - kW + 2 * padW) / sW + 1; return InputType.convolutional(hOut, wOut, outputDepth); } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/RnnLossLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/RnnLossLayer.java index 209c61bca..df0b16e6c 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/RnnLossLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/RnnLossLayer.java @@ -142,13 +142,13 @@ public class RnnLossLayer extends FeedForwardLayer { } @Override - public void setNIn(int nIn){ + public void setNIn(long nIn){ throw new UnsupportedOperationException( "This layer has no parameters, thus nIn will always equal nOut."); } @Override - public void setNOut(int nOut){ + public void setNOut(long nOut){ throw new UnsupportedOperationException( "This layer has no parameters, thus nIn will always equal nOut."); } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling1DLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling1DLayer.java index de491290f..4da7ff011 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling1DLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling1DLayer.java @@ -82,12 +82,12 @@ public class Subsampling1DLayer extends SubsamplingLayer { } InputType.InputTypeRecurrent r = (InputType.InputTypeRecurrent) inputType; long inputTsLength = r.getTimeSeriesLength(); - int outLength; + long outLength; if (inputTsLength < 0) { //Probably: user did InputType.recurrent(x) without specifying sequence length outLength = -1; } else { - outLength = Convolution1DUtils.getOutputSize((int) inputTsLength, kernelSize[0], stride[0], padding[0], + outLength = Convolution1DUtils.getOutputSize(inputTsLength, kernelSize[0], stride[0], padding[0], convolutionMode, dilation[0]); } return InputType.recurrent(r.getSize(), outLength); diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling3DLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling3DLayer.java index 0d0ccba9b..2fcc345a1 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling3DLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling3DLayer.java @@ -32,6 +32,7 @@ import org.deeplearning4j.util.ValidationUtils; import org.nd4j.base.Preconditions; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.learning.regularization.Regularization; import java.util.Collection; @@ -138,9 +139,11 @@ public class Subsampling3DLayer extends NoParamLayer { + "\"): Expected CNN input, got " + inputType); } - // FIXME: int cast + long inChannels = ((InputType.InputTypeConvolutional3D) inputType).getChannels(); + if (inChannels > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); return InputTypeUtil.getOutputTypeCnn3DLayers(inputType, kernelSize, stride, padding, new int[] {1, 1, 1}, // no dilation - convolutionMode, (int) ((InputType.InputTypeConvolutional3D) inputType).getChannels(), + convolutionMode, (int) inChannels, layerIndex, getLayerName(), Subsampling3DLayer.class); } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Upsampling3D.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Upsampling3D.java index d142d52a9..24db83bd3 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Upsampling3D.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Upsampling3D.java @@ -83,11 +83,10 @@ public class Upsampling3D extends BaseUpsamplingLayer { } InputType.InputTypeConvolutional3D i = (InputType.InputTypeConvolutional3D) inputType; - // FIXME: int cast - int inHeight = (int) i.getHeight(); - int inWidth = (int) i.getWidth(); - int inDepth = (int) i.getDepth(); - int inChannels = (int) i.getChannels(); + long inHeight = (int) i.getHeight(); + long inWidth = (int) i.getWidth(); + long inDepth = (int) i.getDepth(); + long inChannels = (int) i.getChannels(); return InputType.convolutional3D(size[0] * inDepth, size[1] * inHeight, size[2] * inWidth, inChannels); } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/ocnn/OCNNOutputLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/ocnn/OCNNOutputLayer.java index f76cb0dad..539289eca 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/ocnn/OCNNOutputLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/ocnn/OCNNOutputLayer.java @@ -259,7 +259,7 @@ public class OCNNOutputLayer extends BaseOutputLayer { } @Override - public void setNOut(int nOut){ + public void setNOut(long nOut){ throw new UnsupportedOperationException( "Unable to specify number of outputs with ocnn. Outputs are fixed to 1."); } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/graph/ComputationGraph.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/graph/ComputationGraph.java index 7c292bafa..32d7bfb73 100755 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/graph/ComputationGraph.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/graph/ComputationGraph.java @@ -79,6 +79,7 @@ import org.nd4j.linalg.dataset.api.DataSetUtil; import org.nd4j.linalg.dataset.api.MultiDataSet; import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; import org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.heartbeat.Heartbeat; import org.nd4j.linalg.heartbeat.reports.Environment; @@ -3329,7 +3330,6 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork { //In 99+% of cases, the input and labels dimension 0 size should be identical //The only real exceptions: space to batch, and batch to space layers //In those cases, we should base it on the labels size, as this impacts gradient calculation - // FIXME: int cast return labels == null || labels[0] == null ? (int) inputs[0].size(0) : (int)labels[0].size(0); } @@ -3653,7 +3653,8 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork { if (endTimeIdx > timeSeriesLength) endTimeIdx = timeSeriesLength; - // FIXME: int cast + if (startTimeIdx > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); List list = getSubsetsForTbptt((int) startTimeIdx, endTimeIdx, inputs, labels, featureMasks, labelMasks); setInputs(list.get(0)); @@ -3799,9 +3800,10 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork { } } - // FIXME: int cast + if (minibatchSize > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); Pair outPair = - current.feedForwardMaskArrays(inputMasks, maskState, (int) minibatchSize); + current.feedForwardMaskArrays(inputMasks, maskState, (int)minibatchSize); map.put(topologicalOrder[i], outPair); } } @@ -4664,7 +4666,7 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork { * @param layer Index of the layer to get the size of. Must be in range 0 to nLayers-1 inclusive * @return Size of the layer */ - public int layerSize(int layer) { + public long layerSize(int layer) { if (layer < 0 || layer > layers.length) { throw new IllegalArgumentException("Invalid layer index: " + layer + ". Layer index must be between 0 and " + (layers.length - 1) + " inclusive"); @@ -4683,7 +4685,7 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork { * @param layer Index of the layer to get the size of. Must be in range 0 to nLayers-1 inclusive * @return Size of the layer */ - public int layerInputSize(int layer) { + public long layerInputSize(int layer) { if (layer < 0 || layer > layers.length) { throw new IllegalArgumentException("Invalid layer index: " + layer + ". Layer index must be between 0 and " + (layers.length - 1) + " inclusive"); @@ -4701,7 +4703,7 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork { * @param layerName Name of the layer to get the size of * @return Size of the layer */ - public int layerSize(String layerName) { + public long layerSize(String layerName) { Layer l = getLayer(layerName); if(l == null){ throw new IllegalArgumentException("No layer with name \"" + layerName + "\" exists"); @@ -4712,8 +4714,7 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork { } FeedForwardLayer ffl = (FeedForwardLayer) conf; - // FIXME: int cast - return (int) ffl.getNOut(); + return ffl.getNOut(); } /** @@ -4727,7 +4728,7 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork { * @param layerName Name of the layer to get the size of * @return Size of the layer */ - public int layerInputSize(String layerName) { + public long layerInputSize(String layerName) { Layer l = getLayer(layerName); if(l == null){ throw new IllegalArgumentException("No layer with name \"" + layerName + "\" exists"); @@ -4738,8 +4739,7 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork { } FeedForwardLayer ffl = (FeedForwardLayer) conf; - // FIXME: int cast - return (int) ffl.getNIn(); + return ffl.getNIn(); } /** diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/UnstackVertex.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/UnstackVertex.java index 4ca04c418..009505057 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/UnstackVertex.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/UnstackVertex.java @@ -43,10 +43,10 @@ import java.util.Arrays; * @author Justin Long (crockpotveggies) */ public class UnstackVertex extends BaseGraphVertex { - private int from; + private long from; private int stackSize; private long forwardShape[]; - private int step; + private long step; public UnstackVertex(ComputationGraph graph, String name, int vertexIndex, int from, int stackSize, DataType dataType) { this(graph, name, vertexIndex, null, null, from, stackSize, dataType); @@ -77,10 +77,9 @@ public class UnstackVertex extends BaseGraphVertex { // once we know the inputs, save the shape and interval size for doBackward this.forwardShape = Arrays.copyOf(inputs[0].shape(), inputs[0].rank()); - // FIXME: int cast - this.step = (int) inputs[0].size(0) / stackSize; - int start = from * step; - int end = (from + 1) * step; + this.step = inputs[0].size(0) / stackSize; + long start = from * step; + long end = (from + 1) * step; INDArray ret; switch (inputs[0].rank()) { //TODO remove the dups here if/when possible (gradient checks must pass) @@ -108,8 +107,8 @@ public class UnstackVertex extends BaseGraphVertex { throw new IllegalStateException("Cannot do backward pass: error not set"); INDArray out = workspaceMgr.create(ArrayType.ACTIVATION_GRAD, inputs[0].dataType(), forwardShape); - int start = from * step; - int end = (from + 1) * step; + long start = from * step; + long end = (from + 1) * step; switch (forwardShape.length) { case 2: @@ -154,8 +153,8 @@ public class UnstackVertex extends BaseGraphVertex { } //Mask arrays are either 1d (column vector) or 2d... - int start = from * minibatchSize; - int end = (from + 1) * minibatchSize; + long start = from * minibatchSize; + long end = (from + 1) * minibatchSize; INDArray outMask = maskArrays[0].get(NDArrayIndex.interval(start, end), NDArrayIndex.all()); return new Pair<>(outMask, currentMaskState); } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/rnn/LastTimeStepVertex.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/rnn/LastTimeStepVertex.java index a1dd47469..797676441 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/rnn/LastTimeStepVertex.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/graph/vertex/impl/rnn/LastTimeStepVertex.java @@ -87,9 +87,8 @@ public class LastTimeStepVertex extends BaseGraphVertex { INDArray out; if (mask == null) { - // FIXME: int cast //No mask array -> extract same (last) column for all - int lastTS = (int) inputs[0].size(2) - 1; + long lastTS = inputs[0].size(2) - 1; out = inputs[0].get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(lastTS)); out = workspaceMgr.dup(ArrayType.ACTIVATIONS, out); fwdPassTimeSteps = null; //Null -> last time step for all examples @@ -99,8 +98,7 @@ public class LastTimeStepVertex extends BaseGraphVertex { //Want the index of the last non-zero entry in the mask array. //Check a little here by using mulRowVector([0,1,2,3,...]) and argmax - // FIXME: int cast - int maxTsLength = (int) fwdPassShape[2]; + long maxTsLength = fwdPassShape[2]; INDArray row = Nd4j.linspace(0, maxTsLength - 1, maxTsLength, mask.dataType()); INDArray temp = mask.mulRowVector(row); INDArray lastElementIdx = Nd4j.argMax(temp, 1); diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/AbstractLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/AbstractLayer.java index fdf12d2f8..750bca77d 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/AbstractLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/AbstractLayer.java @@ -346,7 +346,6 @@ public abstract class AbstractLayer f mmul here, then reshape to 6d in f order INDArray epsNext2d = w2d.mmul(delta2d); //TODO can we reuse im2col array instead of allocating new result array? - INDArray eps6d = Shape.newShapeNoCopy(epsNext2d, new int[] {kW, kH, inDepth, outW, outH, miniBatch}, true); + INDArray eps6d = Shape.newShapeNoCopy(epsNext2d, new long[] {kW, kH, inDepth, outW, outH, miniBatch}, true); //Calculate epsilonNext by doing im2col reduction. //Current col2im implementation expects input with order: [miniBatch,channels,kH,kW,outH,outW] @@ -282,7 +282,7 @@ public class ConvolutionLayer extends BaseLayer Integer.MAX_VALUE || input.size(3) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); pad = ConvolutionUtils.getSameModeTopLeftPadding(outSize, new int[] {(int) input.size(2), (int) input.size(3)}, kernel, strides, dilation ); } else { @@ -397,10 +397,12 @@ public class ConvolutionLayer extends BaseLayer Integer.MAX_VALUE || kW > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); + Convolution.im2col(im2ColIn, (int)kH, (int)kW, strides[0], strides[1], pad[0], pad[1], dilation[0], dilation[1], convolutionMode == ConvolutionMode.Same, col2); - INDArray im2col2d = Shape.newShapeNoCopy(col, new int[] {miniBatch * outH * outW, inDepth * kH * kW}, false); + INDArray im2col2d = Shape.newShapeNoCopy(col, new long[] {miniBatch * outH * outW, inDepth * kH * kW}, false); //Current order of weights: [depthOut,depthIn,kH,kW], c order //Permute to give [kW,kH,depthIn,depthOut], f order @@ -418,7 +420,7 @@ public class ConvolutionLayer extends BaseLayer Integer.MAX_VALUE || input.size(3) > Integer.MAX_VALUE) { + throw new ND4JArraySizeException(); + } pad = ConvolutionUtils.getSameModeTopLeftPadding( outSize, new int[]{(int) input.size(2), (int) input.size(3)}, kernel, strides, dilation); } else { @@ -205,8 +206,8 @@ public class DepthwiseConvolution2DLayer extends ConvolutionLayer { outSize = ConvolutionUtils.getOutputSize(input, kernel, strides, pad, convolutionMode, dilation); } - int outH = outSize[0]; - int outW = outSize[1]; + long outH = outSize[0]; + long outW = outSize[1]; val miniBatch = input.size(0); INDArray output = workspaceMgr.create( diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/SeparableConvolution2DLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/SeparableConvolution2DLayer.java index 422a253d2..9808b3a24 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/SeparableConvolution2DLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/SeparableConvolution2DLayer.java @@ -33,6 +33,7 @@ import org.nd4j.linalg.api.memory.MemoryWorkspace; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.CustomOp; import org.nd4j.linalg.api.ops.DynamicCustomOp; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.primitives.Pair; import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; @@ -90,10 +91,9 @@ public class SeparableConvolution2DLayer extends ConvolutionLayer { INDArray input = this.input.castTo(dataType); - // FIXME: int cast - int miniBatch = (int) input.size(0); - int inH = (int) input.size(2); - int inW = (int) input.size(3); + long miniBatch = input.size(0); + int inH = (int)input.size(2); + int inW = (int)input.size(3); int inDepth = (int) depthWiseWeights.size(1); int kH = (int) depthWiseWeights.size(2); @@ -194,9 +194,8 @@ public class SeparableConvolution2DLayer extends ConvolutionLayer { + " " + layerId()); } - // FIXME: int cast - int inDepth = (int) depthWiseWeights.size(1); - int outDepth = (int) pointWiseWeights.size(0); + long inDepth = depthWiseWeights.size(1); + long outDepth = pointWiseWeights.size(0); if (input.size(1) != inDepth) { String layerName = conf.getLayer().getLayerName(); @@ -220,7 +219,9 @@ public class SeparableConvolution2DLayer extends ConvolutionLayer { if (convolutionMode == ConvolutionMode.Same) { outSize = ConvolutionUtils.getOutputSize(input, kernel, strides, null, convolutionMode, dilation); //Also performs validation - // FIXME: int cast + if (input.size(2) > Integer.MAX_VALUE || input.size(3) > Integer.MAX_VALUE) { + throw new ND4JArraySizeException(); + } pad = ConvolutionUtils.getSameModeTopLeftPadding(outSize, new int[] {(int) input.size(2), (int) input.size(3)}, kernel, strides, dilation ); } else { diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/SpaceToDepth.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/SpaceToDepth.java index b726ea87c..50ea9c9e3 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/SpaceToDepth.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/SpaceToDepth.java @@ -75,11 +75,10 @@ public class SpaceToDepth extends AbstractLayer backpropGradient(INDArray epsilon, LayerWorkspaceMgr workspaceMgr) { assertInputSet(true); - // FIXME: int cast - int miniBatch = (int) input.size(0); - int inDepth = (int) input.size(1); - int inH = (int) input.size(2); - int inW = (int) input.size(3); + long miniBatch = input.size(0); + long inDepth = input.size(1); + long inH = input.size(2); + long inW = input.size(3); INDArray input = this.input.castTo(dataType); //No-op if already correct type @@ -122,17 +121,16 @@ public class SpaceToDepth extends AbstractLayer backpropGradient(INDArray epsilon, LayerWorkspaceMgr workspaceMgr) { assertInputSet(true); - // FIXME: int cast - int miniBatch = (int) input.size(0); - int inDepth = (int) input.size(1); - int inH = (int) input.size(2); - int inW = (int) input.size(3); + long miniBatch = (int) input.size(0); + long inDepth = (int) input.size(1); + long inH = (int) input.size(2); + long inW = (int) input.size(3); INDArray reshapedEpsilon = workspaceMgr.createUninitialized(ArrayType.ACTIVATION_GRAD, epsilon.dataType(), new long[]{miniBatch, inDepth, inH, inW}, 'c'); @@ -106,15 +105,14 @@ public class Upsampling2D extends AbstractLayer Integer.MAX_VALUE) + throw new ND4JArraySizeException(); int[] indexes = new int[(int) input.length()]; for (int i = 0; i < indexes.length; i++) { indexes[i] = input.getInt(i, 0); diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/mkldnn/MKLDNNBatchNormHelper.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/mkldnn/MKLDNNBatchNormHelper.java index 1dcc556b6..cec3b5297 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/mkldnn/MKLDNNBatchNormHelper.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/mkldnn/MKLDNNBatchNormHelper.java @@ -56,7 +56,7 @@ public class MKLDNNBatchNormHelper implements BatchNormalizationHelper { } @Override - public Pair backpropGradient(INDArray input, INDArray epsilon, int[] shape, INDArray gamma, + public Pair backpropGradient(INDArray input, INDArray epsilon, long[] shape, INDArray gamma, INDArray dGammaView, INDArray dBetaView, double eps, LayerWorkspaceMgr workspaceMgr) { //2019-02-14: Backprop disabled pending fixes. https://github.com/deeplearning4j/deeplearning4j/issues/7166 //Also no MKL-DNN implemented for backprop anyway @@ -82,7 +82,7 @@ public class MKLDNNBatchNormHelper implements BatchNormalizationHelper { } @Override - public INDArray preOutput(INDArray x, boolean training, int[] shape, INDArray gamma, INDArray beta, INDArray mean, INDArray var, + public INDArray preOutput(INDArray x, boolean training, long[] shape, INDArray gamma, INDArray beta, INDArray mean, INDArray var, double decay, double eps, LayerWorkspaceMgr workspaceMgr) { if(x.dataType() != DataType.FLOAT) return null; //MKL-DNN only supports float diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/normalization/BatchNormalization.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/normalization/BatchNormalization.java index d5a4c75af..7f5013b1c 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/normalization/BatchNormalization.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/normalization/BatchNormalization.java @@ -152,10 +152,9 @@ public class BatchNormalization extends BaseLayer ret = null; try { - ret = helper.backpropGradient(in, eps, ArrayUtil.toInts(shape), gamma, dGammaView, dBetaView, + ret = helper.backpropGradient(in, eps, shape, gamma, dGammaView, dBetaView, layerConf.getEps(), workspaceMgr); } catch (ND4JOpProfilerException e){ throw e; //NaN panic etc for debugging @@ -438,7 +437,6 @@ public class BatchNormalization extends BaseLayer backpropGradient(INDArray input, INDArray epsilon, int[] shape, INDArray gamma, + Pair backpropGradient(INDArray input, INDArray epsilon, long[] shape, INDArray gamma, INDArray dGammaView, INDArray dBetaView, double eps, LayerWorkspaceMgr workspaceMgr); - INDArray preOutput(INDArray x, boolean training, int[] shape, INDArray gamma, INDArray beta, INDArray mean, + INDArray preOutput(INDArray x, boolean training, long[] shape, INDArray gamma, INDArray beta, INDArray mean, INDArray var, double decay, double eps, LayerWorkspaceMgr workspaceMgr); INDArray getMeanCache(DataType dataType); diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/objdetect/Yolo2OutputLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/objdetect/Yolo2OutputLayer.java index bb551cd3f..3e46fd044 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/objdetect/Yolo2OutputLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/objdetect/Yolo2OutputLayer.java @@ -114,10 +114,9 @@ public class Yolo2OutputLayer extends AbstractLayer C = (input.size(1)/b) - 5 + long mb = input.size(0); + long h = input.size(2); + long w = input.size(3); + long b = boundingBoxPriors.size(0); + long c = input.size(1)/b-5; //input.size(1) == b * (5 + C) -> C = (input.size(1)/b) - 5 INDArray output = layerWorkspaceMgr.create(ArrayType.ACTIVATIONS, input.dataType(), input.shape(), 'c'); INDArray output5 = output.reshape('c', mb, b, 5+c, h, w); @@ -77,7 +76,7 @@ public class YoloUtils { //TODO OPTIMIZE? INDArray inputClassesPreSoftmax = input5.get(all(), all(), interval(5, 5+c), all(), all()); //Shape: [minibatch, C, H, W] INDArray classPredictionsPreSoftmax2d = inputClassesPreSoftmax.permute(0,1,3,4,2) //[minibatch, b, c, h, w] To [mb, b, h, w, c] - .dup('c').reshape('c', new int[]{mb*b*h*w, c}); + .dup('c').reshape('c', new long[]{mb*b*h*w, c}); Transforms.softmax(classPredictionsPreSoftmax2d, false); INDArray postSoftmax5d = classPredictionsPreSoftmax2d.reshape('c', mb, b, h, w, c ).permute(0, 1, 4, 2, 3); @@ -173,13 +172,12 @@ public class YoloUtils { throw new IllegalStateException("Invalid confidence threshold: must be in range [0,1]. Got: " + confThreshold); } - // FIXME: int cast //Activations format: [mb, 5b+c, h, w] - int mb = (int) networkOutput.size(0); - int h = (int) networkOutput.size(2); - int w = (int) networkOutput.size(3); - int b = (int) boundingBoxPriors.size(0); - int c = (int) (networkOutput.size(1)/b)-5; //input.size(1) == b * (5 + C) -> C = (input.size(1)/b) - 5 + long mb = networkOutput.size(0); + long h = networkOutput.size(2); + long w = networkOutput.size(3); + long b = boundingBoxPriors.size(0); + long c = (networkOutput.size(1)/b)-5; //input.size(1) == b * (5 + C) -> C = (input.size(1)/b) - 5 //Reshape from [minibatch, B*(5+C), H, W] to [minibatch, B, 5+C, H, W] to [minibatch, B, 5, H, W] INDArray output5 = networkOutput.dup('c').reshape(mb, b, 5+c, h, w); diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/LSTMHelpers.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/LSTMHelpers.java index 27b980521..86079aead 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/LSTMHelpers.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/LSTMHelpers.java @@ -40,6 +40,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.impl.transforms.pairwise.arithmetic.MulOp; import org.nd4j.linalg.api.ops.impl.transforms.same.TimesOneMinus; import org.nd4j.linalg.api.shape.Shape; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.exception.ND4JOpProfilerException; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.indexing.NDArrayIndex; @@ -113,7 +114,9 @@ public class LSTMHelpers { input = input.castTo(inputWeights.dataType()); //No-op if already correct dtype - // FIXME + if ((!is2dInput && (input.size(2) > Integer.MAX_VALUE)) || + recurrentWeights.size(0) > Integer.MAX_VALUE || input.size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); int timeSeriesLength = (int) (is2dInput ? 1 : input.size(2)); int hiddenLayerSize = (int) recurrentWeights.size(0); int miniBatchSize = (int) input.size(0); @@ -550,7 +553,8 @@ public class LSTMHelpers { for (long iTimeIndex = timeSeriesLength - 1; iTimeIndex >= endIdx; iTimeIndex--) { try(MemoryWorkspace ws = workspaceMgr.notifyScopeEntered(ArrayType.RNN_BP_LOOP_WORKING_MEM)) { - // FIXME: int cast + if (iTimeIndex > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); int time = (int) iTimeIndex; int inext = 1; @@ -574,8 +578,6 @@ public class LSTMHelpers { (iTimeIndex == 0 ? fwdPass.prevAct : fwdPass.fwdPassOutputAsArrays[(int) (time - inext)]); INDArray currMemCellState = fwdPass.memCellState[(int) time]; - - // FIXME: int cast //LSTM unit output errors (dL/d(a_out)); not to be confused with \delta=dL/d(z_out) INDArray epsilonSlice = (is2dInput ? epsilon : epsilon.tensorAlongDimension((int) time, 1, 0)); //(w^{L+1}*(delta^{(L+1)t})^T)^T or equiv. diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/RnnLossLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/RnnLossLayer.java index 82a599acb..dd1b03d63 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/RnnLossLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/recurrent/RnnLossLayer.java @@ -89,8 +89,7 @@ public class RnnLossLayer extends BaseLayer Integer.MAX_VALUE) + throw new ND4JArraySizeException(); return (int) input.size(0); } @@ -862,7 +864,8 @@ public class VariationalAutoencoder implements Layer { @Override public int getInputMiniBatchSize() { - // FIXME: int cast + if (input.size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); return (int) input.size(0); } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java index 24de38f56..dcf82a20a 100755 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/multilayer/MultiLayerNetwork.java @@ -75,6 +75,7 @@ import org.nd4j.linalg.dataset.DataSet; import org.nd4j.linalg.dataset.api.MultiDataSet; import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; import org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.heartbeat.Heartbeat; import org.nd4j.linalg.heartbeat.reports.Environment; @@ -425,7 +426,8 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura try(MemoryWorkspace ws = workspaceMgr.notifyScopeEntered(ArrayType.FF_WORKING_MEM)) { if (layerWiseConfigurations.getInputPreProcess(layerIdx) != null) { - // FIXME: int cast + if (input.size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); outputOfPrevLayer = layerWiseConfigurations.getInputPreProcess(layerIdx).preProcess(outputOfPrevLayer, (int) input.size(0), LayerWorkspaceMgr.noWorkspaces(helperWorkspaces)); } @@ -439,7 +441,8 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura //In 99+% of cases, the input and labels dimension 0 size should be identical //The only real exceptions: space to batch, and batch to space layers //In those cases, we should base it on the labels size, as this impacts gradient calculation - // FIXME: int cast + if (input.size(0) > Integer.MAX_VALUE || labels.size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); return labels == null ? (int) input.size(0) : (int)labels.size(0); } @@ -2074,7 +2077,8 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura if (endTimeIdx > timeSeriesLength) endTimeIdx = timeSeriesLength; - // FIXME: int cast + if (startTimeIdx > Integer.MAX_VALUE || endTimeIdx > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); INDArray[] subsets = getSubsetsForTbptt((int) startTimeIdx, (int) endTimeIdx, input, labels, featuresMaskArray, labelsMaskArray); @@ -2211,7 +2215,9 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura public int[] predict(INDArray d) { INDArray output = output(d, Layer.TrainingMode.TEST); - // FIXME: int cast + if (d.size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); + int[] ret = new int[(int) d.size(0)]; if (d.isRowVectorOrScalar()) ret[0] = Nd4j.getBlasWrapper().iamax(output); @@ -2335,7 +2341,8 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura org.deeplearning4j.nn.conf.layers.OutputLayer layerConf = (org.deeplearning4j.nn.conf.layers.OutputLayer) getOutputLayer().conf().getLayer(); - // FIXME: int cast + if (layerConf.getNOut() > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); fit(examples, FeatureUtil.toOutcomeMatrix(labels, (int) layerConf.getNOut())); } @@ -2584,7 +2591,8 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura INDArray inputToOutputLayer = outputOfLayerDetached(training, FwdPassType.STANDARD,layers.length-2, data.getFeatures(), data.getFeaturesMaskArray(), data.getLabelsMaskArray(), null); - // FIXME: int cast + if (data.getFeatures().size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); IOutputLayer ol = (IOutputLayer) getOutputLayer(); if (getLayerWiseConfigurations().getInputPreProcess(layers.length - 1) != null) { inputToOutputLayer = getLayerWiseConfigurations().getInputPreProcess(layers.length - 1) @@ -2647,7 +2655,8 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura IOutputLayer ol = (IOutputLayer) getOutputLayer(); if(layerWiseConfigurations.getInputPreProcess(layers.length-1) != null){ - // FIXME: int cast + if (data.getFeatures().size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); inputLast = layerWiseConfigurations.getInputPreProcess(layers.length-1).preProcess(inputLast, (int) data.getFeatures().size(0), mgr); } @@ -2811,7 +2820,8 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura throw new IllegalArgumentException( "Invalid input: length 0 (shape: " + Arrays.toString(input.shape()) + ")"); - // FIXME: int cast + if (input.size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); setInputMiniBatchSize((int) input.size(0)); } } @@ -3086,7 +3096,8 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura if(!conf().isMiniBatch()) return 1; - // FIXME: int cast + if (input.size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); return (int) input.size(0); } @@ -3256,7 +3267,8 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura public void setLayerMaskArrays(INDArray featuresMaskArray, INDArray labelsMaskArray) { if (featuresMaskArray != null) { - // FIXME: int cast + if (featuresMaskArray.size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); //New approach: use feedForwardMaskArray method feedForwardMaskArray(featuresMaskArray, MaskState.Active, (int) featuresMaskArray.size(0)); @@ -3438,7 +3450,8 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura val startTimeIdx = i * fwdLen; val endTimeIdx = Math.min(startTimeIdx + fwdLen, tsLength); - // FIXME: int cast + if (endTimeIdx > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); INDArray[] subsets = getSubsetsForTbptt(startTimeIdx, (int) endTimeIdx, features, labels, fMask, lMask); setLayerMaskArrays(subsets[2], subsets[3]); @@ -3943,7 +3956,8 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura } FeedForwardLayer ffl = (FeedForwardLayer) conf; - // FIXME: int cast + if (ffl.getNOut() > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); return (int) ffl.getNOut(); } @@ -3969,7 +3983,8 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura } FeedForwardLayer ffl = (FeedForwardLayer) conf; - // FIXME: int cast + if (ffl.getNIn() > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); return (int) ffl.getNIn(); } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/params/VariationalAutoencoderParamInitializer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/params/VariationalAutoencoderParamInitializer.java index f30ec84ae..59ecf39f0 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/params/VariationalAutoencoderParamInitializer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/params/VariationalAutoencoderParamInitializer.java @@ -22,6 +22,7 @@ import org.deeplearning4j.nn.conf.layers.Layer; import org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder; import org.deeplearning4j.nn.weights.IWeightInit; import org.nd4j.linalg.api.ndarray.INDArray; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.indexing.NDArrayIndex; import java.util.ArrayList; @@ -108,7 +109,8 @@ public class VariationalAutoencoderParamInitializer extends DefaultParamInitiali } //Between last decoder layer and parameters for p(x|z): - // FIXME: int cast + if (nIn > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); val nDistributionParams = layer.getOutputDistribution().distributionInputSize((int) nIn); val lastDecLayerSize = decoderLayerSizes[decoderLayerSizes.length - 1]; paramCount += (lastDecLayerSize + 1) * nDistributionParams; @@ -294,7 +296,8 @@ public class VariationalAutoencoderParamInitializer extends DefaultParamInitiali } //Finally, p(x|z): - // FIXME: int cast + if (nIn > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); int nDistributionParams = layer.getOutputDistribution().distributionInputSize((int) nIn); int pxzWeightCount = decoderLayerSizes[decoderLayerSizes.length - 1] * nDistributionParams; INDArray pxzWeightView = @@ -402,7 +405,8 @@ public class VariationalAutoencoderParamInitializer extends DefaultParamInitiali } //Finally, p(x|z): - // FIXME: int cast + if (nIn > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); int nDistributionParams = layer.getOutputDistribution().distributionInputSize((int) nIn); int pxzWeightCount = decoderLayerSizes[decoderLayerSizes.length - 1] * nDistributionParams; INDArray pxzWeightView = diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/updater/BaseMultiLayerUpdater.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/updater/BaseMultiLayerUpdater.java index 10195b59d..c991f41c9 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/updater/BaseMultiLayerUpdater.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/updater/BaseMultiLayerUpdater.java @@ -30,6 +30,7 @@ import org.nd4j.linalg.api.ops.CustomOp; import org.nd4j.linalg.api.ops.DynamicCustomOp; import org.nd4j.linalg.api.ops.impl.reduce.floating.Norm2; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.indexing.NDArrayIndex; import org.deeplearning4j.nn.workspace.ArrayType; @@ -111,7 +112,8 @@ public abstract class BaseMultiLayerUpdater implements Updater if (currentBlock == null || !UpdaterUtils.updaterConfigurationsEquals(lastLayer, lastVariable, layers[i], var)) { - // FIXME: int cast + if (paramsViewSoFar + paramSizeThisVariable > Integer.MAX_VALUE || paramsViewSoFar + paramSizeThisVariable > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); //Create a new block List list = new ArrayList<>(); list.add(new UpdaterBlock.ParamState(layers[i], var, paramsViewSoFar, @@ -122,9 +124,11 @@ public abstract class BaseMultiLayerUpdater implements Updater updaterBlocks.add(currentBlock); } else { - // FIXME: int cast + long newOffset = currentBlock.getParamOffsetEnd() + paramSizeThisVariable; + if (newOffset > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); //Add to existing updater block - currentBlock.setParamOffsetEnd((int) (currentBlock.getParamOffsetEnd() + paramSizeThisVariable)); + currentBlock.setParamOffsetEnd((int) newOffset); currentBlock.setUpdaterViewOffsetEnd( currentBlock.getUpdaterViewOffsetEnd() + updaterStateSizeThisVariable); currentBlock.getLayersAndVariablesInBlock() diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/Convolution1DUtils.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/Convolution1DUtils.java index 1e5ca2675..f0c8d76c9 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/Convolution1DUtils.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/Convolution1DUtils.java @@ -29,6 +29,7 @@ import org.nd4j.base.Preconditions; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.impl.broadcast.BroadcastCopyOp; import org.nd4j.linalg.api.shape.Shape; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.factory.Nd4j; import java.util.Arrays; @@ -62,10 +63,9 @@ public class Convolution1DUtils { * @param dilation Kernel dilation * @return Output size (width) */ - public static int getOutputSize(int inH, int kernel, int strides, int padding, + public static long getOutputSize(long inH, int kernel, int strides, int padding, ConvolutionMode convolutionMode, int dilation) { - // FIXME: int cast - int eKernel = effectiveKernelSize(kernel, dilation); + long eKernel = effectiveKernelSize(kernel, dilation); if (convolutionMode == ConvolutionMode.Same) { return (int) Math.ceil(inH / ((double) strides)); } @@ -85,7 +85,8 @@ public class Convolution1DUtils { */ public static int getOutputSize(INDArray inputData, int kernel, int strides, int padding, ConvolutionMode convolutionMode, int dilation) { - // FIXME: int cast + if (inputData.size(2) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); int inH = (int) inputData.size(2); int eKernel = effectiveKernelSize(kernel, dilation); boolean atrous = (eKernel == kernel); diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/Convolution3DUtils.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/Convolution3DUtils.java index 809ffde45..7d844db27 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/Convolution3DUtils.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/Convolution3DUtils.java @@ -61,15 +61,14 @@ public class Convolution3DUtils { ConvolutionMode convolutionMode, int[] dilation, boolean isNCDHW) { // NCDHW vs. NDHWC - int inD = (int) (isNCDHW ? inputData.size(2) : inputData.size(1)); - int inH = (int) (isNCDHW ? inputData.size(3) : inputData.size(2)); - int inW = (int) (isNCDHW ? inputData.size(4) : inputData.size(3)); + long inD = (isNCDHW ? inputData.size(2) : inputData.size(1)); + long inH = (isNCDHW ? inputData.size(3) : inputData.size(2)); + long inW = (isNCDHW ? inputData.size(4) : inputData.size(3)); int[] eKernel = effectiveKernelSize(kernel, dilation); boolean atrous = (eKernel == kernel); - // FIXME: int cast - val inShape = new int[]{inD, inH, inW}; + val inShape = new long[]{inD, inH, inW}; validateShapes(ArrayUtil.toInts(inputData.shape()), eKernel, strides, padding, convolutionMode, dilation, inShape, atrous); if (convolutionMode == ConvolutionMode.Same) { @@ -80,16 +79,16 @@ public class Convolution3DUtils { return new int[]{outD, outH, outW}; } - int outD = (inD - eKernel[0] + 2 * padding[0]) / strides[0] + 1; - int outH = (inH - eKernel[1] + 2 * padding[1]) / strides[1] + 1; - int outW = (inW - eKernel[2] + 2 * padding[2]) / strides[2] + 1; + int outD = ((int)inD - eKernel[0] + 2 * padding[0]) / strides[0] + 1; + int outH = ((int)inH - eKernel[1] + 2 * padding[1]) / strides[1] + 1; + int outW = ((int)inW - eKernel[2] + 2 * padding[2]) / strides[2] + 1; return new int[]{outD, outH, outW}; } private static void validateShapes(int[] inputDataShape, int[] eKernel, int[] strides, int[] padding, - ConvolutionMode convolutionMode, int[] dilation, int[] inShape, + ConvolutionMode convolutionMode, int[] dilation, long[] inShape, boolean atrous) { String[] dims = new String[]{"depth", "height", "width"}; diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java index 4c1207d32..d5c8ee1f6 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java @@ -36,6 +36,8 @@ import org.nd4j.linalg.api.ops.impl.broadcast.BroadcastCopyOp; import org.nd4j.linalg.api.ops.impl.layers.convolution.MaxPooling2D; import org.nd4j.linalg.api.ops.impl.layers.convolution.config.Pooling2DConfig; import org.nd4j.linalg.api.shape.Shape; +import org.nd4j.linalg.exception.ND4JArraySizeException; +import org.nd4j.linalg.factory.NDArrayFactory; import org.nd4j.linalg.factory.Nd4j; import java.util.Arrays; @@ -73,7 +75,8 @@ public class ConvolutionUtils { public static int[] getDeconvolutionOutputSize(INDArray inputData, int[] kernel, int[] strides, int[] padding, ConvolutionMode convolutionMode, int[] dilation) { - // FIXME: int cast + if (inputData.size(2) > Integer.MAX_VALUE || inputData.size(3) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); int hIn = (int) inputData.size(2); int wIn = (int) inputData.size(3); int[] eKernel = effectiveKernelSize(kernel, dilation); @@ -104,7 +107,8 @@ public class ConvolutionUtils { */ public static int[] getOutputSize(INDArray inputData, int[] kernel, int[] strides, int[] padding, ConvolutionMode convolutionMode, int[] dilation) { - // FIXME: int cast + if (inputData.size(2) > Integer.MAX_VALUE || inputData.size(3) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); int inH = (int) inputData.size(2); int inW = (int) inputData.size(3); @@ -499,7 +503,7 @@ public class ConvolutionUtils { } } - public static INDArray reshape2dTo4d(INDArray in2d, int[] toShape, LayerWorkspaceMgr workspaceMgr, ArrayType type){ + public static INDArray reshape2dTo4d(INDArray in2d, long[] toShape, LayerWorkspaceMgr workspaceMgr, ArrayType type){ if(in2d.rank() != 2) throw new IllegalArgumentException("Invalid input: expect NDArray with rank 2"); if (toShape.length != 4) @@ -513,7 +517,7 @@ public class ConvolutionUtils { return workspaceMgr.leverageTo(type, out.permute(0, 3, 1, 2)); } - public static INDArray reshape2dTo5d(Convolution3D.DataFormat format, INDArray in2d, int n, int d, int h, int w, int ch, LayerWorkspaceMgr workspaceMgr, ArrayType type){ + public static INDArray reshape2dTo5d(Convolution3D.DataFormat format, INDArray in2d, long n, long d, long h, long w, long ch, LayerWorkspaceMgr workspaceMgr, ArrayType type){ if(in2d.rank() != 2) throw new IllegalArgumentException("Invalid input: expect NDArray with rank 2"); @@ -580,14 +584,21 @@ public class ConvolutionUtils { int inW; int inDepth; - // FIXME: int cast if (inputType instanceof InputType.InputTypeConvolutional) { InputType.InputTypeConvolutional conv = (InputType.InputTypeConvolutional) inputType; + if (conv.getHeight() > Integer.MAX_VALUE || conv.getWidth() > Integer.MAX_VALUE || + conv.getChannels() > Integer.MAX_VALUE){ + throw new ND4JArraySizeException(); + } inH = (int) conv.getHeight(); inW = (int) conv.getWidth(); inDepth = (int) conv.getChannels(); } else if (inputType instanceof InputType.InputTypeConvolutionalFlat) { InputType.InputTypeConvolutionalFlat conv = (InputType.InputTypeConvolutionalFlat) inputType; + if (conv.getHeight() > Integer.MAX_VALUE || conv.getWidth() > Integer.MAX_VALUE || + conv.getDepth() > Integer.MAX_VALUE) { + throw new ND4JArraySizeException(); + } inH = (int) conv.getHeight(); inW = (int) conv.getWidth(); inDepth = (int) conv.getDepth(); diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/TimeSeriesUtils.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/TimeSeriesUtils.java index f356fab71..80383698b 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/TimeSeriesUtils.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/TimeSeriesUtils.java @@ -20,6 +20,7 @@ import lombok.val; import org.nd4j.base.Preconditions; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.shape.Shape; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.indexing.BooleanIndexing; import org.nd4j.linalg.indexing.INDArrayIndex; @@ -193,7 +194,7 @@ public class TimeSeriesUtils { } - public static INDArray reshape2dTo3d(INDArray in, int miniBatchSize, LayerWorkspaceMgr workspaceMgr, ArrayType arrayType) { + public static INDArray reshape2dTo3d(INDArray in, long miniBatchSize, LayerWorkspaceMgr workspaceMgr, ArrayType arrayType) { if (in.rank() != 2) throw new IllegalArgumentException("Invalid input: expect NDArray with rank 2"); //Based on: RnnToFeedForwardPreProcessor @@ -220,7 +221,6 @@ public class TimeSeriesUtils { in = in.dup('f'); } - // FIXME: int cast int[] idxs = new int[(int) in.size(2)]; int j=0; for( int i=idxs.length-1; i>=0; i--){ @@ -248,7 +248,8 @@ public class TimeSeriesUtils { in = workspaceMgr.dup(arrayType, in, 'f'); } - // FIXME: int cast + if (in.size(2) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); int[] idxs = new int[(int) in.size(2)]; int j=0; for( int i=idxs.length-1; i>=0; i--){ @@ -291,7 +292,8 @@ public class TimeSeriesUtils { + " with shape " + Arrays.toString(mask.shape())); } - // FIXME: int cast + if (mask.size(1) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); int[] idxs = new int[(int) mask.size(1)]; int j=0; for( int i=idxs.length-1; i>=0; i--){ @@ -319,7 +321,8 @@ public class TimeSeriesUtils { + " with shape " + Arrays.toString(mask.shape())); } - // FIXME: int cast + if (mask.size(1) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); int[] idxs = new int[(int) mask.size(1)]; int j=0; for( int i=idxs.length-1; i>=0; i--){ @@ -358,9 +361,8 @@ public class TimeSeriesUtils { INDArray out; if (mask == null) { - // FIXME: int cast //No mask array -> extract same (last) column for all - int lastTS = (int) pullFrom.size(2) - 1; + long lastTS = pullFrom.size(2) - 1; out = pullFrom.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(lastTS)); fwdPassTimeSteps = null; //Null -> last time step for all examples } else { @@ -396,9 +398,8 @@ public class TimeSeriesUtils { INDArray out; if (mask == null) { - // FIXME: int cast //No mask array -> extract same (last) column for all - int lastTS = (int) pullFrom.size(2) - 1; + long lastTS = pullFrom.size(2) - 1; out = pullFrom.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(lastTS)); fwdPassTimeSteps = null; //Null -> last time step for all examples } else { diff --git a/deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/TestUtils.java b/deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/TestUtils.java index 251849f3e..c60822ef7 100644 --- a/deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/TestUtils.java +++ b/deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/TestUtils.java @@ -116,7 +116,6 @@ public class TestUtils { public static INDArray randomOneHot(long examples, long nOut, Random rng){ INDArray arr = Nd4j.create(examples, nOut); for( int i=0; i> 16) % (int) negativeHolder.getTable().length()); target = negativeHolder.getTable().getInt(idx); diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SecondIterationFunction.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SecondIterationFunction.java index c34156484..e540c23f0 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SecondIterationFunction.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SecondIterationFunction.java @@ -222,7 +222,6 @@ public class SecondIterationFunction implements FlatMapFunction> 16) % negativeHolder.getTable().length()); target = negativeHolder.getTable().getInt(idx); diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SentenceBatch.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SentenceBatch.java index 0d97f9bc4..78799b22b 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SentenceBatch.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SentenceBatch.java @@ -162,7 +162,6 @@ public class SentenceBatch implements Function label = 1; } else { nextRandom.set(nextRandom.get() * 25214903917L + 11); - // FIXME: int cast target = table.getInt((int) (nextRandom.get() >> 16) % (int) table.length()); if (target == 0) target = (int) nextRandom.get() % (numWords - 1) + 1; diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformer.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformer.java index 45eca7327..10e2a3050 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformer.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformer.java @@ -187,7 +187,6 @@ public class Word2VecPerformer implements VoidFunction, Ato } else { nextRandom.set(nextRandom.get() * 25214903917L + 11); - // FIXME: int cast target = table.getInt((int) (nextRandom.get() >> 16) % (int) table.length()); if (target == 0) target = (int) nextRandom.get() % (numWords - 1) + 1; diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformerVoid.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformerVoid.java index 539755ee6..0e346b622 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformerVoid.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformerVoid.java @@ -337,7 +337,6 @@ public class Word2VecPerformerVoid implements VoidFunction, label = 1; } else { nextRandom.set(nextRandom.get() * 25214903917L + 11); - // FIXME: int cast target = table.getInt((int) (nextRandom.get() >> 16) % (int) table.length()); if (target == 0) target = (int) nextRandom.get() % (numWords - 1) + 1; diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/api/stats/StatsCalculationHelper.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/api/stats/StatsCalculationHelper.java index d9b14651e..3476c5dd2 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/api/stats/StatsCalculationHelper.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/api/stats/StatsCalculationHelper.java @@ -39,7 +39,7 @@ public class StatsCalculationHelper { private long initialModelAfter; private long lastDataSetBefore; private long lastProcessBefore; - private int totalExampleCount; + private long totalExampleCount; private List dataSetGetTimes = new ArrayList<>(); private List processMiniBatchTimes = new ArrayList<>(); @@ -65,7 +65,7 @@ public class StatsCalculationHelper { lastDataSetBefore = timeSource.currentTimeMillis(); } - public void logNextDataSetAfter(int numExamples) { + public void logNextDataSetAfter(long numExamples) { long now = timeSource.currentTimeMillis(); long duration = now - lastDataSetBefore; dataSetGetTimes.add(new BaseEventStats(lastDataSetBefore, duration)); diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/api/worker/ExecuteWorkerMultiDataSetFlatMap.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/api/worker/ExecuteWorkerMultiDataSetFlatMap.java index 6fa148394..15ce0eb32 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/api/worker/ExecuteWorkerMultiDataSetFlatMap.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/api/worker/ExecuteWorkerMultiDataSetFlatMap.java @@ -84,9 +84,8 @@ public class ExecuteWorkerMultiDataSetFlatMap implemen s.logNextDataSetBefore(); MultiDataSet next = batchedIterator.next(); - // FIXME: int cast if (stats) - s.logNextDataSetAfter((int) next.getFeatures(0).size(0)); + s.logNextDataSetAfter(next.getFeatures(0).size(0)); if (stats) { s.logProcessMinibatchBefore(); diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/GraphFeedForwardWithKeyFunction.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/GraphFeedForwardWithKeyFunction.java index cec2f5b17..6d730b60b 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/GraphFeedForwardWithKeyFunction.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/GraphFeedForwardWithKeyFunction.java @@ -71,7 +71,7 @@ public class GraphFeedForwardWithKeyFunction implements PairFlatMapFunction featuresList = new ArrayList<>(batchSize); List keyList = new ArrayList<>(batchSize); - List origSizeList = new ArrayList<>(); + List origSizeList = new ArrayList<>(); long[][] firstShapes = null; boolean sizesDiffer = false; @@ -96,8 +96,7 @@ public class GraphFeedForwardWithKeyFunction implements PairFlatMapFunction implements PairFlatMapFunction implements PairFlatMapFunction, Tuple2> { +public class ScoreFlatMapFunctionCGDataSet implements FlatMapFunction, Tuple2> { private static final Logger log = LoggerFactory.getLogger(ScoreFlatMapFunctionCGDataSet.class); private String json; private Broadcast params; @@ -50,9 +50,9 @@ public class ScoreFlatMapFunctionCGDataSet implements FlatMapFunction> call(Iterator dataSetIterator) throws Exception { + public Iterator> call(Iterator dataSetIterator) throws Exception { if (!dataSetIterator.hasNext()) { - return Collections.singletonList(new Tuple2<>(0, 0.0)).iterator(); + return Collections.singletonList(new Tuple2<>(0L, 0.0)).iterator(); } DataSetIterator iter = new IteratorDataSetIterator(dataSetIterator, minibatchSize); //Does batching where appropriate @@ -65,13 +65,12 @@ public class ScoreFlatMapFunctionCGDataSet implements FlatMapFunction> out = new ArrayList<>(); + List> out = new ArrayList<>(); while (iter.hasNext()) { DataSet ds = iter.next(); double score = network.score(ds, false); - // FIXME: int cast - int numExamples = (int) ds.getFeatures().size(0); + long numExamples = ds.getFeatures().size(0); out.add(new Tuple2<>(numExamples, score * numExamples)); } diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/ScoreFlatMapFunctionCGMultiDataSet.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/ScoreFlatMapFunctionCGMultiDataSet.java index bf9e3f596..f72fdbb34 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/ScoreFlatMapFunctionCGMultiDataSet.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/graph/scoring/ScoreFlatMapFunctionCGMultiDataSet.java @@ -35,7 +35,7 @@ import java.util.Iterator; import java.util.List; /** Function used to score a MultiDataSet using a given ComputationGraph */ -public class ScoreFlatMapFunctionCGMultiDataSet implements FlatMapFunction, Tuple2> { +public class ScoreFlatMapFunctionCGMultiDataSet implements FlatMapFunction, Tuple2> { private static final Logger log = LoggerFactory.getLogger(ScoreFlatMapFunctionCGMultiDataSet.class); private String json; @@ -50,9 +50,9 @@ public class ScoreFlatMapFunctionCGMultiDataSet implements FlatMapFunction> call(Iterator dataSetIterator) throws Exception { + public Iterator> call(Iterator dataSetIterator) throws Exception { if (!dataSetIterator.hasNext()) { - return Collections.singletonList(new Tuple2<>(0, 0.0)).iterator(); + return Collections.singletonList(new Tuple2<>(0L, 0.0)).iterator(); } MultiDataSetIterator iter = new IteratorMultiDataSetIterator(dataSetIterator, minibatchSize); //Does batching where appropriate @@ -66,13 +66,12 @@ public class ScoreFlatMapFunctionCGMultiDataSet implements FlatMapFunction> out = new ArrayList<>(); + List> out = new ArrayList<>(); while (iter.hasNext()) { MultiDataSet ds = iter.next(); double score = network.score(ds, false); - // FIXME: int cast - int numExamples = (int) ds.getFeatures(0).size(0); + long numExamples = ds.getFeatures(0).size(0); out.add(new Tuple2<>(numExamples, score * numExamples)); } diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/FeedForwardWithKeyFunction.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/FeedForwardWithKeyFunction.java index 03e4e55cf..0672b158a 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/FeedForwardWithKeyFunction.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/FeedForwardWithKeyFunction.java @@ -105,7 +105,6 @@ public class FeedForwardWithKeyFunction fMaskList.add(t2._2()._2()); keyList.add(t2._1()); - // FIXME: int cast origSizeList.add((int) t2._2()._1().size(0)); tupleCount++; } diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/ScoreFlatMapFunction.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/ScoreFlatMapFunction.java index 8063ba8e3..98a2639ef 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/ScoreFlatMapFunction.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/multilayer/scoring/ScoreFlatMapFunction.java @@ -64,7 +64,6 @@ public class ScoreFlatMapFunction implements FlatMapFunction, DataSet ds = iter.next(); double score = network.score(ds, false); - // FIXME: int cast val numExamples = (int) ds.getFeatures().size(0); out.add(new Tuple2<>(numExamples, score * numExamples)); } diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingWorker.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingWorker.java index 3896ae61d..71fcd1680 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingWorker.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/impl/paramavg/ParameterAveragingTrainingWorker.java @@ -247,10 +247,8 @@ public class ParameterAveragingTrainingWorker extends BaseTrainingWorker implements DataSetIterator { protected Collection dataSetStreams; protected DataSetPreProcessor preprocessor; protected Iterator iter; - protected int totalOutcomes = -1; - protected int inputColumns = -1; + protected long totalOutcomes = -1; + protected long inputColumns = -1; protected int batch = -1; protected DataSet preloadedDataSet; protected int cursor = 0; @@ -112,7 +113,9 @@ public abstract class BaseDataSetIterator implements DataSetIterator { private void preloadDataSet() { preloadedDataSet = load(iter.next()); - // FIXME: int cast + if (preloadedDataSet.getLabels().size(1) > Integer.MAX_VALUE || + preloadedDataSet.getFeatures().size(1) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); totalOutcomes = (int) preloadedDataSet.getLabels().size(1); inputColumns = (int) preloadedDataSet.getFeatures().size(1); } diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/iterator/PathSparkDataSetIterator.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/iterator/PathSparkDataSetIterator.java index a8a91ed3d..992b13c38 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/iterator/PathSparkDataSetIterator.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/iterator/PathSparkDataSetIterator.java @@ -67,7 +67,6 @@ public class PathSparkDataSetIterator extends BaseDataSetIterator { ds = load(iter.next()); } - // FIXME: int cast totalOutcomes = ds.getLabels() == null ? 0 : (int) ds.getLabels().size(1); //May be null for layerwise pretraining inputColumns = (int) ds.getFeatures().size(1); batch = ds.numExamples(); diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/iterator/PortableDataStreamDataSetIterator.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/iterator/PortableDataStreamDataSetIterator.java index 6285778a6..53af6aa21 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/iterator/PortableDataStreamDataSetIterator.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/iterator/PortableDataStreamDataSetIterator.java @@ -18,6 +18,7 @@ package org.deeplearning4j.spark.iterator; import org.apache.spark.input.PortableDataStream; import org.nd4j.linalg.dataset.DataSet; +import org.nd4j.linalg.exception.ND4JArraySizeException; import java.io.IOException; import java.io.InputStream; @@ -53,7 +54,9 @@ public class PortableDataStreamDataSetIterator extends BaseDataSetIterator Integer.MAX_VALUE || + ds.getFeatures().size(1) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); totalOutcomes = (int) ds.getLabels().size(1); inputColumns = (int) ds.getFeatures().size(1); batch = ds.numExamples(); diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/stats/ExampleCountEventStats.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/stats/ExampleCountEventStats.java index bd4bfac5a..a0792b659 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/stats/ExampleCountEventStats.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/stats/ExampleCountEventStats.java @@ -26,9 +26,9 @@ import lombok.Getter; public class ExampleCountEventStats extends BaseEventStats { @Getter - private final int totalExampleCount; + private final long totalExampleCount; - public ExampleCountEventStats(long startTime, long durationMs, int totalExampleCount) { + public ExampleCountEventStats(long startTime, long durationMs, long totalExampleCount) { super(startTime, durationMs); this.totalExampleCount = totalExampleCount; } diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/util/MLLibUtil.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/util/MLLibUtil.java index b501b834e..cfa081710 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/util/MLLibUtil.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/util/MLLibUtil.java @@ -31,6 +31,7 @@ import org.datavec.api.split.InputStreamInputSplit; import org.datavec.api.writable.Writable; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.dataset.DataSet; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.util.FeatureUtil; import scala.Tuple2; @@ -122,7 +123,8 @@ public class MLLibUtil { if (!arr.isVector()) { throw new IllegalArgumentException("passed in array must be a vector"); } - // FIXME: int cast + if (arr.length() > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); double[] ret = new double[(int) arr.length()]; for (int i = 0; i < arr.length(); i++) { ret[i] = arr.getDouble(i); diff --git a/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/weights/ConvolutionalIterationListener.java b/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/weights/ConvolutionalIterationListener.java index 3fd23bbd3..934ccc110 100644 --- a/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/weights/ConvolutionalIterationListener.java +++ b/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/weights/ConvolutionalIterationListener.java @@ -33,6 +33,7 @@ import org.deeplearning4j.ui.api.UIServer; import org.deeplearning4j.ui.storage.mapdb.MapDBStatsStorage; import org.deeplearning4j.util.UIDProvider; import org.nd4j.linalg.api.ndarray.INDArray; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.io.ClassPathResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -205,7 +206,8 @@ public class ConvolutionalIterationListener extends BaseTrainingListener { if(layers[i].type() == Layer.Type.CONVOLUTIONAL){ INDArray output = activations.get(i+1); //Offset by 1 - activations list includes input - // FIXME: int cast + if (output.shape()[0] - 1 > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); int sampleDim = output.shape()[0] == 1 ? 0 : rnd.nextInt((int) output.shape()[0] - 1) + 1; if (cnt == 0) { INDArray inputs = layers[i].input(); @@ -426,7 +428,8 @@ public class ConvolutionalIterationListener extends BaseTrainingListener { val height = (numRows * (tShape[1] + border + padding_col)) + padding_col + zoomPadding + zoomWidth; - // FIXME: int cast + if (height > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); BufferedImage outputImage = new BufferedImage(maxWidth, (int) height, BufferedImage.TYPE_BYTE_GRAY); Graphics2D graphics2D = outputImage.createGraphics(); @@ -571,7 +574,8 @@ public class ConvolutionalIterationListener extends BaseTrainingListener { */ graphics2D.setPaint(borderColor); - // FIXME: int cast + if (tad2D.shape()[0] > Integer.MAX_VALUE || tad2D.shape()[1] > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); graphics2D.drawRect(columnOffset, rowOffset, (int) tad2D.shape()[0], (int) tad2D.shape()[1]); diff --git a/deeplearning4j/deeplearning4j-zoo/src/main/java/org/deeplearning4j/zoo/util/BaseLabels.java b/deeplearning4j/deeplearning4j-zoo/src/main/java/org/deeplearning4j/zoo/util/BaseLabels.java index 771b5f461..d2e86fe94 100644 --- a/deeplearning4j/deeplearning4j-zoo/src/main/java/org/deeplearning4j/zoo/util/BaseLabels.java +++ b/deeplearning4j/deeplearning4j-zoo/src/main/java/org/deeplearning4j/zoo/util/BaseLabels.java @@ -90,9 +90,8 @@ public abstract class BaseLabels implements Labels { Preconditions.checkState(predictions.size(1) == labels.size(), "Invalid input array:" + " expected array with size(1) equal to numLabels (%s), got array with shape %s", labels.size(), predictions.shape()); - // FIXME: int cast - int rows = (int) predictions.size(0); - int cols = (int) predictions.size(1); + long rows = predictions.size(0); + long cols = predictions.size(1); if (predictions.isColumnVectorOrScalar()) { predictions = predictions.ravel(); rows = (int) predictions.size(0); diff --git a/deeplearning4j/dl4j-integration-tests/src/test/java/org/deeplearning4j/integration/TestUtils.java b/deeplearning4j/dl4j-integration-tests/src/test/java/org/deeplearning4j/integration/TestUtils.java index 683914119..72e51cd92 100644 --- a/deeplearning4j/dl4j-integration-tests/src/test/java/org/deeplearning4j/integration/TestUtils.java +++ b/deeplearning4j/dl4j-integration-tests/src/test/java/org/deeplearning4j/integration/TestUtils.java @@ -116,7 +116,6 @@ public class TestUtils { public static INDArray randomOneHot(long examples, long nOut, Random rng){ INDArray arr = Nd4j.create(examples, nOut); for( int i=0; i(ptr)); } +template +static void _scatterUpdate(Nd4jPointer *extraPointers, int opCode, int numOfSubArrs, + void* hX, Nd4jLong* hXShapeInfo, Nd4jLong* hXOffsets, + void* dX, Nd4jLong* dXShapeInfo, Nd4jLong* dXOffsets, + void* hY, Nd4jLong* hYShapeInfo, Nd4jLong* hYOffsets, + void* dY, Nd4jLong* dYShapeInfo, Nd4jLong* dYOffsets, + void* vIindexes, Nd4jLong* hIndicesShapeInfo, void* dIindexes, Nd4jLong* dIndicesShapeInfo) { + + auto hIindexes = reinterpret_cast(vIindexes); + + int numThreads = omp_get_max_threads(); + + PRAGMA_OMP_PARALLEL_THREADS(numThreads) + { + for (int i = 0; i < numOfSubArrs; ++i) { + + int threadIndex = omp_get_thread_num(); + const auto xIndex = hIindexes[i]; + const bool isOwner = xIndex < numThreads ? threadIndex == xIndex : threadIndex == xIndex % numThreads; + + if (!isOwner) + continue; + + NDArray inSubArr( + reinterpret_cast(hX) + (hXOffsets[hIindexes[i]] * DataTypeUtils::sizeOf(hXShapeInfo)), + hXShapeInfo); + NDArray updSubArr(reinterpret_cast(hY) + (hYOffsets[i] * DataTypeUtils::sizeOf(hXShapeInfo)), + hYShapeInfo); + + if (inSubArr.lengthOf() != updSubArr.lengthOf()) { + continue; + } + + switch (opCode) { + case 0: + inSubArr.applyPairwiseTransform(pairwise::Add, &updSubArr, &inSubArr, nullptr); + break; + case 1: + inSubArr.applyPairwiseTransform(pairwise::Subtract, &updSubArr, &inSubArr, nullptr); + break; + case 2: + inSubArr.applyPairwiseTransform(pairwise::Multiply, &updSubArr, &inSubArr, nullptr); + break; + case 3: + inSubArr.applyPairwiseTransform(pairwise::Divide, &updSubArr, &inSubArr, nullptr); + break; + case 4: + inSubArr.applyPairwiseTransform(pairwise::ReverseSubtract, &updSubArr, &inSubArr, nullptr); + break; + case 5: + inSubArr.applyPairwiseTransform(pairwise::ReverseDivide, &updSubArr, &inSubArr, nullptr); + break; + case 6: + inSubArr.applyPairwiseTransform(pairwise::CopyPws, &updSubArr, &inSubArr, nullptr); + break; + default: + continue; + } + } + } + +} //////////////////////////////////////////////////////////////////////// void scatterUpdate(Nd4jPointer *extraPointers, int opCode, int numOfSubArrs, @@ -2774,60 +2836,11 @@ void scatterUpdate(Nd4jPointer *extraPointers, int opCode, int numOfSubArrs, void* dX, Nd4jLong* dXShapeInfo, Nd4jLong* dXOffsets, void* hY, Nd4jLong* hYShapeInfo, Nd4jLong* hYOffsets, void* dY, Nd4jLong* dYShapeInfo, Nd4jLong* dYOffsets, - int* hIindexes, int* dIindexes) { + void* hIindexes, Nd4jLong* hIndicesShapeInfo, void* dIindexes, Nd4jLong* dIndicesShapeInfo) { + auto iType = ArrayOptions::dataType(hIndicesShapeInfo); try { - - int numThreads = omp_get_max_threads(); - - PRAGMA_OMP_PARALLEL_THREADS(numThreads) - { - for (int i = 0; i < numOfSubArrs; ++i) { - - int threadIndex = omp_get_thread_num(); - const auto xIndex = hIindexes[i]; - const bool isOwner = xIndex < numThreads ? threadIndex == xIndex : threadIndex == xIndex % numThreads; - - if (!isOwner) - continue; - - NDArray inSubArr( - reinterpret_cast(hX) + (hXOffsets[hIindexes[i]] * DataTypeUtils::sizeOf(hXShapeInfo)), - hXShapeInfo); - NDArray updSubArr(reinterpret_cast(hY) + (hYOffsets[i] * DataTypeUtils::sizeOf(hXShapeInfo)), - hYShapeInfo); - - if (inSubArr.lengthOf() != updSubArr.lengthOf()) { - continue; - } - - switch (opCode) { - case 0: - inSubArr.applyPairwiseTransform(pairwise::Add, &updSubArr, &inSubArr, nullptr); - break; - case 1: - inSubArr.applyPairwiseTransform(pairwise::Subtract, &updSubArr, &inSubArr, nullptr); - break; - case 2: - inSubArr.applyPairwiseTransform(pairwise::Multiply, &updSubArr, &inSubArr, nullptr); - break; - case 3: - inSubArr.applyPairwiseTransform(pairwise::Divide, &updSubArr, &inSubArr, nullptr); - break; - case 4: - inSubArr.applyPairwiseTransform(pairwise::ReverseSubtract, &updSubArr, &inSubArr, nullptr); - break; - case 5: - inSubArr.applyPairwiseTransform(pairwise::ReverseDivide, &updSubArr, &inSubArr, nullptr); - break; - case 6: - inSubArr.applyPairwiseTransform(pairwise::CopyPws, &updSubArr, &inSubArr, nullptr); - break; - default: - continue; - } - } - } + BUILD_SINGLE_SELECTOR(iType, _scatterUpdate, (extraPointers, opCode, numOfSubArrs, hX, hXShapeInfo, hXOffsets, dX, dXShapeInfo, dXOffsets, hY, hYShapeInfo, hYOffsets, dY, dYShapeInfo, dYOffsets, hIindexes, hIndicesShapeInfo, dIindexes, dIndicesShapeInfo), INDEXING_TYPES); } catch (std::exception &e) { nd4j::LaunchContext::defaultContext()->errorReference()->setErrorCode(1); nd4j::LaunchContext::defaultContext()->errorReference()->setErrorMessage(e.what()); diff --git a/libnd4j/blas/cuda/NativeOps.cu b/libnd4j/blas/cuda/NativeOps.cu index dc9d37b03..5365fc660 100755 --- a/libnd4j/blas/cuda/NativeOps.cu +++ b/libnd4j/blas/cuda/NativeOps.cu @@ -3198,14 +3198,15 @@ void deleteUtf8String(Nd4jPointer *extraPointers, Nd4jPointer ptr) { } /////////////////////////////////////////////////////////////////// -template +template __global__ static void scatterUpdateCuda(const int opCode, const int numOfSubArrs, void* vx, const Nd4jLong *xShapeInfo, const Nd4jLong *xOffsets, void* vy, const Nd4jLong *yShapeInfo, const Nd4jLong *yOffsets, - const int* indexes) { + const void* vindexes) { __shared__ T *x, *y; __shared__ Nd4jLong arrLenX, arrLenY; + auto indexes = reinterpret_cast(vindexes); for (int e = 0; e < numOfSubArrs; e++ ) { @@ -3261,10 +3262,10 @@ __global__ static void scatterUpdateCuda(const int opCode, const int numOfSubArr } } -template -__host__ static void scatterUpdateCudaLauncher(const cudaStream_t* stream, const int opCode, const int numOfSubArrs, void* vx, const Nd4jLong *xShapeInfo, const Nd4jLong *xOffsets, void* vy, const Nd4jLong *yShapeInfo, const Nd4jLong *yOffsets, const int* indexes) { +template +__host__ static void scatterUpdateCudaLauncher(const cudaStream_t* stream, const int opCode, const int numOfSubArrs, void* vx, const Nd4jLong *xShapeInfo, const Nd4jLong *xOffsets, void* vy, const Nd4jLong *yShapeInfo, const Nd4jLong *yOffsets, const void* indexes) { - scatterUpdateCuda<<<512, 256, MAX_NUM_THREADS, *stream>>>(opCode, numOfSubArrs, vx, xShapeInfo, xOffsets, vy, yShapeInfo, yOffsets, indexes); + scatterUpdateCuda<<<512, 256, MAX_NUM_THREADS, *stream>>>(opCode, numOfSubArrs, vx, xShapeInfo, xOffsets, vy, yShapeInfo, yOffsets, indexes); } @@ -3274,15 +3275,17 @@ void scatterUpdate(Nd4jPointer *extraPointers, int opCode, int numOfSubArrs, void* dX, Nd4jLong* dXShapeInfo, Nd4jLong* dXOffsets, void* hY, Nd4jLong* hYShapeInfo, Nd4jLong* hYOffsets, void* dY, Nd4jLong* dYShapeInfo, Nd4jLong* dYOffsets, - int* hIindexes, int* dIndexes) { + void* hIindexes, Nd4jLong* hIndicesShapeInfo, void* dIindexes, Nd4jLong* dIndicesShapeInfo) { try { auto stream = reinterpret_cast(extraPointers[1]); - nd4j::DataType type = ArrayOptions::dataType(hXShapeInfo); + auto type = ArrayOptions::dataType(hXShapeInfo); + auto iType = ArrayOptions::dataType(hIndicesShapeInfo); - BUILD_SINGLE_SELECTOR(type, scatterUpdateCudaLauncher, + BUILD_DOUBLE_SELECTOR(type,, iType, scatterUpdateCudaLauncher, (stream, opCode, numOfSubArrs, dX, dXShapeInfo, dXOffsets, dY, dYShapeInfo, dYOffsets, dIndexes), - LIBND4J_TYPES); + LIBND4J_TYPES, INDEXING_TYPES); + nd4j::DebugHelper::checkErrorCode(stream, "scatterUpdate(...) failed"); } catch (std::exception &e) { nd4j::LaunchContext::defaultContext()->errorReference()->setErrorCode(1); diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/functions/DifferentialFunctionFactory.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/functions/DifferentialFunctionFactory.java index f8b908449..0bc395803 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/functions/DifferentialFunctionFactory.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/functions/DifferentialFunctionFactory.java @@ -2619,7 +2619,6 @@ public class DifferentialFunctionFactory { validateDifferentialFunctionsameDiff(func); validateDifferentialFunctionsameDiff(input); - // FIXME: int cast! return tile(func, ArrayUtil.toInts(input.getShape())); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/serde/LegacyOpMapper.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/serde/LegacyOpMapper.java index eeb6b1b78..f76c42c50 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/serde/LegacyOpMapper.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/serde/LegacyOpMapper.java @@ -133,16 +133,9 @@ public class LegacyOpMapper { public static Class aggregateOpClass(int opNum) { switch (opNum) { - case 0: - return HierarchicSoftmax.class; - case 1: - return AggregateDot.class; + case 2: return AggregateAxpy.class; - case 3: - return AggregateSkipGram.class; - case 4: - return AggregateCBOW.class; case 5: return AggregateGEMM.class; default: diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/classification/EvaluationCalibration.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/classification/EvaluationCalibration.java index 769fcf109..3de796d93 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/classification/EvaluationCalibration.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/classification/EvaluationCalibration.java @@ -368,7 +368,6 @@ public class EvaluationCalibration extends BaseEvaluation return -1; } - // FIXME: int cast return (int) rDiagBinTotalCount.size(1); } @@ -394,7 +393,6 @@ public class EvaluationCalibration extends BaseEvaluation double[] mpb = meanPredictionBins; double[] fp = fracPositives; - // FIXME: int cast meanPredictionBins = new double[(int) (totalCountBins.length() - numZeroBins)]; fracPositives = new double[meanPredictionBins.length]; int j = 0; diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/classification/ROCBinary.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/classification/ROCBinary.java index 3695a692b..a4d54e3c7 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/classification/ROCBinary.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/classification/ROCBinary.java @@ -154,7 +154,6 @@ public class ROCBinary extends BaseEvaluation { if(labels2d.dataType() != predictions2d.dataType()) labels2d = labels2d.castTo(predictions2d.dataType()); - // FIXME: int cast int n = (int) labels2d.size(1); if (underlying == null) { underlying = new ROC[n]; diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/classification/ROCMultiClass.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/classification/ROCMultiClass.java index 07266399a..a2a1ed16e 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/classification/ROCMultiClass.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/classification/ROCMultiClass.java @@ -208,7 +208,6 @@ public class ROCMultiClass extends BaseEvaluation { if(labels2d.dataType() != predictions2d.dataType()) labels2d = labels2d.castTo(predictions2d.dataType()); - // FIXME: int cast int n = (int) labels2d.size(1); if (underlying == null) { underlying = new ROC[n]; diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/regression/RegressionEvaluation.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/regression/RegressionEvaluation.java index e1a0d1f82..cc206f0df 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/regression/RegressionEvaluation.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/evaluation/regression/RegressionEvaluation.java @@ -216,7 +216,6 @@ public class RegressionEvaluation extends BaseEvaluation { } private static List createDefaultColumnNames(long nColumns) { - // FIXME: int cast List list = new ArrayList<>((int) nColumns); for (int i = 0; i < nColumns; i++) list.add("col_" + i); @@ -244,7 +243,6 @@ public class RegressionEvaluation extends BaseEvaluation { labels = labels.castTo(predictions.dataType()); if (!initialized) { - // FIXME: int cast initialize((int) labels.size(1)); } //References for the calculations is this section: @@ -394,7 +392,6 @@ public class RegressionEvaluation extends BaseEvaluation { if (exampleCountPerColumn == null) { return 0; } - // FIXME: int cast return (int) exampleCountPerColumn.size(1); } return columnNames.size(); diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/BlasBufferUtil.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/BlasBufferUtil.java index 54a5f602f..ff6f852ce 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/BlasBufferUtil.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/BlasBufferUtil.java @@ -34,9 +34,8 @@ public class BlasBufferUtil { * @param arr the array * @return the blas stride */ - public static int getBlasOffset(INDArray arr) { - // FIXME: LONG - return (int) arr.offset(); + public static long getBlasOffset(INDArray arr) { + return arr.offset(); } /** @@ -142,16 +141,15 @@ public class BlasBufferUtil { * @param defaultRows * @return */ - public static int getDimension(INDArray arr, boolean defaultRows) { - // FIXME: int cast + public static long getDimension(INDArray arr, boolean defaultRows) { //ignore ordering for vectors if (arr.isVector()) { - return defaultRows ? (int) arr.rows() : (int) arr.columns(); + return defaultRows ? arr.rows() : arr.columns(); } if (arr.ordering() == NDArrayFactory.C) - return defaultRows ? (int) arr.columns() : (int) arr.rows(); - return defaultRows ? (int) arr.rows() : (int) arr.columns(); + return defaultRows ? arr.columns() : arr.rows(); + return defaultRows ? arr.rows() : arr.columns(); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLapack.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLapack.java index f7cd3a599..3a1a70e1a 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLapack.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLapack.java @@ -36,7 +36,6 @@ public abstract class BaseLapack implements Lapack { @Override public INDArray getrf(INDArray A) { - // FIXME: int cast if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE) throw new ND4JArraySizeException(); @@ -88,7 +87,6 @@ public abstract class BaseLapack implements Lapack { @Override public void potrf(INDArray A, boolean lower) { - // FIXME: int cast if (A.columns() > Integer.MAX_VALUE) throw new ND4JArraySizeException(); @@ -134,7 +132,6 @@ public abstract class BaseLapack implements Lapack { @Override public void geqrf(INDArray A, INDArray R) { - // FIXME: int cast if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE) throw new ND4JArraySizeException(); @@ -188,7 +185,6 @@ public abstract class BaseLapack implements Lapack { throw new Error("syev: V must be the length of the matrix dimension."); } - // FIXME: int cast if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE) throw new ND4JArraySizeException(); @@ -222,7 +218,6 @@ public abstract class BaseLapack implements Lapack { @Override public void gesvd(INDArray A, INDArray S, INDArray U, INDArray VT) { - // FIXME: int cast if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE) throw new ND4JArraySizeException(); @@ -279,7 +274,6 @@ public abstract class BaseLapack implements Lapack { */ @Override public INDArray getLFactor(INDArray A) { - // FIXME: int cast if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE) throw new ND4JArraySizeException(); @@ -304,7 +298,6 @@ public abstract class BaseLapack implements Lapack { @Override public INDArray getUFactor(INDArray A) { - // FIXME: int cast if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE) throw new ND4JArraySizeException(); diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel2.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel2.java index 6114c52d5..34b89824e 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel2.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel2.java @@ -17,6 +17,7 @@ package org.nd4j.linalg.api.blas.impl; import lombok.val; +import org.nd4j.base.Preconditions; import org.nd4j.linalg.api.blas.Level2; import org.nd4j.linalg.api.blas.params.GemvParameters; import org.nd4j.linalg.api.buffer.DataBuffer; @@ -25,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.executioner.DefaultOpExecutioner; import org.nd4j.linalg.api.ops.executioner.OpExecutioner; import org.nd4j.linalg.api.ops.executioner.OpExecutionerUtil; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.exception.ND4JIllegalStateException; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.profiler.OpProfiler; @@ -113,10 +115,10 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, X, Y); - // FIXME: int cast - if (A.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X, Y); + if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE || A.size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); dgbmv(order, TransA, (int) A.rows(), (int) A.columns(), KL, KU, alpha, A, (int) A.size(0), X, X.stride(-1), beta, Y, Y.stride(-1)); } else { @@ -142,10 +144,10 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, X, Y); - // FIXME: int cast - if (X.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X, Y); + if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE || A.size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); dger(order, (int) A.rows(), (int) A.columns(), alpha, X, X.stride(-1), Y, Y.stride(-1), A, (int) A.size(0)); } else { DefaultOpExecutioner.validateDataType(DataType.FLOAT, A, X, Y); @@ -173,12 +175,13 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, X, Y); - // FIXME: int cast - + if (X.length() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE || A.size(0) > Integer.MAX_VALUE) { + throw new ND4JArraySizeException(); + } if (X.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X, Y); dsbmv(order, Uplo, (int) X.length(), (int) A.columns(), alpha, A, (int) A.size(0), X, X.stride(-1), beta, Y, - (int) Y.stride(-1)); + Y.stride(-1)); } else { DefaultOpExecutioner.validateDataType(DataType.FLOAT, A, X, Y); ssbmv(order, Uplo, (int) X.length(), (int) A.columns(), (float) alpha, A, (int) A.size(0), X, X.stride(-1), (float) beta, @@ -202,7 +205,9 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, Ap, X, Y); - // FIXME: int cast + if (X.length() > Integer.MAX_VALUE) { + throw new ND4JArraySizeException(); + } if (Ap.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, X, Y); @@ -231,7 +236,8 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { OpProfiler.getInstance().processBlasCall(false, Ap, X); - // FIXME: int cast + if (X.length() > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); if (X.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, X); @@ -260,7 +266,8 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, X, Y); - // FIXME int cast + if (X.length() > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); if (X.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X, Y); @@ -291,7 +298,8 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, X, Y); - // FIXME: int cast + if (X.length() > Integer.MAX_VALUE || A.size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); if (X.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X, Y); @@ -321,7 +329,8 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, X); - // FIXME: int cast + if (X.length() > Integer.MAX_VALUE || A.size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); if (X.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X); @@ -347,7 +356,8 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, X, Y); - // FIXME: int cast + if (X.length() > Integer.MAX_VALUE || A.size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); if (X.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X, Y); @@ -376,7 +386,9 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, X); - // FIXME: int cast + if (X.length() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE || A.size(0) > Integer.MAX_VALUE) { + throw new ND4JArraySizeException(); + } if (X.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X); @@ -402,7 +414,9 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, X); - // FIXME: int cast + if (X.length() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE || A.size(0) > Integer.MAX_VALUE ) { + throw new ND4JArraySizeException(); + } if (X.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X); @@ -429,7 +443,8 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, Ap, X); - // FIXME: int cast + if (Ap.length() > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); if (X.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, X); @@ -457,7 +472,8 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, Ap, X); - // FIXME: int cast + if (X.length() > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); if (X.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, X, Ap); @@ -485,7 +501,8 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, X); - // FIXME: int cast + if (X.length() > Integer.MAX_VALUE || A.size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); if (A.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X); @@ -513,7 +530,8 @@ public abstract class BaseLevel2 extends BaseLevel implements Level2 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, X); - // FIXME: int cast + if (A.length() > Integer.MAX_VALUE || A.size(0) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); if (X.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, X); diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel3.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel3.java index 3c015c5dc..e08c4c0a9 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel3.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/blas/impl/BaseLevel3.java @@ -25,6 +25,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.executioner.DefaultOpExecutioner; import org.nd4j.linalg.api.ops.executioner.OpExecutioner; import org.nd4j.linalg.api.ops.executioner.OpExecutionerUtil; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.factory.NDArrayFactory; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.profiler.OpProfiler; @@ -129,7 +130,10 @@ public abstract class BaseLevel3 extends BaseLevel implements Level3 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, B, C); - // FIXME: int cast + if (C.rows() > Integer.MAX_VALUE || C.columns() > Integer.MAX_VALUE || + A.size(0) > Integer.MAX_VALUE || B.size(0) > Integer.MAX_VALUE || C.size(0) > Integer.MAX_VALUE) { + throw new ND4JArraySizeException(); + } if (A.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, B, C); @@ -163,7 +167,11 @@ public abstract class BaseLevel3 extends BaseLevel implements Level3 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, C); - // FIXME: int cast + if (C.rows() > Integer.MAX_VALUE || + A.size(0) > Integer.MAX_VALUE || + C.size(0) > Integer.MAX_VALUE) { + throw new ND4JArraySizeException(); + } if (A.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, C); @@ -198,7 +206,10 @@ public abstract class BaseLevel3 extends BaseLevel implements Level3 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, B, C); - // FIXME: int cast + if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE || + A.size(0) > Integer.MAX_VALUE || B.size(0) > Integer.MAX_VALUE || C.size(0) > Integer.MAX_VALUE) { + throw new ND4JArraySizeException(); + } if (A.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, B, C); @@ -234,7 +245,10 @@ public abstract class BaseLevel3 extends BaseLevel implements Level3 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, B, C); - // FIXME: int cast + if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE || + A.size(0) > Integer.MAX_VALUE || B.size(0) > Integer.MAX_VALUE) { + throw new ND4JArraySizeException(); + } if (A.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, B, C); @@ -269,7 +283,10 @@ public abstract class BaseLevel3 extends BaseLevel implements Level3 { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, B); - // FIXME: int cast + if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE || + A.size(0) > Integer.MAX_VALUE || B.size(0) > Integer.MAX_VALUE) { + throw new ND4JArraySizeException(); + } if (A.data().dataType() == DataType.DOUBLE) { DefaultOpExecutioner.validateDataType(DataType.DOUBLE, A, B); diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ndarray/BaseNDArray.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ndarray/BaseNDArray.java index a4214a694..b7db4c40b 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ndarray/BaseNDArray.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ndarray/BaseNDArray.java @@ -1071,7 +1071,8 @@ public abstract class BaseNDArray implements INDArray, Iterable { long offset = index * tensorLength / NDArrayMath.lengthPerSlice(ret2); if (sliceIdx == 0 && length == NDArrayMath.lengthPerSlice(ret2)) { - // FIXME: LONG + if (offset > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); ret2 = ret2.slice((int) offset); if (dimension.length == 1 && ret2.isRowVectorOrScalar()) return ret2; @@ -1081,7 +1082,8 @@ public abstract class BaseNDArray implements INDArray, Iterable { else if (length == NDArrayMath.lengthPerSlice(ret2)) { offset -= ret2.slices() * (offset / ret2.slices()); - // FIXME: LONG + if (offset > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); ret2 = ret2.slice((int) offset); if (dimension.length == 1 && ret2.isRowVectorOrScalar()) return ret2; @@ -3525,16 +3527,21 @@ public abstract class BaseNDArray implements INDArray, Iterable { } @Override - public INDArray repmat(int[] shape) { + public INDArray repmat(long[] shape) { Nd4j.getCompressor().autoDecompress(this); - - long rows = rows() * shape[0]; long cols = columns() * shape[1]; INDArray ret = reshape(1, length()).repeat(0, shape[0]).reshape(rows, columns()).repeat(0, shape[1]); return ret.reshape(rows, cols); } + @Deprecated + @Override + public INDArray repmat(int[] shape) { + long[] longShape = ArrayUtil.toLongArray(shape); + return repmat(longShape); + } + @Override public INDArray repeat(int dimension, long... repeats) { Nd4j.getCompressor().autoDecompress(this); @@ -3672,9 +3679,9 @@ public abstract class BaseNDArray implements INDArray, Iterable { return Nd4j.create(data, shape, strides, 0, ordering()); } + @Deprecated @Override public INDArray reshape(char order, int... newShape) { - // FIXME: int cast return reshape(order, ArrayUtil.toLongArray(newShape)); } @@ -3976,7 +3983,6 @@ public abstract class BaseNDArray implements INDArray, Iterable { @Override public int columns() { - // FIXME: int cast if (isMatrix()) return (int) size(1); else if (Shape.isColumnVectorShape(shape())) { @@ -3991,7 +3997,6 @@ public abstract class BaseNDArray implements INDArray, Iterable { @Override public int rows() { - // FIXME: if (isMatrix()) return (int) size(0); else if (Shape.isRowVectorShape(shape())) { @@ -4573,7 +4578,6 @@ public abstract class BaseNDArray implements INDArray, Iterable { } else { - // FIXME: int cast int[] repeat = new int[shape.length]; for(int i = 0; i < shape.length; i++) { if(i < rank()) { @@ -4603,9 +4607,9 @@ public abstract class BaseNDArray implements INDArray, Iterable { return broadcast(Nd4j.createUninitialized(this.dataType(), shape, this.ordering())); } + @Deprecated @Override public INDArray dimShuffle(Object[] rearrange, int[] newOrder, boolean[] broadCastable) { - // FIXME: int cast return dimShuffle(rearrange, ArrayUtil.toLongArray(newOrder), broadCastable); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ndarray/INDArray.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ndarray/INDArray.java index 221b4021b..de80e9413 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ndarray/INDArray.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ndarray/INDArray.java @@ -871,6 +871,9 @@ public interface INDArray extends Serializable, AutoCloseable { * @param shape the new shape of this ndarray * @return the shape to fill out to */ + INDArray repmat(long... shape); + + @Deprecated INDArray repmat(int... shape); /** diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseBroadcastBoolOp.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseBroadcastBoolOp.java index 4a2e66037..f6148ed7c 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseBroadcastBoolOp.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseBroadcastBoolOp.java @@ -100,16 +100,6 @@ public abstract class BaseBroadcastBoolOp extends BaseOp implements BroadcastOp this(sameDiff, i_v, i_v.getShape(), inPlace, dimension, null); } - public BaseBroadcastBoolOp(SameDiff sameDiff, - SDVariable i_v, - int[] shape, - boolean inPlace, - int[] dimension, - Object[] extraArgs) { - // FIXME: int cast - this(sameDiff, i_v, ArrayUtil.toLongArray(shape), inPlace, dimension, extraArgs); - } - public BaseBroadcastBoolOp(SameDiff sameDiff, SDVariable i_v, long[] shape, diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseBroadcastOp.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseBroadcastOp.java index 6e8b1afa4..9de4d3fbd 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseBroadcastOp.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseBroadcastOp.java @@ -100,16 +100,6 @@ public abstract class BaseBroadcastOp extends BaseOp implements BroadcastOp { this(sameDiff, i_v, i_v.getShape(), inPlace, dimension, null); } - public BaseBroadcastOp(SameDiff sameDiff, - SDVariable i_v, - int[] shape, - boolean inPlace, - int[] dimension, - Object[] extraArgs) { - // FIXME: int cast - this(sameDiff, i_v, ArrayUtil.toLongArray(shape), inPlace, dimension, extraArgs); - } - public BaseBroadcastOp(SameDiff sameDiff, SDVariable i_v, long[] shape, diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformAnyOp.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformAnyOp.java index 046e296e5..8efba0fdf 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformAnyOp.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformAnyOp.java @@ -49,10 +49,6 @@ public abstract class BaseTransformAnyOp extends BaseTransformOp implements Tran super(sameDiff, i_v1, i_v2, extraArgs); } - public BaseTransformAnyOp(SameDiff sameDiff, SDVariable i_v, int[] shape, boolean inPlace, Object[] extraArgs) { - super(sameDiff, i_v, shape, inPlace, extraArgs); - } - public BaseTransformAnyOp(SameDiff sameDiff, SDVariable i_v, boolean inPlace) { super(sameDiff, i_v, inPlace); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformBoolOp.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformBoolOp.java index 68a06f61c..df0e04d5c 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformBoolOp.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformBoolOp.java @@ -40,10 +40,6 @@ public abstract class BaseTransformBoolOp extends BaseTransformOp implements Tra super(sameDiff, i_v1, i_v2, inPlace); } - public BaseTransformBoolOp(SameDiff sameDiff, SDVariable i_v, int[] shape, boolean inPlace, Object[] extraArgs) { - super(sameDiff, i_v, shape, inPlace, extraArgs); - } - public BaseTransformBoolOp(SameDiff sameDiff, SDVariable i_v, boolean inPlace) { super(sameDiff, i_v, inPlace); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformFloatOp.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformFloatOp.java index fc778af90..ee97a4bba 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformFloatOp.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformFloatOp.java @@ -33,10 +33,6 @@ import java.util.List; public abstract class BaseTransformFloatOp extends BaseTransformOp implements TransformFloatOp { - public BaseTransformFloatOp(SameDiff sameDiff, SDVariable i_v, int[] shape, boolean inPlace, Object[] extraArgs) { - super(sameDiff, i_v, shape, inPlace, extraArgs); - } - public BaseTransformFloatOp(SameDiff sameDiff, SDVariable i_v, boolean inPlace) { super(sameDiff, i_v, inPlace); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformOp.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformOp.java index 274dfdf72..4e498edeb 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformOp.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformOp.java @@ -96,15 +96,6 @@ public abstract class BaseTransformOp extends BaseOp implements TransformOp { this(sameDiff,i_v,i_v.getShape(),inPlace,null); } - public BaseTransformOp(SameDiff sameDiff, - SDVariable i_v, - int[] shape, - boolean inPlace, - Object[] extraArgs) { - // FIXME: int cast ! - this(sameDiff, i_v, ArrayUtil.toLongArray(shape), inPlace, extraArgs); - } - public BaseTransformOp(SameDiff sameDiff, SDVariable i_v, long[] shape, diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformSameOp.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformSameOp.java index 7fc34d0e5..b04c24c8c 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformSameOp.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformSameOp.java @@ -44,10 +44,6 @@ public abstract class BaseTransformSameOp extends BaseTransformOp implements Tra super(sameDiff, i_v1, i_v2, extraArgs); } - public BaseTransformSameOp(SameDiff sameDiff, SDVariable i_v, int[] shape, boolean inPlace, Object[] extraArgs) { - super(sameDiff, i_v, shape, inPlace, extraArgs); - } - public BaseTransformSameOp(SameDiff sameDiff, SDVariable i_v, boolean inPlace) { super(sameDiff, i_v, inPlace); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformStrictOp.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformStrictOp.java index ff40ebae4..ff89e49ba 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformStrictOp.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseTransformStrictOp.java @@ -41,10 +41,6 @@ public abstract class BaseTransformStrictOp extends BaseTransformOp implements T super(sameDiff, i_v1, i_v2, inPlace); } - public BaseTransformStrictOp(SameDiff sameDiff, SDVariable i_v, int[] shape, boolean inPlace, Object[] extraArgs) { - super(sameDiff, i_v, shape, inPlace, extraArgs); - } - public BaseTransformStrictOp(SameDiff sameDiff, SDVariable i_v, boolean inPlace) { super(sameDiff, i_v, inPlace); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/AggregateCBOW.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/AggregateCBOW.java deleted file mode 100644 index a9d327a35..000000000 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/AggregateCBOW.java +++ /dev/null @@ -1,172 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.nd4j.linalg.api.ops.aggregates.impl; - -import lombok.NonNull; -import org.nd4j.linalg.api.ndarray.INDArray; -import org.nd4j.linalg.api.ops.aggregates.BaseAggregate; -import org.nd4j.linalg.factory.Nd4j; - -/** - * @author raver119@gmail.com - */ -@Deprecated -public class AggregateCBOW extends BaseAggregate { - private int vectorLength; - - /** - * Optional constructor for ParagraphVectors PV-DM implementation - * - * @param syn0 - * @param syn1 - * @param syn1Neg - * @param expTable - * @param negTable - * @param wordIdx - * @param idxSyn0 - * @param idxSyn1 - * @param codes - * @param negativeRounds - * @param ngStarter - * @param vectorLength - * @param alpha - * @param nextRandom - * @param vocabSize - * @param numLabels - * @param trainWords - */ - public AggregateCBOW(@NonNull INDArray syn0, INDArray syn1, INDArray syn1Neg, @NonNull INDArray expTable, - INDArray negTable, int wordIdx, int[] idxSyn0, int[] idxSyn1, int[] codes, int negativeRounds, - int ngStarter, int vectorLength, double alpha, long nextRandom, int vocabSize, int numLabels, - boolean trainWords, INDArray inferenceVector) { - this(syn0, syn1, syn1Neg, expTable, negTable, wordIdx, idxSyn0, idxSyn1, codes, negativeRounds, ngStarter, - vectorLength, alpha, nextRandom, vocabSize); - - indexingArguments.set(9, numLabels); - indexingArguments.set(10, trainWords ? 1 : 0); - indexingArguments.set(11, inferenceVector == null ? 0 : 1); // set inference to true - - arguments.set(5, inferenceVector); - } - - /** - * Default constructor for CBOW implementation wrapper - * @param syn0 - * @param syn1 - * @param syn1Neg - * @param expTable - * @param negTable - * @param wordIdx - * @param idxSyn0 - * @param idxSyn1 - * @param codes - * @param negativeRounds - * @param ngStarter - * @param vectorLength - * @param alpha - * @param nextRandom - * @param vocabSize - */ - public AggregateCBOW(@NonNull INDArray syn0, INDArray syn1, INDArray syn1Neg, @NonNull INDArray expTable, - INDArray negTable, int wordIdx, int[] idxSyn0, int[] idxSyn1, int[] codes, int negativeRounds, - int ngStarter, int vectorLength, double alpha, long nextRandom, int vocabSize) { - indexingArguments.add(vectorLength); - indexingArguments.add(idxSyn1.length); - indexingArguments.add(negativeRounds); - - // FIXME: int cast - indexingArguments.add((int) expTable.length()); - indexingArguments.add(vocabSize); - indexingArguments.add(ngStarter); - indexingArguments.add(negTable == null ? 0 : (int) negTable.length()); - indexingArguments.add(idxSyn0.length); - indexingArguments.add(wordIdx); - indexingArguments.add(0); // number of labels. 0 by default - indexingArguments.add(1); // trainWords? true by default - indexingArguments.add(0); // is inference? false by default - - - arguments.add(syn0); - arguments.add(syn1); - arguments.add(expTable); - arguments.add(syn1Neg); - arguments.add(negTable); - arguments.add(null); - - intArrayArguments.add(idxSyn0); - intArrayArguments.add(idxSyn1); - intArrayArguments.add(codes); - - realArguments.add(alpha); - realArguments.add((double) nextRandom); - - this.vectorLength = vectorLength; - } - - @Override - public String name() { - return "aggregate_cbow"; - } - - @Override - public int opNum() { - return 4; - } - - @Override - public int maxArguments() { - return 6; - } - - @Override - public int maxShapes() { - return 0; - } - - @Override - public int maxIntArrays() { - return 3; - } - - @Override - public int maxIntArraySize() { - return 40; - } - - @Override - public int maxIndexArguments() { - return 12; - } - - @Override - public int maxRealArguments() { - return 2; - } - - @Override - public int getSharedMemorySize() { - return (vectorLength * Nd4j.sizeOfDataType() * 2) + 512; - } - - @Override - public int getThreadsPerInstance() { - if (vectorLength > 768) - return 768; - - return vectorLength; - } -} diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/AggregateDot.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/AggregateDot.java deleted file mode 100644 index a5ef4a4da..000000000 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/AggregateDot.java +++ /dev/null @@ -1,107 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.nd4j.linalg.api.ops.aggregates.impl; - -import lombok.NonNull; -import org.nd4j.linalg.api.ndarray.INDArray; -import org.nd4j.linalg.api.ops.aggregates.BaseAggregate; -import org.nd4j.linalg.factory.Nd4j; - -/** - * This op describes Dot call that'll happen soon(TM) in batch mode - * - * @author raver119@gmail.com - */ -@Deprecated -public class AggregateDot extends BaseAggregate { - private int vectorLength; - - public AggregateDot(@NonNull INDArray x, @NonNull INDArray y) { - this.arguments.add(x); - this.arguments.add(y); - - // FIXME: int cast - - this.indexingArguments.add((int) x.length()); - this.vectorLength = (int) x.length(); - } - - /** - * This method returns amount of shared memory required for this specific Aggregate. - * PLEASE NOTE: this method is especially important for CUDA backend. On CPU backend it might be ignored, depending on Aggregate. - * - * @return - */ - @Override - public int getSharedMemorySize() { - return (getThreadsPerInstance() * Nd4j.sizeOfDataType()) + 512; - } - - /** - * This method returns desired number of threads per Aggregate instance - * PLEASE NOTE: this method is especially important for CUDA backend. On CPU backend it might be ignored, depending on Aggregate. - * - * @return - */ - @Override - public int getThreadsPerInstance() { - if (vectorLength > 768) - return 768; - - return vectorLength; - } - - @Override - public String name() { - return "aggregate_dot"; - } - - @Override - public int opNum() { - return 1; - } - - @Override - public int maxArguments() { - return 2; - } - - @Override - public int maxShapes() { - return 0; - } - - @Override - public int maxIntArrays() { - return 0; - } - - @Override - public int maxIntArraySize() { - return 0; - } - - @Override - public int maxIndexArguments() { - return 1; - } - - @Override - public int maxRealArguments() { - return 0; - } -} diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/AggregateSkipGram.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/AggregateSkipGram.java deleted file mode 100644 index 7fa52ece2..000000000 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/AggregateSkipGram.java +++ /dev/null @@ -1,165 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.nd4j.linalg.api.ops.aggregates.impl; - -import lombok.NonNull; -import lombok.extern.slf4j.Slf4j; -import org.nd4j.linalg.api.ndarray.INDArray; -import org.nd4j.linalg.api.ops.aggregates.BaseAggregate; -import org.nd4j.linalg.factory.Nd4j; - -/** - * This aggregate encapsulates AggregateSkipGram training round for a given word and context - * - * @author raver119@gmail.com - */ -@Slf4j -@Deprecated -public class AggregateSkipGram extends BaseAggregate { - private int vectorLength; - - public AggregateSkipGram(INDArray syn0, INDArray syn1, INDArray syn1Neg, INDArray expTable, INDArray negTable, - int idxSyn0, int[] idxSyn1, int[] codes, int negativeRounds, int ngStarter, int vectorLength, - double alpha, long nextRandom, int vocabSize, INDArray inferenceVector) { - this(syn0, syn1, syn1Neg, expTable, negTable, idxSyn0, idxSyn1, codes, negativeRounds, ngStarter, vectorLength, - alpha, nextRandom, vocabSize); - - arguments.set(5, inferenceVector); - - indexingArguments.set(8, inferenceVector == null ? 0 : 1); // set isInference to true - } - - public AggregateSkipGram(@NonNull INDArray syn0, INDArray syn1, INDArray syn1Neg, @NonNull INDArray expTable, - INDArray negTable, int idxSyn0, int[] idxSyn1, int[] codes, int negativeRounds, int ngStarter, - int vectorLength, double alpha, long nextRandom, int vocabSize) { - indexingArguments.add(idxSyn0); - indexingArguments.add(vectorLength); - indexingArguments.add(idxSyn1.length); - indexingArguments.add(negativeRounds); - - // FIXME: int cast - indexingArguments.add((int) expTable.length()); - indexingArguments.add(vocabSize); - indexingArguments.add(ngStarter); - - indexingArguments.add(negTable == null ? 0 : (int) negTable.length()); - indexingArguments.add(0); - - arguments.add(syn0); - arguments.add(syn1); - arguments.add(expTable); - arguments.add(syn1Neg); - arguments.add(negTable); - arguments.add(null); - - intArrayArguments.add(idxSyn1); - intArrayArguments.add(codes); - - realArguments.add(alpha); - realArguments.add((double) nextRandom); - - this.vectorLength = vectorLength; - } - - /** - * This is special signature suitable for use with VoidParameterServer, never ever use it outside of spark-nlp - * - * @param w1 - * @param w2 - * @param lr - * @param vectorLength - */ - // TODO: probably this signature should be removed? - public AggregateSkipGram(int w1, int w2, int[] codes, int[] points, int negSamples, double lr, int vectorLength) { - indexingArguments.add(w1); - indexingArguments.add(w2); - indexingArguments.add(vectorLength); - - intArrayArguments.add(codes); - intArrayArguments.add(points); - - realArguments.add(lr); - } - - - /** - * This method returns amount of shared memory required for this specific Aggregate. - * PLEASE NOTE: this method is especially important for CUDA backend. On CPU backend it might be ignored, depending on Aggregate. - * - * @return - */ - @Override - public int getSharedMemorySize() { - return (vectorLength * Nd4j.sizeOfDataType()) + 512; - } - - /** - * This method returns desired number of threads per Aggregate instance - * PLEASE NOTE: this method is especially important for CUDA backend. On CPU backend it might be ignored, depending on Aggregate. - * - * @return - */ - @Override - public int getThreadsPerInstance() { - if (vectorLength > 768) - return 768; - - return vectorLength; - } - - @Override - public String name() { - return "aggregate_skipgram"; - } - - @Override - public int opNum() { - return 3; - } - - @Override - public int maxArguments() { - return 6; - } - - @Override - public int maxShapes() { - return 0; - } - - @Override - public int maxIntArrays() { - return 2; - } - - @Override - public int maxIntArraySize() { - // we hardcode 40 here, due to w2v codeLength mechanics - // TODO: make sure this limitation doesn't bother with spark environment - return 40; - } - - @Override - public int maxIndexArguments() { - return 10; - } - - @Override - public int maxRealArguments() { - return 2; - } -} diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/HierarchicSoftmax.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/HierarchicSoftmax.java deleted file mode 100644 index de494dbff..000000000 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/HierarchicSoftmax.java +++ /dev/null @@ -1,114 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.nd4j.linalg.api.ops.aggregates.impl; - -import org.nd4j.linalg.api.ndarray.INDArray; -import org.nd4j.linalg.api.ops.aggregates.BaseAggregate; -import org.nd4j.linalg.factory.Nd4j; - -/** - * This Op describes HS round for AggregateSkipGram/CBOW Hierarchic Softmax - * - * @author raver119@gmail.com - */ -@Deprecated -public class HierarchicSoftmax extends BaseAggregate { - private int vectorLength; - - public HierarchicSoftmax(INDArray syn0, INDArray syn1, INDArray expTable, INDArray neu1e, int code, double lr) { - arguments.add(syn0); - arguments.add(syn1); - arguments.add(expTable); - arguments.add(neu1e); - - // FIXME: int cast - - indexingArguments.add((int) neu1e.length()); - indexingArguments.add((int) expTable.length()); - indexingArguments.add(code); - indexingArguments.add(0); // set isInference to false - - realArguments.add(lr); - - this.vectorLength = (int) neu1e.length(); - } - - /** - * This method returns amount of shared memory required for this specific Aggregate. - * PLEASE NOTE: this method is especially important for CUDA backend. On CPU backend it might be ignored, depending on Aggregate. - * - * @return - */ - @Override - public int getSharedMemorySize() { - return (getThreadsPerInstance() * Nd4j.sizeOfDataType()) + 512; - } - - /** - * This method returns desired number of threads per Aggregate instance - * PLEASE NOTE: this method is especially important for CUDA backend. On CPU backend it might be ignored, depending on Aggregate. - * - * @return - */ - @Override - public int getThreadsPerInstance() { - if (vectorLength > 768) - return 768; - - return vectorLength; - } - - @Override - public int opNum() { - return 0; - } - - @Override - public String name() { - return "aggregate_hs"; - } - - @Override - public int maxArguments() { - return 4; - } - - @Override - public int maxShapes() { - return 0; - } - - @Override - public int maxIntArrays() { - return 0; - } - - @Override - public int maxIntArraySize() { - return 0; - } - - @Override - public int maxIndexArguments() { - return 5; - } - - @Override - public int maxRealArguments() { - return 1; - } -} diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastAMax.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastAMax.java index 726d82b0a..c1fceccb6 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastAMax.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastAMax.java @@ -50,7 +50,7 @@ public class BroadcastAMax extends BaseBroadcastOp { super(sameDiff, i_v, dimension, inPlace); } - public BroadcastAMax(SameDiff sameDiff, SDVariable i_v, int[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { + public BroadcastAMax(SameDiff sameDiff, SDVariable i_v, long[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { super(sameDiff, i_v, shape, inPlace, dimension, extraArgs); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastCopyOp.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastCopyOp.java index 639ee24ec..00700b3c6 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastCopyOp.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastCopyOp.java @@ -48,7 +48,7 @@ public class BroadcastCopyOp extends BaseBroadcastOp { super(sameDiff, i_v, dimension, inPlace); } - public BroadcastCopyOp(SameDiff sameDiff, SDVariable i_v, int[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { + public BroadcastCopyOp(SameDiff sameDiff, SDVariable i_v, long[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { super(sameDiff, i_v, shape, inPlace, dimension, extraArgs); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastMin.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastMin.java index fe47e2bc3..8a7234532 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastMin.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastMin.java @@ -58,7 +58,7 @@ public class BroadcastMin extends BaseBroadcastOp { super(sameDiff, i_v, dimension, inPlace); } - public BroadcastMin(SameDiff sameDiff, SDVariable i_v, int[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { + public BroadcastMin(SameDiff sameDiff, SDVariable i_v, long[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { super(sameDiff, i_v, shape, inPlace, dimension, extraArgs); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastRSubOp.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastRSubOp.java index 0ddf777c2..1a4ec9887 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastRSubOp.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastRSubOp.java @@ -46,7 +46,7 @@ public class BroadcastRSubOp extends BaseBroadcastOp { super(sameDiff, i_v, dimension, inPlace); } - public BroadcastRSubOp(SameDiff sameDiff, SDVariable i_v, int[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { + public BroadcastRSubOp(SameDiff sameDiff, SDVariable i_v, long[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { super(sameDiff, i_v, shape, inPlace, dimension, extraArgs); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastSubOp.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastSubOp.java index 035e6d882..e060db4b6 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastSubOp.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BroadcastSubOp.java @@ -52,7 +52,7 @@ public class BroadcastSubOp extends BaseBroadcastOp { super(sameDiff, i_v, dimension, inPlace); } - public BroadcastSubOp(SameDiff sameDiff, SDVariable i_v, int[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { + public BroadcastSubOp(SameDiff sameDiff, SDVariable i_v, long[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { super(sameDiff, i_v, shape, inPlace, dimension, extraArgs); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastGreaterThan.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastGreaterThan.java index 733744fcd..63f1e2c45 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastGreaterThan.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastGreaterThan.java @@ -53,7 +53,7 @@ public class BroadcastGreaterThan extends BaseBroadcastBoolOp { super(sameDiff, i_v, dimension, inPlace); } - public BroadcastGreaterThan(SameDiff sameDiff, SDVariable i_v, int[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { + public BroadcastGreaterThan(SameDiff sameDiff, SDVariable i_v, long[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { super(sameDiff, i_v, shape, inPlace, dimension, extraArgs); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThan.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThan.java index 0f715a56a..9fab3350f 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThan.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThan.java @@ -54,7 +54,7 @@ public class BroadcastLessThan extends BaseBroadcastBoolOp { super(sameDiff, i_v, dimension, inPlace); } - public BroadcastLessThan(SameDiff sameDiff, SDVariable i_v, int[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { + public BroadcastLessThan(SameDiff sameDiff, SDVariable i_v, long[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { super(sameDiff, i_v, shape, inPlace, dimension, extraArgs); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThanOrEqual.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThanOrEqual.java index 903919d4b..e9ee1db2c 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThanOrEqual.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/bool/BroadcastLessThanOrEqual.java @@ -54,7 +54,7 @@ public class BroadcastLessThanOrEqual extends BaseBroadcastBoolOp { super(sameDiff, i_v, dimension, inPlace); } - public BroadcastLessThanOrEqual(SameDiff sameDiff, SDVariable i_v, int[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { + public BroadcastLessThanOrEqual(SameDiff sameDiff, SDVariable i_v, long[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { super(sameDiff, i_v, shape, inPlace, dimension, extraArgs); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/DeConv2D.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/DeConv2D.java index 017d341d6..2bba1c2e3 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/DeConv2D.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/DeConv2D.java @@ -186,10 +186,10 @@ public class DeConv2D extends DynamicCustomOp { public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map attributesForNode, GraphDef graph) { val aStrides = nodeDef.getAttrOrThrow("strides"); val tfStrides = aStrides.getList().getIList(); - int sH = 1; - int sW = 1; - int kH = 1; - int kW = 1; + long sH = 1; + long sW = 1; + long kH = 1; + long kW = 1; val aPadding = nodeDef.getAttrOrDefault("padding", null); @@ -214,21 +214,18 @@ public class DeConv2D extends DynamicCustomOp { dataFormat = attr.getS().toStringUtf8().toLowerCase(); } - // FIXME: int cast - - if (dataFormat.equalsIgnoreCase(DeConv2DConfig.NCHW)) { - sH = tfStrides.get(2).intValue(); - sW = tfStrides.get(3).intValue(); + sH = tfStrides.get(2).longValue(); + sW = tfStrides.get(3).longValue(); - kH = (int) arr.size(2); - kW = (int) arr.size(3); + kH = arr.size(2); + kW = arr.size(3); } else { - sH = tfStrides.get(1).intValue(); - sW = tfStrides.get(2).intValue(); + sH = tfStrides.get(1).longValue(); + sW = tfStrides.get(2).longValue(); - kH = (int) arr.size(0); - kW = (int) arr.size(1); + kH = arr.size(0); + kW = arr.size(1); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ASinh.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ASinh.java index 23d176603..49ef2fb09 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ASinh.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/transforms/strict/ASinh.java @@ -37,10 +37,6 @@ public class ASinh extends BaseTransformStrictOp { super(sameDiff, i_v, inPlace); } - public ASinh(SameDiff sameDiff, SDVariable i_v, int[] shape, boolean inPlace, Object[] extraArgs) { - super(sameDiff, i_v, shape, inPlace, extraArgs); - } - public ASinh(SameDiff sameDiff, SDVariable i_v) { super(sameDiff, i_v, false); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/random/impl/BinomialDistributionEx.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/random/impl/BinomialDistributionEx.java index ff05d6c9f..3ec26a927 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/random/impl/BinomialDistributionEx.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/random/impl/BinomialDistributionEx.java @@ -33,7 +33,7 @@ import java.util.Map; * @author raver119@gmail.com */ public class BinomialDistributionEx extends BaseRandomOp { - private int trials; + private long trials; private double probability; public BinomialDistributionEx() { @@ -46,7 +46,7 @@ public class BinomialDistributionEx extends BaseRandomOp { * @param trials * @param probability */ - public BinomialDistributionEx(@NonNull INDArray z, int trials, double probability) { + public BinomialDistributionEx(@NonNull INDArray z, long trials, double probability) { super(z, z, z); this.trials = trials; this.probability = probability; @@ -59,7 +59,7 @@ public class BinomialDistributionEx extends BaseRandomOp { * @param trials * @param probabilities array with probability value for each trial */ - public BinomialDistributionEx(@NonNull INDArray z, int trials, @NonNull INDArray probabilities) { + public BinomialDistributionEx(@NonNull INDArray z, long trials, @NonNull INDArray probabilities) { super(z, probabilities, z); if (z.length() != probabilities.length()) throw new IllegalStateException("Length of probabilities array should match length of target array"); @@ -82,8 +82,7 @@ public class BinomialDistributionEx extends BaseRandomOp { * @param probabilities */ public BinomialDistributionEx(@NonNull INDArray z, @NonNull INDArray probabilities) { - // FIXME: int cast - this(z, (int) probabilities.length(), probabilities); + this(z, probabilities.length(), probabilities); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/rng/distribution/BaseDistribution.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/rng/distribution/BaseDistribution.java index 831ee144a..e32010827 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/rng/distribution/BaseDistribution.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/rng/distribution/BaseDistribution.java @@ -229,7 +229,6 @@ public abstract class BaseDistribution implements Distribution { if (sampleSize <= 0) { throw new NotStrictlyPositiveException(LocalizedFormats.NUMBER_OF_SAMPLES, sampleSize); } - // FIXME: int cast double[] out = new double[(int) sampleSize]; for (int i = 0; i < sampleSize; i++) { out[i] = sample(); diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/OrthogonalDistribution.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/OrthogonalDistribution.java index 31d24f11e..80f9910db 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/OrthogonalDistribution.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/OrthogonalDistribution.java @@ -222,7 +222,7 @@ public class OrthogonalDistribution extends BaseDistribution { @Override public INDArray sample(long[] shape){ - int numRows = 1; + long numRows = 1; for (int i = 0; i < shape.length - 1; i++) numRows *= shape[i]; long numCols = shape[shape.length - 1]; diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/shape/Shape.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/shape/Shape.java index ef4331b0a..51711b3d2 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/shape/Shape.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/shape/Shape.java @@ -923,15 +923,13 @@ public class Shape { * @param indices Indices array to get the offset for (must be same length as array rank) * @return Buffer offset fo the specified indices */ - public static long getOffset(IntBuffer shapeInformation, int[] indices) { - // FIXME: int cast + /*public static long getOffset(IntBuffer shapeInformation, int[] indices) { return getOffset(shapeInformation, ArrayUtil.toLongArray(indices)); } public static long getOffset(LongBuffer shapeInformation, int[] indices) { - // FIXME: int cast return getOffset(shapeInformation, ArrayUtil.toLongArray(indices)); - } + }*/ public static long getOffset(LongBuffer shapeInformation, long... indices) { int rank = rank(shapeInformation); @@ -968,8 +966,8 @@ public class Shape { * @param indices Indices array to get the offset for (must be same length as array rank) * @return Buffer offset fo the specified indices */ + @Deprecated public static long getOffset(DataBuffer shapeInformation, int[] indices) { - // FIXME: int cast return getOffset(shapeInformation, ArrayUtil.toLongArray(indices)); } public static long getOffset(DataBuffer shapeInformation, long... indices) { diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/convolution/Convolution.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/convolution/Convolution.java index b31e6e036..9c0645156 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/convolution/Convolution.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/convolution/Convolution.java @@ -159,9 +159,8 @@ public class Convolution { public static INDArray im2col(INDArray img, int kh, int kw, int sy, int sx, int ph, int pw, int dh, int dw, boolean isSameMode) { Nd4j.getCompressor().autoDecompress(img); //Input: NCHW format - // FIXME: int cast - int outH = outputSize((int) img.size(2), kh, sy, ph, dh, isSameMode); - int outW = outputSize((int) img.size(3), kw, sx, pw, dw, isSameMode); + long outH = outputSize(img.size(2), kh, sy, ph, dh, isSameMode); + long outW = outputSize(img.size(3), kw, sx, pw, dw, isSameMode); //[miniBatch,depth,kH,kW,outH,outW] INDArray out = Nd4j.create(new long[]{img.size(0), img.size(1), kh, kw, outH, outW}, 'c'); @@ -277,9 +276,8 @@ public class Convolution { output = Nd4j.createUninitialized(img.dataType(), new long[]{img.size(0), img.size(1), kh, kw, oH, oW}, 'c'); } else { - // FIXME: int cast - int oH = ((int) img.size(2) - (kh + (kh - 1) * (1 - 1)) + 2 * ph) / sy + 1; - int oW = ((int) img.size(3) - (kw + (kw - 1) * (1 - 1)) + 2 * pw) / sx + 1; + long oH = (img.size(2) - (kh + (kh - 1) * (1 - 1)) + 2 * ph) / sy + 1; + long oW = (img.size(3) - (kw + (kw - 1) * (1 - 1)) + 2 * pw) / sx + 1; output = Nd4j.createUninitialized(img.dataType(), new long[]{img.size(0), img.size(1), kh, kw, oH, oW}, 'c'); } @@ -314,7 +312,7 @@ public class Convolution { * @return */ @Deprecated - public static int outSize(int size, int k, int s, int p, int dilation, boolean coverAll) { + public static long outSize(long size, long k, long s, long p, int dilation, boolean coverAll) { k = effectiveKernelSize(k, dilation); if (coverAll) @@ -323,7 +321,7 @@ public class Convolution { return (size + p * 2 - k) / s + 1; } - public static int outputSize(int size, int k, int s, int p, int dilation, boolean isSameMode) { + public static long outputSize(long size, long k, long s, long p, int dilation, boolean isSameMode) { k = effectiveKernelSize(k, dilation); if (isSameMode) { @@ -333,7 +331,7 @@ public class Convolution { } } - public static int effectiveKernelSize(int kernel, int dilation) { + public static long effectiveKernelSize(long kernel, int dilation) { return kernel + (kernel - 1) * (dilation - 1); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/DataSet.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/DataSet.java index cb354c2d1..5cfecc6fe 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/DataSet.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/DataSet.java @@ -584,7 +584,6 @@ public class DataSet implements org.nd4j.linalg.dataset.api.DataSet { */ @Override public int numInputs() { - // FIXME: int cast return (int) getFeatures().size(1); } @@ -1134,13 +1133,11 @@ public class DataSet implements org.nd4j.linalg.dataset.api.DataSet { @Override public int numOutcomes() { - // FIXME: int cast return (int) getLabels().size(1); } @Override public int numExamples() { - // FIXME: int cast if (getFeatures() != null) return (int) getFeatures().size(0); else if (getLabels() != null) diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/KFoldIterator.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/KFoldIterator.java index 70c754c6e..4d7d257e1 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/KFoldIterator.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/KFoldIterator.java @@ -99,13 +99,11 @@ public class KFoldIterator implements DataSetIterator { @Override public int inputColumns() { - // FIXME: int cast return (int) allData.getFeatures().size(1); } @Override public int totalOutcomes() { - // FIXME: int cast return (int) allData.getLabels().size(1); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/TestDataSetIterator.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/TestDataSetIterator.java index cc3f6905d..f57025344 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/TestDataSetIterator.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/iterator/TestDataSetIterator.java @@ -72,13 +72,11 @@ public class TestDataSetIterator implements DataSetIterator { @Override public int inputColumns() { - // FIXME: int cast return (int)list.get(0).getFeatures().columns(); } @Override public int totalOutcomes() { - // FIXME: int cast return (int) list.get(0).getLabels().columns(); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/RGBtoGrayscaleDataSetPreProcessor.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/RGBtoGrayscaleDataSetPreProcessor.java index 11d0bd9a6..d7839e8d8 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/RGBtoGrayscaleDataSetPreProcessor.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/dataset/api/preprocessor/RGBtoGrayscaleDataSetPreProcessor.java @@ -61,7 +61,6 @@ public class RGBtoGrayscaleDataSetPreProcessor implements DataSetPreProcessor { B.muli(BLUE_RATIO); R.addi(G).addi(B); - // FIXME: int cast result.putSlice((int)n, R); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/factory/BaseNDArrayFactory.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/factory/BaseNDArrayFactory.java index a664d9ee5..907290ef9 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/factory/BaseNDArrayFactory.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/factory/BaseNDArrayFactory.java @@ -29,6 +29,7 @@ import org.nd4j.linalg.api.ops.DynamicCustomOp; import org.nd4j.linalg.api.ops.random.impl.Range; import org.nd4j.linalg.api.rng.distribution.Distribution; import org.nd4j.linalg.api.shape.Shape; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.indexing.INDArrayIndex; import org.nd4j.linalg.indexing.NDArrayIndex; import org.nd4j.linalg.primitives.AtomicDouble; @@ -921,8 +922,8 @@ public abstract class BaseNDArrayFactory implements NDArrayFactory { int arrOffset = 0; - // FIXME: int cast - + if (ret.tensorsAlongDimension(dimension) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); INDArray[] retAlongDimensionArrays = new INDArray[(int) ret.tensorsAlongDimension(dimension)]; for (int i = 0; i < retAlongDimensionArrays.length; i++) retAlongDimensionArrays[i] = ret.tensorAlongDimension(i, dimension); diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/factory/Nd4j.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/factory/Nd4j.java index 17a8e8a36..1a6565ce4 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/factory/Nd4j.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/factory/Nd4j.java @@ -6581,7 +6581,8 @@ public class Nd4j { */ @Deprecated public static void scatterUpdate(ScatterUpdate.UpdateOp op, @NonNull INDArray array, @NonNull INDArray indices, @NonNull INDArray updates, int... axis) { - Preconditions.checkArgument(indices.dataType() == DataType.INT, "Indices should have INT data type"); + Preconditions.checkArgument(indices.dataType() == DataType.INT || indices.dataType() == DataType.LONG, + "Indices should have INT data type"); Preconditions.checkArgument(array.dataType() == updates.dataType(), "Array and updates should have the same data type"); getExecutioner().scatterUpdate(op, array, indices, updates, axis); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/indexing/BooleanIndexing.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/indexing/BooleanIndexing.java index 6bc48778f..77f56c613 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/indexing/BooleanIndexing.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/indexing/BooleanIndexing.java @@ -106,8 +106,6 @@ public class BooleanIndexing { MatchCondition op = new MatchCondition(n, condition, dimension); INDArray arr = Nd4j.getExecutioner().exec(op); - // FIXME: int cast - boolean[] result = new boolean[(int) arr.length()]; for (int i = 0; i < arr.length(); i++) { diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/indexing/Indices.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/indexing/Indices.java index 3ca99c50e..c17295409 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/indexing/Indices.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/indexing/Indices.java @@ -16,6 +16,7 @@ package org.nd4j.linalg.indexing; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.shade.guava.primitives.Ints; import org.nd4j.shade.guava.primitives.Longs; import org.nd4j.linalg.api.ndarray.INDArray; @@ -59,8 +60,8 @@ public class Indices { double otherTest = ((double) index) / arr.size(-1); int test = (int) Math.floor(otherTest); - // FIXME: int cast - + if (arr.vectorsAlongDimension(-1) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); int vectors = (int) arr.vectorsAlongDimension(-1); if (test >= vectors) return vectors - 1; diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/learning/legacy/AdaGrad.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/learning/legacy/AdaGrad.java index 0bf673a49..6793f04f5 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/learning/legacy/AdaGrad.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/learning/legacy/AdaGrad.java @@ -37,7 +37,7 @@ public class AdaGrad implements Serializable { public static final double DEFAULT_ADAGRAD_EPSILON = 1e-6; public INDArray historicalGradient; - public int[] shape; + public long[] shape; protected double learningRate = 1e-1; // learning rate protected int numIterations = 0; private double epsilon = DEFAULT_ADAGRAD_EPSILON; @@ -73,7 +73,7 @@ public class AdaGrad implements Serializable { * @param learningRate */ public AdaGrad(int rows, int cols, double learningRate) { - this.shape = new int[] {rows, cols}; + this.shape = new long[] {rows, cols}; this.learningRate = learningRate; } @@ -81,7 +81,7 @@ public class AdaGrad implements Serializable { this(rows, cols, 0.1); } - public AdaGrad(int[] shape, double learningRate) { + public AdaGrad(long[] shape, double learningRate) { this.shape = shape; this.learningRate = learningRate; } @@ -124,7 +124,7 @@ public class AdaGrad implements Serializable { return ret; } - public double getGradient(double gradient, int column, int[] shape) { + public double getGradient(double gradient, int column, long[] shape) { boolean historicalInitialized = false; if (this.historicalGradient == null) { this.historicalGradient = Nd4j.ones(shape); @@ -143,7 +143,7 @@ public class AdaGrad implements Serializable { return adjustedGradient; } - public INDArray getGradient(INDArray gradient, int slice, int[] shape) { + public INDArray getGradient(INDArray gradient, int slice, long[] shape) { boolean historicalInitialized = false; INDArray sqrtHistory; diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/util/DataSetUtils.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/util/DataSetUtils.java index 302b2d102..49c17fa11 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/util/DataSetUtils.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/util/DataSetUtils.java @@ -18,6 +18,7 @@ package org.nd4j.linalg.util; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.dataset.DataSet; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.tools.BTools; import org.nd4j.tools.InfoLine; import org.nd4j.tools.InfoValues; @@ -178,7 +179,9 @@ public class DataSetUtils { InfoValues iv; // double j_Dbl = -1; - // FIXME: int cast + if (in_INDA.rows() > Integer.MAX_VALUE) { + throw new ND4JArraySizeException(); + } int i_CharsCount = BTools.getIndexCharsCount( (int) in_INDA.rows() - 1 ); // oinfo = ""; @@ -219,7 +222,8 @@ public class DataSetUtils { c_I = 0; // if ( ot_INDA != null ) { - // FIXME: int cast + if (ot_INDA.columns() - 1 > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); for ( int j = (int) ot_INDA.columns() - 1; j >= 0; j-- ) { // if ( c_I > c_End_I ) break; @@ -346,7 +350,8 @@ public class DataSetUtils { InfoValues iv; // double j_Dbl = -1; - // FIXME: int cast + if (INDA.rows() - 1 > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); int i_CharsCount = BTools.getIndexCharsCount( (int) INDA.rows() - 1 ); // if ( !turned ) { //= standard @@ -366,7 +371,8 @@ public class DataSetUtils { iv.vsL.add( BTools.getSInt( i, i_CharsCount ) ); // int c_I = 0; - // FIXME: int cast + if (INDA.columns() - 1 > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); for ( int j = (int) INDA.columns() - 1; j >= 0; j-- ) { // if ( c_I > c_End_I ) break; diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/util/NDArrayUtil.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/util/NDArrayUtil.java index fd09351fb..23ae9b984 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/util/NDArrayUtil.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/util/NDArrayUtil.java @@ -84,7 +84,6 @@ public class NDArrayUtil { n = n.reshape(-1); - // FIXME: int cast long[] ret = new long[(int) n.length()]; for (int i = 0; i < n.length(); i++) ret[i] = (long) n.getFloat(i); diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/serde/binary/BinarySerde.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/serde/binary/BinarySerde.java index bf0fdf9cb..60bb0378b 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/serde/binary/BinarySerde.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/serde/binary/BinarySerde.java @@ -26,6 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.shape.Shape; import org.nd4j.linalg.compression.CompressedDataBuffer; import org.nd4j.linalg.compression.CompressionDescriptor; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.primitives.Pair; @@ -91,7 +92,8 @@ public class BinarySerde { if (type != DataType.COMPRESSED) { ByteBuffer slice = byteBuffer.slice(); //wrap the data buffer for the last bit - // FIXME: int cast + if (Shape.length(shapeBuff) > Integer.MAX_VALUE) + throw new ND4JArraySizeException(); DataBuffer buff = Nd4j.createBuffer(slice, type, (int) Shape.length(shapeBuff)); //advance past the data int position = byteBuffer.position() + (buff.getElementSize() * (int) buff.length()); diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-native-api/src/main/java/org/nd4j/nativeblas/NativeOps.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-native-api/src/main/java/org/nd4j/nativeblas/NativeOps.java index a060232db..8f621668b 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-native-api/src/main/java/org/nd4j/nativeblas/NativeOps.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-native-api/src/main/java/org/nd4j/nativeblas/NativeOps.java @@ -1075,7 +1075,7 @@ public interface NativeOps { Pointer dX, @Cast("Nd4jLong *") LongPointer dXShapeInfo, @Cast("Nd4jLong *") LongPointer dxOffsets, Pointer hY, @Cast("Nd4jLong *") LongPointer hYShapeInfo, @Cast("Nd4jLong *") LongPointer hyOffsets, Pointer dY, @Cast("Nd4jLong *") LongPointer dYShapeInfo, @Cast("Nd4jLong *") LongPointer dyOffsets, - IntPointer hIndices, IntPointer dIndices); + Pointer hIndices, @Cast("Nd4jLong *") LongPointer hIndicesShapeInfo, Pointer dIndices, @Cast("Nd4jLong *") LongPointer dIndicesShapeInfo); //void fillUtf8String(PointerPointer extraPointers, String[] string, int numStrings, Pointer buffer); Pointer createUtf8String(PointerPointer extraPointers, String string, int length); diff --git a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/JCublasNDArrayFactory.java b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/JCublasNDArrayFactory.java index 9931fcaa9..cf0bf72da 100644 --- a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/JCublasNDArrayFactory.java +++ b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/JCublasNDArrayFactory.java @@ -174,8 +174,7 @@ public class JCublasNDArrayFactory extends BaseNativeNDArrayFactory { @Override public INDArray create(DataBuffer data, long rows, long columns, int[] stride, long offset) { - // FIXME: int cast - return new JCublasNDArray(data, new long[] {rows, columns}, ArrayUtil.toLongArray(stride), Nd4j.order(), data.dataType()); + return new JCublasNDArray(data, new long[] {rows, columns}, stride, Nd4j.order(), data.dataType()); } @Override @@ -455,8 +454,7 @@ public class JCublasNDArrayFactory extends BaseNativeNDArrayFactory { @Override public INDArray pullRows(INDArray source, int sourceDimension, long[] indexes) { - // FIXME: int cast - return pullRows(source, sourceDimension, ArrayUtil.toInts(indexes)); + return pullRows(source, sourceDimension, indexes); } /** @@ -468,7 +466,7 @@ public class JCublasNDArrayFactory extends BaseNativeNDArrayFactory { * @return */ @Override - public INDArray pullRows(INDArray source, int sourceDimension, int[] indexes, char order) { + public INDArray pullRows(INDArray source, int sourceDimension, long[] indexes, char order) { if (indexes == null || indexes.length < 1) throw new IllegalStateException("Indexes can't be null or zero-length"); diff --git a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLapack.java b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLapack.java index 3eade74e9..13991f63b 100644 --- a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLapack.java +++ b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/blas/JcublasLapack.java @@ -850,7 +850,9 @@ public class JcublasLapack extends BaseLapack { if (A.ordering() == 'c') a = A.dup('f'); - // FIXME: int cast + if (A.rows() > Integer.MAX_VALUE) { + throw new RuntimeException("Rows overflow"); + } int M = (int) A.rows(); if (Nd4j.getExecutioner() instanceof GridExecutioner) @@ -925,7 +927,10 @@ public class JcublasLapack extends BaseLapack { if (A.ordering() == 'c') a = A.dup('f'); - // FIXME: int cast + if (A.rows() > Integer.MAX_VALUE) { + throw new RuntimeException("Rows overflow"); + } + int M = (int) A.rows(); if (Nd4j.getExecutioner() instanceof GridExecutioner) diff --git a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaExecutioner.java b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaExecutioner.java index 057e0bbca..5783909d8 100644 --- a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaExecutioner.java +++ b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/ops/executioner/CudaExecutioner.java @@ -2458,7 +2458,7 @@ public class CudaExecutioner extends DefaultOpExecutioner { nativeOps.scatterUpdate(stuff, op.ordinal(), (int) indices.length(), null, (LongPointer) AtomicAllocator.getInstance().getHostPointer(tadX.getFirst()), null, AtomicAllocator.getInstance().getPointer(array, context), (LongPointer) AtomicAllocator.getInstance().getPointer(tadX.getFirst()), (LongPointer) AtomicAllocator.getInstance().getPointer(tadX.getSecond()), null, (LongPointer) AtomicAllocator.getInstance().getHostPointer(tadY.getFirst()), null, AtomicAllocator.getInstance().getPointer(updates, context), (LongPointer) AtomicAllocator.getInstance().getPointer(tadY.getFirst()), (LongPointer) AtomicAllocator.getInstance().getPointer(tadY.getSecond()), - null, (IntPointer) AtomicAllocator.getInstance().getPointer(indices, context)); + AtomicAllocator.getInstance().getHostPointer(indices), (LongPointer) AtomicAllocator.getInstance().getHostPointer(indices.shapeInfoDataBuffer()), AtomicAllocator.getInstance().getPointer(indices, context), (LongPointer) AtomicAllocator.getInstance().getPointer(indices.shapeInfoDataBuffer(), context)); if (nativeOps.lastErrorCode() != 0) throw new RuntimeException(nativeOps.lastErrorMessage()); diff --git a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/blas/CpuLapack.java b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/blas/CpuLapack.java index 39731f90f..70866f6f7 100644 --- a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/blas/CpuLapack.java +++ b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/blas/CpuLapack.java @@ -20,6 +20,7 @@ import lombok.val; import org.nd4j.linalg.api.blas.impl.BaseLapack; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; +import org.nd4j.linalg.exception.ND4JArraySizeException; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.indexing.INDArrayIndex; import org.nd4j.linalg.indexing.NDArrayIndex; @@ -40,7 +41,9 @@ public class CpuLapack extends BaseLapack { } protected static int getLda(INDArray A) { - // FIXME: int cast + if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE) { + throw new ND4JArraySizeException(); + } return A.ordering() == 'f' ? (int) A.rows() : (int) A.columns(); } //========================= diff --git a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/ops/NativeOpExecutioner.java b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/ops/NativeOpExecutioner.java index 663eb862e..a2964b7a6 100644 --- a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/ops/NativeOpExecutioner.java +++ b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/ops/NativeOpExecutioner.java @@ -1974,7 +1974,7 @@ public class NativeOpExecutioner extends DefaultOpExecutioner { loop.scatterUpdate(null, op.ordinal(), (int) indices.length(), array.data().addressPointer(), (LongPointer) tadX.getFirst().addressPointer(), (LongPointer) tadX.getSecond().addressPointer(), null, null, null, updates.data().addressPointer(), (LongPointer) tadY.getFirst().addressPointer(), (LongPointer) tadY.getSecond().addressPointer(), null, null, null, - (IntPointer) indices.data().addressPointer(), null); + indices.data().addressPointer(), (LongPointer) indices.shapeInfoDataBuffer().addressPointer(), null, null); if (loop.lastErrorCode() != 0) throw new RuntimeException(loop.lastErrorMessage()); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/evaluation/EvaluationCalibrationTest.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/evaluation/EvaluationCalibrationTest.java index 219ccc19c..012ba3434 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/evaluation/EvaluationCalibrationTest.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/evaluation/EvaluationCalibrationTest.java @@ -161,7 +161,6 @@ public class EvaluationCalibrationTest extends BaseNd4jTest { ec.eval(labels, arr); int[] expLabelCounts = labels.sum(0).data().asInt(); - // FIXME: int cast int[] expPredictionCount = new int[(int) labels.size(1)]; INDArray argmax = Nd4j.argMax(arr, 1); for (int i = 0; i < argmax.length(); i++) { diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/Nd4jTestsC.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/Nd4jTestsC.java index 31d51d59a..bac06b981 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/Nd4jTestsC.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/Nd4jTestsC.java @@ -5273,7 +5273,6 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray exp = Nd4j.linspace(0, 9, 10, DataType.DOUBLE); int cnt = 0; for (long i = matrix.rows() - 1; i >= 0; i--) { - // FIXME: int cast matrix.getRow((int) i).assign(cnt); cnt++; } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ShufflesTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ShufflesTests.java index f88e78408..f1fdf9c57 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ShufflesTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ShufflesTests.java @@ -342,7 +342,6 @@ public class ShufflesTests extends BaseNd4jTest { public float[] measureState(INDArray data) { // for 3D we save 0 element for each slice. - // FIXME: int cast float[] result = new float[(int) data.shape()[0]]; for (int x = 0; x < data.shape()[0]; x++) { @@ -390,7 +389,6 @@ public class ShufflesTests extends BaseNd4jTest { } public float[] measureState(INDArray data) { - // FIXME: int cast float[] result = new float[data.rows()]; for (int x = 0; x < data.rows(); x++) { diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/aggregates/AggregatesTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/aggregates/AggregatesTests.java deleted file mode 100644 index 91d80cd56..000000000 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/aggregates/AggregatesTests.java +++ /dev/null @@ -1,178 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.nd4j.linalg.aggregates; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.nd4j.OpValidationSuite; -import org.nd4j.linalg.BaseNd4jTest; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.ndarray.INDArray; -import org.nd4j.linalg.api.ops.aggregates.Aggregate; -import org.nd4j.linalg.api.ops.aggregates.impl.AggregateAxpy; -import org.nd4j.linalg.api.ops.aggregates.impl.AggregateSkipGram; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.factory.Nd4jBackend; - -import java.util.ArrayList; -import java.util.List; - -import static org.junit.Assert.assertEquals; - -/** - * @author raver119@gmail.com - */ -@RunWith(Parameterized.class) -public class AggregatesTests extends BaseNd4jTest { - - public AggregatesTests(Nd4jBackend backend) { - super(backend); - } - - @Before - public void setUp() { - //DataTypeUtil.setDTypeForContext(DataType.DOUBLE); - } - - @Test - public void testAggregate1() { - INDArray arrayX = Nd4j.ones(10); - INDArray arrayY = Nd4j.zeros(10); - - INDArray exp1 = Nd4j.ones(10); - - AggregateAxpy axpy = new AggregateAxpy(arrayX, arrayY, 1.0f); - - Nd4j.getExecutioner().exec(axpy); - - assertEquals(exp1, arrayY); - } - - - @Test - public void testBatchedAggregate1() { - OpValidationSuite.ignoreFailing(); //CRASHING - INDArray arrayX1 = Nd4j.ones(DataType.FLOAT, 10); - INDArray arrayY1 = Nd4j.zeros(DataType.FLOAT,10); - - INDArray arrayX2 = Nd4j.ones(DataType.FLOAT,10); - INDArray arrayY2 = Nd4j.zeros(DataType.FLOAT,10); - - INDArray exp1 = Nd4j.create(DataType.FLOAT,10).assign(1f); - INDArray exp2 = Nd4j.create(DataType.FLOAT,10).assign(1f); - - AggregateAxpy axpy1 = new AggregateAxpy(arrayX1, arrayY1, 1.0f); - AggregateAxpy axpy2 = new AggregateAxpy(arrayX2, arrayY2, 1.0f); - - List batch = new ArrayList<>(); - batch.add(axpy1); - batch.add(axpy2); - - Nd4j.getExecutioner().exec(batch); - - assertEquals(exp1, arrayY1); - assertEquals(exp2, arrayY2); - } - - @Test - public void testBatchedAggregate2() { - INDArray arrayX1 = Nd4j.ones(10); - INDArray arrayY1 = Nd4j.zeros(10).assign(2.0f); - - INDArray arrayX2 = Nd4j.ones(10); - INDArray arrayY2 = Nd4j.zeros(10).assign(2.0f); - - INDArray arrayX3 = Nd4j.ones(10); - INDArray arrayY3 = Nd4j.ones(10); - - INDArray exp1 = Nd4j.create(10).assign(4f); - INDArray exp2 = Nd4j.create(10).assign(3f); - INDArray exp3 = Nd4j.create(10).assign(3f); - - AggregateAxpy axpy1 = new AggregateAxpy(arrayX1, arrayY1, 2.0f); - AggregateAxpy axpy2 = new AggregateAxpy(arrayX2, arrayY2, 1.0f); - AggregateAxpy axpy3 = new AggregateAxpy(arrayX3, arrayY3, 2.0f); - - List batch = new ArrayList<>(); - batch.add(axpy1); - batch.add(axpy2); - batch.add(axpy3); - - Nd4j.getExecutioner().exec(batch); - - assertEquals(exp1, arrayY1); - assertEquals(exp2, arrayY2); - assertEquals(exp3, arrayY3); - } - - @Test - public void testBatchedSkipGram1() { - OpValidationSuite.ignoreFailing(); //CRASHING - INDArray syn0 = Nd4j.create(DataType.FLOAT, 10, 10).assign(0.01f); - INDArray syn1 = Nd4j.create(DataType.FLOAT,10, 10).assign(0.02f); - INDArray syn1Neg = Nd4j.ones(DataType.FLOAT,10, 10).assign(0.03f); - INDArray expTable = Nd4j.create(DataType.FLOAT,10000).assign(0.5f); - - double lr = 0.001; - - int idxSyn0_1 = 0; - int idxSyn0_2 = 3; - - INDArray expSyn0 = Nd4j.create(DataType.FLOAT,10).assign(0.01f); - INDArray expSyn1_1 = Nd4j.create(DataType.FLOAT,10).assign(0.020005); // gradient is 0.00005 - INDArray expSyn1_2 = Nd4j.create(DataType.FLOAT,10).assign(0.019995f); // gradient is -0.00005 - - - INDArray syn0row_1 = syn0.getRow(idxSyn0_1); - INDArray syn0row_2 = syn0.getRow(idxSyn0_2); - - AggregateSkipGram op1 = new AggregateSkipGram(syn0, syn1, syn1Neg, expTable, null, idxSyn0_1, new int[] {1, 2}, - new int[] {0, 1}, 0, 0, 10, lr, 1L, 10); - AggregateSkipGram op2 = new AggregateSkipGram(syn0, syn1, syn1Neg, expTable, null, idxSyn0_2, new int[] {4, 5}, - new int[] {0, 1}, 0, 0, 10, lr, 1L, 10); - - - List batch = new ArrayList<>(); - batch.add(op1); - batch.add(op2); - - Nd4j.getExecutioner().exec(batch); - - /* - Since expTable contains all-equal values, and only difference for ANY index is code being 0 or 1, syn0 row will stay intact, - because neu1e will be full of 0.0f, and axpy will have no actual effect - */ - assertEquals(expSyn0, syn0row_1); - assertEquals(expSyn0, syn0row_2); - - // syn1 row 1 modified only once - assertEquals(expSyn1_1, syn1.getRow(1)); - assertEquals(expSyn1_1, syn1.getRow(4)); - - // syn1 row 2 modified only once - assertEquals(expSyn1_2, syn1.getRow(2)); - assertEquals(expSyn1_2, syn1.getRow(5)); - } - - - @Override - public char ordering() { - return 'c'; - } -} diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/aggregates/HierarchicSoftmaxTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/aggregates/HierarchicSoftmaxTests.java deleted file mode 100644 index 3f07b6a2a..000000000 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/aggregates/HierarchicSoftmaxTests.java +++ /dev/null @@ -1,472 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.nd4j.linalg.aggregates; - -import lombok.extern.slf4j.Slf4j; -import lombok.val; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.nd4j.linalg.BaseNd4jTest; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.ndarray.INDArray; -import org.nd4j.linalg.api.ops.aggregates.impl.AggregateCBOW; -import org.nd4j.linalg.api.ops.aggregates.impl.AggregateSkipGram; -import org.nd4j.linalg.api.ops.aggregates.impl.HierarchicSoftmax; -import org.nd4j.linalg.api.ops.impl.nlp.CbowRound; -import org.nd4j.linalg.api.ops.impl.nlp.SkipGramRound; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.factory.Nd4jBackend; - -import java.util.Arrays; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; - -/** - * This tests pack covers simple gradient checks for AggregateSkipGram, CBOW and HierarchicSoftmax - * - * @author raver119@gmail.com - */ -@Slf4j -@RunWith(Parameterized.class) -public class HierarchicSoftmaxTests extends BaseNd4jTest { - - - public HierarchicSoftmaxTests(Nd4jBackend backend) { - super(backend); - } - - @Before - public void setUp() { - // DataTypeUtil.setDTypeForContext(DataType.DOUBLE); - } - - @Test - public void testHSGradient1() { - INDArray syn0 = Nd4j.ones(10, 10).assign(0.01f); - INDArray syn1 = Nd4j.ones(10, 10).assign(0.02f); - INDArray expTable = Nd4j.create(10000).assign(0.5f); - INDArray neu1e = Nd4j.create(10); - - INDArray expSyn0 = Nd4j.create(10).assign(0.01f); - INDArray expSyn1 = Nd4j.create(10).assign(0.020005); - INDArray expNeu1e = Nd4j.create(10).assign(0.00001f); - - int idxSyn0 = 1; - int idxSyn1 = 1; - int code = 0; - - double lr = 0.001; - - HierarchicSoftmax op = - new HierarchicSoftmax(syn0.getRow(idxSyn0), syn1.getRow(idxSyn1), expTable, neu1e, code, lr); - - Nd4j.getExecutioner().exec(op); - - INDArray syn0row = syn0.getRow(idxSyn0); - INDArray syn1row = syn1.getRow(idxSyn1); - - // expected gradient is 0.0005 - // expected neu1 = 0.00001 - // expected syn1 = 0.020005 - - assertEquals(expNeu1e, neu1e); - - assertEquals(expSyn1, syn1row); - - // we hadn't modified syn0 at all yet - assertEquals(expSyn0, syn0row); - } - - @Test - public void testSGGradient1() { - INDArray syn0 = Nd4j.create(DataType.DOUBLE, 10, 10).assign(0.01f); - INDArray syn1 = Nd4j.create(DataType.DOUBLE,10, 10).assign(0.02f); - INDArray syn1Neg = Nd4j.create(DataType.DOUBLE,10, 10).assign(0.03f); - INDArray expTable = Nd4j.create(DataType.DOUBLE,10000).assign(0.5f); - - double lr = 0.001; - - int idxSyn0 = 0; - - INDArray expSyn0 = Nd4j.create(DataType.DOUBLE,10).assign(0.01001f); - INDArray expSyn1_1 = Nd4j.create(DataType.DOUBLE,10).assign(0.020005); - - INDArray syn0row = syn0.getRow(idxSyn0); - - log.info("syn0row before: {}", Arrays.toString(syn0row.dup().data().asFloat())); - - AggregateSkipGram op = new AggregateSkipGram(syn0, syn1, syn1Neg, expTable, null, idxSyn0, new int[] {1}, - new int[] {0}, 0, 0, 10, lr, 1L, 10); - //Nd4j.getExecutioner().exec(op); - val sg = new SkipGramRound(idxSyn0, syn0, syn1, expTable, new int[] {1}, new byte[]{0}, lr, 1L, Nd4j.empty(syn0.dataType())); - Nd4j.getExecutioner().exec(sg); - - log.info("syn0row after: {}", Arrays.toString(syn0row.dup().data().asFloat())); - - assertEquals(expSyn0, syn0row); - assertEquals(expSyn1_1, syn1.getRow(1)); - } - - @Test - public void testSGGradient2() { - INDArray syn0 = Nd4j.create(10, 10).assign(0.01f); - INDArray syn1 = Nd4j.create(10, 10).assign(0.02f); - INDArray syn1Neg = Nd4j.ones(10, 10).assign(0.03f); - INDArray expTable = Nd4j.create(10000).assign(0.5f); - - double lr = 0.001; - - int idxSyn0 = 0; - - INDArray expSyn0 = Nd4j.create(10).assign(0.01f); - INDArray expSyn1_1 = Nd4j.create(10).assign(0.020005); // gradient is 0.00005 - INDArray expSyn1_2 = Nd4j.create(10).assign(0.019995f); // gradient is -0.00005 - - - INDArray syn0row = syn0.getRow(idxSyn0); - - - log.info("syn1row2 before: {}", Arrays.toString(syn1.getRow(2).dup().data().asFloat())); - - AggregateSkipGram op = new AggregateSkipGram(syn0, syn1, null, expTable, null, idxSyn0, new int[] {1, 2}, - new int[] {0, 1}, 0, 0, 10, lr, 1L, 10); - //Nd4j.getExecutioner().exec(op); - val sg = new SkipGramRound(idxSyn0, syn0, syn1, expTable, new int[] {1, 2}, new byte[]{0, 1}, lr, 1L, Nd4j.empty(syn0.dataType())); - Nd4j.getExecutioner().exec(sg); - - /* - Since expTable contains all-equal values, and only difference for ANY index is code being 0 or 1, syn0 row will stay intact, - because neu1e will be full of 0.0f, and axpy will have no actual effect - */ - assertEquals(expSyn0, syn0row); - - // syn1 row 1 modified only once - assertArrayEquals(expSyn1_1.data().asFloat(), syn1.getRow(1).dup().data().asFloat(), 1e-7f); - - log.info("syn1row2 after: {}", Arrays.toString(syn1.getRow(2).dup().data().asFloat())); - - // syn1 row 2 modified only once - assertArrayEquals(expSyn1_2.data().asFloat(), syn1.getRow(2).dup().data().asFloat(), 1e-7f); - } - - /** - * This particular test does nothing: neither HS or Neh is executed - * - * @throws Exception - */ - @Test - public void testSGGradientNoOp() { - INDArray syn0 = Nd4j.create(10, 10).assign(0.01f); - INDArray syn1 = Nd4j.create(10, 10).assign(0.02f); - INDArray syn1Neg = Nd4j.ones(10, 10).assign(0.03f); - INDArray expTable = Nd4j.create(10000).assign(0.5f); - INDArray table = null; - - double lr = 0.001; - - int idxSyn0 = 0; - INDArray expSyn0 = Nd4j.create(10).assign(0.01f); - INDArray expSyn1 = syn1.dup(); - - AggregateSkipGram op = new AggregateSkipGram(syn0, syn1, syn1Neg, expTable, table, idxSyn0, new int[] {}, - new int[] {}, 0, 0, 10, lr, 1L, 10); - - Nd4j.getExecutioner().exec(op); - - assertEquals(expSyn0, syn0.getRow(idxSyn0)); - assertEquals(expSyn1, syn1); - } - - @Test - public void testSGGradientNegative1() { - INDArray syn0 = Nd4j.create(10, 10).assign(0.01f); - INDArray syn1 = Nd4j.create(10, 10).assign(0.02f); - INDArray syn1Neg = Nd4j.ones(10, 10).assign(0.03f); - INDArray expTable = Nd4j.create(10000).assign(0.5f); - INDArray table = Nd4j.create(100000); - - double lr = 0.001; - - INDArray expSyn0 = Nd4j.create(10).assign(0.01f); - - int idxSyn0 = 1; - - log.info("syn0row1 after: {}", Arrays.toString(syn0.getRow(idxSyn0).dup().data().asFloat())); - - - AggregateSkipGram op = new AggregateSkipGram(syn0, syn1, syn1Neg, expTable, table, idxSyn0, new int[] {}, - new int[] {}, 1, 3, 10, lr, 2L, 10); - //Nd4j.getExecutioner().exec(op); - - val sg = new SkipGramRound(idxSyn0, 3, syn0, syn1Neg, expTable, table, 1, lr, 2L, Nd4j.empty(syn0.dataType())); - Nd4j.getExecutioner().exec(sg); - - log.info("syn0row1 after: {}", Arrays.toString(syn0.getRow(idxSyn0).dup().data().asFloat())); - - // we expect syn0 to be equal, since 2 rounds with +- gradients give the same output value for neu1e - assertEquals(expSyn0, syn0.getRow(idxSyn0)); - } - - - @Test - public void testCBOWGradient1() { - INDArray syn0 = Nd4j.create(10, 10).assign(0.01f); - INDArray syn1 = Nd4j.create(10, 10).assign(0.02f); - INDArray expTable = Nd4j.create(10000).assign(0.5f); - - double lr = 0.025; - - INDArray syn0row_before_0 = syn0.getRow(0).dup(); - INDArray syn0row_before_1 = syn0.getRow(1).dup(); - INDArray syn0row_before_2 = syn0.getRow(2).dup(); - - AggregateCBOW op = new AggregateCBOW(syn0, syn1, null, expTable, null, 0, new int[] {0, 1, 2}, new int[] {4, 5}, - new int[] {1, 1}, 0, 0, 10, lr, 2L, 10); - //Nd4j.getExecutioner().exec(op); - - val sg = new CbowRound(0, new int[] {0, 1, 2}, new int[] {0,0,0}, syn0, syn1, expTable, new int[] {4, 5}, new byte[]{1, 1}, lr, 2L, Nd4j.empty(syn0.dataType()), 1); - Nd4j.getExecutioner().exec(sg); - - INDArray syn0row_0 = syn0.getRow(0); - INDArray syn0row_1 = syn0.getRow(1); - INDArray syn0row_2 = syn0.getRow(2); - - INDArray syn1row_4 = syn1.getRow(4); - INDArray syn1row_5 = syn1.getRow(5); - INDArray syn1row_6 = syn1.getRow(6); - - INDArray expSyn0row_0 = Nd4j.create(10).assign(0.0095f); - INDArray expSyn1row_4 = Nd4j.create(10).assign(0.019875f); - INDArray expSyn1row_6 = Nd4j.create(10).assign(0.02f); - - assertNotEquals(syn0row_before_0, syn0row_0); - assertNotEquals(syn0row_before_1, syn0row_1); - assertNotEquals(syn0row_before_2, syn0row_2); - - // neu1 is expected to be 0.01 - // dot is expected to be 0.002 - // g is expected -0.0125 for both rounds: both codes are 1, so (1 - 1 - 0.5) * 0.025 = -0.0125 - // neu1e is expected to be -0.00025 after first round ( g * syn1 + neu1e) (-0.0125 * 0.02 + 0.000) - // neu1e is expected to be -0.00050 after second round (-0.0125 * 0.02 + -0.00025) - // syn1 is expected to be 0.019875 after first round (g * neu1 + syn1) (-0.0125 * 0.01 + 0.02 ) - // syn1 is expected to be 0.019875 after second round (g * neu1 + syn1) (-0.0125 * 0.01 + 0.02 ) NOTE: each of round uses it's own syn1 index - - // syn0 is expected to be 0.0095f after op (syn0 += neu1e) (0.01 += -0.0005) - - log.info("syn1row4[0]: {}", syn1row_4.getFloat(0)); - - assertEquals(expSyn0row_0, syn0row_0); - assertEquals(expSyn0row_0, syn0row_1); - assertEquals(expSyn0row_0, syn0row_2); - - assertEquals(expSyn1row_4, syn1row_4); - assertEquals(expSyn1row_4, syn1row_5); - assertEquals(expSyn1row_6, syn1row_6); - - } - - @Test - public void testCBOWGradientNoOp1() { - INDArray syn0 = Nd4j.create(10, 10).assign(0.01f); - INDArray syn1 = Nd4j.create(10, 10).assign(0.02f); - INDArray syn1Neg = Nd4j.ones(10, 10).assign(0.03f); - INDArray expTable = Nd4j.create(10000).assign(0.5f); - INDArray table = Nd4j.create(100000); - - double lr = 0.025; - - INDArray expSyn0 = syn0.dup(); - INDArray expSyn1 = syn1.dup(); - INDArray expSyn1Neg = syn1Neg.dup(); - - AggregateCBOW op = new AggregateCBOW(syn0, syn1, syn1Neg, expTable, table, 0, new int[] {}, new int[] {}, - new int[] {}, 0, 0, 10, lr, 2L, 10); - - Nd4j.getExecutioner().exec(op); - - assertEquals(expSyn0, syn0); - assertEquals(expSyn1, syn1); - assertEquals(expSyn1Neg, syn1Neg); - } - - @Test - public void testCBOWGradientNegative1() { - INDArray syn0 = Nd4j.create(10, 10).assign(0.01f); - INDArray syn1 = Nd4j.create(10, 10).assign(0.02f); - INDArray syn1Neg = Nd4j.create(10, 10).assign(0.03f); - INDArray expTable = Nd4j.create(10000).assign(0.5f); - INDArray table = Nd4j.create(100000); - - double lr = 0.025; - - INDArray syn0dup = syn0.dup(); - INDArray syn1dup = syn1.dup(); - INDArray syn1NegDup = syn1Neg.dup(); - - INDArray expSyn0_row0 = Nd4j.create(10).assign(0.0096265625); - INDArray expSyn0_row3 = Nd4j.create(10).assign(0.01f); - INDArray expSyn1Neg_row6 = Nd4j.create(10).assign(0.030125f); - - AggregateCBOW op = new AggregateCBOW(syn0, syn1, syn1Neg, expTable, table, 0, new int[] {0, 1, 2}, new int[] {}, new int[] {}, 2, 6, 10, lr, 2L, 10); - //Nd4j.getExecutioner().exec(op); - - val sg = new CbowRound(0, new int[]{0, 1, 2}, new int[] {0, 0, 0}, 6, syn0, syn1Neg, expTable, table, 2, lr, 2L, Nd4j.empty(syn0.dataType()), 1); - Nd4j.getExecutioner().exec(sg); - - - assertNotEquals(syn0dup, syn0); - assertNotEquals(syn1NegDup, syn1Neg); - assertEquals(syn1dup, syn1); - - // neu1 is expected to be 0.01 - // dot is expected to be 0.003 (dot += 0.01 * 0.03) for round 1 & 2. - // dot is expected to be 0.002987 for round 3 (because syn1Neg for idx 8 is modified at round 2) - // g is expected to be 0.0125 for the first round (code is 1) - // g is expected to be -0.0125 for the second round (code is 0) - // g is expected to be -0.0125 for the third round (code is 0) - // neu1e is expected to be 0.000375 after first round (0.0125 * 0.03 + 0.00) - // neu1e is expected to be 0.00 after second round (-0.0125 * 0.03 + 0.000375) - // neu1e is expected to be -0.0003734375 after third round (-0.0125 * 0.029875 + 0.00) - // syn1Neg idx6 is expected to be 0.030125 after first round (0.0125 * 0.01 + 0.03) - // syn1Neg idx8 is expected to be 0.029875 after second round (-0.0125 * 0.01 + 0.03) - // syn1Neg idx8 is expected to be 0.02975 after third round (-0.0125 * 0.01 + 0.029875) - // syn0 idx0 is expected to be 0.00 after training (0.01 += -0.0003734375) - - log.info("syn1neg_row6 after: {}", Arrays.toString(syn1Neg.getRow(6).dup().data().asFloat())); - - // checking target first - assertEquals(expSyn1Neg_row6, syn1Neg.getRow(6)); - - assertEquals(expSyn0_row0, syn0.getRow(0)); - assertEquals(expSyn0_row0, syn0.getRow(1)); - assertEquals(expSyn0_row0, syn0.getRow(2)); - - // these rows shouldn't change - assertEquals(expSyn0_row3, syn0.getRow(3)); - assertEquals(expSyn0_row3, syn0.getRow(4)); - assertEquals(expSyn0_row3, syn0.getRow(5)); - assertEquals(expSyn0_row3, syn0.getRow(6)); - assertEquals(expSyn0_row3, syn0.getRow(7)); - assertEquals(expSyn0_row3, syn0.getRow(8)); - assertEquals(expSyn0_row3, syn0.getRow(9)); - } - - - @Test - public void testCBOWInference1() { - INDArray syn0 = Nd4j.create(10, 10).assign(0.01f); - INDArray syn1 = Nd4j.create(10, 10).assign(0.02f); - INDArray syn1Neg = Nd4j.create(10, 10).assign(0.03f); - INDArray expTable = Nd4j.create(10000).assign(0.5f); - INDArray table = Nd4j.create(100000); - - double lr = 0.025; - - INDArray syn0dup = syn0.dup(); - INDArray syn1dup = syn1.dup(); - INDArray syn1NegDup = syn1Neg.dup(); - - INDArray inference = Nd4j.create(10).assign(0.04f); - INDArray dup = inference.dup(); - INDArray expInference = Nd4j.create(10).assign(0.0395f); - - log.info("Empty vector: {}", Arrays.toString(inference.data().asFloat())); - - /* - surrounding words are 0 and 1 - */ - AggregateCBOW op = new AggregateCBOW(syn0, syn1, null, expTable, null, 0, new int[] {0, 1}, new int[] {4, 5}, - new int[] {1, 1}, 0, 0, 10, lr, 2L, 10, 0, false, inference); - - Nd4j.getExecutioner().exec(op); - - /* - syn0, syn1 and syn1Neg should stay intact during inference - */ - assertEquals(syn0dup, syn0); - assertEquals(syn1dup, syn1); - assertEquals(syn1NegDup, syn1Neg); - - /** - * neu1 is expected to be 0.02 - * syn1 is expected to be 0.02 - * dot is expected to be 0.04 ( 0.02 * 0.02 * 10) - * g is expected to be -0.0125 for BOTH rounds, since we're not changing syn1 values during inference - * neu1e is expected to be -0.00025 at first round (-0.0125 * 0.02 + 0.00) - * neu1e is expected to be -0.0005 at second round (-0.0125 * 0.02 + -0.00025) - * inference is expected to be 0.0395 after training (0.04 + -0.0005) - */ - - assertNotEquals(dup, inference); - - log.info("Inferred vector: {}", Arrays.toString(inference.data().asFloat())); - - assertEquals(expInference, inference); - - } - - @Test - public void testSGInference1() { - INDArray syn0 = Nd4j.create(10, 10).assign(0.01f); - INDArray syn1 = Nd4j.create(10, 10).assign(0.02f); - INDArray syn1Neg = Nd4j.create(10, 10).assign(0.03f); - INDArray expTable = Nd4j.create(10000).assign(0.5f); - INDArray table = Nd4j.create(100000); - - double lr = 0.025; - - INDArray syn0dup = syn0.dup(); - INDArray syn1dup = syn1.dup(); - INDArray syn1NegDup = syn1Neg.dup(); - - INDArray inference = Nd4j.create(10).assign(0.04f); - INDArray dup = inference.dup(); - INDArray expInference = Nd4j.create(10).assign(0.0395f); - - AggregateSkipGram op = new AggregateSkipGram(syn0, syn1, syn1Neg, expTable, null, 0, new int[] {1, 2}, - new int[] {1, 1}, 0, 0, 10, lr, 1L, 10, inference); - - Nd4j.getExecutioner().exec(op); - - /* - syn0, syn1 and syn1Neg should stay intact during inference - */ - assertEquals(syn0dup, syn0); - assertEquals(syn1dup, syn1); - assertEquals(syn1NegDup, syn1Neg); - - assertNotEquals(dup, inference); - - /** - * dot is expected to be 0.008 for both rounds - * g is expected to be -0.0125 for both rounds, since we don't update syn0/syn1 before end of SG round - * neu1e is expected to be -0.00025 after first round (-0.0125 * 0.02 + 0.00) - * neu1e is expected to be -0.0005 after first round (-0.0125 * 0.02 + -0.00025) - * inferenceVector is expected to be 0.0395 after training (0.04 + -0.0005) - */ - - assertEquals(expInference, inference); - } - - @Override - public char ordering() { - return 'c'; - } -} diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/api/TestNDArrayCreationUtil.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/api/TestNDArrayCreationUtil.java index 907bdd04a..2dc595f54 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/api/TestNDArrayCreationUtil.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/api/TestNDArrayCreationUtil.java @@ -38,31 +38,26 @@ public class TestNDArrayCreationUtil extends BaseNd4jTest { @Test public void testShapes() { - // FIXME: int cast long[] shape2d = {2, 3}; for (Pair p : NDArrayCreationUtil.getAllTestMatricesWithShape(2, 3, 12345, DataType.DOUBLE)) { assertArrayEquals(p.getSecond(), shape2d, p.getFirst().shape()); } - // FIXME: int cast long[] shape3d = {2, 3, 4}; for (Pair p : NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, shape3d, DataType.DOUBLE)) { assertArrayEquals(p.getSecond(), shape3d, p.getFirst().shape()); } - // FIXME: int cast long[] shape4d = {2, 3, 4, 5}; for (Pair p : NDArrayCreationUtil.getAll4dTestArraysWithShape(12345, ArrayUtil.toInts(shape4d), DataType.DOUBLE)) { assertArrayEquals(p.getSecond(), shape4d, p.getFirst().shape()); } - // FIXME: int cast long[] shape5d = {2, 3, 4, 5, 6}; for (Pair p : NDArrayCreationUtil.getAll5dTestArraysWithShape(12345, ArrayUtil.toInts(shape5d), DataType.DOUBLE)) { assertArrayEquals(p.getSecond(), shape5d, p.getFirst().shape()); } - // FIXME: int cast long[] shape6d = {2, 3, 4, 5, 6, 7}; for (Pair p : NDArrayCreationUtil.getAll6dTestArraysWithShape(12345, ArrayUtil.toInts(shape6d), DataType.DOUBLE)) { assertArrayEquals(p.getSecond(), shape6d, p.getFirst().shape()); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/convolution/ConvolutionTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/convolution/ConvolutionTests.java index 09350092c..e04a41714 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/convolution/ConvolutionTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/convolution/ConvolutionTests.java @@ -1304,7 +1304,7 @@ public class ConvolutionTests extends BaseNd4jTest { @Test public void testConvOutWidthAndHeight() { - int outSize = Convolution.outSize(2, 1, 1, 2, 1, false); + long outSize = Convolution.outSize(2, 1, 1, 2, 1, false); assertEquals(6, outSize); } /* diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/convolution/ConvolutionTestsC.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/convolution/ConvolutionTestsC.java index 46d93fff5..cd7ef26b6 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/convolution/ConvolutionTestsC.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/convolution/ConvolutionTestsC.java @@ -55,7 +55,7 @@ public class ConvolutionTestsC extends BaseNd4jTest { @Test public void testConvOutWidthAndHeight() { - int outSize = Convolution.outSize(2, 1, 1, 2, 1, false); + long outSize = Convolution.outSize(2, 1, 1, 2, 1, false); assertEquals(6, outSize); } @@ -415,14 +415,13 @@ public class ConvolutionTestsC extends BaseNd4jTest { int outH = (int)Math.ceil(input.size(2)/(double)s[0]); int outW = (int)Math.ceil(input.size(3)/(double)s[1]); - // FIXME: int cast - int totalPadH = (outH-1)*s[0] + k[0] - (int) input.size(2); - int totalPadW = (outW-1)*s[1] + k[1] - (int) input.size(3); + long totalPadH = (outH-1)*s[0] + k[0] - input.size(2); + long totalPadW = (outW-1)*s[1] + k[1] - input.size(3); - int topPad = totalPadH/2; - int bottomPad = totalPadH - topPad; - int leftPad = totalPadW/2; - int rightPad = totalPadW - leftPad; + long topPad = totalPadH/2; + long bottomPad = totalPadH - topPad; + long leftPad = totalPadW/2; + long rightPad = totalPadW - leftPad; INDArray outGrad = Nd4j.create(input.shape()); @@ -432,10 +431,10 @@ public class ConvolutionTestsC extends BaseNd4jTest { for( int x=0; x max){ max = v; - maxPos = new int[]{kTLy + kY, kTLx + kX}; + maxPos = new long[]{kTLy + kY, kTLx + kX}; } } } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/crash/CrashTest.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/crash/CrashTest.java index c3a66200b..c6efda9b0 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/crash/CrashTest.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/crash/CrashTest.java @@ -88,8 +88,7 @@ public class CrashTest extends BaseNd4jTest { INDArray y = Nd4j.create(64, 64, 1024); for (int i = 0; i < ITERATIONS; i++) { - // FIXME: int cast - int slice = RandomUtils.nextInt(0, (int) x.shape()[0]); + long slice = RandomUtils.nextLong(0, x.shape()[0]); op(x.slice(slice), y.slice(slice), i); } } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dataset/PreProcessor3D4DTest.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dataset/PreProcessor3D4DTest.java index 62edf3499..18776292a 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dataset/PreProcessor3D4DTest.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dataset/PreProcessor3D4DTest.java @@ -323,7 +323,6 @@ public class PreProcessor3D4DTest extends BaseNd4jTest { this.samples = samples; this.origin = origin; - // FIXME: int cast numFeatures = (int) featureScale.size(0); maxN = samples * timeSteps; INDArray template = Nd4j.linspace(origin, origin + timeSteps - 1, timeSteps).reshape(1, -1); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dimensionalityreduction/TestPCA.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dimensionalityreduction/TestPCA.java index 3548dc2e2..a28c026cc 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dimensionalityreduction/TestPCA.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dimensionalityreduction/TestPCA.java @@ -153,9 +153,8 @@ public class TestPCA extends BaseNd4jTest { System.out.println("Eigenvalues:\n" + ns.format(myPCA.getEigenvalues())); double variance = 0.0; - // FIXME: int cast // sample 1000 of the randomly generated samples with the reduced basis set - for (int i = 0; i < 1000; i++) + for (long i = 0; i < 1000; i++) variance += myPCA.estimateVariance(m.getRow(i), reduced70.columns()); variance /= 1000.0; System.out.println("Fraction of variance using 70% variance with " + reduced70.columns() + " columns: " diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTests.java index 52ede954a..e04250f69 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTests.java @@ -545,7 +545,6 @@ public class OpExecutionerTests extends BaseNd4jTest { OpExecutioner opExecutioner = Nd4j.getExecutioner(); INDArray arr = Nd4j.linspace(1, 6, 6, DataType.DOUBLE).reshape(2, 3); INDArray slice = arr.slice(0); - // FIXME: int cast val expected = new double[(int) slice.length()]; for (int i = 0; i < slice.length(); i++) expected[i] = (float) Math.exp(slice.getDouble(i)); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTestsC.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTestsC.java index 0df75ac74..72be040c5 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTestsC.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTestsC.java @@ -411,7 +411,6 @@ public class OpExecutionerTestsC extends BaseNd4jTest { OpExecutioner opExecutioner = Nd4j.getExecutioner(); INDArray arr = Nd4j.linspace(1, 6, 6, DataType.DOUBLE).reshape(2, 3); INDArray slice = arr.slice(0); - // FIXME: int cast val expected = new double[(int) slice.length()]; for (int i = 0; i < slice.length(); i++) expected[i] = (float) Math.exp(slice.getDouble(i)); @@ -852,7 +851,6 @@ public class OpExecutionerTestsC extends BaseNd4jTest { val next = iter.next(); double d = fourd.getDouble(next); - // FIXME: int cast sums[(int) next[0]] += d; sumSquares[(int) next[0]] += d * d; } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/padding/PaddingTestsC.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/padding/PaddingTestsC.java index 7e9f1e91c..2483f03e6 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/padding/PaddingTestsC.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/padding/PaddingTestsC.java @@ -100,9 +100,8 @@ public class PaddingTestsC extends BaseNd4jTest { val h = linspaced.size(2); val w = linspaced.size(3); - // FIXME: int cast - int outWidth = Convolution.outSize((int) h, kh, sy, ph, 1, true); - int outHeight = Convolution.outSize((int) w, kw, sx, pw, 1, true); + long outWidth = Convolution.outSize(h, kh, sy, ph, 1, true); + long outHeight = Convolution.outSize(w, kw, sx, pw, 1, true); INDArray padded = Nd4j.pad(linspaced, new int[][] {{0, 0}, {0, 0}, {ph, ph + sy - 1}, {pw, pw + sx - 1}}); System.out.println(padded); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/serde/binary/BinarySerdeTest.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/serde/binary/BinarySerdeTest.java index 295ad5d67..d7db01c7b 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/serde/binary/BinarySerdeTest.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/serde/binary/BinarySerdeTest.java @@ -126,7 +126,6 @@ public class BinarySerdeTest extends BaseNd4jTest { Nd4j.getCompressor().compressi(arr, "GZIP"); for (int i = 0; i < numTrials; i++) { StopWatch oldStopWatch = new StopWatch(); - // FIXME: int cast BufferedOutputStream bos = new BufferedOutputStream(new ByteArrayOutputStream((int) arr.length())); DataOutputStream dos = new DataOutputStream(bos); oldStopWatch.start(); diff --git a/nd4j/nd4j-common/src/main/java/org/nd4j/linalg/util/ArrayUtil.java b/nd4j/nd4j-common/src/main/java/org/nd4j/linalg/util/ArrayUtil.java index cf54d4357..caeb0d47b 100644 --- a/nd4j/nd4j-common/src/main/java/org/nd4j/linalg/util/ArrayUtil.java +++ b/nd4j/nd4j-common/src/main/java/org/nd4j/linalg/util/ArrayUtil.java @@ -163,7 +163,8 @@ public class ArrayUtil { } public static long[] nTimes(long n, long toReplicate) { - // FIXME: int cast + if (n > Integer.MAX_VALUE) + throw new RuntimeException("Index overflow in nTimes"); val ret = new long[(int) n]; Arrays.fill(ret, toReplicate); return ret; @@ -1329,8 +1330,6 @@ public class ArrayUtil { * @return the shape for tensor matrix multiply */ public static long[] getTensorMmulShape(long[] aShape, long[] bShape, int[][] axes) { - // FIXME: int cast - int validationLength = Math.min(axes[0].length, axes[1].length); for (int i = 0; i < validationLength; i++) { @@ -2970,7 +2969,9 @@ public class ArrayUtil { } public static long[] buildInterleavedVector(Random rng, long length) { - // FIXME: int cast + if (length > Integer.MAX_VALUE) { + throw new RuntimeException("Integer overflow"); + } val result = new long[(int) length]; List indexes = new ArrayList<>(); diff --git a/nd4j/nd4j-serde/nd4j-aeron/src/test/java/org/nd4j/aeron/ipc/AeronNDArraySerdeTest.java b/nd4j/nd4j-serde/nd4j-aeron/src/test/java/org/nd4j/aeron/ipc/AeronNDArraySerdeTest.java index 677326e86..e05a5b6f9 100644 --- a/nd4j/nd4j-serde/nd4j-aeron/src/test/java/org/nd4j/aeron/ipc/AeronNDArraySerdeTest.java +++ b/nd4j/nd4j-serde/nd4j-aeron/src/test/java/org/nd4j/aeron/ipc/AeronNDArraySerdeTest.java @@ -77,7 +77,6 @@ public class AeronNDArraySerdeTest { Nd4j.getCompressor().compressi(arr, "GZIP"); for (int i = 0; i < numTrials; i++) { StopWatch oldStopWatch = new StopWatch(); - // FIXME: int cast BufferedOutputStream bos = new BufferedOutputStream(new ByteArrayOutputStream((int) arr.length())); DataOutputStream dos = new DataOutputStream(bos); oldStopWatch.start();