From a32644bdb7548f7b623a51b6a5dbf3ca035f81cb Mon Sep 17 00:00:00 2001 From: brian Date: Fri, 21 Oct 2022 16:36:56 +0200 Subject: [PATCH] Fix javadoc and cleanup Signed-off-by: brian --- build.gradle | 19 ++ .../java/org/datavec/api/writable/Text.java | 2 +- .../org/nd4j/autodiff/samediff/SameDiff.java | 11 +- .../nd4j/autodiff/samediff/ops/SDBaseOps.java | 60 ++-- .../evaluation/classification/Evaluation.java | 4 +- .../nd4j/linalg/api/blas/BlasException.java | 2 +- .../java/org/nd4j/linalg/api/blas/Lapack.java | 21 +- .../java/org/nd4j/linalg/api/blas/Level1.java | 8 +- .../linalg/api/buffer/BaseDataBuffer.java | 9 +- .../linalg/checkutil/NDArrayCreationUtil.java | 2 +- .../org/nd4j/linalg/dataset/api/DataSet.java | 2 +- .../nd4j/linalg/dataset/api/MultiDataSet.java | 2 +- .../org/nd4j/linalg/util/ND4JTestUtils.java | 256 +++++++++--------- .../iterator/impl/EmnistDataSetIterator.java | 4 +- .../iterator/impl/LFWDataSetIterator.java | 159 ++++++----- cavis-full/build.gradle | 9 +- .../org/nd4j/aeron/ipc/NDArrayMessage.java | 2 +- .../transport/RoutedTransport.java | 4 +- 18 files changed, 322 insertions(+), 254 deletions(-) diff --git a/build.gradle b/build.gradle index ab3337562..3a59a0cf4 100644 --- a/build.gradle +++ b/build.gradle @@ -55,6 +55,7 @@ configurations.all { } + allprojects { Project proj -> apply plugin: 'com.google.osdetector' @@ -161,3 +162,21 @@ allprojects { Project proj -> } } } + + +task aggregatedJavadocs(type: Javadoc, description: 'Generate javadocs from all child projects as if it was a single project', group: 'Documentation') { + subprojects.each { proj -> + proj.tasks.withType(Javadoc).each { javadocTask -> + logger.quiet("Adding javadoc for project " + proj.name) + source += javadocTask.source + classpath += javadocTask.classpath + excludes += javadocTask.excludes + includes += javadocTask.includes + } + } + destinationDir = file("$buildDir/docs/javadoc") + title = "$project.name $version API" + options.author true + options.links 'http://docs.oracle.com/javase/8/docs/api/' + options.addStringOption('Xdoclint:none', '-quiet') +} \ No newline at end of file diff --git a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/Text.java b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/Text.java index b36452a0d..b80d491d2 100644 --- a/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/Text.java +++ b/cavis-datavec/cavis-datavec-api/src/main/java/org/datavec/api/writable/Text.java @@ -106,7 +106,7 @@ public class Text extends BinaryComparable implements WritableComparable 0 + * @param numEpochs The number of epochs for training. Must be > 0 * @param validationIter The DataSetIterator to use for validation (null to skip validation) * @param validationFrequency The frequency with which to run validation. 1 is every epoch, 2 is every other, etc. * @param listeners Additional listeners to use during this operation @@ -1479,7 +1479,7 @@ public class SameDiff extends SDBaseOps { * A special case of {@link #fit()}. * * @param iter The iterator to train the SameDiff instance with - * @param numEpochs The number of epochs for training. Must be > 0 + * @param numEpochs The number of epochs for training. Must be > 0 * @param listeners Additional listeners to use during this operation * @return a {@link History} object containing the history information for this training operation * (evaluations specified in the {@link TrainingConfig}, loss values, and timing information). @@ -1497,7 +1497,7 @@ public class SameDiff extends SDBaseOps { * A special case of {@link #fit()}. * * @param iter The iterator to train the SameDiff instance with - * @param numEpochs The number of epochs for training. Must be > 0 + * @param numEpochs The number of epochs for training. Must be > 0 * @param validationIter The MultiDataSetIterator to use for validation (null to skip validation) * @param validationFrequency The frequency with which to run validation. 1 is every epoch, 2 is every other, etc. * @param listeners Additional listeners to use during this operation @@ -1514,7 +1514,7 @@ public class SameDiff extends SDBaseOps { * A special case of {@link #fit()}. * * @param iter The iterator to train the SameDiff instance with - * @param numEpochs The number of epochs for training. Must be > 0 + * @param numEpochs The number of epochs for training. Must be > 0 * @param listeners Additional listeners to use during this operation * @return a {@link History} object containing the history information for this training operation * (evaluations specified in the {@link TrainingConfig}, loss values, and timing information). @@ -3036,7 +3036,6 @@ public class SameDiff extends SDBaseOps { * See also: {@link VariableType} * * @param variables Variables to convert to constants - * @return The (now constant) SDVariables */ public void convertToConstants(List variables) { if (variables.size() == 0) @@ -3201,7 +3200,7 @@ public class SameDiff extends SDBaseOps { * For example, {@code z(float) = x(float)+y(float)}, changing both x and y to double results in {@code z(double) = x(double)+y(double)} * without doing anything to change z's datatype directly (z datatype is inferred from x + y + add op).
* ARRAY type SDVariables cannot be converted directly, as their datatypes are determined by the function + - * input datatypes. + * input datatypes.
* Note that this method should be used with caution: incorrect datatype modifications may leave your network * in an incorrect state. For example, {@code op(x(float),y(float)) -> op(x(double),y(float))} may not be * supported by all ops. diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/ops/SDBaseOps.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/ops/SDBaseOps.java index a80836439..2886fca90 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/ops/SDBaseOps.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/autodiff/samediff/ops/SDBaseOps.java @@ -382,7 +382,7 @@ public class SDBaseOps { } /** - * Cast the array to a new datatype - for example, Integer -> Float
+ * Cast the array to a new datatype - for example, Integer -> Float
* * @param arg Input variable to cast (NDARRAY type) * @param datatype Datatype to cast to @@ -393,7 +393,7 @@ public class SDBaseOps { } /** - * Cast the array to a new datatype - for example, Integer -> Float
+ * Cast the array to a new datatype - for example, Integer -> Float
* * @param name name May be null. Name for the output variable * @param arg Input variable to cast (NDARRAY type) @@ -654,7 +654,7 @@ public class SDBaseOps { * * @param x Input variable (NUMERIC type) * @param partitions 1D input with values 0 to numPartitions-1 (INT type) - * @param numPartitions Number of partitions, >= 1 + * @param numPartitions Number of partitions, >= 1 */ public SDVariable[] dynamicPartition(SDVariable x, SDVariable partitions, int numPartitions) { SDValidation.validateNumerical("dynamicPartition", "x", x); @@ -676,7 +676,7 @@ public class SDBaseOps { * @param names names May be null. Arrays of names for the output variables. * @param x Input variable (NUMERIC type) * @param partitions 1D input with values 0 to numPartitions-1 (INT type) - * @param numPartitions Number of partitions, >= 1 + * @param numPartitions Number of partitions, >= 1 */ public SDVariable[] dynamicPartition(String[] names, SDVariable x, SDVariable partitions, int numPartitions) { @@ -689,7 +689,7 @@ public class SDBaseOps { /** * Dynamically merge the specified input arrays into a single array, using the specified indices
* - * @param indices Indices to use when merging. Must be >= 1, same length as input variables (INT type) + * @param indices Indices to use when merging. Must be >= 1, same length as input variables (INT type) * @param x Input variables. (NUMERIC type) * @return output Merged output variable (NUMERIC type) */ @@ -705,7 +705,7 @@ public class SDBaseOps { * Dynamically merge the specified input arrays into a single array, using the specified indices
* * @param name name May be null. Name for the output variable - * @param indices Indices to use when merging. Must be >= 1, same length as input variables (INT type) + * @param indices Indices to use when merging. Must be >= 1, same length as input variables (INT type) * @param x Input variables. (NUMERIC type) * @return output Merged output variable (NUMERIC type) */ @@ -943,7 +943,7 @@ public class SDBaseOps { } /** - * Greater than operation: elementwise x > y
+ * Greater than operation: elementwise x > y
* * Return boolean array with values true where satisfied, or false otherwise.
* @@ -957,7 +957,7 @@ public class SDBaseOps { } /** - * Greater than operation: elementwise x > y
+ * Greater than operation: elementwise x > y
* * Return boolean array with values true where satisfied, or false otherwise.
* @@ -973,7 +973,7 @@ public class SDBaseOps { } /** - * Greater than operation: elementwise x > y
+ * Greater than operation: elementwise x > y
* If x and y arrays have equal shape, the output shape is the same as these inputs.
* * Note: supports broadcasting if x and y have different shapes and are broadcastable.
@@ -993,7 +993,7 @@ public class SDBaseOps { } /** - * Greater than operation: elementwise x > y
+ * Greater than operation: elementwise x > y
* If x and y arrays have equal shape, the output shape is the same as these inputs.
* * Note: supports broadcasting if x and y have different shapes and are broadcastable.
@@ -1015,7 +1015,7 @@ public class SDBaseOps { } /** - * Greater than or equals operation: elementwise x >= y
+ * Greater than or equals operation: elementwise x >= y
* * Return boolean array with values true where satisfied, or false otherwise.
* @@ -1029,7 +1029,7 @@ public class SDBaseOps { } /** - * Greater than or equals operation: elementwise x >= y
+ * Greater than or equals operation: elementwise x >= y
* * Return boolean array with values true where satisfied, or false otherwise.
* @@ -1045,7 +1045,7 @@ public class SDBaseOps { } /** - * Greater than or equal to operation: elementwise x >= y
+ * Greater than or equal to operation: elementwise x >= y
* If x and y arrays have equal shape, the output shape is the same as these inputs.
* * Note: supports broadcasting if x and y have different shapes and are broadcastable.
@@ -1065,7 +1065,7 @@ public class SDBaseOps { } /** - * Greater than or equal to operation: elementwise x >= y
+ * Greater than or equal to operation: elementwise x >= y
* If x and y arrays have equal shape, the output shape is the same as these inputs.
* * Note: supports broadcasting if x and y have different shapes and are broadcastable.
@@ -1232,7 +1232,7 @@ public class SDBaseOps { } /** - * Less than operation: elementwise x < y
+ * Less than operation: elementwise x < y
* * Return boolean array with values true where satisfied, or false otherwise.
* @@ -1246,7 +1246,7 @@ public class SDBaseOps { } /** - * Less than operation: elementwise x < y
+ * Less than operation: elementwise x < y
* * Return boolean array with values true where satisfied, or false otherwise.
* @@ -1262,7 +1262,7 @@ public class SDBaseOps { } /** - * Less than operation: elementwise x < y
+ * Less than operation: elementwise x < y
* If x and y arrays have equal shape, the output shape is the same as these inputs.
* * Note: supports broadcasting if x and y have different shapes and are broadcastable.
@@ -1282,7 +1282,7 @@ public class SDBaseOps { } /** - * Less than operation: elementwise x < y
+ * Less than operation: elementwise x < y
* If x and y arrays have equal shape, the output shape is the same as these inputs.
* * Note: supports broadcasting if x and y have different shapes and are broadcastable.
@@ -1304,7 +1304,7 @@ public class SDBaseOps { } /** - * Less than or equals operation: elementwise x <= y
+ * Less than or equals operation: elementwise x <= y
* * Return boolean array with values true where satisfied, or false otherwise.
* @@ -1318,7 +1318,7 @@ public class SDBaseOps { } /** - * Less than or equals operation: elementwise x <= y
+ * Less than or equals operation: elementwise x <= y
* * Return boolean array with values true where satisfied, or false otherwise.
* @@ -1334,7 +1334,7 @@ public class SDBaseOps { } /** - * Less than or equal to operation: elementwise x <= y
+ * Less than or equal to operation: elementwise x <= y
* If x and y arrays have equal shape, the output shape is the same as these inputs.
* * Note: supports broadcasting if x and y have different shapes and are broadcastable.
@@ -1354,7 +1354,7 @@ public class SDBaseOps { } /** - * Less than or equal to operation: elementwise x <= y
+ * Less than or equal to operation: elementwise x <= y
* If x and y arrays have equal shape, the output shape is the same as these inputs.
* * Note: supports broadcasting if x and y have different shapes and are broadcastable.
@@ -3590,7 +3590,7 @@ public class SDBaseOps { /** * Generate a sequence mask (with values 0 or 1) based on the specified lengths
- * Specifically, out[i, ..., k, j] = (j < lengths[i, ..., k] ? 1.0 : 0.0)
+ * {@code Specifically, out[i, ..., k, j] = (j < lengths[i, ..., k] ? 1.0 : 0.0)}
* * @param lengths Lengths of the sequences (NUMERIC type) * @param maxLen Maximum sequence length @@ -3604,7 +3604,7 @@ public class SDBaseOps { /** * Generate a sequence mask (with values 0 or 1) based on the specified lengths
- * Specifically, out[i, ..., k, j] = (j < lengths[i, ..., k] ? 1.0 : 0.0)
+ * {@code Specifically, out[i, ..., k, j] = (j < lengths[i, ..., k] ? 1.0 : 0.0)}
* * @param name name May be null. Name for the output variable * @param lengths Lengths of the sequences (NUMERIC type) @@ -3620,7 +3620,7 @@ public class SDBaseOps { /** * Generate a sequence mask (with values 0 or 1) based on the specified lengths
- * Specifically, out[i, ..., k, j] = (j < lengths[i, ..., k] ? 1.0 : 0.0)
+ * {@code Specifically, out[i, ..., k, j] = (j < lengths[i, ..., k] ? 1.0 : 0.0)}
* * @param lengths Lengths of the sequences (NUMERIC type) * @param maxLen Maximum sequence length (INT type) @@ -3635,7 +3635,7 @@ public class SDBaseOps { /** * Generate a sequence mask (with values 0 or 1) based on the specified lengths
- * Specifically, out[i, ..., k, j] = (j < lengths[i, ..., k] ? 1.0 : 0.0)
+ * {@code Specifically, out[i, ..., k, j] = (j < lengths[i, ..., k] ? 1.0 : 0.0)}
* * @param name name May be null. Name for the output variable * @param lengths Lengths of the sequences (NUMERIC type) @@ -3761,7 +3761,7 @@ public class SDBaseOps { * then slice(input, begin=[0,1], size=[2,1] will return:
* [b]
* [e]
- * Note that for each dimension i, begin[i] + size[i] <= input.size(i)
+ * Note that for each dimension i, begin[i] + size[i] <= input.size(i)
* * @param input input Variable to get subset of (NUMERIC type) * @param begin Beginning index. Must be same length as rank of input array (Size: AtLeast(min=1)) @@ -3783,7 +3783,7 @@ public class SDBaseOps { * then slice(input, begin=[0,1], size=[2,1] will return:
* [b]
* [e]
- * Note that for each dimension i, begin[i] + size[i] <= input.size(i)
+ * Note that for each dimension i, begin[i] + size[i] <= input.size(i)
* * @param name name May be null. Name for the output variable * @param input input Variable to get subset of (NUMERIC type) @@ -3807,7 +3807,7 @@ public class SDBaseOps { * then slice(input, begin=[0,1], size=[2,1] will return:
* [b]
* [e]
- * Note that for each dimension i, begin[i] + size[i] <= input.size(i)
+ * Note that for each dimension i, begin[i] + size[i] <= input.size(i)
* * @param input input Variable to get subset of (NUMERIC type) * @param begin Beginning index. Must be same length as rank of input array (INT type) @@ -3829,7 +3829,7 @@ public class SDBaseOps { * then slice(input, begin=[0,1], size=[2,1] will return:
* [b]
* [e]
- * Note that for each dimension i, begin[i] + size[i] <= input.size(i)
+ * Note that for each dimension i, begin[i] + size[i] <= input.size(i)
* * @param name name May be null. Name for the output variable * @param input input Variable to get subset of (NUMERIC type) diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/classification/Evaluation.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/classification/Evaluation.java index df2151210..9766a5b7c 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/classification/Evaluation.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/evaluation/classification/Evaluation.java @@ -176,7 +176,7 @@ public class Evaluation extends BaseEvaluation { * Constructor to use for top N accuracy * * @param labels Labels for the classes (may be null) - * @param topN Value to use for top N accuracy calculation (<=1: standard accuracy). Note that with top N + * @param topN Value to use for top N accuracy calculation (<=1: standard accuracy). Note that with top N * accuracy, an example is considered 'correct' if the probability for the true class is one of the * highest N values */ @@ -1173,7 +1173,7 @@ public class Evaluation extends BaseEvaluation { /** * False Alarm Rate (FAR) reflects rate of misclassified to classified records - * http://ro.ecu.edu.au/cgi/viewcontent.cgi?article=1058&context=isw
+ * {@link }http://ro.ecu.edu.au/cgi/viewcontent.cgi?article=1058&context=isw}
* Note: value returned will differ depending on number of classes and settings.
* 1. For binary classification, if the positive class is set (via default value of 1, via constructor, * or via {@link #setBinaryPositiveClass(Integer)}), the returned value will be for the specified positive class diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/BlasException.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/BlasException.java index 9e9e807b5..5b103b278 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/BlasException.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/BlasException.java @@ -34,7 +34,7 @@ public class BlasException extends Error { } /** - * Principal constructor - error message & error code + * Principal constructor - error message & error code * @param message the error message to put into the Exception * @param errorCode the library error number */ diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/Lapack.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/Lapack.java index b112c73fd..3bf6699ca 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/Lapack.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/Lapack.java @@ -28,15 +28,15 @@ public interface Lapack { * LU decomposiiton of a matrix * Factorize a matrix A * - * The matrix A is overridden by the L & U combined. + * The matrix A is overridden by the L & U combined. * The permutation results are returned directly as a vector. To * create the permutation matrix use getPFactor method - * To split out the L & U matrix use getLFactor and getUFactor methods + * To split out the L & U matrix use getLFactor and getUFactor methods * * getrf = triangular factorization (TRF) of a general matrix (GE) * * @param A the input matrix, it will be overwritten with the factors - * @returns Permutation array + * @return Permutation array * @throws Error - with a message to indicate failure (usu. bad params) */ INDArray getrf(INDArray A); @@ -53,7 +53,7 @@ public interface Lapack { * matrix Q and an upper triangular R matrix * * @param A the input matrix, it will be overwritten with the factors - * @param The R array if null R is not returned + * @param R The R array if null R is not returned * @throws Error - with a message to indicate failure (usu. bad params) */ void geqrf(INDArray A, INDArray R); @@ -71,8 +71,7 @@ public interface Lapack { * lower L ( or upper U ) triangular matrix * * @param A the input matrix, it will be overwritten with the factors - * @param whether to return the upper (false) or lower factor - * @returns Permutation array + * @param lower whether to return the upper (false) or lower factor * @throws Error - with a message to indicate failure (usu. bad params) */ void potrf(INDArray A, boolean lower); @@ -122,7 +121,7 @@ public interface Lapack { * * @param M - the size of the permutation matrix ( usu. the # rows in factored matrix ) * @param ipiv - the vector returned from a refactoring - * @returned the square permutation matrix - size is the M x M + * @return the square permutation matrix - size is the M x M */ INDArray getPFactor(int M, INDArray ipiv); @@ -131,8 +130,8 @@ public interface Lapack { * extracts the L (lower triangular) matrix from the LU factor result * L will be the same dimensions as A * - * @param A - the combined L & U matrices returned from factorization - * @returned the lower triangular with unit diagonal + * @param A - the combined L & U matrices returned from factorization + * @return the lower triangular with unit diagonal */ INDArray getLFactor(INDArray A); @@ -141,8 +140,8 @@ public interface Lapack { * extracts the U (upper triangular) matrix from the LU factor result * U will be n x n matrix where n = num cols in A * - * @param A - the combined L & U matrices returned from factorization - * @returned the upper triangular matrix + * @param A - the combined L & U matrices returned from factorization + * @return the upper triangular matrix */ INDArray getUFactor(INDArray A); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/Level1.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/Level1.java index 3b82f6cbd..3658c0e74 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/Level1.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/blas/Level1.java @@ -26,7 +26,7 @@ import org.nd4j.linalg.api.ndarray.INDArray; public interface Level1 { /** * computes a vector-vector dot product. - * @param n + * @param N * @param alpha * @param X * @param Y @@ -65,7 +65,7 @@ public interface Level1 { /** * finds the element of a * vector that has the largest absolute value. - * @param n the length to iterate for + * @param N the length to iterate for * @param arr the array to get the max * index for * @param stride the stride for the array @@ -105,7 +105,7 @@ public interface Level1 { /** * computes a vector-scalar product and adds the result to a vector. - * @param n + * @param N * @param alpha * @param x * @param y @@ -115,7 +115,7 @@ public interface Level1 { /** * computes a vector-scalar product and adds the result to a vector. * y = a*x + y - * @param n number of operations + * @param N number of operations * @param alpha * @param x X * @param offsetX offset of first element of X in buffer diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/buffer/BaseDataBuffer.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/buffer/BaseDataBuffer.java index ecb2110c2..29340385b 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/buffer/BaseDataBuffer.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/api/buffer/BaseDataBuffer.java @@ -1297,7 +1297,7 @@ public abstract class BaseDataBuffer implements DataBuffer { if (offset() == 0) { return wrappedBuffer().asIntBuffer(); } else - return wrappedBuffer().asIntBuffer().position((int) offset()); + return (IntBuffer) wrappedBuffer().asIntBuffer().position((int) offset()); } @Override @@ -1308,7 +1308,7 @@ public abstract class BaseDataBuffer implements DataBuffer { if (offset() == 0) { return wrappedBuffer().asLongBuffer(); } else - return wrappedBuffer().asLongBuffer().position((int) offset()); + return (LongBuffer) wrappedBuffer().asLongBuffer().position((int) offset()); } @Override @@ -1319,7 +1319,7 @@ public abstract class BaseDataBuffer implements DataBuffer { if (offset() == 0) { return wrappedBuffer().asDoubleBuffer(); } else { - return wrappedBuffer().asDoubleBuffer().position((int) (offset())); + return (DoubleBuffer) wrappedBuffer().asDoubleBuffer().position((int) (offset())); } } @@ -1331,7 +1331,8 @@ public abstract class BaseDataBuffer implements DataBuffer { if (offset() == 0) { return wrappedBuffer().asFloatBuffer(); } else { - return wrappedBuffer().asFloatBuffer().position((int) (offset())); + return (FloatBuffer) wrappedBuffer().asFloatBuffer() + .position((int) (offset())); } } diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/checkutil/NDArrayCreationUtil.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/checkutil/NDArrayCreationUtil.java index c683e47ee..1665ca165 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/checkutil/NDArrayCreationUtil.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/checkutil/NDArrayCreationUtil.java @@ -34,7 +34,7 @@ import java.util.*; public class NDArrayCreationUtil { private NDArrayCreationUtil() {} - /** Get an array of INDArrays (2d) all with the specified shape. Pair returned to aid + /** Get an array of INDArrays (2d) all with the specified shape. {@code Pair} returned to aid * debugging: String contains information on how to reproduce the matrix (i.e., which function, and arguments) * Each NDArray in the returned array has been obtained by applying an operation such as transpose, tensorAlongDimension, * etc to an original array. diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/DataSet.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/DataSet.java index 1aadba8c8..9613d9141 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/DataSet.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/DataSet.java @@ -302,7 +302,7 @@ public interface DataSet extends Iterable, Seri * Get the example metadata, or null if no metadata has been set * * @return List of metadata instances - * @see {@link #getExampleMetaData(Class)} for convenience method for types + * {@link #getExampleMetaData(Class)} for convenience method for types */ List getExampleMetaData(); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/MultiDataSet.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/MultiDataSet.java index 675f66d0b..377beee0d 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/MultiDataSet.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/dataset/api/MultiDataSet.java @@ -180,7 +180,7 @@ public interface MultiDataSet extends Serializable { * Get the example metadata, or null if no metadata has been set * * @return List of metadata instances - * @see {@link #getExampleMetaData(Class)} for convenience method for types + * {@link #getExampleMetaData(Class)} for convenience method for types */ List getExampleMetaData(); diff --git a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/ND4JTestUtils.java b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/ND4JTestUtils.java index ad386be2f..a1ed3050a 100644 --- a/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/ND4JTestUtils.java +++ b/cavis-dnn/cavis-dnn-api/src/main/java/org/nd4j/linalg/util/ND4JTestUtils.java @@ -35,140 +35,154 @@ import java.util.*; public class ND4JTestUtils { - private ND4JTestUtils(){ } + private ND4JTestUtils() { + } - @AllArgsConstructor - @Data - public static class ComparisonResult { - List> allResults; - List> passed; - List> failed; - List skippedDir1; - List skippedDir2; + @AllArgsConstructor + @Data + public static class ComparisonResult { + + List> allResults; + List> passed; + List> failed; + List skippedDir1; + List skippedDir2; + } + + /** + * A function for use with {@link #validateSerializedArrays(File, File, boolean, BiFunction)} + * using {@code INDArray#equals(Object)} + */ + public static class EqualsFn implements BiFunction { + + @Override + public Boolean apply(INDArray i1, INDArray i2) { + return i1.equals(i2); + } + } + + /** + * A function for use with {@link #validateSerializedArrays(File, File, boolean, BiFunction)} + * using {@link INDArray#equalsWithEps(Object, double)} + */ + @AllArgsConstructor + public static class EqualsWithEpsFn implements BiFunction { + + private final double eps; + + @Override + public Boolean apply(INDArray i1, INDArray i2) { + return i1.equalsWithEps(i2, eps); + } + } + + /** + * Scan the specified directories for matching files (i.e., same path relative to their respective + * root directories) and compare the contents using INDArray.equals (via {@link EqualsFn} Assumes + * the saved files represent INDArrays saved with {@link Nd4j#saveBinary(INDArray, File)} + * + * @param dir1 First directory + * @param dir2 Second directory + * @param recursive Whether to search recursively (i.e., include files in subdirectories + * @return Comparison results + */ + public static ComparisonResult validateSerializedArrays(File dir1, File dir2, boolean recursive) + throws Exception { + return validateSerializedArrays(dir1, dir2, recursive, new EqualsFn()); + } + + /** + * Scan the specified directories for matching files (i.e., same path relative to their respective + * root directories) and compare the contents using a provided function.
Assumes the saved + * files represent INDArrays saved with {@link Nd4j#saveBinary(INDArray, File)} + * + * @param dir1 First directory + * @param dir2 Second directory + * @param recursive Whether to search recursively (i.e., include files in subdirectories + * @return Comparison results + */ + public static ComparisonResult validateSerializedArrays(File dir1, File dir2, boolean recursive, + BiFunction evalFn) throws Exception { + File[] f1 = FileUtils.listFiles(dir1, null, recursive).toArray(new File[0]); + File[] f2 = FileUtils.listFiles(dir2, null, recursive).toArray(new File[0]); + + Preconditions.checkState(f1.length > 0, "No files found for directory 1: %s", + dir1.getAbsolutePath()); + Preconditions.checkState(f2.length > 0, "No files found for directory 2: %s", + dir2.getAbsolutePath()); + + Map relativized1 = new HashMap<>(); + Map relativized2 = new HashMap<>(); + + URI u = dir1.toURI(); + for (File f : f1) { + if (!f.isFile()) { + continue; + } + String relative = u.relativize(f.toURI()).getPath(); + relativized1.put(relative, f); } - /** - * A function for use with {@link #validateSerializedArrays(File, File, boolean, BiFunction)} using {@code INDArray#equals(Object)} - */ - public static class EqualsFn implements BiFunction { - @Override - public Boolean apply(INDArray i1, INDArray i2) { - return i1.equals(i2); + u = dir2.toURI(); + for (File f : f2) { + if (!f.isFile()) { + continue; } + String relative = u.relativize(f.toURI()).getPath(); + relativized2.put(relative, f); } - /** - * A function for use with {@link #validateSerializedArrays(File, File, boolean, BiFunction)} using {@link INDArray#equalsWithEps(Object, double)} - */ - @AllArgsConstructor - public static class EqualsWithEpsFn implements BiFunction { - private final double eps; - - @Override - public Boolean apply(INDArray i1, INDArray i2) { - return i1.equalsWithEps(i2, eps); - } + List skipped1 = new ArrayList<>(); + for (String s : relativized1.keySet()) { + if (!relativized2.containsKey(s)) { + skipped1.add(relativized1.get(s)); + } } - /** - * Scan the specified directories for matching files (i.e., same path relative to their respective root directories) - * and compare the contents using INDArray.equals (via {@link EqualsFn} - * Assumes the saved files represent INDArrays saved with {@link Nd4j#saveBinary(INDArray, File)} - * @param dir1 First directory - * @param dir2 Second directory - * @param recursive Whether to search recursively (i.e., include files in subdirectories - * @return Comparison results - */ - public static ComparisonResult validateSerializedArrays(File dir1, File dir2, boolean recursive) throws Exception { - return validateSerializedArrays(dir1, dir2, recursive, new EqualsFn()); + List skipped2 = new ArrayList<>(); + for (String s : relativized2.keySet()) { + if (!relativized1.containsKey(s)) { + skipped2.add(relativized1.get(s)); + } } - /** - * Scan the specified directories for matching files (i.e., same path relative to their respective root directories) - * and compare the contents using a provided function.
- * Assumes the saved files represent INDArrays saved with {@link Nd4j#saveBinary(INDArray, File)} - * @param dir1 First directory - * @param dir2 Second directory - * @param recursive Whether to search recursively (i.e., include files in subdirectories - * @return Comparison results - */ - public static ComparisonResult validateSerializedArrays(File dir1, File dir2, boolean recursive, BiFunction evalFn) throws Exception { - File[] f1 = FileUtils.listFiles(dir1, null, recursive).toArray(new File[0]); - File[] f2 = FileUtils.listFiles(dir2, null, recursive).toArray(new File[0]); + List> allResults = new ArrayList<>(); + List> passed = new ArrayList<>(); + List> failed = new ArrayList<>(); + for (Map.Entry e : relativized1.entrySet()) { + File file1 = e.getValue(); + File file2 = relativized2.get(e.getKey()); - Preconditions.checkState(f1.length > 0, "No files found for directory 1: %s", dir1.getAbsolutePath() ); - Preconditions.checkState(f2.length > 0, "No files found for directory 2: %s", dir2.getAbsolutePath() ); - - Map relativized1 = new HashMap<>(); - Map relativized2 = new HashMap<>(); - - URI u = dir1.toURI(); - for(File f : f1){ - if(!f.isFile()) - continue; - String relative = u.relativize(f.toURI()).getPath(); - relativized1.put(relative, f); + if (file2 == null) { + continue; } - u = dir2.toURI(); - for(File f : f2){ - if(!f.isFile()) - continue; - String relative = u.relativize(f.toURI()).getPath(); - relativized2.put(relative, f); - } - - List skipped1 = new ArrayList<>(); - for(String s : relativized1.keySet()){ - if(!relativized2.containsKey(s)){ - skipped1.add(relativized1.get(s)); - } - } - - List skipped2 = new ArrayList<>(); - for(String s : relativized2.keySet()){ - if(!relativized1.containsKey(s)){ - skipped2.add(relativized1.get(s)); - } - } - - List> allResults = new ArrayList<>(); - List> passed = new ArrayList<>(); - List> failed = new ArrayList<>(); - for(Map.Entry e : relativized1.entrySet()){ - File file1 = e.getValue(); - File file2 = relativized2.get(e.getKey()); - - if(file2 == null) - continue; - - INDArray i1 = Nd4j.readBinary(file1); - INDArray i2 = Nd4j.readBinary(file2); - boolean b = evalFn.apply(i1, i2); - Triple t = new Triple<>(file1, file2, b); - allResults.add(t); - if(b){ - passed.add(t); - } else { - failed.add(t); - } - } - - Comparator> c = new Comparator>() { - @Override - public int compare(Triple o1, Triple o2) { - return o1.getFirst().compareTo(o2.getFirst()); - } - }; - - Collections.sort(allResults, c); - Collections.sort(passed, c); - Collections.sort(failed, c); - Collections.sort(skipped1); - Collections.sort(skipped2); - - - return new ComparisonResult(allResults, passed, failed, skipped1, skipped2); + INDArray i1 = Nd4j.readBinary(file1); + INDArray i2 = Nd4j.readBinary(file2); + boolean b = evalFn.apply(i1, i2); + Triple t = new Triple<>(file1, file2, b); + allResults.add(t); + if (b) { + passed.add(t); + } else { + failed.add(t); + } } + + Comparator> c = new Comparator>() { + @Override + public int compare(Triple o1, Triple o2) { + return o1.getFirst().compareTo(o2.getFirst()); + } + }; + + Collections.sort(allResults, c); + Collections.sort(passed, c); + Collections.sort(failed, c); + Collections.sort(skipped1); + Collections.sort(skipped2); + + return new ComparisonResult(allResults, passed, failed, skipped1, skipped2); + } } diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/iterator/impl/EmnistDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/iterator/impl/EmnistDataSetIterator.java index 6d000582e..27ce9f464 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/iterator/impl/EmnistDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/iterator/impl/EmnistDataSetIterator.java @@ -211,7 +211,7 @@ public class EmnistDataSetIterator extends BaseDatasetIterator { } /** - * Get the labels as a List + * Get the labels as a {@code List} * * @return Labels */ @@ -244,7 +244,7 @@ public class EmnistDataSetIterator extends BaseDatasetIterator { } /** - * Get the label assignments for the given set as a List + * Get the label assignments for the given set as a {@code List} * * @param dataSet DataSet to get the label assignment for * @return Label assignment and given dataset diff --git a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/iterator/impl/LFWDataSetIterator.java b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/iterator/impl/LFWDataSetIterator.java index 37ce72ea4..295798bdd 100644 --- a/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/iterator/impl/LFWDataSetIterator.java +++ b/cavis-dnn/cavis-dnn-data/cavis-dnn-data-datasets/src/main/java/org/deeplearning4j/datasets/iterator/impl/LFWDataSetIterator.java @@ -31,76 +31,109 @@ import java.util.Random; public class LFWDataSetIterator extends RecordReaderDataSetIterator { - /** Loads subset of images with given imgDim returned by the generator. */ - public LFWDataSetIterator(int[] imgDim) { - this(LFWLoader.SUB_NUM_IMAGES, LFWLoader.SUB_NUM_IMAGES, imgDim, LFWLoader.SUB_NUM_LABELS, false, - new ParentPathLabelGenerator(), true, 1, null, new Random(System.currentTimeMillis())); - } + /** + * Loads subset of images with given imgDim returned by the generator. + */ + public LFWDataSetIterator(int[] imgDim) { + this(LFWLoader.SUB_NUM_IMAGES, LFWLoader.SUB_NUM_IMAGES, imgDim, LFWLoader.SUB_NUM_LABELS, + false, + new ParentPathLabelGenerator(), true, 1, null, new Random(System.currentTimeMillis())); + } - /** Loads images with given batchSize, numExamples returned by the generator. */ - public LFWDataSetIterator(int batchSize, int numExamples) { - this(batchSize, numExamples, new int[] {LFWLoader.HEIGHT, LFWLoader.WIDTH, LFWLoader.CHANNELS}, - LFWLoader.NUM_LABELS, false, LFWLoader.LABEL_PATTERN, true, 1, null, - new Random(System.currentTimeMillis())); - } + /** + * Loads images with given batchSize, numExamples returned by the generator. + */ + public LFWDataSetIterator(int batchSize, int numExamples) { + this(batchSize, numExamples, new int[]{LFWLoader.HEIGHT, LFWLoader.WIDTH, LFWLoader.CHANNELS}, + LFWLoader.NUM_LABELS, false, LFWLoader.LABEL_PATTERN, true, 1, null, + new Random(System.currentTimeMillis())); + } - /** Loads images with given batchSize, numExamples, imgDim returned by the generator. */ - public LFWDataSetIterator(int batchSize, int numExamples, int[] imgDim) { - this(batchSize, numExamples, imgDim, LFWLoader.NUM_LABELS, false, LFWLoader.LABEL_PATTERN, true, 1, null, - new Random(System.currentTimeMillis())); - } + /** + * Loads images with given batchSize, numExamples, imgDim returned by the generator. + */ + public LFWDataSetIterator(int batchSize, int numExamples, int[] imgDim) { + this(batchSize, numExamples, imgDim, LFWLoader.NUM_LABELS, false, LFWLoader.LABEL_PATTERN, true, + 1, null, + new Random(System.currentTimeMillis())); + } - /** Loads images with given batchSize, imgDim, useSubset, returned by the generator. */ - public LFWDataSetIterator(int batchSize, int[] imgDim, boolean useSubset) { - this(batchSize, useSubset ? LFWLoader.SUB_NUM_IMAGES : LFWLoader.NUM_IMAGES, imgDim, - useSubset ? LFWLoader.SUB_NUM_LABELS : LFWLoader.NUM_LABELS, useSubset, LFWLoader.LABEL_PATTERN, - true, 1, null, new Random(System.currentTimeMillis())); - } + /** + * Loads images with given batchSize, imgDim, useSubset, returned by the generator. + */ + public LFWDataSetIterator(int batchSize, int[] imgDim, boolean useSubset) { + this(batchSize, useSubset ? LFWLoader.SUB_NUM_IMAGES : LFWLoader.NUM_IMAGES, imgDim, + useSubset ? LFWLoader.SUB_NUM_LABELS : LFWLoader.NUM_LABELS, useSubset, + LFWLoader.LABEL_PATTERN, + true, 1, null, new Random(System.currentTimeMillis())); + } - /** Loads images with given batchSize, numExamples, imgDim, train, & splitTrainTest returned by the generator. */ - public LFWDataSetIterator(int batchSize, int numExamples, int[] imgDim, boolean train, double splitTrainTest) { - this(batchSize, numExamples, imgDim, LFWLoader.NUM_LABELS, false, LFWLoader.LABEL_PATTERN, train, - splitTrainTest, null, new Random(System.currentTimeMillis())); - } + /** + * Loads images with given batchSize, numExamples, imgDim, train, & splitTrainTest returned + * by the generator. + */ + public LFWDataSetIterator(int batchSize, int numExamples, int[] imgDim, boolean train, + double splitTrainTest) { + this(batchSize, numExamples, imgDim, LFWLoader.NUM_LABELS, false, LFWLoader.LABEL_PATTERN, + train, + splitTrainTest, null, new Random(System.currentTimeMillis())); + } - /** Loads images with given batchSize, numExamples, numLabels, train, & splitTrainTest returned by the generator. */ - public LFWDataSetIterator(int batchSize, int numExamples, int numLabels, boolean train, double splitTrainTest) { - this(batchSize, numExamples, new int[] {LFWLoader.HEIGHT, LFWLoader.WIDTH, LFWLoader.CHANNELS}, numLabels, - false, null, train, splitTrainTest, null, new Random(System.currentTimeMillis())); - } + /** + * Loads images with given batchSize, numExamples, numLabels, train, & splitTrainTest + * returned by the generator. + */ + public LFWDataSetIterator(int batchSize, int numExamples, int numLabels, boolean train, + double splitTrainTest) { + this(batchSize, numExamples, new int[]{LFWLoader.HEIGHT, LFWLoader.WIDTH, LFWLoader.CHANNELS}, + numLabels, + false, null, train, splitTrainTest, null, new Random(System.currentTimeMillis())); + } - /** Loads images with given batchSize, numExamples, imgDim, numLabels, useSubset, train, splitTrainTest & Random returned by the generator. */ - public LFWDataSetIterator(int batchSize, int numExamples, int[] imgDim, int numLabels, boolean useSubset, - boolean train, double splitTrainTest, Random rng) { - this(batchSize, numExamples, imgDim, numLabels, useSubset, LFWLoader.LABEL_PATTERN, train, splitTrainTest, null, - rng); - } + /** + * Loads images with given batchSize, numExamples, imgDim, numLabels, useSubset, train, + * splitTrainTest & Random returned by the generator. + */ + public LFWDataSetIterator(int batchSize, int numExamples, int[] imgDim, int numLabels, + boolean useSubset, + boolean train, double splitTrainTest, Random rng) { + this(batchSize, numExamples, imgDim, numLabels, useSubset, LFWLoader.LABEL_PATTERN, train, + splitTrainTest, null, + rng); + } - /** Loads images with given batchSize, numExamples, imgDim, numLabels, useSubset, train, splitTrainTest & Random returned by the generator. */ - public LFWDataSetIterator(int batchSize, int numExamples, int[] imgDim, int numLabels, boolean useSubset, - PathLabelGenerator labelGenerator, boolean train, double splitTrainTest, Random rng) { - this(batchSize, numExamples, imgDim, numLabels, useSubset, labelGenerator, train, splitTrainTest, null, rng); - } + /** + * Loads images with given batchSize, numExamples, imgDim, numLabels, useSubset, train, + * splitTrainTest & Random returned by the generator. + */ + public LFWDataSetIterator(int batchSize, int numExamples, int[] imgDim, int numLabels, + boolean useSubset, + PathLabelGenerator labelGenerator, boolean train, double splitTrainTest, Random rng) { + this(batchSize, numExamples, imgDim, numLabels, useSubset, labelGenerator, train, + splitTrainTest, null, rng); + } - /** - * Create LFW data specific iterator - * @param batchSize the batch size of the examples - * @param numExamples the overall number of examples - * @param imgDim an array of height, width and channels - * @param numLabels the overall number of examples - * @param useSubset use a subset of the LFWDataSet - * @param labelGenerator path label generator to use - * @param train true if use train value - * @param splitTrainTest the percentage to split data for train and remainder goes to test - * @param imageTransform how to transform the image - - * @param rng random number to lock in batch shuffling - * */ - public LFWDataSetIterator(int batchSize, int numExamples, int[] imgDim, int numLabels, boolean useSubset, - PathLabelGenerator labelGenerator, boolean train, double splitTrainTest, - ImageTransform imageTransform, Random rng) { - super(new LFWLoader(imgDim, imageTransform, useSubset).getRecordReader(batchSize, numExamples, imgDim, - numLabels, labelGenerator, train, splitTrainTest, rng), batchSize, 1, numLabels); - } + /** + * Create LFW data specific iterator + * + * @param batchSize the batch size of the examples + * @param numExamples the overall number of examples + * @param imgDim an array of height, width and channels + * @param numLabels the overall number of examples + * @param useSubset use a subset of the LFWDataSet + * @param labelGenerator path label generator to use + * @param train true if use train value + * @param splitTrainTest the percentage to split data for train and remainder goes to test + * @param imageTransform how to transform the image + * @param rng random number to lock in batch shuffling + */ + public LFWDataSetIterator(int batchSize, int numExamples, int[] imgDim, int numLabels, + boolean useSubset, + PathLabelGenerator labelGenerator, boolean train, double splitTrainTest, + ImageTransform imageTransform, Random rng) { + super(new LFWLoader(imgDim, imageTransform, useSubset).getRecordReader(batchSize, numExamples, + imgDim, + numLabels, labelGenerator, train, splitTrainTest, rng), batchSize, 1, numLabels); + } } diff --git a/cavis-full/build.gradle b/cavis-full/build.gradle index 68e847fdf..e25c3e7b2 100644 --- a/cavis-full/build.gradle +++ b/cavis-full/build.gradle @@ -12,12 +12,15 @@ configurations.archives.artifacts.with { archives -> dependencies { //Todo clean this api platform(project(":cavis-common-platform")) - api "org.bytedeco:javacpp:1.5.7" //for some reason we needed to apply version numbers here, they do not end up in POM otherwise + //api "org.bytedeco:javacpp:1.5.7" //for some reason we needed to apply version numbers here, they do not end up in POM otherwise api "com.fasterxml.jackson.datatype:jackson-datatype-joda:2.10.5" api 'org.slf4j:slf4j-simple:2.0.3' api 'org.slf4j:slf4j-api:2.0.3' - //api group: "org.bytedeco", name: "javacpp", classifier: "linux-x64_86" - + //TODO for the two below.. either platform specific uber jars or a single big one with all platforms + api group: "org.bytedeco", name: "javacpp", version: "1.5.7", classifier: "linux-x86_64" + //api group: "org.bytedeco", name: "javacpp", version: "1.5.7" + // api group: 'net.brutex.cavis-native', name: 'cavis-native-lib', version: '1.0.0-SNAPSHOT', classifier: "linux-x86_64-avx2-cpu" + //api group: 'net.brutex.cavis-native', name: 'cavis-native-lib', version: '1.0.0-SNAPSHOT' rootProject.getAllprojects().each { Project sproj -> if(!sproj.name.equals(name) && !sproj.name.equals("cavis-common-platform") && !sproj.name.equals("Cavis") diff --git a/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/NDArrayMessage.java b/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/NDArrayMessage.java index c73f9c3bb..3658a8006 100644 --- a/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/NDArrayMessage.java +++ b/cavis-nd4j/cavis-nd4j-aeron/src/main/java/org/nd4j/aeron/ipc/NDArrayMessage.java @@ -256,7 +256,7 @@ public class NDArrayMessage implements Serializable { String messageId = UUID.randomUUID().toString(); for (int i = 0; i < ret.length; i++) { //data: only grab a chunk of the data - ByteBuffer view = wholeBuffer.byteBuffer().asReadOnlyBuffer().position(i * chunkSize); + ByteBuffer view = (ByteBuffer) wholeBuffer.byteBuffer().asReadOnlyBuffer().position(i * chunkSize); view.limit(Math.min(i * chunkSize + chunkSize, wholeBuffer.capacity())); view.order(ByteOrder.nativeOrder()); view = view.slice(); diff --git a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/transport/RoutedTransport.java b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/transport/RoutedTransport.java index 8f49c7e7a..b17164393 100644 --- a/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/transport/RoutedTransport.java +++ b/cavis-nd4j/cavis-nd4j-parameter-server/cavis-nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/transport/RoutedTransport.java @@ -664,8 +664,8 @@ public class RoutedTransport extends BaseTransport { public static class RemoteConnectionBuilder { - private final Object locker = new Object(); - private final AtomicBoolean activated = new AtomicBoolean(); + private Object locker = new Object(); + private AtomicBoolean activated = new AtomicBoolean(); } }