Various fixes (#43)
* #8172 Enable DL4J MKLDNN batch norm backward pass Signed-off-by: AlexDBlack <blacka101@gmail.com> * #8382 INDArray.toString() rank 1 brackets / ambiguity fix Signed-off-by: AlexDBlack <blacka101@gmail.com> * #8308 Fix handful of broken links (inc. some in errors) Signed-off-by: AlexDBlack <blacka101@gmail.com> * Unused dependencies, round 1 Signed-off-by: AlexDBlack <blacka101@gmail.com> * Unused dependencies, round 2 Signed-off-by: AlexDBlack <blacka101@gmail.com> * Unused dependencies, round 3 Signed-off-by: AlexDBlack <blacka101@gmail.com> * Small fix Signed-off-by: AlexDBlack <blacka101@gmail.com> * Uniform distribution TF import fix Signed-off-by: AlexDBlack <blacka101@gmail.com>master
parent
48df1acdfb
commit
47d19908f4
|
@ -9,7 +9,7 @@ Deeplearning4j's [open issues are here](https://github.com/eclipse/deeplearning4
|
|||
|
||||
Note that you will need to [build dl4j from source](https://deeplearning4j.org/docs/latest/deeplearning4j-build-from-source)
|
||||
|
||||
For some tips on contributing to open source, this [post is helpful](http://blog.smartbear.com/programming/14-ways-to-contribute-to-open-source-without-being-a-programming-genius-or-a-rock-star/).
|
||||
For some tips on contributing to open source, this [post is helpful](https://smartbear.com/blog/test-and-monitor/14-ways-to-contribute-to-open-source-without-being/).
|
||||
|
||||
## Contributions
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@
|
|||
<outputDirectory>examples</outputDirectory>
|
||||
<!--
|
||||
<lineEnding>unix</lineEnding>
|
||||
http://stackoverflow.com/questions/2958282/stranges-files-in-my-assembly-since-switching-to-lineendingunix-lineending
|
||||
https://stackoverflow.com/questions/2958282/stranges-files-in-my-assembly-since-switching-to-lineendingunix-lineending
|
||||
-->
|
||||
</fileSet>
|
||||
|
||||
|
|
|
@ -52,11 +52,6 @@
|
|||
<artifactId>joda-time</artifactId>
|
||||
<version>${jodatime.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.yaml</groupId>
|
||||
<artifactId>snakeyaml</artifactId>
|
||||
<version>${snakeyaml.version}</version>
|
||||
</dependency>
|
||||
<!-- ND4J Shaded Jackson Dependencies -->
|
||||
<dependency>
|
||||
<groupId>org.nd4j</groupId>
|
||||
|
|
|
@ -29,21 +29,11 @@
|
|||
<name>datavec-arrow</name>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.nd4j</groupId>
|
||||
<artifactId>nd4j-arrow</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.datavec</groupId>
|
||||
<artifactId>datavec-api</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.carrotsearch</groupId>
|
||||
<artifactId>hppc</artifactId>
|
||||
<version>${hppc.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.arrow</groupId>
|
||||
<artifactId>arrow-vector</artifactId>
|
||||
|
|
|
@ -44,26 +44,6 @@
|
|||
<artifactId>datavec-api</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
<version>${commons-logging.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-context</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-beans</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.cleartk</groupId>
|
||||
<artifactId>cleartk-snowball</artifactId>
|
||||
|
|
|
@ -31,36 +31,6 @@
|
|||
<artifactId>datavec-api</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<version>${geo.jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${geo.jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
<version>${geo.jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||
<artifactId>jackson-dataformat-yaml</artifactId>
|
||||
<version>${geo.jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||
<artifactId>jackson-dataformat-xml</artifactId>
|
||||
<version>${geo.jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-joda</artifactId>
|
||||
<version>${geo.jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.maxmind.geoip2</groupId>
|
||||
<artifactId>geoip2</artifactId>
|
||||
|
|
|
@ -35,41 +35,11 @@
|
|||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.sun.xml.bind</groupId>
|
||||
<artifactId>jaxb-core</artifactId>
|
||||
<version>${jaxb.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.xml.bind</groupId>
|
||||
<artifactId>jaxb-impl</artifactId>
|
||||
<version>${jaxb.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty</artifactId>
|
||||
<version>${netty.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-compress</artifactId>
|
||||
<version>${commons-compress.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.zookeeper</groupId>
|
||||
<artifactId>zookeeper</artifactId>
|
||||
<version>${zookeeper.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
|
|
|
@ -73,42 +73,7 @@
|
|||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<version>${geo.jackson.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${geo.jackson.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
<version>${geo.jackson.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||
<artifactId>jackson-dataformat-yaml</artifactId>
|
||||
<version>${geo.jackson.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||
<artifactId>jackson-dataformat-xml</artifactId>
|
||||
<version>${geo.jackson.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-joda</artifactId>
|
||||
<version>${geo.jackson.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.datavec</groupId>
|
||||
<artifactId>datavec-python</artifactId>
|
||||
|
|
|
@ -41,11 +41,6 @@
|
|||
<artifactId>slf4j-api</artifactId>
|
||||
<version>${slf4j.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.github.oshi</groupId>
|
||||
<artifactId>oshi-core</artifactId>
|
||||
<version>${oshi.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.datavec</groupId>
|
||||
<artifactId>datavec-data-image</artifactId>
|
||||
|
|
|
@ -41,26 +41,6 @@
|
|||
<version>1.0.0-SNAPSHOT</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
<version>${commons-codec.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
<version>${httpclient.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpcore</artifactId>
|
||||
<version>${httpcore.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpmime</artifactId>
|
||||
<version>${httpmime.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.mashape.unirest</groupId>
|
||||
<artifactId>unirest-java</artifactId>
|
||||
|
|
|
@ -94,12 +94,6 @@
|
|||
<version>${scala.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.yaml</groupId>
|
||||
<artifactId>snakeyaml</artifactId>
|
||||
<version>${snakeyaml.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.typesafe.play</groupId>
|
||||
<artifactId>play-java_2.11</artifactId>
|
||||
|
|
|
@ -39,11 +39,6 @@
|
|||
<artifactId>scala-library</artifactId>
|
||||
<version>${scala.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.scala-lang</groupId>
|
||||
<artifactId>scala-reflect</artifactId>
|
||||
<version>${scala.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
|
|
|
@ -64,7 +64,7 @@ public class DL4JResources {
|
|||
/**
|
||||
* Set the base download URL for (most) DL4J datasets and models.<br>
|
||||
* This usually doesn't need to be set manually unless there is some issue with the default location
|
||||
* @param baseDownloadURL Base download URL to set. For example, http://blob.deeplearning4j.org/
|
||||
* @param baseDownloadURL Base download URL to set. For example, https://dl4jdata.blob.core.windows.net/
|
||||
*/
|
||||
public static void setBaseDownloadURL(@NonNull String baseDownloadURL){
|
||||
baseURL = baseDownloadURL;
|
||||
|
@ -79,8 +79,8 @@ public class DL4JResources {
|
|||
|
||||
/**
|
||||
* Get the URL relative to the base URL.<br>
|
||||
* For example, if baseURL is "http://blob.deeplearning4j.org/", and relativeToBase is "/datasets/iris.dat"
|
||||
* this simply returns "http://blob.deeplearning4j.org/datasets/iris.dat"
|
||||
* For example, if baseURL is "https://dl4jdata.blob.core.windows.net/", and relativeToBase is "/datasets/iris.dat"
|
||||
* this simply returns "https://dl4jdata.blob.core.windows.net/datasets/iris.dat"
|
||||
*
|
||||
* @param relativeToBase Relative URL
|
||||
* @return URL
|
||||
|
@ -92,8 +92,8 @@ public class DL4JResources {
|
|||
|
||||
/**
|
||||
* Get the URL relative to the base URL as a String.<br>
|
||||
* For example, if baseURL is "http://blob.deeplearning4j.org/", and relativeToBase is "/datasets/iris.dat"
|
||||
* this simply returns "http://blob.deeplearning4j.org/datasets/iris.dat"
|
||||
* For example, if baseURL is "https://dl4jdata.blob.core.windows.net/", and relativeToBase is "/datasets/iris.dat"
|
||||
* this simply returns "https://dl4jdata.blob.core.windows.net/datasets/iris.dat"
|
||||
*
|
||||
* @param relativeToBase Relative URL
|
||||
* @return URL
|
||||
|
|
|
@ -138,12 +138,14 @@ public class ValidateMKLDNN extends BaseDL4JTest {
|
|||
ConvolutionMode cm = ConvolutionMode.Truncate;
|
||||
|
||||
for (int minibatch : new int[]{1, 3}) {
|
||||
for (boolean b : new boolean[]{true, false}) {
|
||||
|
||||
inputSize[0] = minibatch;
|
||||
INDArray f = Nd4j.rand(Nd4j.defaultFloatingPointType(), inputSize);
|
||||
INDArray l = TestUtils.randomOneHot(minibatch, 10);
|
||||
|
||||
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
|
||||
.dataType(DataType.FLOAT)
|
||||
.updater(new Adam(0.01))
|
||||
.convolutionMode(cm)
|
||||
.seed(12345)
|
||||
|
@ -154,7 +156,7 @@ public class ValidateMKLDNN extends BaseDL4JTest {
|
|||
.padding(0, 0)
|
||||
.nOut(3)
|
||||
.build())
|
||||
.layer(new BatchNormalization.Builder().helperAllowFallback(false)/*.eps(0)*/.build())
|
||||
.layer(new BatchNormalization.Builder().useLogStd(b).helperAllowFallback(false)/*.eps(0)*/.build())
|
||||
.layer(new ConvolutionLayer.Builder().activation(Activation.TANH)
|
||||
.kernelSize(kernel)
|
||||
.stride(stride)
|
||||
|
@ -186,6 +188,7 @@ public class ValidateMKLDNN extends BaseDL4JTest {
|
|||
LayerHelperValidationUtil.validateMLN(netWith, tc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test @Ignore //https://github.com/deeplearning4j/deeplearning4j/issues/7272
|
||||
public void validateLRN() {
|
||||
|
|
|
@ -38,7 +38,7 @@ import java.util.concurrent.atomic.AtomicLong;
|
|||
|
||||
/**Implementation of the DeepWalk graph vectorization model, based on the paper
|
||||
* <i>DeepWalk: Online Learning of Social Representations</i> by Perozzi, Al-Rfou & Skiena (2014),
|
||||
* <a href="http://arxiv.org/abs/1403.6652">http://arxiv.org/abs/1403.6652</a><br>
|
||||
* <a href="https://arxiv.org/abs/1403.6652">https://arxiv.org/abs/1403.6652</a><br>
|
||||
* Similar to word2vec in nature, DeepWalk is an unsupervised learning algorithm that learns a vector representation
|
||||
* of each vertex in a graph. Vector representations are learned using walks (usually random walks) on the vertices in
|
||||
* the graph.<br>
|
||||
|
|
|
@ -40,6 +40,6 @@ public class InvalidKerasConfigurationException extends Exception {
|
|||
}
|
||||
|
||||
private static String appendDocumentationURL(String message) {
|
||||
return message + ". For more information, see http://deeplearning4j.org/model-import-keras.";
|
||||
return message + ". For more information, see http://deeplearning4j.org/docs/latest/keras-import-overview";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.deeplearning4j.nn.modelimport.keras.exceptions;
|
|||
* is not currently supported.
|
||||
*
|
||||
* See <a href="https://deeplearning4j.org/docs/latest/keras-import-overview">https://deeplearning4j.org/docs/latest/keras-import-overview</a>
|
||||
* for more information and file an issue at <a href="http://github.com/deeplearning4j/deeplearning4j/issues">http://github.com/deeplearning4j/deeplearning4j/issues</a>.
|
||||
* for more information and file an issue at <a href="https://github.com/eclipse/deeplearning4j/issues">https://github.com/eclipse/deeplearning4j/issues</a>.
|
||||
*
|
||||
* @author dave@skymind.io
|
||||
*/
|
||||
|
@ -41,6 +41,6 @@ public class UnsupportedKerasConfigurationException extends Exception {
|
|||
}
|
||||
|
||||
private static String appendDocumentationURL(String message) {
|
||||
return message + ". Please file an issue at http://github.com/deeplearning4j/deeplearning4j/issues.";
|
||||
return message + ". Please file an issue at https://github.com/eclipse/deeplearning4j/issues.";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -104,7 +104,7 @@ public class KerasEmbedding extends KerasLayer {
|
|||
"on Embedding layers. Zero Masking for the Embedding layer only works with unidirectional LSTM for now."
|
||||
+ " If you want to have this behaviour for your imported model " +
|
||||
"in DL4J, apply masking as a pre-processing step to your input." +
|
||||
"See https://deeplearning4j.org/usingrnns#masking for more on this.");
|
||||
"See http://deeplearning4j.org/docs/latest/deeplearning4j-nn-recurrent#masking for more on this.");
|
||||
|
||||
Pair<WeightInit, Distribution> init = getWeightInitFromConfig(layerConfig, conf.getLAYER_FIELD_EMBEDDING_INIT(),
|
||||
enforceTrainingConfig, conf, kerasMajorVersion);
|
||||
|
|
|
@ -77,71 +77,6 @@
|
|||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<version>${google.protobuf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>${jodatime.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>${commons-lang3.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-validator</artifactId>
|
||||
<version>${hibernate.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.scala-lang</groupId>
|
||||
<artifactId>scala-library</artifactId>
|
||||
<version>${scala.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.scala-lang</groupId>
|
||||
<artifactId>scala-reflect</artifactId>
|
||||
<version>${scala.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.yaml</groupId>
|
||||
<artifactId>snakeyaml</artifactId>
|
||||
<version>${snakeyaml.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-jdk8</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-jsr310</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.typesafe</groupId>
|
||||
<artifactId>config</artifactId>
|
||||
<version>${typesafe.config.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.typesafe.play</groupId>
|
||||
<artifactId>play-java_2.11</artifactId>
|
||||
|
|
|
@ -31,21 +31,6 @@
|
|||
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
<version>${httpclient.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpcore</artifactId>
|
||||
<version>${httpcore.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpmime</artifactId>
|
||||
<version>${httpmime.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.mashape.unirest</groupId>
|
||||
<artifactId>unirest-java</artifactId>
|
||||
|
|
|
@ -29,7 +29,7 @@ import static java.lang.Math.max;
|
|||
* QuadTree: <a href="http://en.wikipedia.org/wiki/Quadtree">http://en.wikipedia.org/wiki/Quadtree</a>
|
||||
*
|
||||
* Reference impl based on the paper by:
|
||||
* <a href="http://arxiv.org/pdf/1301.3342v2.pdf">http://arxiv.org/pdf/1301.3342v2.pdf</a>
|
||||
* <a href="https://arxiv.org/pdf/1301.3342v2.pdf">https://arxiv.org/pdf/1301.3342v2.pdf</a>
|
||||
*
|
||||
* Primarily focused on 2 dimensions, may expand later if there's a reason.
|
||||
*
|
||||
|
|
|
@ -86,7 +86,7 @@ public class MathUtils {
|
|||
|
||||
|
||||
/**
|
||||
* See: http://stackoverflow.com/questions/466204/rounding-off-to-nearest-power-of-2
|
||||
* See: https://stackoverflow.com/questions/466204/rounding-off-to-nearest-power-of-2
|
||||
* @param v the number to getFromOrigin the next power of 2 for
|
||||
* @return the next power of 2 for the passed in value
|
||||
*/
|
||||
|
|
|
@ -52,12 +52,6 @@
|
|||
<artifactId>deeplearning4j-nlp</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.nutz</groupId>
|
||||
<artifactId>nutz</artifactId>
|
||||
<version>1.r.58</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.nlpcn</groupId>
|
||||
<artifactId>nlp-lang</artifactId>
|
||||
|
|
|
@ -33,26 +33,6 @@
|
|||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
<version>${commons-logging.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-context</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-beans</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.cleartk</groupId>
|
||||
<artifactId>cleartk-snowball</artifactId>
|
||||
|
|
|
@ -54,11 +54,6 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.objenesis</groupId>
|
||||
<artifactId>objenesis</artifactId>
|
||||
<version>${objenesis.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
|
@ -66,16 +61,6 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- TSNE -->
|
||||
<!-- (Previously: dropwizard deps) -->
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.nd4j</groupId>
|
||||
<artifactId>nd4j-jackson</artifactId>
|
||||
<version>${nd4j.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
|
|
|
@ -42,7 +42,7 @@ import java.util.*;
|
|||
* Instead of rand walks, this walker produces walks based on number of edges coming into each node.
|
||||
* This allows you to build walks filtering too rare nodes, or too popular nodes, depending on your demands.
|
||||
*
|
||||
* Original DeepWalk paper: <a href="http://arxiv.org/pdf/1403.6652v2">http://arxiv.org/pdf/1403.6652v2</a>
|
||||
* Original DeepWalk paper: <a href="https://arxiv.org/pdf/1403.6652v2">https://arxiv.org/pdf/1403.6652v2</a>
|
||||
* @author raver119@gmail.com
|
||||
*/
|
||||
public class PopularityWalker<T extends SequenceElement> extends RandomWalker<T> implements GraphWalker<T> {
|
||||
|
|
|
@ -37,7 +37,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
/**
|
||||
* This is Random-based walker for SequenceVectors-based DeepWalk implementation
|
||||
*
|
||||
* Original DeepWalk paper: <a href="http://arxiv.org/pdf/1403.6652v2">http://arxiv.org/pdf/1403.6652v2</a>
|
||||
* Original DeepWalk paper: <a href="https://arxiv.org/pdf/1403.6652v2">https://arxiv.org/pdf/1403.6652v2</a>
|
||||
*
|
||||
* @author AlexDBlack
|
||||
* @author raver119@gmail.com
|
||||
|
|
|
@ -52,7 +52,7 @@ package org.deeplearning4j.nn.conf;
|
|||
* </ul>
|
||||
* Thus, the l2 norm of the scaled gradients will not exceed the specified threshold, though may be smaller than it<br>
|
||||
* See: Pascanu, Mikolov, Bengio (2012), <i>On the difficulty of training Recurrent Neural Networks</i>,
|
||||
* <a href="http://arxiv.org/abs/1211.5063">http://arxiv.org/abs/1211.5063</a><br>
|
||||
* <a href="https://arxiv.org/abs/1211.5063">https://arxiv.org/abs/1211.5063</a><br>
|
||||
* Threshold for clipping can be set in Layer configuration, using gradientNormalizationThreshold(double threshold)
|
||||
* </p>
|
||||
*
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.nd4j.shade.jackson.annotation.JsonProperty;
|
|||
|
||||
/**
|
||||
* Orthogonal distribution, with gain parameter.<br>
|
||||
* See <a href="http://arxiv.org/abs/1312.6120">http://arxiv.org/abs/1312.6120</a> for details
|
||||
* See <a href="https://arxiv.org/abs/1312.6120">https://arxiv.org/abs/1312.6120</a> for details
|
||||
*
|
||||
*/
|
||||
@EqualsAndHashCode(callSuper = false)
|
||||
|
|
|
@ -236,7 +236,7 @@ public class BatchNormalization extends FeedForwardLayer {
|
|||
|
||||
/**
|
||||
* Epsilon value for batch normalization; small floating point value added to variance (algorithm 1 in <a
|
||||
* href="http://arxiv.org/pdf/1502.03167v3.pdf">http://arxiv.org/pdf/1502.03167v3.pdf</a>) to reduce/avoid
|
||||
* href="https://arxiv.org/pdf/1502.03167v3.pdf">https://arxiv.org/pdf/1502.03167v3.pdf</a>) to reduce/avoid
|
||||
* underflow issues.<br> Default: 1e-5
|
||||
*/
|
||||
protected double eps = 1e-5;
|
||||
|
@ -365,7 +365,7 @@ public class BatchNormalization extends FeedForwardLayer {
|
|||
|
||||
/**
|
||||
* Epsilon value for batch normalization; small floating point value added to variance (algorithm 1 in <a
|
||||
* href="http://arxiv.org/pdf/1502.03167v3.pdf">http://arxiv.org/pdf/1502.03167v3.pdf</a>) to reduce/avoid
|
||||
* href="https://arxiv.org/pdf/1502.03167v3.pdf">https://arxiv.org/pdf/1502.03167v3.pdf</a>) to reduce/avoid
|
||||
* underflow issues.<br> Default: 1e-5
|
||||
*
|
||||
* @param eps Epsilon values to use
|
||||
|
|
|
@ -53,8 +53,8 @@ import java.util.Map;
|
|||
* We deserialize the config using the default deserializer, then handle the new IUpdater (which will be null for
|
||||
* 0.8.0 and earlier configs) if necessary
|
||||
*
|
||||
* Overall design: <a href="http://stackoverflow.com/questions/18313323/how-do-i-call-the-default-deserializer-from-a-custom-deserializer-in-jackson">
|
||||
* http://stackoverflow.com/questions/18313323/how-do-i-call-the-default-deserializer-from-a-custom-deserializer-in-jackson</a>
|
||||
* Overall design: <a href="https://stackoverflow.com/questions/18313323/how-do-i-call-the-default-deserializer-from-a-custom-deserializer-in-jackson">
|
||||
* https://stackoverflow.com/questions/18313323/how-do-i-call-the-default-deserializer-from-a-custom-deserializer-in-jackson</a>
|
||||
*
|
||||
* @author Alex Black
|
||||
*/
|
||||
|
|
|
@ -67,17 +67,17 @@ public class MKLDNNBatchNormHelper implements BatchNormalizationHelper {
|
|||
INDArray beta, INDArray dGammaView, INDArray dBetaView, double eps, LayerWorkspaceMgr workspaceMgr) {
|
||||
if(input.dataType() != DataType.FLOAT)
|
||||
return null; //MKL-DNN only supports float
|
||||
/*
|
||||
|
||||
//TODO FIXME - AB 2019/11/01 - https://github.com/eclipse/deeplearning4j/issues/8335
|
||||
List<INDArray> args = new ArrayList<>();
|
||||
args.add(input);
|
||||
args.add(meanCache);
|
||||
args.add(varCache);
|
||||
args.add(epsilon);
|
||||
if(gamma != null)
|
||||
args.add(gamma.reshape(gamma.length()));
|
||||
if(beta != null)
|
||||
args.add(beta.reshape(beta.length()));
|
||||
args.add(epsilon);
|
||||
|
||||
|
||||
DynamicCustomOp op = DynamicCustomOp.builder("batchnorm_bp")
|
||||
|
@ -110,8 +110,6 @@ public class MKLDNNBatchNormHelper implements BatchNormalizationHelper {
|
|||
g.setGradientFor(BatchNormalizationParamInitializer.BETA, dBetaView);
|
||||
|
||||
return new Pair<>(g, epsAtInput);
|
||||
*/
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -49,8 +49,8 @@ import java.util.*;
|
|||
/**
|
||||
* Batch normalization layer.<br>
|
||||
* Rerences:<br>
|
||||
* <a href="http://arxiv.org/pdf/1502.03167v3.pdf">http://arxiv.org/pdf/1502.03167v3.pdf</a><br>
|
||||
* <a href="http://arxiv.org/pdf/1410.7455v8.pdf">http://arxiv.org/pdf/1410.7455v8.pdf</a><br>
|
||||
* <a href="https://arxiv.org/pdf/1502.03167v3.pdf">https://arxiv.org/pdf/1502.03167v3.pdf</a><br>
|
||||
* <a href="https://arxiv.org/pdf/1410.7455v8.pdf">https://arxiv.org/pdf/1410.7455v8.pdf</a><br>
|
||||
* <a href="https://kratzert.github.io/2016/02/12/understanding-the-gradient-flow-through-the-batch-normalization-layer.html">
|
||||
* https://kratzert.github.io/2016/02/12/understanding-the-gradient-flow-through-the-batch-normalization-layer.html</a>
|
||||
*
|
||||
|
@ -327,7 +327,7 @@ public class BatchNormalization extends BaseLayer<org.deeplearning4j.nn.conf.lay
|
|||
batchMean = input.mean(0, 2, 3);
|
||||
batchVar = input.var(false, 0, 2, 3);
|
||||
} else {
|
||||
// TODO setup BatchNorm for RNN http://arxiv.org/pdf/1510.01378v1.pdf
|
||||
// TODO setup BatchNorm for RNN https://arxiv.org/pdf/1510.01378v1.pdf
|
||||
throw new IllegalStateException( "The layer prior to BatchNorm in the configuration is not currently supported. " + layerId());
|
||||
}
|
||||
|
||||
|
@ -476,7 +476,7 @@ public class BatchNormalization extends BaseLayer<org.deeplearning4j.nn.conf.lay
|
|||
|
||||
// xHat = (x-xmean) / sqrt(var + epsilon)
|
||||
//Note that for CNNs, mean and variance are calculated per feature map (i.e., per activation) rather than per activation
|
||||
//Pg5 of http://arxiv.org/pdf/1502.03167v3.pdf
|
||||
//Pg5 of https://arxiv.org/pdf/1502.03167v3.pdf
|
||||
// "For convolutional layers, we additionally want the normalization to obey the convolutional property – so that
|
||||
// different elements of the same feature map, at different locations, are normalized in the same way. To achieve
|
||||
// this, we jointly normalize all the activations in a minibatch, over all locations."
|
||||
|
@ -560,7 +560,7 @@ public class BatchNormalization extends BaseLayer<org.deeplearning4j.nn.conf.lay
|
|||
activations = Nd4j.getExecutioner().exec(new BroadcastAddOp(activations, beta, activations, 1));
|
||||
}
|
||||
} else {
|
||||
// TODO setup BatchNorm for RNN http://arxiv.org/pdf/1510.01378v1.pdf
|
||||
// TODO setup BatchNorm for RNN https://arxiv.org/pdf/1510.01378v1.pdf
|
||||
throw new IllegalStateException(
|
||||
"The layer prior to BatchNorm in the configuration is not currently supported. "
|
||||
+ layerId());
|
||||
|
|
|
@ -40,7 +40,7 @@ import java.util.Map;
|
|||
* <a href="http://www.cs.toronto.edu/~graves/phd.pdf">http://www.cs.toronto.edu/~graves/phd.pdf</a>
|
||||
* See also for full/vectorized equations (and a comparison to other LSTM variants):
|
||||
* Greff et al. 2015, "LSTM: A Search Space Odyssey", pg11. This is the "vanilla" variant in said paper
|
||||
* <a href="http://arxiv.org/pdf/1503.04069.pdf">http://arxiv.org/pdf/1503.04069.pdf</a>
|
||||
* <a href="https://arxiv.org/pdf/1503.04069.pdf">https://arxiv.org/pdf/1503.04069.pdf</a>
|
||||
*
|
||||
* A high level description of bidirectional LSTM can be found from
|
||||
* "Hybrid Speech Recognition with Deep Bidirectional LSTM"
|
||||
|
|
|
@ -34,7 +34,7 @@ import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr;
|
|||
* <a href="http://www.cs.toronto.edu/~graves/phd.pdf">http://www.cs.toronto.edu/~graves/phd.pdf</a>
|
||||
* See also for full/vectorized equations (and a comparison to other LSTM variants):
|
||||
* Greff et al. 2015, "LSTM: A Search Space Odyssey", pg11. This is the "vanilla" variant in said paper
|
||||
* <a href="http://arxiv.org/pdf/1503.04069.pdf">http://arxiv.org/pdf/1503.04069.pdf</a>
|
||||
* <a href="https://arxiv.org/pdf/1503.04069.pdf">https://arxiv.org/pdf/1503.04069.pdf</a>
|
||||
*
|
||||
* @author Alex Black
|
||||
* @see LSTM LSTM class, for the version without peephole connections
|
||||
|
|
|
@ -38,7 +38,7 @@ import org.nd4j.util.OneTimeLogger;
|
|||
*
|
||||
* See also for full/vectorized equations (and a comparison to other LSTM variants):
|
||||
* Greff et al. 2015, "LSTM: A Search Space Odyssey", pg11. This is the "no peephole" variant in said paper
|
||||
* <a href="http://arxiv.org/pdf/1503.04069.pdf">http://arxiv.org/pdf/1503.04069.pdf</a>
|
||||
* <a href="https://arxiv.org/pdf/1503.04069.pdf">https://arxiv.org/pdf/1503.04069.pdf</a>
|
||||
*
|
||||
* @author Alex Black
|
||||
* @see GravesLSTM GravesLSTM class, for the version with peephole connections
|
||||
|
|
|
@ -68,7 +68,7 @@ import static org.nd4j.linalg.indexing.NDArrayIndex.*;
|
|||
* <p>
|
||||
* When 'hasPeepholeConnections' is true, this is the "vanilla" variant in said paper<br>
|
||||
* When 'hasPeepholeConnections' is false, this is the "no peephole" variant<br>
|
||||
* <a href="http://arxiv.org/pdf/1503.04069.pdf">http://arxiv.org/pdf/1503.04069.pdf</a>
|
||||
* <a href="https://arxiv.org/pdf/1503.04069.pdf">https://arxiv.org/pdf/1503.04069.pdf</a>
|
||||
*
|
||||
*
|
||||
* @author Alex Black (LSTM implementations)
|
||||
|
|
|
@ -44,184 +44,48 @@
|
|||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.scala-lang</groupId>
|
||||
<artifactId>scala-library</artifactId>
|
||||
<version>${scala.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.scala-lang</groupId>
|
||||
<artifactId>scala-reflect</artifactId>
|
||||
<version>${scala.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
<version>${commons-logging.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>${jodatime.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
<version>${httpclient.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpcore</artifactId>
|
||||
<version>${httpcore.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.amazonaws</groupId>
|
||||
<artifactId>aws-java-sdk</artifactId>
|
||||
<version>1.11.24</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.deeplearning4j</groupId>
|
||||
<artifactId>deeplearning4j-core</artifactId>
|
||||
<version>${project.parent.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>args4j</groupId>
|
||||
<artifactId>args4j</artifactId>
|
||||
<version>2.32</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.nd4j</groupId>
|
||||
<artifactId>nd4j-api</artifactId>
|
||||
<version>${nd4j.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.deeplearning4j</groupId>
|
||||
<artifactId>deeplearning4j-util</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.jcraft</groupId>
|
||||
<artifactId>jsch</artifactId>
|
||||
<version>${jsch.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.inject</groupId>
|
||||
<artifactId>guice</artifactId>
|
||||
<version>${guice.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<version>${google.protobuf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
<version>${commons-codec.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-collections</groupId>
|
||||
<artifactId>commons-collections</artifactId>
|
||||
<version>${commons-collections.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
<version>${commons-io.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-lang</groupId>
|
||||
<artifactId>commons-lang</artifactId>
|
||||
<version>${commons-lang.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-net</groupId>
|
||||
<artifactId>commons-net</artifactId>
|
||||
<version>${commons-net.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.xml.bind</groupId>
|
||||
<artifactId>jaxb-core</artifactId>
|
||||
<version>${jaxb.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.xml.bind</groupId>
|
||||
<artifactId>jaxb-impl</artifactId>
|
||||
<version>${jaxb.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty</artifactId>
|
||||
<version>${netty.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
<artifactId>javax.servlet-api</artifactId>
|
||||
<version>${servlet.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-compress</artifactId>
|
||||
<version>${commons-compress.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>${commons-lang3.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-math3</artifactId>
|
||||
<version>${commons-math3.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.curator</groupId>
|
||||
<artifactId>curator-recipes</artifactId>
|
||||
<version>${curator.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.typesafe</groupId>
|
||||
<artifactId>config</artifactId>
|
||||
<version>${typesafe.config.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-core_2.11</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>jsr305</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jul-to-slf4j</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jcl-over-slf4j</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.threadly</groupId>
|
||||
<artifactId>threadly</artifactId>
|
||||
<version>${threadly.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>${commons-lang3.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<profiles>
|
||||
|
|
|
@ -27,8 +27,8 @@ import lombok.AllArgsConstructor;
|
|||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang.RandomStringUtils;
|
||||
import org.apache.spark.api.java.function.Function;
|
||||
import org.apache.commons.lang3.RandomStringUtils;
|
||||
import org.nd4j.linalg.function.Function;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.*;
|
||||
|
@ -157,7 +157,7 @@ public class SparkEMRClient {
|
|||
private void submitJob(AmazonElasticMapReduce emr, String mainClass, List<String> args, Map<String, String> sparkConfs, File uberJar) throws Exception {
|
||||
AmazonS3URI s3Jar = new AmazonS3URI(sparkS3JarFolder + "/" + uberJar.getName());
|
||||
log.info(String.format("Placing uberJar %s to %s", uberJar.getPath(), s3Jar.toString()));
|
||||
PutObjectRequest putRequest = sparkS3PutObjectDecorator.call(
|
||||
PutObjectRequest putRequest = sparkS3PutObjectDecorator.apply(
|
||||
new PutObjectRequest(s3Jar.getBucket(), s3Jar.getKey(), uberJar)
|
||||
);
|
||||
sparkS3ClientBuilder.build().putObject(putRequest);
|
||||
|
@ -289,7 +289,7 @@ public class SparkEMRClient {
|
|||
// This should allow the user to decorate the put call to add metadata to the jar put command, such as security groups,
|
||||
protected Function<PutObjectRequest, PutObjectRequest> sparkS3PutObjectDecorator = new Function<PutObjectRequest, PutObjectRequest>() {
|
||||
@Override
|
||||
public PutObjectRequest call(PutObjectRequest putObjectRequest) throws Exception {
|
||||
public PutObjectRequest apply(PutObjectRequest putObjectRequest) {
|
||||
return putObjectRequest;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -116,7 +116,6 @@
|
|||
</build>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<!-- ND4J Shaded Jackson Dependency -->
|
||||
<dependency>
|
||||
<groupId>org.nd4j</groupId>
|
||||
|
@ -139,82 +138,6 @@
|
|||
<artifactId>scala-reflect</artifactId>
|
||||
<version>${scala.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.inject</groupId>
|
||||
<artifactId>guice</artifactId>
|
||||
<version>${guice.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<version>${google.protobuf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
<version>${commons-codec.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-collections</groupId>
|
||||
<artifactId>commons-collections</artifactId>
|
||||
<version>${commons-collections.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
<version>${commons-io.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-lang</groupId>
|
||||
<artifactId>commons-lang</artifactId>
|
||||
<version>${commons-lang.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-net</groupId>
|
||||
<artifactId>commons-net</artifactId>
|
||||
<version>${commons-net.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.xml.bind</groupId>
|
||||
<artifactId>jaxb-core</artifactId>
|
||||
<version>${jaxb.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.xml.bind</groupId>
|
||||
<artifactId>jaxb-impl</artifactId>
|
||||
<version>${jaxb.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty</artifactId>
|
||||
<version>${netty.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
<artifactId>javax.servlet-api</artifactId>
|
||||
<version>${servlet.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-compress</artifactId>
|
||||
<version>${commons-compress.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>${commons-lang3.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-math3</artifactId>
|
||||
<version>${commons-math3.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.curator</groupId>
|
||||
<artifactId>curator-recipes</artifactId>
|
||||
<version>${curator.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.typesafe</groupId>
|
||||
<artifactId>config</artifactId>
|
||||
|
@ -250,9 +173,7 @@
|
|||
<artifactId>log4j</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<profiles>
|
||||
|
|
|
@ -129,32 +129,11 @@
|
|||
<artifactId>deeplearning4j-ui-model</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<version>${google.protobuf.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.ws.rs</groupId>
|
||||
<artifactId>javax.ws.rs-api</artifactId>
|
||||
<version>${ws.rs.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>${jodatime.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>${commons-lang3.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-validator</artifactId>
|
||||
<version>${hibernate.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.scala-lang</groupId>
|
||||
<artifactId>scala-library</artifactId>
|
||||
|
@ -165,11 +144,6 @@
|
|||
<artifactId>scala-reflect</artifactId>
|
||||
<version>${scala.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.yaml</groupId>
|
||||
<artifactId>snakeyaml</artifactId>
|
||||
<version>${snakeyaml.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.typesafe.play</groupId>
|
||||
<artifactId>play-java_2.11</artifactId>
|
||||
|
|
|
@ -447,7 +447,7 @@ Marketers might seek to establish relationships among products to build a recomm
|
|||
|
||||
### <a name="patent">Google's Word2vec Patent</a>
|
||||
|
||||
Word2vec is [a method of computing vector representations of words](http://arxiv.org/pdf/1301.3781.pdf) introduced by a team of researchers at Google led by Tomas Mikolov. Google [hosts an open-source version of Word2vec](https://code.google.com/p/word2vec/) released under an Apache 2.0 license. In 2014, Mikolov left Google for Facebook, and in May 2015, [Google was granted a patent for the method](http://patft.uspto.gov/netacgi/nph-Parser?Sect1=PTO2&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&co1=AND&d=PTXT&s1=9037464&OS=9037464&RS=9037464), which does not abrogate the Apache license under which it has been released.
|
||||
Word2vec is [a method of computing vector representations of words](https://arxiv.org/pdf/1301.3781.pdf) introduced by a team of researchers at Google led by Tomas Mikolov. Google [hosts an open-source version of Word2vec](https://code.google.com/p/word2vec/) released under an Apache 2.0 license. In 2014, Mikolov left Google for Facebook, and in May 2015, [Google was granted a patent for the method](http://patft.uspto.gov/netacgi/nph-Parser?Sect1=PTO2&Sect2=HITOFF&p=1&u=%2Fnetahtml%2FPTO%2Fsearch-bool.html&r=1&f=G&l=50&co1=AND&d=PTXT&s1=9037464&OS=9037464&RS=9037464), which does not abrogate the Apache license under which it has been released.
|
||||
|
||||
### <a name="foreign">Foreign Languages</a>
|
||||
|
||||
|
@ -485,7 +485,7 @@ Deeplearning4j has a class called [SequenceVectors](https://github.com/eclipse/d
|
|||
* [Quora: What Are Some Interesting Word2Vec Results?](http://www.quora.com/Word2vec/What-are-some-interesting-Word2Vec-results/answer/Omer-Levy)
|
||||
* [Word2Vec: an introduction](http://www.folgertkarsdorp.nl/word2vec-an-introduction/); Folgert Karsdorp
|
||||
* [Mikolov's Original Word2vec Code @Google](https://code.google.com/p/word2vec/)
|
||||
* [word2vec Explained: Deriving Mikolov et al.’s Negative-Sampling Word-Embedding Method](http://arxiv.org/pdf/1402.3722v1.pdf); Yoav Goldberg and Omer Levy
|
||||
* [word2vec Explained: Deriving Mikolov et al.’s Negative-Sampling Word-Embedding Method](https://arxiv.org/pdf/1402.3722v1.pdf); Yoav Goldberg and Omer Levy
|
||||
* [Advances in Pre-Training Distributed Word Representations - by Mikolov et al](https://arxiv.org/abs/1712.09405)
|
||||
|
||||
|
||||
|
|
|
@ -51,10 +51,10 @@ Examples of some architectures that can be built using ComputationGraph include:
|
|||
|
||||
- Multi-task learning architectures
|
||||
- Recurrent neural networks with skip connections
|
||||
- [GoogLeNet](http://arxiv.org/abs/1409.4842), a complex type of convolutional netural network for image classification
|
||||
- [Image caption generation](http://arxiv.org/abs/1411.4555)
|
||||
- [GoogLeNet](https://arxiv.org/abs/1409.4842), a complex type of convolutional netural network for image classification
|
||||
- [Image caption generation](https://arxiv.org/abs/1411.4555)
|
||||
- [Convolutional networks for sentence classification](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/convolution/sentenceclassification/CnnSentenceClassificationExample.java)
|
||||
- [Residual learning convolutional neural networks](http://arxiv.org/abs/1512.03385)
|
||||
- [Residual learning convolutional neural networks](https://arxiv.org/abs/1512.03385)
|
||||
|
||||
|
||||
## <a name="config">Configuring a Computation Graph</a>
|
||||
|
|
|
@ -8,7 +8,7 @@ weight: 10
|
|||
|
||||
## Saving and Loading a Neural Network
|
||||
|
||||
The `ModelSerializer` is a class which handles loading and saving models. There are two methods for saving models shown in the examples through the link. The first example saves a normal multilayer network, the second one saves a [computation graph](https://deeplearning4j.org/compgraph).
|
||||
The `ModelSerializer` is a class which handles loading and saving models. There are two methods for saving models shown in the examples through the link. The first example saves a normal multilayer network, the second one saves a [computation graph](https://deeplearning4j.org/docs/latest/deeplearning4j-nn-computationgraph).
|
||||
|
||||
Here is a [basic example](https://github.com/eclipse/deeplearning4j-examples/tree/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/misc/modelsaving) with code to save a computation graph using the `ModelSerializer` class, as well as an example of using ModelSerializer to save a neural net built using MultiLayer configuration.
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ DL4J currently supports the following types of recurrent neural network
|
|||
* BaseRecurrent
|
||||
|
||||
Java documentation for each is available, [GravesLSTM](https://deeplearning4j.org/api/{{page.version}}/org/deeplearning4j/nn/conf/layers/GravesLSTM.html),
|
||||
[BidirectionalGravesLSTM](https://deeplearning4j.org/api/{{page.version}}/org/deeplearning4j/nn/conf/layers/GravesBidirectionalLSTM.html), [BaseRecurrent](https://deeplearning4j.org/doc/org/deeplearning4j/nn/conf/layers/BaseRecurrentLayer.html)
|
||||
[BidirectionalGravesLSTM](https://deeplearning4j.org/api/{{page.version}}/org/deeplearning4j/nn/conf/layers/GravesBidirectionalLSTM.html), [BaseRecurrent](https://deeplearning4j.org/api/latest/org/deeplearning4j/nn/conf/layers/BaseRecurrentLayer.html)
|
||||
|
||||
#### Data for RNNs
|
||||
Consider for the moment a standard feed-forward network (a multi-layer perceptron or 'DenseLayer' in DL4J). These networks expect input and output data that is two-dimensional: that is, data with "shape" [numExamples,inputSize]. This means that the data into a feed-forward network has ‘numExamples’ rows/examples, where each row consists of ‘inputSize’ columns. A single example would have shape [1,inputSize], though in practice we generally use multiple examples for computational and optimization efficiency. Similarly, output data for a standard feed-forward network is also two dimensional, with shape [numExamples,outputSize].
|
||||
|
|
|
@ -8,7 +8,7 @@ weight: 10
|
|||
|
||||
## t-SNE's Data Visualization
|
||||
|
||||
[t-Distributed Stochastic Neighbor Embedding](http://homepage.tudelft.nl/19j49/t-SNE.html) (t-SNE) is a data-visualization tool created by Laurens van der Maaten at Delft University of Technology.
|
||||
[t-Distributed Stochastic Neighbor Embedding](https://en.wikipedia.org/wiki/T-distributed_stochastic_neighbor_embedding) (t-SNE) is a data-visualization tool created by Laurens van der Maaten at Delft University of Technology.
|
||||
|
||||
While it can be used for any data, t-SNE (pronounced Tee-Snee) is only really meaningful with labeled data, which clarify how the input is clustering. Below, you can see the kind of graphic you can generate in DL4J with t-SNE working on MNIST data.
|
||||
|
||||
|
|
|
@ -627,7 +627,7 @@ To use the system clock time source, add the following to Spark submit:
|
|||
|
||||
## <a name="ubuntu16">Failed training on Ubuntu 16.04 (Ubuntu bug that may affect DL4J users)</a>
|
||||
|
||||
When running a Spark on YARN cluster on Ubuntu 16.04 machines, chances are that after finishing a job, all processes owned by the user running Hadoop/YARN are killed. This is related to a bug in Ubuntu, which is documented at https://bugs.launchpad.net/ubuntu/+source/procps/+bug/1610499. There's also a Stackoverflow discussion about it at http://stackoverflow.com/questions/38419078/logouts-while-running-hadoop-under-ubuntu-16-04.
|
||||
When running a Spark on YARN cluster on Ubuntu 16.04 machines, chances are that after finishing a job, all processes owned by the user running Hadoop/YARN are killed. This is related to a bug in Ubuntu, which is documented at https://bugs.launchpad.net/ubuntu/+source/procps/+bug/1610499. There's also a Stackoverflow discussion about it at https://stackoverflow.com/questions/38419078/logouts-while-running-hadoop-under-ubuntu-16-04.
|
||||
|
||||
Some workarounds are suggested.
|
||||
|
||||
|
@ -695,7 +695,7 @@ To use the system clock time source, add the following to Spark submit:
|
|||
|
||||
## <a href="ubuntu16">Failed training on Ubuntu 16.04 (Ubuntu bug that may affect DL4J users)</a>
|
||||
|
||||
When running a Spark on YARN cluster on Ubuntu 16.04 machines, chances are that after finishing a job, all processes owned by the user running Hadoop/YARN are killed. This is related to a bug in Ubuntu, which is documented at https://bugs.launchpad.net/ubuntu/+source/procps/+bug/1610499. There's also a Stackoverflow discussion about it at http://stackoverflow.com/questions/38419078/logouts-while-running-hadoop-under-ubuntu-16-04.
|
||||
When running a Spark on YARN cluster on Ubuntu 16.04 machines, chances are that after finishing a job, all processes owned by the user running Hadoop/YARN are killed. This is related to a bug in Ubuntu, which is documented at https://bugs.launchpad.net/ubuntu/+source/procps/+bug/1610499. There's also a Stackoverflow discussion about it at https://stackoverflow.com/questions/38419078/logouts-while-running-hadoop-under-ubuntu-16-04.
|
||||
|
||||
Some workarounds are suggested.
|
||||
|
||||
|
|
|
@ -99,4 +99,4 @@ You can also download a [free version of the Skymind Intelligence Layer](https:/
|
|||
|
||||
Most of what we know about deep learning is contained in academic papers. You can find some of the major research groups [here](https://skymind.ai/wiki/machine-learning-research-groups-labs).
|
||||
|
||||
While individual courses have limits on what they can teach, the Internet does not. Most math and programming questions can be answered by Googling and searching sites like [Stackoverflow](http://stackoverflow.com) and [Math Stackexchange](https://math.stackexchange.com/).
|
||||
While individual courses have limits on what they can teach, the Internet does not. Most math and programming questions can be answered by Googling and searching sites like [Stackoverflow](https://stackoverflow.com) and [Math Stackexchange](https://math.stackexchange.com/).
|
||||
|
|
|
@ -220,7 +220,7 @@ List of supported activation functions:
|
|||
* **LEAKYRELU** - ([Source](https://github.com/eclipse/deeplearning4j/blob/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationLReLU.java)) - leaky rectified linear unit. ```f(x) = max(0, x) + alpha * min(0, x)``` with ```alpha=0.01``` by default.
|
||||
* **RATIONALTANH** - ([Source](https://github.com/eclipse/deeplearning4j/blob/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationRationalTanh.java)) - ```tanh(y) ~ sgn(y) * { 1 - 1/(1+|y|+y^2+1.41645*y^4)}``` which approximates ```f(x) = 1.7159 * tanh(2x/3)```, but should be faster to execute. ([Reference](https://arxiv.org/abs/1508.01292))
|
||||
* **RELU** - ([Source](https://github.com/eclipse/deeplearning4j/blob/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationReLU.java)) - standard rectified linear unit: ```f(x) = x``` if ```x>0``` or ```f(x) = 0``` otherwise
|
||||
* **RRELU** - ([Source](https://github.com/eclipse/deeplearning4j/blob/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationRReLU.java)) - randomized rectified linear unit. Deterministic during test time. ([Reference](http://arxiv.org/abs/1505.00853))
|
||||
* **RRELU** - ([Source](https://github.com/eclipse/deeplearning4j/blob/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationRReLU.java)) - randomized rectified linear unit. Deterministic during test time. ([Reference](https://arxiv.org/abs/1505.00853))
|
||||
* **SIGMOID** - ([Source](https://github.com/eclipse/deeplearning4j/blob/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationSigmoid.java)) - standard sigmoid activation function, ```f(x) = 1 / (1 + exp(-x))```
|
||||
* **SOFTMAX** - ([Source](https://github.com/eclipse/deeplearning4j/blob/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationSoftmax.java)) - standard softmax activation function
|
||||
* **SOFTPLUS** - ([Source](https://github.com/eclipse/deeplearning4j/blob/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationSoftPlus.java)) - ```f(x) = log(1+e^x)``` - shape is similar to a smooth version of the RELU activation function
|
||||
|
@ -269,7 +269,7 @@ The [CS231n course notes](http://cs231n.github.io/neural-networks-3/#ada) have a
|
|||
Supported updaters in Deeplearning4j:
|
||||
* **AdaDelta** - ([Source](https://github.com/eclipse/deeplearning4j/tree/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/learning/config/AdaDelta.java)) - [Reference](https://arxiv.org/abs/1212.5701)
|
||||
* **AdaGrad** - ([Source](https://github.com/eclipse/deeplearning4j/tree/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/learning/config/AdaGrad.java)) - [Reference](http://jmlr.org/papers/v12/duchi11a.html)
|
||||
* **AdaMax** - ([Source](https://github.com/eclipse/deeplearning4j/tree/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/learning/config/AdaMax.java)) - A variant of the Adam updater - [Reference](http://arxiv.org/abs/1412.6980)
|
||||
* **AdaMax** - ([Source](https://github.com/eclipse/deeplearning4j/tree/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/learning/config/AdaMax.java)) - A variant of the Adam updater - [Reference](https://arxiv.org/abs/1412.6980)
|
||||
* **Adam** - ([Source](https://github.com/eclipse/deeplearning4j/tree/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/learning/config/Adam.java))
|
||||
* **Nadam** - ([Source](https://github.com/eclipse/deeplearning4j/tree/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/learning/config/Nadam.java)) - A variant of the Adam updater, using the Nesterov mementum update rule - [Reference](https://arxiv.org/abs/1609.04747)
|
||||
* **Nesterovs** - ([Source](https://github.com/eclipse/deeplearning4j/tree/master/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/learning/config/Nesterovs.java)) - Nesterov momentum updater
|
||||
|
|
|
@ -84,7 +84,7 @@ Not all DL4J layer types are supported in cuDNN. DL4J layers with cuDNN support
|
|||
To check if cuDNN is being used, the simplest approach is to look at the log output when running inference or training:
|
||||
If cuDNN is NOT available when you are using a layer that supports it, you will see a message such as:
|
||||
```
|
||||
o.d.n.l.c.ConvolutionLayer - cuDNN not found: use cuDNN for better GPU performance by including the deeplearning4j-cuda module. For more information, please refer to: https://deeplearning4j.org/cudnn
|
||||
o.d.n.l.c.ConvolutionLayer - cuDNN not found: use cuDNN for better GPU performance by including the deeplearning4j-cuda module. For more information, please refer to: https://deeplearning4j.org/docs/latest/deeplearning4j-config-cudnn
|
||||
java.lang.ClassNotFoundException: org.deeplearning4j.nn.layers.convolution.CudnnConvolutionHelper
|
||||
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
|
||||
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
|
||||
|
|
|
@ -18,31 +18,31 @@ Most of the examples make use of DataVec, a toolkit for preprocessing and clearn
|
|||
|
||||
This example takes the canonical Iris dataset of the flower species of the same name, whose relevant measurements are sepal length, sepal width, petal length and petal width. It builds a Spark RDD from the relatively small dataset and runs an analysis against it.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/datavec-examples/src/main/java/org/datavec/transform/analysis/IrisAnalysis.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/datavec-examples/src/main/java/org/datavec/transform/analysis/IrisAnalysis.java)
|
||||
|
||||
### BasicDataVecExample.java
|
||||
|
||||
This example loads data into a Spark RDD. All DataVec transform operations use Spark RDDs. Here, we use DataVec to filter data, apply time transformations and remove columns.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/datavec-examples/src/main/java/org/datavec/transform/basic/BasicDataVecExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/datavec-examples/src/main/java/org/datavec/transform/basic/BasicDataVecExample.java)
|
||||
|
||||
### PrintSchemasAtEachStep.java
|
||||
|
||||
This example shows the print Schema tools that are useful to visualize and to ensure that the code for the transform is behaving as expected.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/datavec-examples/src/main/java/org/datavec/transform/debugging/PrintSchemasAtEachStep.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/datavec-examples/src/main/java/org/datavec/transform/debugging/PrintSchemasAtEachStep.java)
|
||||
|
||||
### JoinExample.java
|
||||
|
||||
You may need to join datasets before passing to a neural network. You can do that in DataVec, and this example shows you how.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/datavec-examples/src/main/java/org/datavec/transform/join/JoinExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/datavec-examples/src/main/java/org/datavec/transform/join/JoinExample.java)
|
||||
|
||||
### LogDataExample.java
|
||||
|
||||
This is an example of parsing log data using DataVec. The obvious use cases are cybersecurity and customer relationship management.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/datavec-examples/src/main/java/org/datavec/transform/logdata/LogDataExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/datavec-examples/src/main/java/org/datavec/transform/logdata/LogDataExample.java)
|
||||
|
||||
### MnistImagePipelineExample.java
|
||||
|
||||
|
@ -50,7 +50,7 @@ This example is from the video below, which demonstrates the ParentPathLabelGene
|
|||
|
||||
<iframe width="560" height="315" src="http://www.youtube.com/embed/GLC8CIoHDnI" frameborder="0" allowfullscreen></iframe>
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/dataExamples/MnistImagePipelineExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/dataExamples/MnistImagePipelineExample.java)
|
||||
|
||||
### PreprocessNormalizerExample.java
|
||||
|
||||
|
@ -78,13 +78,13 @@ MNIST is the "Hello World" of deep learning. Simple, straightforward, and focuss
|
|||
|
||||
This is a Single Layer Perceptron for recognizing digits. Note that this pulls the images from a binary package containing the dataset, a rather special case for data ingestion.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/feedforward/mnist/MLPMnistSingleLayerExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/feedforward/mnist/MLPMnistSingleLayerExample.java)
|
||||
|
||||
### MLPMnistTwoLayerExample.java
|
||||
|
||||
A two-layer perceptron for MNIST, showing there is more than one useful network for a given dataset.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/feedforward/mnist/MLPMnistTwoLayerExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/feedforward/mnist/MLPMnistTwoLayerExample.java)
|
||||
|
||||
### Feedforward Examples
|
||||
|
||||
|
@ -92,7 +92,7 @@ Data flows through feed-forward neural networks in a single pass from input via
|
|||
|
||||
These networks can be used for a wide range of tasks depending on they are configured. Along with image classification over MNIST data, this directory has examples demonstrating regression, classification, and anomoly detection.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/tree/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/feedforward)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/tree/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/feedforward)
|
||||
|
||||
### Convolutional Neural Networks
|
||||
|
||||
|
@ -102,7 +102,7 @@ Convolutional Neural Networks are mainly used for image recognition, although th
|
|||
|
||||
This example can be run using either LeNet or AlexNet.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/convolution/AnimalsClassification.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/convolution/AnimalsClassification.java)
|
||||
|
||||
---
|
||||
|
||||
|
@ -115,7 +115,7 @@ load the model for later training or inference.
|
|||
|
||||
This demonstrates saving and loading a network build using the class ComputationGraph.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/misc/modelsaving/SaveLoadComputationGraph.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/misc/modelsaving/SaveLoadComputationGraph.java)
|
||||
|
||||
### SaveLoadMultiLayerNetwork.java
|
||||
|
||||
|
@ -135,11 +135,11 @@ Do you need to add a Loss Function that is not available or prebuilt yet? Check
|
|||
|
||||
### CustomLossExample.java
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/misc/lossfunctions/CustomLossExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/misc/lossfunctions/CustomLossExample.java)
|
||||
|
||||
### CustomLossL1L2.java
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/misc/lossfunctions/CustomLossL1L2.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/misc/lossfunctions/CustomLossL1L2.java)
|
||||
|
||||
### Custom Layer
|
||||
|
||||
|
@ -147,7 +147,7 @@ Do you need to add a layer with features that aren't available in DeepLearning4J
|
|||
|
||||
### CustomLayerExample.java
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/misc/customlayers/CustomLayerExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/misc/customlayers/CustomLayerExample.java)
|
||||
|
||||
---
|
||||
|
||||
|
@ -159,25 +159,25 @@ Neural Networks for NLP? We have those, too.
|
|||
|
||||
Global Vectors for Word Representation are useful for detecting relationships between words.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/nlp/glove/GloVeExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/nlp/glove/GloVeExample.java)
|
||||
|
||||
### Paragraph Vectors
|
||||
|
||||
A vectorized representation of words. Described [here](https://cs.stanford.edu/~quocle/paragraph_vector.pdf)
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/nlp/paragraphvectors/ParagraphVectorsClassifierExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/nlp/paragraphvectors/ParagraphVectorsClassifierExample.java)
|
||||
|
||||
### Sequence Vectors
|
||||
|
||||
One way to represent sentences is as a sequence of words.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/nlp/sequencevectors/SequenceVectorsTextExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/nlp/sequencevectors/SequenceVectorsTextExample.java)
|
||||
|
||||
### Word2Vec
|
||||
|
||||
Described [here](https://deeplearning4j.org/word2vec.html)
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/nlp/word2vec/Word2VecRawTextExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/nlp/word2vec/Word2VecRawTextExample.java)
|
||||
|
||||
---
|
||||
|
||||
|
@ -185,7 +185,7 @@ Described [here](https://deeplearning4j.org/word2vec.html)
|
|||
|
||||
t-Distributed Stochastic Neighbor Embedding (t-SNE) is useful for data visualization. We include an example in the NLP section since word similarity visualization is a common use.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/nlp/tsne/TSNEStandardExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/nlp/tsne/TSNEStandardExample.java)
|
||||
|
||||
---
|
||||
|
||||
|
@ -199,19 +199,19 @@ The examples folder for Recurrent Neural Networks has the following:
|
|||
|
||||
An RNN learns a string of characters.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/recurrent/basic/BasicRNNExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/recurrent/basic/BasicRNNExample.java)
|
||||
|
||||
### GravesLSTMCharModellingExample.java
|
||||
|
||||
Takes the complete works of Shakespeare as a sequence of characters and Trains a Neural Net to generate "Shakespeare" one character at a time.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/recurrent/character/GravesLSTMCharModellingExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/recurrent/character/GravesLSTMCharModellingExample.java)
|
||||
|
||||
### SingleTimestepRegressionExample.java
|
||||
|
||||
Regression with an LSTM (Long Short Term Memory) Recurrent Neural Network.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/recurrent/regression/SingleTimestepRegressionExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-examples/src/main/java/org/deeplearning4j/examples/recurrent/regression/SingleTimestepRegressionExample.java)
|
||||
|
||||
### AdditionRNN.java
|
||||
|
||||
|
@ -254,13 +254,13 @@ DeepLearning4j supports using a Spark Cluster for network training. Here are the
|
|||
### MnistMLPExample.java
|
||||
|
||||
This is an example of a Multi-Layer Perceptron training on the Mnist data set of handwritten digits.
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-spark-examples/dl4j-spark/src/main/java/org/deeplearning4j/mlp/MnistMLPExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-spark-examples/dl4j-spark/src/main/java/org/deeplearning4j/mlp/MnistMLPExample.java)
|
||||
|
||||
### SparkLSTMCharacterExample.java
|
||||
|
||||
An LSTM recurrent Network in Spark.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-spark-examples/dl4j-spark/src/main/java/org/deeplearning4j/rnn/SparkLSTMCharacterExample.java)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/blob/master/dl4j-spark-examples/dl4j-spark/src/main/java/org/deeplearning4j/rnn/SparkLSTMCharacterExample.java)
|
||||
|
||||
---
|
||||
|
||||
|
@ -274,7 +274,7 @@ The learning algorithms and loss functions are executed as ND4J operations.
|
|||
|
||||
This is a directory with examples for creating and manipulating NDArrays.
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/tree/master/nd4j-examples/src/main/java/org/nd4j/examples)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/tree/master/nd4j-examples/src/main/java/org/nd4j/examples)
|
||||
|
||||
---
|
||||
|
||||
|
@ -282,4 +282,4 @@ This is a directory with examples for creating and manipulating NDArrays.
|
|||
|
||||
Deep learning algorithms have learned to play Space Invaders and Doom using reinforcement learning. DeepLearning4J/RL4J examples of Reinforcement Learning are available here:
|
||||
|
||||
[Show me the code](http://github.com/eclipse/deeplearning4j-examples/tree/master/rl4j-examples)
|
||||
[Show me the code](https://github.com/eclipse/deeplearning4j-examples/tree/master/rl4j-examples)
|
|
@ -179,7 +179,7 @@ Congratulations! You just trained your first neural network with Deeplearning4j.
|
|||
**Q:** **SPARK ISSUES** I am running the examples and having issues with the Spark based examples such as distributed training or datavec transform options.
|
||||
|
||||
|
||||
**A:** You may be missing some dependencies that Spark requires. See this [Stack Overflow discussion](http://stackoverflow.com/a/38735202/3892515) for a discussion of potential dependency issues. Windows users may need the winutils.exe from Hadoop.
|
||||
**A:** You may be missing some dependencies that Spark requires. See this [Stack Overflow discussion](https://stackoverflow.com/a/38735202/3892515) for a discussion of potential dependency issues. Windows users may need the winutils.exe from Hadoop.
|
||||
|
||||
Download winutils.exe from https://github.com/steveloughran/winutils and put it into the null/bin/winutils.exe (or create a hadoop folder and add that to HADOOP_HOME)
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
//
|
||||
// Methods to lookup files in $PATH
|
||||
// adopted from http://stackoverflow.com/questions/2718915/check-if-file-exists-including-on-path
|
||||
// adopted from https://stackoverflow.com/questions/2718915/check-if-file-exists-including-on-path
|
||||
//
|
||||
|
||||
#ifndef LIBND4J_FILES_H
|
||||
|
|
|
@ -137,7 +137,7 @@ namespace nd4j {
|
|||
#endif
|
||||
|
||||
/**
|
||||
* This operation performs batch normalization of layer, it is based on following article http://arxiv.org/abs/1502.03167.
|
||||
* This operation performs batch normalization of layer, it is based on following article https://arxiv.org/abs/1502.03167.
|
||||
* Expected arguments:
|
||||
* x: input 4D array of shape [bS,iH,iW,iD] (data format = NHWC) or [bS,iD,iH,iW] (data format = NCHW), where
|
||||
* bS - batch size
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
//
|
||||
|
||||
// implementation of gated Recurrent Unit cell
|
||||
// (cf. http://arxiv.org/abs/1406.1078).
|
||||
// (cf. https://arxiv.org/abs/1406.1078).
|
||||
// Kyunghyun Cho, Bart van Merrienboer, Caglar Gulcehre, Dzmitry Bahdanau, Fethi Bougares, Holger Schwenk, Yoshua Bengio
|
||||
// "Learning Phrase Representations using RNN Encoder-Decoder for Statistical Machine Translation"
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
//
|
||||
|
||||
// implementation of gated Recurrent Unit cell
|
||||
// (cf. http://arxiv.org/abs/1406.1078).
|
||||
// (cf. https://arxiv.org/abs/1406.1078).
|
||||
// Kyunghyun Cho, Bart van Merrienboer, Caglar Gulcehre, Dzmitry Bahdanau, Fethi Bougares, Holger Schwenk, Yoshua Bengio
|
||||
// "Learning Phrase Representations using RNN Encoder-Decoder for Statistical Machine Translation"
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
#include <string>
|
||||
#include <op_boilerplate.h>
|
||||
|
||||
//http://stackoverflow.com/questions/228005/alternative-to-itoa-for-converting-integer-to-string-c
|
||||
//https://stackoverflow.com/questions/228005/alternative-to-itoa-for-converting-integer-to-string-c
|
||||
FORCEINLINE std::string int_array_to_string(Nd4jLong int_array[], Nd4jLong size_of_array) {
|
||||
std::string returnstring = "[";
|
||||
for (int temp = 0; temp < size_of_array; temp++) {
|
||||
|
|
|
@ -41,12 +41,12 @@ To install ND4J, there are a couple of approaches, and more information can be f
|
|||
|
||||
#### Install from Maven Central
|
||||
|
||||
1. Search for nd4j in the [Maven Central Repository](http://mvnrepository.com/search?q=nd4j) to find the available nd4j jars.
|
||||
1. Search for nd4j in the [Maven Central Repository](https://search.maven.org/search?q=nd4j) to find the available nd4j jars.
|
||||
2. Include the appropriate dependency in your pom.xml.
|
||||
|
||||
#### Clone from the GitHub Repo
|
||||
|
||||
https://deeplearning4j.org/buildinglocally
|
||||
https://deeplearning4j.org/docs/latest/deeplearning4j-build-from-source
|
||||
## Contribute
|
||||
|
||||
1. Check for open issues, or open a new issue to start a discussion around a feature idea or a bug.
|
||||
|
|
|
@ -192,12 +192,6 @@
|
|||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.objenesis</groupId>
|
||||
<artifactId>objenesis</artifactId>
|
||||
<version>${objenesis.version}</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- oshi: Used for collecting system information for system info reporting -->
|
||||
<dependency>
|
||||
|
@ -206,22 +200,6 @@
|
|||
<version>${oshi.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
|
|
|
@ -69,7 +69,7 @@ public class SDNN extends SDOps {
|
|||
|
||||
/**
|
||||
* Neural network batch normalization operation.<br>
|
||||
* For details, see <a href="http://arxiv.org/abs/1502.03167">http://arxiv.org/abs/1502.03167</a>
|
||||
* For details, see <a href="https://arxiv.org/abs/1502.03167">https://arxiv.org/abs/1502.03167</a>
|
||||
*
|
||||
* @param name Name of the output variable
|
||||
* @param input Input variable.
|
||||
|
@ -139,7 +139,7 @@ public class SDNN extends SDOps {
|
|||
* out = a * (exp(x) - 1) if x <= 0<br>
|
||||
* with constant a = 1.0
|
||||
* <p>
|
||||
* See: <a href="http://arxiv.org/abs/1511.07289">http://arxiv.org/abs/1511.07289</a>
|
||||
* See: <a href="https://arxiv.org/abs/1511.07289">https://arxiv.org/abs/1511.07289</a>
|
||||
*
|
||||
* @param x Input variable
|
||||
* @return Output variable
|
||||
|
@ -154,7 +154,7 @@ public class SDNN extends SDOps {
|
|||
* out = a * (exp(x) - 1) if x <= 0<br>
|
||||
* with constant a = 1.0
|
||||
* <p>
|
||||
* See: <a href="http://arxiv.org/abs/1511.07289">http://arxiv.org/abs/1511.07289</a>
|
||||
* See: <a href="https://arxiv.org/abs/1511.07289">https://arxiv.org/abs/1511.07289</a>
|
||||
*
|
||||
* @param name Output variable name
|
||||
* @param x Input variable
|
||||
|
|
|
@ -34,7 +34,7 @@ import org.nd4j.shade.jackson.annotation.JsonIgnoreProperties;
|
|||
* alpha is drawn from uniform(l,u) during training and is set to l+u/2 during test
|
||||
* l and u default to 1/8 and 1/3 respectively
|
||||
*
|
||||
* <a href="http://arxiv.org/abs/1505.00853">
|
||||
* <a href="https://arxiv.org/abs/1505.00853">
|
||||
* Empirical Evaluation of Rectified Activations in Convolutional Network</a>
|
||||
*/
|
||||
@EqualsAndHashCode(callSuper = false)
|
||||
|
|
|
@ -34,7 +34,7 @@ import org.tensorflow.framework.NodeDef;
|
|||
* Out(x) = x if x >= 0<br>
|
||||
* Leaky ReLU may avoid zero gradient "dying ReLU" problem by having non-zero
|
||||
* gradient below 0.<br>
|
||||
* See for example http://arxiv.org/abs/1505.00853 for a comparison of
|
||||
* See for example https://arxiv.org/abs/1505.00853 for a comparison of
|
||||
* ReLU variants.
|
||||
*
|
||||
* @author Alex Black
|
||||
|
|
|
@ -33,7 +33,7 @@ import java.util.List;
|
|||
|
||||
/**
|
||||
* Max out activation:
|
||||
* http://arxiv.org/pdf/1302.4389.pdf
|
||||
* https://arxiv.org/pdf/1302.4389.pdf
|
||||
*
|
||||
* @author Adam Gibson
|
||||
*/
|
||||
|
|
|
@ -32,7 +32,7 @@ import java.util.List;
|
|||
* Introduced in paper:<br>
|
||||
* Fast and Accurate Deep Network Learning by Exponential Linear Units (ELUs)<br>
|
||||
* Djork-Arné Clevert, Thomas Unterthiner, Sepp Hochreiter (2015)<br>
|
||||
* <a href="http://arxiv.org/abs/1511.07289">http://arxiv.org/abs/1511.07289</a>
|
||||
* <a href="https://arxiv.org/abs/1511.07289">https://arxiv.org/abs/1511.07289</a>
|
||||
*
|
||||
* @author Alex Black
|
||||
*/
|
||||
|
|
|
@ -74,6 +74,7 @@ public class DistributionUniform extends DynamicCustomOp {
|
|||
AttrValue v = attributesForNode.get("dtype");
|
||||
dataType = TFGraphMapper.convertType(v.getType());
|
||||
addIArgument(dataType.toInt());
|
||||
addTArgument(0.0, 1.0); //TF version is hardcoded 0 to 1
|
||||
}
|
||||
|
||||
protected void addArgs() {
|
||||
|
|
|
@ -32,7 +32,7 @@ import java.util.Map;
|
|||
|
||||
/**
|
||||
* The AdaMax updater, a variant of Adam.
|
||||
* http://arxiv.org/abs/1412.6980
|
||||
* https://arxiv.org/abs/1412.6980
|
||||
*
|
||||
* @author Justin Long
|
||||
*/
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.util.Map;
|
|||
|
||||
/**
|
||||
* The Adam updater.
|
||||
* http://arxiv.org/abs/1412.6980
|
||||
* https://arxiv.org/abs/1412.6980
|
||||
*
|
||||
* @author Adam Gibson
|
||||
*/
|
||||
|
|
|
@ -28,7 +28,7 @@ import java.util.Map;
|
|||
|
||||
/**
|
||||
* The AdaMax updater, a variant of Adam.
|
||||
* http://arxiv.org/abs/1412.6980
|
||||
* https://arxiv.org/abs/1412.6980
|
||||
*
|
||||
* @author Justin Long
|
||||
*/
|
||||
|
|
|
@ -29,7 +29,7 @@ import java.util.Map;
|
|||
|
||||
/**
|
||||
* The Adam updater.
|
||||
* http://arxiv.org/abs/1412.6980
|
||||
* https://arxiv.org/abs/1412.6980
|
||||
*
|
||||
* @author Adam Gibson
|
||||
*/
|
||||
|
|
|
@ -46,6 +46,9 @@ public class NDArrayStrings {
|
|||
|
||||
public static final String EMPTY_ARRAY_STR = "[]";
|
||||
|
||||
private static final String[] OPEN_BRACKETS = new String[]{"", "[", "[[", "[[[", "[[[[", "[[[[[", "[[[[[[", "[[[[[[[", "[[[[[[[["};
|
||||
private static final String[] CLOSE_BRACKETS = new String[]{"", "]", "]]", "]]]", "]]]]", "]]]]]", "]]]]]]", "]]]]]]]", "]]]]]]]]"};
|
||||
|
||||
/**
|
||||
* The default number of elements for printing INDArrays (via NDArrayStrings or INDArray.toString)
|
||||
*/
|
||||
|
@ -190,29 +193,29 @@ public class NDArrayStrings {
|
|||
|
||||
private String format(INDArray arr, int offset, boolean summarize) {
|
||||
int rank = arr.rank();
|
||||
if (arr.isScalar()) {
|
||||
if (arr.isScalar() || arr.length() == 1) {
|
||||
int fRank = Math.min(rank, OPEN_BRACKETS.length-1);
|
||||
if (arr.isR()) {
|
||||
//true scalar i.e shape = [] not legacy which is [1,1]
|
||||
double arrElement = arr.getDouble(0);
|
||||
if (!dontOverrideFormat && ((Math.abs(arrElement) < this.minToPrintWithoutSwitching && arrElement != 0) || (Math.abs(arrElement) >= this.maxToPrintWithoutSwitching))) {
|
||||
//switch to scientific notation
|
||||
String asString = localeIndifferentDecimalFormat(scientificFormat).format(arrElement);
|
||||
//from E to small e
|
||||
asString = asString.replace('E', 'e');
|
||||
return asString;
|
||||
return OPEN_BRACKETS[fRank] + asString + CLOSE_BRACKETS[fRank];
|
||||
} else {
|
||||
if (arr.getDouble(0) == 0) return "0";
|
||||
return decimalFormat.format(arr.getDouble(0));
|
||||
if (arr.getDouble(0) == 0) return OPEN_BRACKETS[fRank] + "0" + CLOSE_BRACKETS[fRank];
|
||||
return OPEN_BRACKETS[fRank] + decimalFormat.format(arr.getDouble(0)) + CLOSE_BRACKETS[fRank];
|
||||
}
|
||||
} else if (arr.isZ()) {
|
||||
long arrElement = arr.getLong(0);
|
||||
return String.valueOf(arrElement);
|
||||
return OPEN_BRACKETS[fRank] + arrElement + CLOSE_BRACKETS[fRank];
|
||||
} else if (arr.isB()) {
|
||||
long arrElement = arr.getLong(0);
|
||||
return arrElement == 0 ? "false" : "true";
|
||||
return OPEN_BRACKETS[fRank] + (arrElement == 0 ? "false" : "true") + CLOSE_BRACKETS[fRank];
|
||||
} else if (arr.dataType() == DataType.UTF8){
|
||||
String s = arr.getString(0);
|
||||
return "\"" + s.replaceAll("\n","\\n") + "\"";
|
||||
return OPEN_BRACKETS[fRank] + "\"" + s.replaceAll("\n","\\n") + "\"" + CLOSE_BRACKETS[fRank];
|
||||
} else
|
||||
throw new ND4JIllegalStateException();
|
||||
} else if (rank == 1) {
|
||||
|
@ -246,9 +249,10 @@ public class NDArrayStrings {
|
|||
//hack fix for slice issue with 'f' order
|
||||
if (arr.ordering() == 'f' && arr.rank() > 2 && arr.size(arr.rank() - 1) == 1) {
|
||||
sb.append(format(arr.dup('c').slice(i), offset, summarize));
|
||||
} else if(arr.rank() <= 1 || arr.length() == 1) {
|
||||
sb.append(format(Nd4j.scalar(arr.getDouble(0)),offset,summarize));
|
||||
}
|
||||
// else if(arr.rank() <= 1 || arr.length() == 1) {
|
||||
// sb.append(format(Nd4j.scalar(arr.getDouble(0)),offset,summarize));
|
||||
// }
|
||||
else {
|
||||
sb.append(format(arr.slice(i), offset, summarize));
|
||||
}
|
||||
|
|
|
@ -34,13 +34,6 @@
|
|||
<classifier>${dependency.classifier}</classifier>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
<version>5.0.2.RELEASE</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.bytedeco</groupId>
|
||||
<artifactId>javacpp</artifactId>
|
||||
|
@ -87,73 +80,10 @@
|
|||
<artifactId>nd4j-api</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.nd4j</groupId>
|
||||
<artifactId>nd4j-jackson</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
<version>${logback.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.nd4j</groupId>
|
||||
<artifactId>nd4j-tensorflow</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- Reflections: required in one of the tests -->
|
||||
<dependency>
|
||||
<groupId>org.reflections</groupId>
|
||||
<artifactId>reflections</artifactId>
|
||||
<version>${reflections.version}</version>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<environmentVariables>
|
||||
<LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${user.dir}:${libnd4jhome}/blasbuild/cpu/blas/</LD_LIBRARY_PATH>
|
||||
</environmentVariables>
|
||||
<includes>
|
||||
<include>*.java</include>
|
||||
<include>**/*.java</include>
|
||||
<include>**/Test*.java</include>
|
||||
<include>**/*Test.java</include>
|
||||
<include>**/*TestCase.java</include>
|
||||
</includes>
|
||||
<junitArtifactName>junit:junit</junitArtifactName>
|
||||
<systemPropertyVariables>
|
||||
<org.nd4j.linalg.defaultbackend>org.nd4j.linalg.cpu.nativecpu.CpuBackend</org.nd4j.linalg.defaultbackend>
|
||||
<org.nd4j.linalg.tests.backendstorun>org.nd4j.linalg.cpu.nativecpu.CpuBackend</org.nd4j.linalg.tests.backendstorun>
|
||||
</systemPropertyVariables>
|
||||
<!--
|
||||
Maximum heap size was set to 8g, as a minimum required value for tests run.
|
||||
Depending on a build machine, default value is not always enough.
|
||||
-->
|
||||
<argLine>-Ddtype=float -Xmx8g</argLine>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<executions>
|
||||
|
|
|
@ -21680,7 +21680,7 @@ public static final int TAD_THRESHOLD = TAD_THRESHOLD();
|
|||
// #endif
|
||||
|
||||
/**
|
||||
* This operation performs batch normalization of layer, it is based on following article http://arxiv.org/abs/1502.03167.
|
||||
* This operation performs batch normalization of layer, it is based on following article https://arxiv.org/abs/1502.03167.
|
||||
* Expected arguments:
|
||||
* x: input 4D array of shape [bS,iH,iW,iD] (data format = NHWC) or [bS,iD,iH,iW] (data format = NCHW), where
|
||||
* bS - batch size
|
||||
|
|
|
@ -57,12 +57,7 @@
|
|||
</plugins>
|
||||
</build>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
<version>5.0.2.RELEASE</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
|
@ -105,10 +100,12 @@
|
|||
<artifactId>logback-core</artifactId>
|
||||
<version>${logback.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.nd4j</groupId>
|
||||
<artifactId>nd4j-kafka_${scala.binary.version}</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
<version>5.0.2.RELEASE</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
|
|
@ -20,16 +20,13 @@ package org.nd4j.linalg;
|
|||
import lombok.val;
|
||||
import org.bytedeco.javacpp.Pointer;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.rules.TestName;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
import org.nd4j.config.ND4JEnvironmentVars;
|
||||
import org.nd4j.config.ND4JSystemProperties;
|
||||
import org.nd4j.linalg.api.buffer.DataType;
|
||||
import org.nd4j.linalg.api.environment.Nd4jEnvironment;
|
||||
import org.nd4j.linalg.api.memory.MemoryWorkspace;
|
||||
import org.nd4j.linalg.factory.Nd4j;
|
||||
import org.nd4j.linalg.factory.Nd4jBackend;
|
||||
|
@ -38,7 +35,6 @@ import org.nd4j.linalg.util.ArrayUtil;
|
|||
import org.nd4j.nativeblas.NativeOpsHolder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import scala.collection.mutable.StringBuilder;
|
||||
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.util.*;
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.nd4j.linalg.api.buffer.DataType;
|
|||
import org.nd4j.linalg.api.ndarray.INDArray;
|
||||
import org.nd4j.linalg.factory.Nd4j;
|
||||
import org.nd4j.linalg.factory.Nd4jBackend;
|
||||
import org.nd4j.linalg.util.ArrayUtil;
|
||||
|
||||
@RunWith(Parameterized.class)
|
||||
@Slf4j
|
||||
|
@ -57,6 +58,30 @@ public class ToStringTest extends BaseNd4jTest {
|
|||
Nd4j.createFromArray(1, 2, 3, 4, 5, 6, 7, 8).toString(6, true, 1));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToStringScalars(){
|
||||
DataType[] dataTypes = new DataType[]{DataType.FLOAT, DataType.DOUBLE, DataType.BOOL, DataType.INT, DataType.UINT32};
|
||||
String[] strs = new String[]{"1.0000", "1.0000", "true", "1", "1"};
|
||||
|
||||
for(int dt=0; dt<5; dt++ ) {
|
||||
for (int i = 0; i < 5; i++) {
|
||||
long[] shape = ArrayUtil.nTimes(i, 1L);
|
||||
INDArray scalar = Nd4j.scalar(1.0f).castTo(dataTypes[dt]).reshape(shape);
|
||||
String str = scalar.toString();
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (int j = 0; j < i; j++) {
|
||||
sb.append("[");
|
||||
}
|
||||
sb.append(strs[dt]);
|
||||
for (int j = 0; j < i; j++) {
|
||||
sb.append("]");
|
||||
}
|
||||
String exp = sb.toString();
|
||||
assertEquals("Rank: " + i + ", DT: " + dataTypes[dt], exp, str);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public char ordering() {
|
||||
return 'c';
|
||||
|
|
|
@ -1495,7 +1495,7 @@ public class ArrayUtil {
|
|||
|
||||
}
|
||||
|
||||
//Credit: http://stackoverflow.com/questions/15533854/converting-byte-array-to-double-array
|
||||
//Credit: https://stackoverflow.com/questions/15533854/converting-byte-array-to-double-array
|
||||
|
||||
/**
|
||||
*
|
||||
|
|
|
@ -107,7 +107,7 @@ public class MathUtils {
|
|||
}
|
||||
|
||||
/**
|
||||
* See: <a href="http://stackoverflow.com/questions/466204/rounding-off-to-nearest-power-of-2">http://stackoverflow.com/questions/466204/rounding-off-to-nearest-power-of-2</a>
|
||||
* See: <a href="https://stackoverflow.com/questions/466204/rounding-off-to-nearest-power-of-2">https://stackoverflow.com/questions/466204/rounding-off-to-nearest-power-of-2</a>
|
||||
*
|
||||
* @param v the number to getFromOrigin the next power of 2 for
|
||||
* @return the next power of 2 for the passed in value
|
||||
|
|
|
@ -29,29 +29,6 @@
|
|||
<name>nd4j-parameter-server-client</name>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
<version>${commons-codec.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
<version>${httpclient.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpcore</artifactId>
|
||||
<version>${httpcore.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpmime</artifactId>
|
||||
<version>${httpmime.version}</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>com.mashape.unirest</groupId>
|
||||
<artifactId>unirest-java</artifactId>
|
||||
|
@ -72,11 +49,6 @@
|
|||
<artifactId>nd4j-aeron</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
<version>${commons-io.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.zeroturnaround</groupId>
|
||||
<artifactId>zt-exec</artifactId>
|
||||
|
@ -89,12 +61,6 @@
|
|||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.nd4j</groupId>
|
||||
<artifactId>nd4j-parameter-server-status_2.11</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.nd4j.linalg.api.buffer.DataType;
|
|||
import org.nd4j.linalg.api.memory.MemoryWorkspace;
|
||||
import org.nd4j.linalg.factory.Nd4j;
|
||||
import org.nd4j.linalg.profiler.ProfilerConfig;
|
||||
import scala.collection.mutable.StringBuilder;
|
||||
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.util.List;
|
||||
|
|
|
@ -31,7 +31,7 @@ import java.util.concurrent.TimeoutException;
|
|||
/**
|
||||
* Start background daemons for tests
|
||||
* Credit to:
|
||||
* http://stackoverflow.com/questions/636367/executing-a-java-application-in-a-separate-process
|
||||
* https://stackoverflow.com/questions/636367/executing-a-java-application-in-a-separate-process
|
||||
* @author Adam Gibson
|
||||
*/
|
||||
@Slf4j
|
||||
|
|
|
@ -47,66 +47,19 @@
|
|||
<artifactId>nd4j-parameter-server</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>${jodatime.version}</version>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>${commons-lang3.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-validator</artifactId>
|
||||
<version>${hibernate.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.scala-lang</groupId>
|
||||
<artifactId>scala-library</artifactId>
|
||||
<version>${scala.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.scala-lang</groupId>
|
||||
<artifactId>scala-reflect</artifactId>
|
||||
<version>${scala.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.yaml</groupId>
|
||||
<artifactId>snakeyaml</artifactId>
|
||||
<version>${snakeyaml.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-jdk8</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-jsr310</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.typesafe</groupId>
|
||||
<artifactId>config</artifactId>
|
||||
<version>${typesafe.config.version}</version>
|
||||
<groupId>com.typesafe.play</groupId>
|
||||
<artifactId>play-netty-server_2.11</artifactId>
|
||||
<version>${playframework.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.typesafe.play</groupId>
|
||||
<artifactId>play-java_2.11</artifactId>
|
||||
|
@ -142,24 +95,6 @@
|
|||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>net.jodah</groupId>
|
||||
<artifactId>typetools</artifactId>
|
||||
<version>${jodah.typetools.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.typesafe.play</groupId>
|
||||
<artifactId>play-netty-server_2.11</artifactId>
|
||||
<version>${playframework.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<profiles>
|
||||
|
|
|
@ -34,39 +34,6 @@
|
|||
<artifactId>nd4j-parameter-server-model</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
<version>${commons-codec.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
<version>${httpclient.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpcore</artifactId>
|
||||
<version>${httpcore.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpmime</artifactId>
|
||||
<version>${httpmime.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.mashape.unirest</groupId>
|
||||
<artifactId>unirest-java</artifactId>
|
||||
<version>${unirest.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.nd4j</groupId>
|
||||
<artifactId>nd4j-jackson</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
|
@ -76,16 +43,20 @@
|
|||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.nd4j</groupId>
|
||||
<artifactId>nd4j-aeron</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.beust</groupId>
|
||||
<artifactId>jcommander</artifactId>
|
||||
<version>${jcommander.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.nd4j</groupId>
|
||||
<artifactId>nd4j-aeron</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<groupId>com.mashape.unirest</groupId>
|
||||
<artifactId>unirest-java</artifactId>
|
||||
<version>${unirest.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ import java.io.IOException;
|
|||
import java.net.*;
|
||||
|
||||
/**
|
||||
* Credit: http://stackoverflow.com/questions/5226905/test-if-remote-port-is-in-use
|
||||
* Credit: https://stackoverflow.com/questions/5226905/test-if-remote-port-is-in-use
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
|
|
@ -74,12 +74,14 @@
|
|||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
<version>${logback.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-core</artifactId>
|
||||
<version>${logback.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
|
|
@ -39,16 +39,6 @@
|
|||
<artifactId>nd4j-api</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.carrotsearch</groupId>
|
||||
<artifactId>hppc</artifactId>
|
||||
<version>${hppc.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>${jodatime.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.arrow</groupId>
|
||||
<artifactId>arrow-vector</artifactId>
|
||||
|
|
|
@ -94,26 +94,7 @@
|
|||
<version>${jkserializers.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
<version>${commons-codec.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
<version>${commons-io.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>${commons-lang3.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.ow2.asm</groupId>
|
||||
<artifactId>asm</artifactId>
|
||||
<version>${asm.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-core_2.11</artifactId>
|
||||
|
|
|
@ -44,7 +44,7 @@ import java.util.ArrayList;
|
|||
*
|
||||
* DQN or Deep Q-Learning in the Discrete domain
|
||||
*
|
||||
* http://arxiv.org/abs/1312.5602
|
||||
* https://arxiv.org/abs/1312.5602
|
||||
*
|
||||
*/
|
||||
public abstract class QLearningDiscrete<O extends Encodable> extends QLearning<O, Integer, DiscreteSpace> {
|
||||
|
|
Loading…
Reference in New Issue