Update shaded Jackson version to 2.10.1 (#82)

* Update shaded Jackson version to 2.10.1

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* Remove no longer needed scala compiler plugin from UI

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* Fix op name for BitwiseAnd op

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* TimeDistributedLayer mask array fix + test

Signed-off-by: AlexDBlack <blacka101@gmail.com>
master
Alex Black 2019-11-26 19:24:38 +11:00 committed by GitHub
parent 4b50b920c7
commit 8843c7377a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 101 additions and 36 deletions

View File

@ -21,12 +21,14 @@ import org.deeplearning4j.TestUtils;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.inputs.InputType;
import org.deeplearning4j.nn.conf.layers.DenseLayer;
import org.deeplearning4j.nn.conf.layers.LSTM;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.conf.layers.RnnOutputLayer;
import org.deeplearning4j.nn.conf.layers.recurrent.Bidirectional;
import org.deeplearning4j.nn.conf.layers.recurrent.LastTimeStep;
import org.deeplearning4j.nn.conf.layers.recurrent.SimpleRnn;
import org.deeplearning4j.nn.conf.layers.recurrent.TimeDistributed;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.junit.Ignore;
@ -289,4 +291,66 @@ public class RnnGradientChecks extends BaseDL4JTest {
}
}
}
@Test
public void testTimeDistributedDense() {
int nIn = 3;
int nOut = 5;
int tsLength = 4;
int layerSize = 8;
Random r = new Random(12345);
for (int mb : new int[]{1, 3}) {
for (boolean inputMask : new boolean[]{false, true}) {
INDArray in = Nd4j.rand(new int[]{mb, nIn, tsLength});
INDArray labels = TestUtils.randomOneHotTimeSeries(mb, nOut, tsLength);
String maskType = (inputMask ? "inputMask" : "none");
INDArray inMask = null;
if (inputMask) {
inMask = Nd4j.ones(mb, tsLength);
for (int i = 0; i < mb; i++) {
int firstMaskedStep = tsLength - 1 - i;
if (firstMaskedStep == 0) {
firstMaskedStep = tsLength;
}
for (int j = firstMaskedStep; j < tsLength; j++) {
inMask.putScalar(i, j, 0.0);
}
}
}
String name = "testLastTimeStepLayer() - mb=" + mb + ", tsLength = " + tsLength + ", maskType=" + maskType;
if (PRINT_RESULTS) {
System.out.println("Starting test: " + name);
}
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.dataType(DataType.DOUBLE)
.activation(Activation.TANH)
.updater(new NoOp())
.weightInit(WeightInit.XAVIER)
.list()
.layer(new LSTM.Builder().nOut(layerSize).build())
.layer(new TimeDistributed(new DenseLayer.Builder().nOut(layerSize).activation(Activation.SOFTMAX).build(), 2))
.layer(new RnnOutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
.lossFunction(LossFunctions.LossFunction.MCXENT).build())
.setInputType(InputType.recurrent(nIn))
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR,
DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, in, labels, inMask, null, true, 16);
assertTrue(name, gradOK);
TestUtils.testModelSerialization(net);
}
}
}
}

View File

@ -16,6 +16,7 @@
package org.deeplearning4j.nn.dtypes;
import org.deeplearning4j.nn.conf.layers.recurrent.TimeDistributed;
import org.nd4j.shade.guava.collect.ImmutableSet;
import org.nd4j.shade.guava.reflect.ClassPath;
import lombok.extern.slf4j.Slf4j;
@ -811,7 +812,8 @@ public class DTypeTests extends BaseDL4JTest {
.layer(new DenseLayer.Builder().nOut(5).build())
.layer(new GravesBidirectionalLSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build())
.layer(new Bidirectional(new LSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build()))
.layer(new SimpleRnn.Builder().nIn(10).nOut(5).build())
.layer(new TimeDistributed(new DenseLayer.Builder().nIn(10).nOut(5).activation(Activation.TANH).build(), 2))
.layer(new SimpleRnn.Builder().nIn(5).nOut(5).build())
.layer(new MaskZeroLayer.Builder().underlying(new SimpleRnn.Builder().nIn(5).nOut(5).build()).maskValue(0.0).build())
.layer(secondLast)
.layer(ol)

View File

@ -1,10 +1,12 @@
package org.deeplearning4j.nn.layers.recurrent;
import org.deeplearning4j.nn.api.Layer;
import org.deeplearning4j.nn.api.MaskState;
import org.deeplearning4j.nn.gradient.Gradient;
import org.deeplearning4j.nn.layers.wrapper.BaseWrapperLayer;
import org.deeplearning4j.nn.workspace.ArrayType;
import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr;
import org.deeplearning4j.util.TimeSeriesUtils;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.primitives.Pair;
import org.nd4j.linalg.util.ArrayUtil;
@ -107,4 +109,30 @@ public class TimeDistributedLayer extends BaseWrapperLayer {
INDArray permuted = reshaped.permute(permute);
return permuted;
}
@Override
public void setMaskArray(INDArray maskArray) {
if(maskArray == null){
underlying.setMaskArray(null);
} else {
INDArray reshaped = TimeSeriesUtils.reshapeTimeSeriesMaskToVector(maskArray, LayerWorkspaceMgr.noWorkspaces(), ArrayType.ACTIVATIONS);
underlying.setMaskArray(reshaped);
}
}
@Override
public Pair<INDArray, MaskState> feedForwardMaskArray(INDArray maskArray, MaskState currentMaskState, int minibatchSize) {
if(maskArray == null){
return underlying.feedForwardMaskArray(null, currentMaskState, minibatchSize);
} else {
INDArray reshaped = TimeSeriesUtils.reshapeTimeSeriesMaskToVector(maskArray, LayerWorkspaceMgr.noWorkspaces(), ArrayType.ACTIVATIONS);
Pair<INDArray, MaskState> p = underlying.feedForwardMaskArray(reshaped, currentMaskState, minibatchSize);
if(p == null || p.getFirst() == null){
return p;
}
INDArray reshaped2 = TimeSeriesUtils.reshapeVectorToTimeSeriesMask(p.getFirst(), (int)maskArray.size(0));
p.setFirst(reshaped2);
return p;
}
}
}

View File

@ -34,36 +34,6 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
</plugin>
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>${maven-scala-plugin.version}</version>
<configuration>
<args>
<arg>-deprecation</arg>
<arg>-explaintypes</arg>
<arg>-nobootcp</arg>
</args>
</configuration>
<executions>
<execution>
<id>scala-compile-first</id>
<phase>process-resources</phase>
<goals>
<goal>add-source</goal>
<goal>compile</goal>
</goals>
</execution>
<execution>
<id>scala-test-compile</id>
<phase>process-test-resources</phase>
<goals>
<goal>add-source</goal>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
<modules>

View File

@ -1,5 +1,6 @@
/* ******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
* Copyright (c) 2019 Konduit K.K.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
@ -50,7 +51,7 @@ public class BitwiseAnd extends BaseDynamicTransformOp {
@Override
public String opName() {
return "BitwiseAnd";
return "bitwise_and";
}

View File

@ -328,9 +328,9 @@
<slf4j.version>1.7.21</slf4j.version>
<junit.version>4.12</junit.version>
<logback.version>1.2.3</logback.version>
<jackson.version>2.9.9</jackson.version>
<jackson.databind.version>2.9.9.3</jackson.databind.version>
<shaded.snakeyaml.version>1.23</shaded.snakeyaml.version>
<jackson.version>2.10.1</jackson.version>
<jackson.databind.version>2.10.1</jackson.databind.version>
<shaded.snakeyaml.version>1.24</shaded.snakeyaml.version>
<geo.jackson.version>2.8.7</geo.jackson.version>
<lombok.version>1.18.2</lombok.version>
<cleartk.version>2.0.0</cleartk.version>