Various DL4J/ND4J fixes (#81)

* #7954 Force refresh of UI when switching tabs on overview page

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* #8017 Concurrent modification exception (synchronize) fix

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* #8033 Don't initialize updater in middle of writing memory crash dump

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* #8208 Fix shape checks for ND4J int[] creator methods

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* #6385 #7992 Keras import naming fixes + cleanup

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* #8016 Upsampling3D - add NDHWC format support

Signed-off-by: AlexDBlack <blacka101@gmail.com>
master
Alex Black 2019-07-26 00:05:24 +10:00 committed by AlexDBlack
parent 7c5c84bea8
commit fad8da878f
13 changed files with 204 additions and 117 deletions

View File

@ -386,63 +386,64 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
for (Activation afn : activations) {
for (int miniBatchSize : minibatchSizes) {
for (ConvolutionMode mode : modes) {
for(Convolution3D.DataFormat df : Convolution3D.DataFormat.values()) {
int outDepth = depth * upsamplingSize[0];
int outHeight = height * upsamplingSize[1];
int outWidth = width * upsamplingSize[2];
int outDepth = depth * upsamplingSize[0];
int outHeight = height * upsamplingSize[1];
int outWidth = width * upsamplingSize[2];
INDArray input = Nd4j.rand(new int[]{miniBatchSize, convNIn, depth, height, width});
INDArray labels = Nd4j.zeros(miniBatchSize, finalNOut);
for (int i = 0; i < miniBatchSize; i++) {
labels.putScalar(new int[]{i, i % finalNOut}, 1.0);
}
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.dataType(DataType.DOUBLE)
.updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
.dist(new NormalDistribution(0, 1))
.seed(12345)
.list()
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
.nIn(convNIn).nOut(convNOut).hasBias(false)
.convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
.build())
.layer(1, new Upsampling3D.Builder(upsamplingSize[0]).build())
.layer(2, new DenseLayer.Builder().nOut(denseNOut).build())
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
.inputPreProcessor(2,
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
convNOut, true))
.setInputType(InputType.convolutional3D(depth, height, width, convNIn)).build();
String json = conf.toJson();
MultiLayerConfiguration c2 = MultiLayerConfiguration.fromJson(json);
assertEquals(conf, c2);
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
String msg = "Minibatch size = " + miniBatchSize + ", activationFn=" + afn
+ ", kernel = " + Arrays.toString(upsamplingSize) + ", mode = " + mode.toString()
+ ", input depth " + depth + ", input height " + height
+ ", input width " + width;
if (PRINT_RESULTS) {
log.info(msg);
for (int j = 0; j < net.getnLayers(); j++) {
log.info("Layer " + j + " # params: " + net.getLayer(j).numParams());
INDArray input = df == Convolution3D.DataFormat.NCDHW ? Nd4j.rand(miniBatchSize, convNIn, depth, height, width) : Nd4j.rand(miniBatchSize, depth, height, width, convNIn);
INDArray labels = Nd4j.zeros(miniBatchSize, finalNOut);
for (int i = 0; i < miniBatchSize; i++) {
labels.putScalar(new int[]{i, i % finalNOut}, 1.0);
}
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.dataType(DataType.DOUBLE)
.updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
.dist(new NormalDistribution(0, 1))
.seed(12345)
.list()
.layer(0, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
.nIn(convNIn).nOut(convNOut).hasBias(false)
.convolutionMode(mode).dataFormat(df)
.build())
.layer(1, new Upsampling3D.Builder(upsamplingSize[0]).dataFormat(df).build())
.layer(2, new DenseLayer.Builder().nOut(denseNOut).build())
.layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(finalNOut).build())
.inputPreProcessor(2,
new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
convNOut, true))
.setInputType(InputType.convolutional3D(df, depth, height, width, convNIn)).build();
String json = conf.toJson();
MultiLayerConfiguration c2 = MultiLayerConfiguration.fromJson(json);
assertEquals(conf, c2);
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
String msg = "Minibatch size = " + miniBatchSize + ", activationFn=" + afn
+ ", kernel = " + Arrays.toString(upsamplingSize) + ", mode = " + mode.toString()
+ ", input depth " + depth + ", input height " + height
+ ", input width " + width;
if (PRINT_RESULTS) {
log.info(msg);
for (int j = 0; j < net.getnLayers(); j++) {
log.info("Layer " + j + " # params: " + net.getLayer(j).numParams());
}
}
boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS,
DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS,
RETURN_ON_FIRST_FAILURE, input, labels);
assertTrue(msg, gradOK);
TestUtils.testModelSerialization(net);
}
boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS,
DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS,
RETURN_ON_FIRST_FAILURE, input, labels);
assertTrue(msg, gradOK);
TestUtils.testModelSerialization(net);
}
}
}

View File

@ -32,6 +32,17 @@ import java.util.Map;
*/
@Slf4j
public class KerasOptimizerUtils {
protected static final String LR = "lr";
protected static final String LR2 = "learning_rate";
protected static final String EPSILON = "epsilon";
protected static final String MOMENTUM = "momentum";
protected static final String BETA_1 = "beta_1";
protected static final String BETA_2 = "beta_2";
protected static final String DECAY = "decay";
protected static final String RHO = "rho";
protected static final String SCHEDULE_DECAY = "schedule_decay";
/**
* Map Keras optimizer to DL4J IUpdater.
*
@ -55,11 +66,11 @@ public class KerasOptimizerUtils {
switch (optimizerName) {
case "Adam": {
double lr = (double) optimizerParameters.get("lr");
double beta1 = (double) optimizerParameters.get("beta_1");
double beta2 = (double) optimizerParameters.get("beta_2");
double epsilon = (double) optimizerParameters.get("epsilon");
double decay = (double) optimizerParameters.get("decay");
double lr = (double) (optimizerParameters.containsKey(LR) ? optimizerParameters.get(LR) : optimizerParameters.get(LR2));
double beta1 = (double) optimizerParameters.get(BETA_1);
double beta2 = (double) optimizerParameters.get(BETA_2);
double epsilon = (double) optimizerParameters.get(EPSILON);
double decay = (double) optimizerParameters.get(DECAY);
dl4jOptimizer = new Adam.Builder()
.beta1(beta1).beta2(beta2)
@ -69,9 +80,9 @@ public class KerasOptimizerUtils {
break;
}
case "Adadelta": {
double rho = (double) optimizerParameters.get("rho");
double epsilon = (double) optimizerParameters.get("epsilon");
// double decay = (double) optimizerParameters.get("decay"); No decay in DL4J Adadelta
double rho = (double) optimizerParameters.get(RHO);
double epsilon = (double) optimizerParameters.get(EPSILON);
// double decay = (double) optimizerParameters.get(DECAY); No decay in DL4J Adadelta
dl4jOptimizer = new AdaDelta.Builder()
.epsilon(epsilon).rho(rho)
@ -79,9 +90,9 @@ public class KerasOptimizerUtils {
break;
}
case "Adgrad": {
double lr = (double) optimizerParameters.get("lr");
double epsilon = (double) optimizerParameters.get("epsilon");
double decay = (double) optimizerParameters.get("decay");
double lr = (double) (optimizerParameters.containsKey(LR) ? optimizerParameters.get(LR) : optimizerParameters.get(LR2));
double epsilon = (double) optimizerParameters.get(EPSILON);
double decay = (double) optimizerParameters.get(DECAY);
dl4jOptimizer = new AdaGrad.Builder()
.epsilon(epsilon).learningRate(lr)
@ -90,20 +101,20 @@ public class KerasOptimizerUtils {
break;
}
case "Adamax": {
double lr = (double) optimizerParameters.get("lr");
double beta1 = (double) optimizerParameters.get("beta_1");
double beta2 = (double) optimizerParameters.get("beta_2");
double epsilon = (double) optimizerParameters.get("epsilon");
double lr = (double) (optimizerParameters.containsKey(LR) ? optimizerParameters.get(LR) : optimizerParameters.get(LR2));
double beta1 = (double) optimizerParameters.get(BETA_1);
double beta2 = (double) optimizerParameters.get(BETA_2);
double epsilon = (double) optimizerParameters.get(EPSILON);
dl4jOptimizer = new AdaMax(lr, beta1, beta2, epsilon);
break;
}
case "Nadam": {
double lr = (double) optimizerParameters.get("lr");
double beta1 = (double) optimizerParameters.get("beta_1");
double beta2 = (double) optimizerParameters.get("beta_2");
double epsilon = (double) optimizerParameters.get("epsilon");
double scheduleDecay = (double) optimizerParameters.get("schedule_decay");
double lr = (double) (optimizerParameters.containsKey(LR) ? optimizerParameters.get(LR) : optimizerParameters.get(LR2));
double beta1 = (double) optimizerParameters.get(BETA_1);
double beta2 = (double) optimizerParameters.get(BETA_2);
double epsilon = (double) optimizerParameters.get(EPSILON);
double scheduleDecay = (double) optimizerParameters.get(SCHEDULE_DECAY);
dl4jOptimizer = new Nadam.Builder()
.beta1(beta1).beta2(beta2)
@ -114,15 +125,10 @@ public class KerasOptimizerUtils {
break;
}
case "SGD": {
double lr = (double) optimizerParameters.get("lr");
double momentum = 0.0;
try {
momentum = (double) optimizerParameters.get("epsilon");
} catch (Exception e) {
log.warn("couldn't read momentum parameter");
}
double lr = (double) (optimizerParameters.containsKey(LR) ? optimizerParameters.get(LR) : optimizerParameters.get(LR2));
double momentum = (double) (optimizerParameters.containsKey(EPSILON) ? optimizerParameters.get(EPSILON) : optimizerParameters.get(MOMENTUM));
double decay = (double) optimizerParameters.get("decay");
double decay = (double) optimizerParameters.get(DECAY);
dl4jOptimizer = new Nesterovs.Builder()
.momentum(momentum).learningRate(lr)
@ -131,10 +137,10 @@ public class KerasOptimizerUtils {
break;
}
case "RMSprop": {
double lr = (double) optimizerParameters.get("lr");
double rho = (double) optimizerParameters.get("rho");
double epsilon = (double) optimizerParameters.get("epsilon");
double decay = (double) optimizerParameters.get("decay");
double lr = (double) (optimizerParameters.containsKey(LR) ? optimizerParameters.get(LR) : optimizerParameters.get(LR2));
double rho = (double) optimizerParameters.get(RHO);
double epsilon = (double) optimizerParameters.get(EPSILON);
double decay = (double) optimizerParameters.get(DECAY);
dl4jOptimizer = new RmsProp.Builder()
.epsilon(epsilon).rmsDecay(rho).learningRate(lr)

View File

@ -45,10 +45,14 @@ import java.util.Map;
public class Upsampling3D extends BaseUpsamplingLayer {
protected int[] size;
protected Convolution3D.DataFormat dataFormat = Convolution3D.DataFormat.NCDHW; //Default to NCDHW for 1.0.0-beta4 and earlier, when no config existed (NCDHW only)
protected Upsampling3D(UpsamplingBuilder builder) {
protected Upsampling3D(Builder builder) {
super(builder);
this.size = builder.size;
this.dataFormat = builder.dataFormat;
}
@Override
@ -124,10 +128,32 @@ public class Upsampling3D extends BaseUpsamplingLayer {
@NoArgsConstructor
public static class Builder extends UpsamplingBuilder<Builder> {
protected Convolution3D.DataFormat dataFormat = Convolution3D.DataFormat.NCDHW;
/**
* @param size Upsampling layer size (most common value: 2)
*/
public Builder(int size) {
super(new int[] {size, size, size});
}
/**
* @param dataFormat Data format - see {@link Convolution3D.DataFormat} for more details
* @param size Upsampling layer size (most common value: 2)
*/
public Builder(@NonNull Convolution3D.DataFormat dataFormat, int size){
super(new int[]{size, size, size});
this.dataFormat = dataFormat;
}
/**
* Sets the DataFormat. See {@link Convolution3D.DataFormat} for more details
*/
public Builder dataFormat(@NonNull Convolution3D.DataFormat dataFormat){
this.dataFormat = dataFormat;
return this;
}
/**
* Upsampling size as int, so same upsampling size is used for depth, width and height
*

View File

@ -2896,7 +2896,7 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork {
solver.getOptimizer().setUpdaterComputationGraph(new ComputationGraphUpdater(this));
}
if(solver != null) {
return solver.getOptimizer().getComputationGraphUpdater();
return solver.getOptimizer().getComputationGraphUpdater(initializeIfAbsent);
}
return null;
}

View File

@ -67,18 +67,36 @@ public class Upsampling3D extends AbstractLayer<org.deeplearning4j.nn.conf.layer
public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon, LayerWorkspaceMgr workspaceMgr) {
assertInputSet(true);
boolean ncdhw = layerConf().getDataFormat() == org.deeplearning4j.nn.conf.layers.Convolution3D.DataFormat.NCDHW;
// FIXME: int cast
// Assumes NCDHW order
int miniBatch = (int) input.size(0);
int inChannels = (int) input.size(1);
int inD = (int) input.size(2);
int inH = (int) input.size(3);
int inW = (int) input.size(4);
int inChannels, inD, inH, inW;
int[] intArgs;
if(ncdhw){
inChannels = (int) input.size(1);
inD = (int) input.size(2);
inH = (int) input.size(3);
inW = (int) input.size(4);
intArgs = new int[] {1}; // 1 is channels first
} else {
inD = (int) input.size(1);
inH = (int) input.size(2);
inW = (int) input.size(3);
inChannels = (int) input.size(4);
intArgs = new int[] {0}; // 0 is channels last
}
int[] intArgs = new int[] {1}; // 1 is channels first
INDArray reshapedEpsilon = workspaceMgr.createUninitialized(
ArrayType.ACTIVATION_GRAD, epsilon.dataType(), new long[]{miniBatch, inChannels, inD, inH, inW}, 'c');
INDArray epsOut;
if(ncdhw){
epsOut = workspaceMgr.createUninitialized(
ArrayType.ACTIVATION_GRAD, epsilon.dataType(), new long[]{miniBatch, inChannels, inD, inH, inW}, 'c');
} else {
epsOut = workspaceMgr.createUninitialized(
ArrayType.ACTIVATION_GRAD, epsilon.dataType(), new long[]{miniBatch, inD, inH, inW, inChannels}, 'c');
}
Gradient gradient = new DefaultGradient();
@ -86,13 +104,13 @@ public class Upsampling3D extends AbstractLayer<org.deeplearning4j.nn.conf.layer
CustomOp op = DynamicCustomOp.builder("upsampling3d_bp")
.addIntegerArguments(intArgs)
.addInputs(input, epsilon)
.addOutputs(reshapedEpsilon)
.addOutputs(epsOut)
.callInplace(false)
.build();
Nd4j.getExecutioner().exec(op);
reshapedEpsilon = backpropDropOutIfPresent(reshapedEpsilon);
return new Pair<>(gradient, reshapedEpsilon);
epsOut = backpropDropOutIfPresent(epsOut);
return new Pair<>(gradient, epsOut);
}
protected int[] getSize() {
@ -115,32 +133,51 @@ public class Upsampling3D extends AbstractLayer<org.deeplearning4j.nn.conf.layer
return preOutput;
}
long miniBatch = (int) input.size(0);
long inChannels = (int) input.size(1);
long inD = (int) input.size(2);
long inH = (int) input.size(3);
long inW = (int) input.size(4);
boolean ncdhw = layerConf().getDataFormat() == org.deeplearning4j.nn.conf.layers.Convolution3D.DataFormat.NCDHW;
// FIXME: int cast
int miniBatch = (int) input.size(0);
int inChannels, inD, inH, inW;
int[] intArgs;
int[] size = getSize();
if(ncdhw){
inChannels = (int) input.size(1);
inD = (int) input.size(2);
inH = (int) input.size(3);
inW = (int) input.size(4);
intArgs = new int[] {size[0], size[1], size[2], 1}; // 1 is channels first
} else {
inD = (int) input.size(1);
inH = (int) input.size(2);
inW = (int) input.size(3);
inChannels = (int) input.size(4);
intArgs = new int[] {size[0], size[1], size[2], 0}; // 0 is channels last
}
long outD = inD * size[0];
long outH = inH * size[1];
long outW = inW * size[2];
int[] intArgs = new int[] {size[0], size[1], size[2], 1}; // 1 is channels first
INDArray output;
if(ncdhw){
output = workspaceMgr.createUninitialized(ArrayType.ACTIVATIONS,
input.dataType(), new long[]{miniBatch, inChannels, outD, outH, outW}, 'c');
} else {
output = workspaceMgr.createUninitialized(ArrayType.ACTIVATIONS,
input.dataType(), new long[]{miniBatch, outD, outH, outW, inChannels}, 'c');
}
INDArray reshapedOutput = workspaceMgr.createUninitialized(ArrayType.ACTIVATIONS,
input.dataType(), new long[]{miniBatch, inChannels, outD, outH, outW}, 'c');
CustomOp upsampling = DynamicCustomOp.builder("upsampling3d")
.addIntegerArguments(intArgs)
.addInputs(input)
.addOutputs(reshapedOutput)
.addOutputs(output)
.callInplace(false)
.build();
Nd4j.getExecutioner().exec(upsampling);
return reshapedOutput;
return output;
}
@Override

View File

@ -3172,7 +3172,7 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura
}
}
if(solver != null) {
return solver.getOptimizer().getUpdater();
return solver.getOptimizer().getUpdater(initializeIfReq);
}
return null;
}

View File

@ -42,8 +42,12 @@ public interface ConvexOptimizer extends Serializable {
Updater getUpdater();
Updater getUpdater(boolean initializeIfReq);
ComputationGraphUpdater getComputationGraphUpdater();
ComputationGraphUpdater getComputationGraphUpdater(boolean initializeIfReq);
void setUpdater(Updater updater);
void setUpdaterComputationGraph(ComputationGraphUpdater updater);

View File

@ -115,7 +115,12 @@ public abstract class BaseOptimizer implements ConvexOptimizer {
@Override
public Updater getUpdater() {
if (updater == null) {
return getUpdater(true);
}
@Override
public Updater getUpdater(boolean initializeIfReq) {
if (updater == null && initializeIfReq) {
updater = UpdaterCreator.getUpdater(model);
}
return updater;
@ -130,7 +135,12 @@ public abstract class BaseOptimizer implements ConvexOptimizer {
@Override
public ComputationGraphUpdater getComputationGraphUpdater() {
if (computationGraphUpdater == null && model instanceof ComputationGraph) {
return getComputationGraphUpdater(true);
}
@Override
public ComputationGraphUpdater getComputationGraphUpdater(boolean initializIfReq) {
if (computationGraphUpdater == null && model instanceof ComputationGraph && initializIfReq) {
computationGraphUpdater = new ComputationGraphUpdater((ComputationGraph) model);
}
return computationGraphUpdater;

View File

@ -205,7 +205,7 @@ public class CrashReportingUtil {
StringBuilder sb = genericMemoryStatus();
int bytesPerElement;
switch (Nd4j.dataType()){
switch (isMLN ? mln.params().dataType() : cg.params().dataType()){
case DOUBLE:
bytesPerElement = 8;
break;

View File

@ -200,7 +200,7 @@ public class TrainModule implements UIModule {
* List training sessions
* @return HTML list of training sessions
*/
private Result listSessions() {
private synchronized Result listSessions() {
StringBuilder sb = new StringBuilder("<!DOCTYPE html>\n" +
"<html lang=\"en\">\n" +
"<head>\n" +
@ -464,7 +464,7 @@ public class TrainModule implements UIModule {
* @param sessionId session ID
* @return info for session as JSON
*/
private Result sessionInfoForSession(String sessionId) {
private synchronized Result sessionInfoForSession(String sessionId) {
Map<String, Object> dataEachSession = new HashMap<>();
StatsStorage ss = knownSessionIDs.get(sessionId);
@ -475,7 +475,7 @@ public class TrainModule implements UIModule {
return Results.ok(asJson(dataEachSession)).as("application/json");
}
private Result setSession(String newSessionID) {
private synchronized Result setSession(String newSessionID) {
if (knownSessionIDs.containsKey(newSessionID)) {
currentSessionID = newSessionID;
currentWorkerIdx = 0;
@ -567,7 +567,7 @@ public class TrainModule implements UIModule {
return getOverviewDataForSession(currentSessionID);
}
private Result getOverviewDataForSession(String sessionId) {
private synchronized Result getOverviewDataForSession(String sessionId) {
Long lastUpdateTime = getLastUpdateTime(sessionId);
I18N i18N = getI18N(sessionId);

View File

@ -20,6 +20,8 @@ function selectStdevChart(fieldName) {
$("#stdevGradients").removeAttr("class");
$("#stdevUpdates").attr("class", "active");
}
renderOverviewPage(false);
}
/* ---------- Render page ---------- */

View File

@ -5207,7 +5207,7 @@ public class Nd4j {
*/
public static void checkShapeValues(int... shape) {
for (int e: shape) {
if (e < 1)
if (e < 0)
throw new ND4JIllegalStateException("Invalid shape: Requested INDArray shape " + Arrays.toString(shape)
+ " contains dimension size values < 0 (all dimensions must be 0 or more)");
}

View File

@ -256,6 +256,7 @@ public class EmptyTests extends BaseNd4jTest {
assertArrayEquals(new long[]{0}, Nd4j.zeros(0).shape());
assertArrayEquals(new long[]{0,0}, Nd4j.zeros(0,0).shape());
assertArrayEquals(new long[]{0,0,0}, Nd4j.zeros(0,0,0).shape());
assertArrayEquals(new long[]{0,0,0}, Nd4j.zeros(new int[]{0,0,0}, 'f').shape());
assertArrayEquals(new long[]{0}, Nd4j.zeros(0L).shape());
assertArrayEquals(new long[]{0}, Nd4j.zeros(dt, 0L).shape());