Signed-off-by: AlexDBlack <blacka101@gmail.com>
master
Alex Black 2019-08-19 22:35:49 +10:00 committed by GitHub
parent 36db761917
commit 30b51f8085
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 4 additions and 5 deletions

View File

@ -296,7 +296,7 @@ public class GlobalPoolingLayer extends AbstractLayer<org.deeplearning4j.nn.conf
switch (poolingType) { switch (poolingType) {
case MAX: case MAX:
INDArray isMax = Nd4j.exec(new IsMax(inputArray.dup(), poolDim))[0]; INDArray isMax = Nd4j.exec(new IsMax(inputArray, inputArray.ulike(), poolDim))[0];
return Nd4j.getExecutioner().exec(new BroadcastMulOp(isMax, epsilon, isMax, broadcastDims)); return Nd4j.getExecutioner().exec(new BroadcastMulOp(isMax, epsilon, isMax, broadcastDims));
case AVG: case AVG:
//if out = avg(in,dims) then dL/dIn = 1/N * dL/dOut //if out = avg(in,dims) then dL/dIn = 1/N * dL/dOut

View File

@ -639,7 +639,7 @@ public class ConvolutionUtils {
INDArray output = Nd4j.createUninitialized(new int[]{(int)in.size(0), 1, outH, 1}, 'c'); INDArray output = Nd4j.createUninitialized(new int[]{(int)in.size(0), 1, outH, 1}, 'c');
DynamicCustomOp op = new MaxPooling2D(in, output, Pooling2DConfig.builder() DynamicCustomOp op = new MaxPooling2D(reshaped4d, output, Pooling2DConfig.builder()
.kH(k[0]).kW(k[1]) .kH(k[0]).kW(k[1])
.sH(s[0]).sW(s[1]) .sH(s[0]).sW(s[1])
.pH(pad == null ? 0 : pad[0]).pW(pad == null ? 0 : pad[1]) .pH(pad == null ? 0 : pad[0]).pW(pad == null ? 0 : pad[1])

View File

@ -136,7 +136,7 @@ public class MaskedReductionUtil {
Nd4j.getExecutioner().exec(new BroadcastAddOp(input, negInfMask, withInf, 0, 2)); Nd4j.getExecutioner().exec(new BroadcastAddOp(input, negInfMask, withInf, 0, 2));
//At this point: all the masked out steps have value -inf, hence can't be the output of the MAX op //At this point: all the masked out steps have value -inf, hence can't be the output of the MAX op
INDArray isMax = Nd4j.exec(new IsMax(withInf, 2))[0]; INDArray isMax = Nd4j.exec(new IsMax(withInf, withInf.ulike(), 2))[0];
return Nd4j.getExecutioner().exec(new BroadcastMulOp(isMax, epsilon2d, isMax, 0, 1)); return Nd4j.getExecutioner().exec(new BroadcastMulOp(isMax, epsilon2d, isMax, 0, 1));
case AVG: case AVG:
@ -296,7 +296,7 @@ public class MaskedReductionUtil {
Nd4j.getExecutioner().exec(new BroadcastAddOp(input, negInfMask, withInf, dimensions)); Nd4j.getExecutioner().exec(new BroadcastAddOp(input, negInfMask, withInf, dimensions));
//At this point: all the masked out steps have value -inf, hence can't be the output of the MAX op //At this point: all the masked out steps have value -inf, hence can't be the output of the MAX op
INDArray isMax = Nd4j.exec(new IsMax(withInf, 2, 3))[0]; INDArray isMax = Nd4j.exec(new IsMax(withInf, withInf.ulike(), 2, 3))[0];
return Nd4j.getExecutioner().exec(new BroadcastMulOp(isMax, epsilon2d, isMax, 0, 1)); return Nd4j.getExecutioner().exec(new BroadcastMulOp(isMax, epsilon2d, isMax, 0, 1));
case AVG: case AVG:

View File

@ -22,7 +22,6 @@ import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule; import org.junit.Rule;
import org.junit.rules.TestName; import org.junit.rules.TestName;
import org.nd4j.linalg.api.buffer.DataBuffer;
import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.api.memory.MemoryWorkspace; import org.nd4j.linalg.api.memory.MemoryWorkspace;
import org.nd4j.linalg.api.ops.executioner.OpExecutioner; import org.nd4j.linalg.api.ops.executioner.OpExecutioner;