diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SubsamplingLayerTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SubsamplingLayerTest.java
index 69f8c22db..e0e556f39 100644
--- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SubsamplingLayerTest.java
+++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/convolution/SubsamplingLayerTest.java
@@ -155,7 +155,7 @@ public class SubsamplingLayerTest extends BaseDL4JTest {
}
- @Test(expected = IllegalStateException.class)
+ @Test(expected = UnsupportedOperationException.class)
public void testSubSampleLayerSumBackprop() throws Exception {
Layer layer = getSubsamplingLayer(SubsamplingLayer.PoolingType.SUM);
INDArray input = getData();
diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/SpaceToDepthLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/SpaceToDepthLayer.java
index aeca265f8..44f8bb666 100644
--- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/SpaceToDepthLayer.java
+++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/SpaceToDepthLayer.java
@@ -92,7 +92,6 @@ public class SpaceToDepthLayer extends NoParamLayer {
@Override
public LayerMemoryReport getMemoryReport(InputType inputType) {
- InputType.InputTypeConvolutional c = (InputType.InputTypeConvolutional) inputType;
InputType.InputTypeConvolutional outputType = (InputType.InputTypeConvolutional) getOutputType(-1, inputType);
return new LayerMemoryReport.Builder(layerName, SpaceToDepthLayer.class, inputType, outputType)
diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/SubsamplingLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/SubsamplingLayer.java
index 9eff7a91a..b2e4df6b8 100644
--- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/SubsamplingLayer.java
+++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/SubsamplingLayer.java
@@ -57,6 +57,12 @@ public class SubsamplingLayer extends NoParamLayer {
protected int pnorm;
protected double eps;
protected boolean cudnnAllowFallback = true;
+ /*
+ Default here for JSON deserialization of 1.0.0-beta4 and earlier models. New models default to false via builder.
+ This impacts average pooling only - whether the divisor should include or exclude padding along image edges.
+ DL4J originally included padding in the count, versions after 1.0.0-beta4 will exclude it by default.
+ */
+ protected boolean avgPoolIncludePadInDivisor = true;
public enum PoolingType {
MAX, AVG, SUM, PNORM;
@@ -95,6 +101,7 @@ public class SubsamplingLayer extends NoParamLayer {
this.pnorm = builder.pnorm;
this.eps = builder.eps;
this.cudnnAllowFallback = builder.cudnnAllowFallback;
+ this.avgPoolIncludePadInDivisor = builder.avgPoolIncludePadInDivisor;
}
@Override
@@ -376,6 +383,7 @@ public class SubsamplingLayer extends NoParamLayer {
* Whether fallback to non-CuDNN implementation should be used
*/
protected boolean cudnnAllowFallback = true;
+ protected boolean avgPoolIncludePadInDivisor = false;
protected BaseSubsamplingBuilder(PoolingType poolingType, int[] kernelSize, int[] stride) {
this.setPoolingType(poolingType.toPoolingType());
@@ -482,6 +490,29 @@ public class SubsamplingLayer extends NoParamLayer {
this.cudnnAllowFallback = allowFallback;
return (T) this;
}
+
+ /**
+ * When doing average pooling, should the padding values be included in the divisor or not?
+ * Not applicable for max and p-norm pooling.
+ * Users should not usually set this - instead, leave it as the default (false). It is included mainly for backward
+ * compatibility of older models
+ * Consider the following 2x2 segment along the right side of the image:
+ *
+ * [A, P] + * [B, P] + *+ * Where A and B are actual values, and P is padding (0).