416 lines
19 KiB
Java
Raw Normal View History

/*
2021-02-01 14:31:20 +09:00
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
2021-02-01 17:47:29 +09:00
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
2021-02-01 14:31:20 +09:00
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package org.deeplearning4j.regressiontest;
2021-03-16 11:57:24 +09:00
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import org.deeplearning4j.BaseDL4JTest;
import org.deeplearning4j.TestUtils;
import org.deeplearning4j.nn.conf.BackpropType;
import org.deeplearning4j.nn.conf.ConvolutionMode;
import org.deeplearning4j.nn.conf.graph.LayerVertex;
import org.deeplearning4j.nn.conf.layers.CnnLossLayer;
import org.deeplearning4j.nn.conf.layers.ConvolutionLayer;
import org.deeplearning4j.nn.conf.layers.DenseLayer;
import org.deeplearning4j.nn.conf.layers.DepthwiseConvolution2D;
import org.deeplearning4j.nn.conf.layers.GlobalPoolingLayer;
import org.deeplearning4j.nn.conf.layers.LSTM;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.conf.layers.PoolingType;
import org.deeplearning4j.nn.conf.layers.RnnOutputLayer;
import org.deeplearning4j.nn.conf.layers.SeparableConvolution2D;
import org.deeplearning4j.nn.conf.layers.SubsamplingLayer;
import org.deeplearning4j.nn.conf.layers.Upsampling2D;
import org.deeplearning4j.nn.conf.layers.ZeroPaddingLayer;
import org.deeplearning4j.nn.conf.layers.convolutional.Cropping2D;
import org.deeplearning4j.nn.conf.layers.recurrent.Bidirectional;
import org.deeplearning4j.nn.conf.layers.recurrent.SimpleRnn;
import org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder;
import org.deeplearning4j.nn.graph.ComputationGraph;
import org.deeplearning4j.nn.graph.vertex.impl.MergeVertex;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInitXavier;
import org.deeplearning4j.regressiontest.customlayer100a.CustomLayer;
2021-03-16 11:57:24 +09:00
import org.junit.jupiter.api.Test;
import org.nd4j.linalg.activations.impl.ActivationIdentity;
import org.nd4j.linalg.activations.impl.ActivationLReLU;
import org.nd4j.linalg.activations.impl.ActivationReLU;
import org.nd4j.linalg.activations.impl.ActivationSigmoid;
import org.nd4j.linalg.activations.impl.ActivationSoftmax;
import org.nd4j.linalg.activations.impl.ActivationTanH;
import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.learning.config.Adam;
import org.nd4j.linalg.learning.config.RmsProp;
import org.nd4j.linalg.learning.regularization.L2Regularization;
import org.nd4j.linalg.lossfunctions.impl.LossMAE;
import org.nd4j.linalg.lossfunctions.impl.LossMCXENT;
Refactor packages to fix split package issues (#411) * Refactor nd4j-common: org.nd4j.* -> org.nd4j.common.* Signed-off-by: Alex Black <blacka101@gmail.com> * Fix CUDA (missed nd4j-common package refactoring changes) Signed-off-by: Alex Black <blacka101@gmail.com> * nd4j-kryo: org.nd4j -> org.nd4j.kryo Signed-off-by: Alex Black <blacka101@gmail.com> * Fix nd4j-common for deeplearning4j-cuda Signed-off-by: Alex Black <blacka101@gmail.com> * nd4j-grppc-client: org.nd4j.graph -> org.nd4j.remote.grpc Signed-off-by: Alex Black <blacka101@gmail.com> * deeplearning4j-common: org.deeplearning4.* -> org.deeplearning4j.common.* Signed-off-by: Alex Black <blacka101@gmail.com> * deeplearning4j-core: org.deeplearning4j.* -> org.deeplearning.core.* Signed-off-by: Alex Black <blacka101@gmail.com> * deeplearning4j-cuda: org.deeplearning4j.nn.layers.* -> org.deeplearning4j.cuda.* Signed-off-by: Alex Black <blacka101@gmail.com> * Import fixes Signed-off-by: Alex Black <blacka101@gmail.com> * deeplearning4j-nlp-*: org.deeplearning4.text.* -> org.deeplearning4j.nlp.(language).* Signed-off-by: Alex Black <blacka101@gmail.com> * deeplearning4j-ui-model: org.deeplearning4j.ui -> org.deeplearning4j.ui.model Signed-off-by: Alex Black <blacka101@gmail.com> * datavec-spark-inference-{server/model/client}: org.datavec.spark.transform -> org.datavec.spark.inference.{server/model/client} Signed-off-by: Alex Black <blacka101@gmail.com> * datavec-jdbc: org.datavec.api -> org.datavec.jdbc Signed-off-by: Alex Black <blacka101@gmail.com> * Delete org.deeplearning4j.datasets.iterator.impl.MultiDataSetIteratorAdapter in favor of (essentially identical) org.nd4j.linalg.dataset.adapter.MultiDataSetIteratorAdapter Signed-off-by: Alex Black <blacka101@gmail.com> * ND4S fixes Signed-off-by: Alex Black <blacka101@gmail.com> * Fixes Signed-off-by: Alex Black <blacka101@gmail.com> * nd4j-common-tests: org.nd4j.* -> org.nd4j.common.tests Signed-off-by: Alex Black <blacka101@gmail.com> * Trigger CI Signed-off-by: Alex Black <blacka101@gmail.com> * Fixes Signed-off-by: Alex Black <blacka101@gmail.com> * #8878 Ignore CUDA tests on modules with 'nd4j-native under cuda' issue Signed-off-by: Alex Black <blacka101@gmail.com> * Fix bad imports in tests Signed-off-by: Alex Black <blacka101@gmail.com> * Add ignore on test (already failing) due to #8882 Signed-off-by: Alex Black <blacka101@gmail.com> * Import fixes Signed-off-by: Alex Black <blacka101@gmail.com> * Additional import fixes Signed-off-by: Alex Black <blacka101@gmail.com>
2020-04-29 11:19:26 +10:00
import org.nd4j.common.resources.Resources;
public class RegressionTest100b4 extends BaseDL4JTest {
@Override
public DataType getDataType() {
return DataType.FLOAT;
}
@Test
public void testCustomLayer() throws Exception {
for (DataType dtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
String dtypeName = dtype.toString().toLowerCase();
File f = Resources.asFile("regression_testing/100b4/CustomLayerExample_100b4_" + dtypeName + ".bin");
MultiLayerNetwork.load(f, true);
MultiLayerNetwork net = MultiLayerNetwork.load(f, true);
// net = net.clone();
DenseLayer l0 = (DenseLayer) net.getLayer(0).getLayerConfiguration();
assertEquals(new ActivationTanH(), l0.getActivationFn());
assertEquals(new L2Regularization(0.03), TestUtils.getL2Reg(l0));
assertEquals(new RmsProp(0.95), l0.getUpdater());
CustomLayer l1 = (CustomLayer) net.getLayer(1).getLayerConfiguration();
assertEquals(new ActivationTanH(), l1.getActivationFn());
assertEquals(new ActivationSigmoid(), l1.getSecondActivationFunction());
assertEquals(new RmsProp(0.95), l1.getUpdater());
INDArray outExp;
File f2 = Resources
.asFile("regression_testing/100b4/CustomLayerExample_Output_100b4_" + dtypeName + ".bin");
try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) {
outExp = Nd4j.read(dis);
}
INDArray in;
File f3 = Resources.asFile("regression_testing/100b4/CustomLayerExample_Input_100b4_" + dtypeName + ".bin");
try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) {
in = Nd4j.read(dis);
}
assertEquals(dtype, in.dataType());
assertEquals(dtype, outExp.dataType());
assertEquals(dtype, net.getModelParams().dataType());
assertEquals(dtype, net.getFlattenedGradients().dataType());
assertEquals(dtype, net.getUpdater().getStateViewArray().dataType());
//System.out.println(Arrays.toString(net.params().data().asFloat()));
INDArray outAct = net.output(in);
assertEquals(dtype, outAct.dataType());
assertEquals(dtype, net.getNetConfiguration().getDataType());
assertEquals(dtype, net.getModelParams().dataType());
MatMul for gemm/gemv calls (#365) * libnd4j added optional alpha and beta support to matmul Signed-off-by: Oleg <oleg.semeniv@gmail.com> * libnd4j typos fixes Signed-off-by: Oleg <oleg.semeniv@gmail.com> * libnd4j add optional alpha and beta to matmul_bp Signed-off-by: Oleg <oleg.semeniv@gmail.com> * libnd4j one more typo fix Signed-off-by: Oleg <oleg.semeniv@gmail.com> * libnd4j added optional alpha and beta to mkl implementation Signed-off-by: Oleg <oleg.semeniv@gmail.com> * MatMul alpha/beta on java side Signed-off-by: raver119 <raver119@gmail.com> * alpha/beta fix in libnd4j Signed-off-by: raver119 <raver119@gmail.com> * alpha/beta fix in matmul_bp Signed-off-by: raver119 <raver119@gmail.com> * restored view validation Signed-off-by: raver119 <raver119@gmail.com> * gemv/gemm now use MatMul op Signed-off-by: raver119 <raver119@gmail.com> * few tests fixed Signed-off-by: raver119 <raver119@gmail.com> * additional INDArray.mmul signature Signed-off-by: raver119 <raver119@gmail.com> * make C order default for INDArray.mmul, unless both A/B have F order Signed-off-by: raver119 <raver119@gmail.com> * Nd4j.gemm validation fix Signed-off-by: raver119 <raver119@gmail.com> * disable mkldnn matmul for xxf with beta != 0 case Signed-off-by: raver119 <raver119@gmail.com> * SimpleRnn workspace fix + timeouts Signed-off-by: Alex Black <blacka101@gmail.com> * two more tests + minor fix in matmul platform check Signed-off-by: raver119 <raver119@gmail.com> * Flaky test fixes Signed-off-by: Alex Black <blacka101@gmail.com> * propagate testresources profile Signed-off-by: raver119 <raver119@gmail.com> * Resources fix + flaky test fix Signed-off-by: Alex Black <blacka101@gmail.com> Co-authored-by: Oleg <oleg.semeniv@gmail.com> Co-authored-by: Alex Black <blacka101@gmail.com>
2020-04-10 17:57:02 +03:00
boolean eq = outExp.equalsWithEps(outAct, 0.01);
assertTrue(eq, "Test for dtype: " + dtypeName + "\n" + outExp + " vs " + outAct);
}
}
@Test
public void testLSTM() throws Exception {
File f = Resources.asFile("regression_testing/100b4/GravesLSTMCharModelingExample_100b4.bin");
MultiLayerNetwork net = MultiLayerNetwork.load(f, true);
LSTM l0 = (LSTM) net.getLayer(0).getLayerConfiguration();
assertEquals(new ActivationTanH(), l0.getActivationFn());
assertEquals(200, l0.getNOut());
assertEquals(new WeightInitXavier(), l0.getWeightInit());
assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l0));
assertEquals(new Adam(0.005), l0.getUpdater());
LSTM l1 = (LSTM) net.getLayer(1).getLayerConfiguration();
assertEquals(new ActivationTanH(), l1.getActivationFn());
assertEquals(200, l1.getNOut());
assertEquals(new WeightInitXavier(), l1.getWeightInit());
assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l1));
assertEquals(new Adam(0.005), l1.getUpdater());
RnnOutputLayer l2 = (RnnOutputLayer) net.getLayer(2).getLayerConfiguration();
assertEquals(new ActivationSoftmax(), l2.getActivationFn());
assertEquals(77, l2.getNOut());
assertEquals(new WeightInitXavier(), l2.getWeightInit());
assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l2));
assertEquals(new Adam(0.005), l2.getUpdater());
assertEquals(BackpropType.TruncatedBPTT, net.getNetConfiguration().getBackpropType());
assertEquals(50, net.getNetConfiguration().getTbpttBackLength());
assertEquals(50, net.getNetConfiguration().getTbpttFwdLength());
INDArray outExp;
File f2 = Resources.asFile("regression_testing/100b4/GravesLSTMCharModelingExample_Output_100b4.bin");
try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) {
outExp = Nd4j.read(dis);
}
INDArray in;
File f3 = Resources.asFile("regression_testing/100b4/GravesLSTMCharModelingExample_Input_100b4.bin");
try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) {
in = Nd4j.read(dis);
}
INDArray outAct = net.output(in);
assertEquals(outExp, outAct);
}
@Test
public void testVae() throws Exception {
File f = Resources.asFile("regression_testing/100b4/VaeMNISTAnomaly_100b4.bin");
MultiLayerNetwork net = MultiLayerNetwork.load(f, true);
VariationalAutoencoder l0 = (VariationalAutoencoder) net.getLayer(0).getLayerConfiguration();
assertEquals(new ActivationLReLU(), l0.getActivationFn());
assertEquals(32, l0.getNOut());
assertArrayEquals(new int[]{256, 256}, l0.getEncoderLayerSizes());
assertArrayEquals(new int[]{256, 256}, l0.getDecoderLayerSizes());
assertEquals(new WeightInitXavier(), l0.getWeightInit());
assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l0));
assertEquals(new Adam(1e-3), l0.getUpdater());
INDArray outExp;
File f2 = Resources.asFile("regression_testing/100b4/VaeMNISTAnomaly_Output_100b4.bin");
try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) {
outExp = Nd4j.read(dis);
}
INDArray in;
File f3 = Resources.asFile("regression_testing/100b4/VaeMNISTAnomaly_Input_100b4.bin");
try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) {
in = Nd4j.read(dis);
}
INDArray outAct = net.output(in);
assertEquals(outExp, outAct);
}
@Test
////@Ignore("Failing due to new data format changes. Sept 10,2020")
public void testYoloHouseNumber() throws Exception {
File f = Resources.asFile("regression_testing/100b4/HouseNumberDetection_100b4.bin");
ComputationGraph net = ComputationGraph.load(f, true);
int nBoxes = 5;
int nClasses = 10;
ConvolutionLayer cl = (ConvolutionLayer) ((LayerVertex) net.getComputationGraphConfiguration().getVertices()
.get("convolution2d_9")).getNetConfiguration().getFirstLayer();
assertEquals(nBoxes * (5 + nClasses), cl.getNOut());
assertEquals(new ActivationIdentity(), cl.getActivationFn());
assertEquals(ConvolutionMode.Same, cl.getConvolutionMode());
assertEquals(new WeightInitXavier(), cl.getWeightInit());
assertArrayEquals(new int[]{1, 1}, cl.getKernelSize());
INDArray outExp;
File f2 = Resources.asFile("regression_testing/100b4/HouseNumberDetection_Output_100b4.bin");
try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) {
outExp = Nd4j.read(dis);
}
INDArray in;
File f3 = Resources.asFile("regression_testing/100b4/HouseNumberDetection_Input_100b4.bin");
try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) {
in = Nd4j.read(dis);
}
INDArray outAct = net.outputSingle(in);
boolean eq = outExp.equalsWithEps(outAct.castTo(outExp.dataType()), 1e-3);
assertTrue(eq);
}
@Test
////@Ignore("failing due to new input data format changes.")
public void testSyntheticCNN() throws Exception {
File f = Resources.asFile("regression_testing/100b4/SyntheticCNN_100b4.bin");
MultiLayerNetwork net = MultiLayerNetwork.load(f, true);
ConvolutionLayer l0 = (ConvolutionLayer) net.getLayer(0).getLayerConfiguration();
assertEquals(new ActivationReLU(), l0.getActivationFn());
assertEquals(4, l0.getNOut());
assertEquals(new WeightInitXavier(), l0.getWeightInit());
assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l0));
assertEquals(new Adam(0.005), l0.getUpdater());
assertArrayEquals(new int[]{3, 3}, l0.getKernelSize());
assertArrayEquals(new int[]{2, 1}, l0.getStride());
assertArrayEquals(new int[]{1, 1}, l0.getDilation());
assertArrayEquals(new int[]{0, 0}, l0.getPadding());
SeparableConvolution2D l1 = (SeparableConvolution2D) net.getLayer(1).getLayerConfiguration();
assertEquals(new ActivationReLU(), l1.getActivationFn());
assertEquals(8, l1.getNOut());
assertEquals(new WeightInitXavier(), l1.getWeightInit());
assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l1));
assertEquals(new Adam(0.005), l1.getUpdater());
assertArrayEquals(new int[]{3, 3}, l1.getKernelSize());
assertArrayEquals(new int[]{1, 1}, l1.getStride());
assertArrayEquals(new int[]{1, 1}, l1.getDilation());
assertArrayEquals(new int[]{0, 0}, l1.getPadding());
assertEquals(ConvolutionMode.Same, l1.getConvolutionMode());
assertEquals(1, l1.getDepthMultiplier());
SubsamplingLayer l2 = (SubsamplingLayer) net.getLayer(2).getLayerConfiguration();
assertArrayEquals(new int[]{3, 3}, l2.getKernelSize());
assertArrayEquals(new int[]{2, 2}, l2.getStride());
assertArrayEquals(new int[]{1, 1}, l2.getDilation());
assertArrayEquals(new int[]{0, 0}, l2.getPadding());
assertEquals(PoolingType.MAX, l2.getPoolingType());
ZeroPaddingLayer l3 = (ZeroPaddingLayer) net.getLayer(3).getLayerConfiguration();
assertArrayEquals(new int[]{4, 4, 4, 4}, l3.getPadding());
Upsampling2D l4 = (Upsampling2D) net.getLayer(4).getLayerConfiguration();
assertArrayEquals(new int[]{3, 3}, l4.getSize());
DepthwiseConvolution2D l5 = (DepthwiseConvolution2D) net.getLayer(5).getLayerConfiguration();
assertEquals(new ActivationReLU(), l5.getActivationFn());
assertEquals(16, l5.getNOut());
assertEquals(new WeightInitXavier(), l5.getWeightInit());
assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l5));
assertEquals(new Adam(0.005), l5.getUpdater());
assertArrayEquals(new int[]{3, 3}, l5.getKernelSize());
assertArrayEquals(new int[]{1, 1}, l5.getStride());
assertArrayEquals(new int[]{1, 1}, l5.getDilation());
assertArrayEquals(new int[]{0, 0}, l5.getPadding());
assertEquals(2, l5.getDepthMultiplier());
SubsamplingLayer l6 = (SubsamplingLayer) net.getLayer(6).getLayerConfiguration();
assertArrayEquals(new int[]{2, 2}, l6.getKernelSize());
assertArrayEquals(new int[]{2, 2}, l6.getStride());
assertArrayEquals(new int[]{1, 1}, l6.getDilation());
assertArrayEquals(new int[]{0, 0}, l6.getPadding());
assertEquals(PoolingType.MAX, l6.getPoolingType());
Cropping2D l7 = (Cropping2D) net.getLayer(7).getLayerConfiguration();
assertArrayEquals(new int[]{3, 3, 2, 2}, l7.getCropping());
ConvolutionLayer l8 = (ConvolutionLayer) net.getLayer(8).getLayerConfiguration();
assertEquals(4, l8.getNOut());
assertEquals(new WeightInitXavier(), l8.getWeightInit());
assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l8));
assertEquals(new Adam(0.005), l8.getUpdater());
assertArrayEquals(new int[]{4, 4}, l8.getKernelSize());
assertArrayEquals(new int[]{1, 1}, l8.getStride());
assertArrayEquals(new int[]{1, 1}, l8.getDilation());
assertArrayEquals(new int[]{0, 0}, l8.getPadding());
CnnLossLayer l9 = (CnnLossLayer) net.getLayer(9).getLayerConfiguration();
assertEquals(new WeightInitXavier(), l9.getWeightInit());
assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l9));
assertEquals(new Adam(0.005), l9.getUpdater());
assertEquals(new LossMAE(), l9.getLossFunction());
INDArray outExp;
File f2 = Resources.asFile("regression_testing/100b4/SyntheticCNN_Output_100b4.bin");
try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) {
outExp = Nd4j.read(dis);
}
INDArray in;
File f3 = Resources.asFile("regression_testing/100b4/SyntheticCNN_Input_100b4.bin");
try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) {
in = Nd4j.read(dis);
}
INDArray outAct = net.output(in);
//19 layers - CPU vs. GPU difference accumulates notably, but appears to be correct
if(Nd4j.getBackend().getClass().getName().toLowerCase().contains("native")){
assertEquals(outExp, outAct);
} else {
boolean eq = outExp.equalsWithEps(outAct, 0.1);
assertTrue(eq);
}
}
@Test
public void testSyntheticBidirectionalRNNGraph() throws Exception {
File f = Resources.asFile("regression_testing/100b4/SyntheticBidirectionalRNNGraph_100b4.bin");
ComputationGraph net = ComputationGraph.load(f, true);
Bidirectional l0 = (Bidirectional) net.getLayer("rnn1").getLayerConfiguration();
LSTM l1 = (LSTM) l0.getFwd();
assertEquals(16, l1.getNOut());
assertEquals(new ActivationReLU(), l1.getActivationFn());
assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l1));
LSTM l2 = (LSTM) l0.getBwd();
assertEquals(16, l2.getNOut());
assertEquals(new ActivationReLU(), l2.getActivationFn());
assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l2));
Bidirectional l3 = (Bidirectional) net.getLayer("rnn2").getLayerConfiguration();
SimpleRnn l4 = (SimpleRnn) l3.getFwd();
assertEquals(16, l4.getNOut());
assertEquals(new ActivationReLU(), l4.getActivationFn());
assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l4));
SimpleRnn l5 = (SimpleRnn) l3.getBwd();
assertEquals(16, l5.getNOut());
assertEquals(new ActivationReLU(), l5.getActivationFn());
assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l5));
MergeVertex mv = (MergeVertex) net.getVertex("concat");
GlobalPoolingLayer gpl = (GlobalPoolingLayer) net.getLayer("pooling").getLayerConfiguration();
assertEquals(PoolingType.MAX, gpl.getPoolingType());
assertArrayEquals(new int[]{2}, gpl.getPoolingDimensions());
assertTrue(gpl.isCollapseDimensions());
OutputLayer outl = (OutputLayer) net.getLayer("out").getLayerConfiguration();
assertEquals(3, outl.getNOut());
assertEquals(new LossMCXENT(), outl.getLossFunction());
INDArray outExp;
File f2 = Resources.asFile("regression_testing/100b4/SyntheticBidirectionalRNNGraph_Output_100b4.bin");
try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) {
outExp = Nd4j.read(dis);
}
INDArray in;
File f3 = Resources.asFile("regression_testing/100b4/SyntheticBidirectionalRNNGraph_Input_100b4.bin");
try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) {
in = Nd4j.read(dis);
}
INDArray outAct = net.output(in)[0];
assertEquals(outExp, outAct);
}
}