165 lines
6.0 KiB
Java
Raw Normal View History

2021-02-01 14:31:20 +09:00
/*
* ******************************************************************************
* *
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
2021-02-01 17:47:29 +09:00
* * See the NOTICE file distributed with this work for additional
* * information regarding copyright ownership.
2021-02-01 14:31:20 +09:00
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
2019-06-06 15:21:15 +03:00
package org.deeplearning4j.parallelism;
import lombok.val;
Various fixes (#143) * #8568 ArrayUtil optimization Signed-off-by: AlexDBlack <blacka101@gmail.com> * #6171 Keras ReLU and ELU support Signed-off-by: AlexDBlack <blacka101@gmail.com> * Keras softmax layer import Signed-off-by: AlexDBlack <blacka101@gmail.com> * #8549 Webjars dependency management Signed-off-by: AlexDBlack <blacka101@gmail.com> * Fix for TF import names ':0' suffix issue / NPE Signed-off-by: AlexDBlack <blacka101@gmail.com> * BiasAdd: fix default data format for TF import Signed-off-by: AlexDBlack <blacka101@gmail.com> * Update zoo test ignores Signed-off-by: AlexDBlack <blacka101@gmail.com> * #8509 SameDiff Listener API - provide frame + iteration Signed-off-by: AlexDBlack <blacka101@gmail.com> * #8520 ND4J Environment Signed-off-by: AlexDBlack <blacka101@gmail.com> * Deconv3d Signed-off-by: AlexDBlack <blacka101@gmail.com> * Deconv3d fixes + gradient check Signed-off-by: AlexDBlack <blacka101@gmail.com> * Conv3d fixes + deconv3d DType test Signed-off-by: AlexDBlack <blacka101@gmail.com> * Fix issue with deconv3d gradinet check weight init Signed-off-by: AlexDBlack <blacka101@gmail.com> * #8579 Fix BaseCudaDataBuffer constructor fix for UINT16 Signed-off-by: AlexDBlack <blacka101@gmail.com> * DataType.isNumerical() returns false for BOOL type Signed-off-by: AlexDBlack <blacka101@gmail.com> * #8504 Reduce Spark log spam for tests Signed-off-by: AlexDBlack <blacka101@gmail.com> * Clean up DL4J gradient check test spam Signed-off-by: AlexDBlack <blacka101@gmail.com> * More Gradient check spam reduction Signed-off-by: AlexDBlack <blacka101@gmail.com> * SameDiff test spam reduction Signed-off-by: AlexDBlack <blacka101@gmail.com> * Fixes for FlatBuffers mapping Signed-off-by: AlexDBlack <blacka101@gmail.com> * SameDiff log spam cleanup Signed-off-by: AlexDBlack <blacka101@gmail.com> * Tests should extend BaseNd4jTest Signed-off-by: AlexDBlack <blacka101@gmail.com> * Remove debug line in c++ op Signed-off-by: AlexDBlack <blacka101@gmail.com> * ND4J test spam cleanup Signed-off-by: AlexDBlack <blacka101@gmail.com> * DL4J test spam reduction Signed-off-by: AlexDBlack <blacka101@gmail.com> * More Dl4J and datavec test spam cleanup Signed-off-by: AlexDBlack <blacka101@gmail.com> * Fix for bad conv3d test Signed-off-by: AlexDBlack <blacka101@gmail.com> * Additional test Signed-off-by: AlexDBlack <blacka101@gmail.com> * Embedding layers: don't inherit global default activation function Signed-off-by: AlexDBlack <blacka101@gmail.com> * Trigger CI Signed-off-by: AlexDBlack <blacka101@gmail.com> * Consolidate all BaseDL4JTest classes to single class used everywhere; make timeout configurable per class Signed-off-by: AlexDBlack <blacka101@gmail.com> * Test fixes and timeout increases Signed-off-by: AlexDBlack <blacka101@gmail.com> * Timeouts and PReLU fixes Signed-off-by: AlexDBlack <blacka101@gmail.com> * Restore libnd4j build threads arg for CUDA build Signed-off-by: AlexDBlack <blacka101@gmail.com> * Increase timeouts on a few tests to avoid spurious failures on some CI machines Signed-off-by: AlexDBlack <blacka101@gmail.com> * More timeout fixes Signed-off-by: AlexDBlack <blacka101@gmail.com> * More test timeout fixes Signed-off-by: AlexDBlack <blacka101@gmail.com> * Tweak timeout for one more test Signed-off-by: AlexDBlack <blacka101@gmail.com> * Final tweaks Signed-off-by: AlexDBlack <blacka101@gmail.com> * One more ignore Signed-off-by: AlexDBlack <blacka101@gmail.com>
2020-01-04 13:45:07 +11:00
import org.deeplearning4j.BaseDL4JTest;
2019-06-06 15:21:15 +03:00
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.graph.ComputationGraph;
import org.deeplearning4j.parallelism.inference.InferenceMode;
import org.deeplearning4j.parallelism.inference.LoadBalanceMode;
2021-03-16 11:57:24 +09:00
import org.junit.jupiter.api.Test;
2019-06-06 15:21:15 +03:00
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
2021-03-16 11:57:24 +09:00
import static org.junit.jupiter.api.Assertions.*;
2019-06-06 15:21:15 +03:00
public class InplaceParallelInferenceTest extends BaseDL4JTest {
@Test
public void testUpdateModel() {
int nIn = 5;
val conf = NeuralNetConfiguration.builder()
2019-06-06 15:21:15 +03:00
.graphBuilder()
.addInputs("in")
.layer("out0", OutputLayer.builder().nIn(nIn).nOut(4).activation(Activation.SOFTMAX).build(), "in")
.layer("out1", OutputLayer.builder().nIn(nIn).nOut(6).activation(Activation.SOFTMAX).build(), "in")
2019-06-06 15:21:15 +03:00
.setOutputs("out0", "out1")
.build();
val net = new ComputationGraph(conf);
net.init();
val pi = new ParallelInference.Builder(net)
.inferenceMode(InferenceMode.INPLACE)
.workers(2)
.build();
try {
assertTrue(pi instanceof InplaceParallelInference);
val models = pi.getCurrentModelsFromWorkers();
assertTrue(models.length > 0);
for (val m : models) {
assertNotNull(m);
assertEquals(net.getModelParams(), m.getModelParams());
2019-06-06 15:21:15 +03:00
}
val conf2 = NeuralNetConfiguration.builder()
2019-06-06 15:21:15 +03:00
.graphBuilder()
.addInputs("in")
.layer("out0", OutputLayer.builder().nIn(nIn).nOut(4).activation(Activation.SOFTMAX).build(), "in")
.layer("out1", OutputLayer.builder().nIn(nIn).nOut(6).activation(Activation.SOFTMAX).build(), "in")
.layer("out2", OutputLayer.builder().nIn(nIn).nOut(8).activation(Activation.SOFTMAX).build(), "in")
2019-06-06 15:21:15 +03:00
.setOutputs("out0", "out1", "out2")
.build();
val net2 = new ComputationGraph(conf2);
net2.init();
assertNotEquals(net.getModelParams(), net2.getModelParams());
2019-06-06 15:21:15 +03:00
pi.updateModel(net2);
val models2 = pi.getCurrentModelsFromWorkers();
assertTrue(models2.length > 0);
for (val m : models2) {
assertNotNull(m);
assertEquals(net2.getModelParams(), m.getModelParams());
2019-06-06 15:21:15 +03:00
}
} finally {
pi.shutdown();
}
}
@Test
public void testOutput_RoundRobin_1() throws Exception {
int nIn = 5;
val conf = NeuralNetConfiguration.builder()
2019-06-06 15:21:15 +03:00
.graphBuilder()
.addInputs("in")
.layer("out0", OutputLayer.builder().nIn(nIn).nOut(4).activation(Activation.SOFTMAX).build(), "in")
.layer("out1", OutputLayer.builder().nIn(nIn).nOut(6).activation(Activation.SOFTMAX).build(), "in")
2019-06-06 15:21:15 +03:00
.setOutputs("out0", "out1")
.build();
val net = new ComputationGraph(conf);
net.init();
val pi = new ParallelInference.Builder(net)
.inferenceMode(InferenceMode.INPLACE)
.loadBalanceMode(LoadBalanceMode.ROUND_ROBIN)
.workers(2)
.build();
try {
2022-10-21 15:19:32 +02:00
val result0 = pi.output(new INDArray[]{Nd4j.create(new double[]{1.0, 2.0, 3.0, 4.0, 5.0}, 1, 5)}, null)[0];
val result1 = pi.output(new INDArray[]{Nd4j.create(new double[]{1.0, 2.0, 3.0, 4.0, 5.0}, 1, 5)}, null)[0];
2019-06-06 15:21:15 +03:00
assertNotNull(result0);
assertEquals(result0, result1);
} finally {
pi.shutdown();
}
}
@Test
public void testOutput_FIFO_1() throws Exception {
int nIn = 5;
val conf = NeuralNetConfiguration.builder()
2019-06-06 15:21:15 +03:00
.graphBuilder()
.addInputs("in")
.layer("out0", OutputLayer.builder().nIn(nIn).nOut(4).activation(Activation.SOFTMAX).build(), "in")
.layer("out1", OutputLayer.builder().nIn(nIn).nOut(6).activation(Activation.SOFTMAX).build(), "in")
2019-06-06 15:21:15 +03:00
.setOutputs("out0", "out1")
.build();
val net = new ComputationGraph(conf);
net.init();
val pi = new ParallelInference.Builder(net)
.inferenceMode(InferenceMode.INPLACE)
.loadBalanceMode(LoadBalanceMode.FIFO)
.workers(2)
.build();
try {
2022-10-21 15:19:32 +02:00
val result0 = pi.output(new INDArray[]{Nd4j.create(new double[]{1.0, 2.0, 3.0, 4.0, 5.0}, 1, 5)}, null)[0];
val result1 = pi.output(new INDArray[]{Nd4j.create(new double[]{1.0, 2.0, 3.0, 4.0, 5.0}, 1, 5)}, null)[0];
2019-06-06 15:21:15 +03:00
assertNotNull(result0);
assertEquals(result0, result1);
} finally {
pi.shutdown();
}
}
}