Tag some junit tests with @Tag("long-running") to skip them during normal build

Signed-off-by: brian <brian@brutex.de>
master
Brian Rosenberger 2023-08-09 12:13:31 +02:00
parent f7be1e324f
commit 0e4be5c4d2
3 changed files with 6 additions and 3 deletions

View File

@ -16,6 +16,7 @@ import org.deeplearning4j.nn.conf.layers.misc.FrozenLayerWithBackprop;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit; import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.listeners.PerformanceListener; import org.deeplearning4j.optimize.listeners.PerformanceListener;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.nd4j.linalg.activations.Activation; import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.activations.impl.ActivationLReLU; import org.nd4j.linalg.activations.impl.ActivationLReLU;
@ -122,7 +123,7 @@ public class App {
return conf; return conf;
} }
@Test @Test @Tag("long-running")
public void runTest() throws Exception { public void runTest() throws Exception {
App.main(null); App.main(null);
} }

View File

@ -44,6 +44,7 @@ import org.deeplearning4j.nn.conf.layers.*;
import org.deeplearning4j.nn.conf.layers.misc.FrozenLayerWithBackprop; import org.deeplearning4j.nn.conf.layers.misc.FrozenLayerWithBackprop;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.optimize.listeners.PerformanceListener; import org.deeplearning4j.optimize.listeners.PerformanceListener;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet; import org.nd4j.linalg.dataset.DataSet;
@ -70,7 +71,7 @@ public class App2 {
final static INDArray label_real = Nd4j.ones(BATCHSIZE, 1); final static INDArray label_real = Nd4j.ones(BATCHSIZE, 1);
final static INDArray label_fake = Nd4j.zeros(BATCHSIZE, 1); final static INDArray label_fake = Nd4j.zeros(BATCHSIZE, 1);
@Test @Test @Tag("long-running")
void runTest() throws IOException { void runTest() throws IOException {
Nd4j.getMemoryManager().setAutoGcWindow(15 * 1000); Nd4j.getMemoryManager().setAutoGcWindow(15 * 1000);

View File

@ -31,6 +31,7 @@ import org.deeplearning4j.nn.conf.layers.DropoutLayer;
import org.deeplearning4j.nn.conf.layers.OutputLayer; import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit; import org.deeplearning4j.nn.weights.WeightInit;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.nd4j.linalg.activations.Activation; import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.activations.impl.ActivationLReLU; import org.nd4j.linalg.activations.impl.ActivationLReLU;
@ -100,7 +101,7 @@ public class MnistSimpleGAN {
return new MultiLayerNetwork(discConf); return new MultiLayerNetwork(discConf);
} }
@Test @Test @Tag("long-running")
public void runTest() throws Exception { public void runTest() throws Exception {
main(null); main(null);
} }