Ensure all stack traces are printed, fix 1 test failure

master
agibsonccc 2021-03-27 18:20:07 +09:00
parent 83cb1a2326
commit 1a8f360798
10 changed files with 44 additions and 5 deletions

View File

@ -59,6 +59,8 @@ import java.util.List;
import static org.junit.jupiter.api.Assertions.*; import static org.junit.jupiter.api.Assertions.*;
@NativeTag @NativeTag
@Tag(TagNames.FILE_IO) @Tag(TagNames.FILE_IO)
@Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public class TestObjectDetectionRecordReader { public class TestObjectDetectionRecordReader {

View File

@ -47,6 +47,7 @@
<configuration> <configuration>
<forkCount>${cpu.core.count}</forkCount> <forkCount>${cpu.core.count}</forkCount>
<reuseForks>false</reuseForks> <reuseForks>false</reuseForks>
<trimStackTrace>false</trimStackTrace>
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/> <forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
<argLine>-Ddtype=float -Dfile.encoding=UTF-8 <argLine>-Ddtype=float -Dfile.encoding=UTF-8
-Dtest.solr.allowed.securerandom=NativePRNG -Xmx${test.heap.size} -Dorg.bytedeco.javacpp.maxphysicalbytes=${test.offheap.size} -Dorg.bytedeco.javacpp.maxbytes=${test.offheap.size} -Dtest.solr.allowed.securerandom=NativePRNG -Xmx${test.heap.size} -Dorg.bytedeco.javacpp.maxphysicalbytes=${test.offheap.size} -Dorg.bytedeco.javacpp.maxbytes=${test.offheap.size}

View File

@ -41,6 +41,7 @@
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId> <artifactId>maven-surefire-plugin</artifactId>
<configuration> <configuration>
<trimStackTrace>false</trimStackTrace>
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/> <forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
<argLine>-Ddtype=float -Dfile.encoding=UTF-8 -Xmx${test.heap.size} <argLine>-Ddtype=float -Dfile.encoding=UTF-8 -Xmx${test.heap.size}
-Dtest.solr.allowed.securerandom=NativePRNG -Dtest.solr.allowed.securerandom=NativePRNG

View File

@ -111,6 +111,7 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
@ParameterizedTest @ParameterizedTest
@MethodSource("org.nd4j.linalg.BaseNd4jTestWithBackends#configs") @MethodSource("org.nd4j.linalg.BaseNd4jTestWithBackends#configs")
@Tag(TagNames.LONG_TEST) @Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testParagraphVectorsVocabBuilding1() throws Exception { public void testParagraphVectorsVocabBuilding1() throws Exception {
File file = Resources.asFile("/big/raw_sentences.txt"); File file = Resources.asFile("/big/raw_sentences.txt");
SentenceIterator iter = new BasicLineIterator(file); //UimaSentenceIterator.createWithPath(file.getAbsolutePath()); SentenceIterator iter = new BasicLineIterator(file); //UimaSentenceIterator.createWithPath(file.getAbsolutePath());
@ -160,6 +161,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
@Tag(TagNames.LONG_TEST) @Tag(TagNames.LONG_TEST)
@ParameterizedTest @ParameterizedTest
@MethodSource("org.nd4j.linalg.BaseNd4jTestWithBackends#configs") @MethodSource("org.nd4j.linalg.BaseNd4jTestWithBackends#configs")
@Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testParagraphVectorsModelling1(Nd4jBackend backend) throws Exception { public void testParagraphVectorsModelling1(Nd4jBackend backend) throws Exception {
File file = Resources.asFile("/big/raw_sentences.txt"); File file = Resources.asFile("/big/raw_sentences.txt");
SentenceIterator iter = new BasicLineIterator(file); SentenceIterator iter = new BasicLineIterator(file);
@ -288,7 +291,7 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
SerializationUtils.saveObject(vec, tempFile); SerializationUtils.saveObject(vec, tempFile);
ParagraphVectors vec2 = (ParagraphVectors) SerializationUtils.readObject(tempFile); ParagraphVectors vec2 = SerializationUtils.readObject(tempFile);
INDArray day2 = vec2.getWordVectorMatrix("day").dup(); INDArray day2 = vec2.getWordVectorMatrix("day").dup();
List<String> labelsBinary = vec2.labelsSource.getLabels(); List<String> labelsBinary = vec2.labelsSource.getLabels();
@ -352,6 +355,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
@Test @Test
@Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testParagraphVectorsDM() throws Exception { public void testParagraphVectorsDM() throws Exception {
File file = Resources.asFile("/big/raw_sentences.txt"); File file = Resources.asFile("/big/raw_sentences.txt");
SentenceIterator iter = new BasicLineIterator(file); SentenceIterator iter = new BasicLineIterator(file);
@ -416,6 +421,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
@Timeout(300000) @Timeout(300000)
@Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testParagraphVectorsDBOW() throws Exception { public void testParagraphVectorsDBOW() throws Exception {
skipUnlessIntegrationTests(); skipUnlessIntegrationTests();
@ -494,6 +501,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
@Test() @Test()
@Timeout(300000) @Timeout(300000)
@Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testParagraphVectorsWithWordVectorsModelling1() throws Exception { public void testParagraphVectorsWithWordVectorsModelling1() throws Exception {
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend"); String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) { if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
@ -585,6 +594,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
* @throws Exception * @throws Exception
*/ */
@Test @Test
@Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testParagraphVectorsReducedLabels1(@TempDir Path testDir) throws Exception { public void testParagraphVectorsReducedLabels1(@TempDir Path testDir) throws Exception {
val tempDir = testDir.toFile(); val tempDir = testDir.toFile();
ClassPathResource resource = new ClassPathResource("/labeled"); ClassPathResource resource = new ClassPathResource("/labeled");
@ -636,6 +647,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
@Test() @Test()
@Timeout(300000) @Timeout(300000)
@Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testParallelIterator() throws IOException { public void testParallelIterator() throws IOException {
TokenizerFactory factory = new DefaultTokenizerFactory(); TokenizerFactory factory = new DefaultTokenizerFactory();
SentenceIterator iterator = new BasicLineIterator(Resources.asFile("big/raw_sentences.txt")); SentenceIterator iterator = new BasicLineIterator(Resources.asFile("big/raw_sentences.txt"));
@ -659,6 +672,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
} }
@Test @Test
@Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testIterator(@TempDir Path testDir) throws IOException { public void testIterator(@TempDir Path testDir) throws IOException {
val folder_labeled = new File(testDir.toFile(),"labeled"); val folder_labeled = new File(testDir.toFile(),"labeled");
val folder_unlabeled = new File(testDir.toFile(),"unlabeled"); val folder_unlabeled = new File(testDir.toFile(),"unlabeled");
@ -708,6 +723,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
there's no need in this test within travis, use it manually only for problems detection there's no need in this test within travis, use it manually only for problems detection
*/ */
@Test @Test
@Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testParagraphVectorsOverExistingWordVectorsModel(@TempDir Path testDir) throws Exception { public void testParagraphVectorsOverExistingWordVectorsModel(@TempDir Path testDir) throws Exception {
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend"); String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) { if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
@ -854,6 +871,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
* Special test to check d2v inference against pre-trained gensim model and * Special test to check d2v inference against pre-trained gensim model and
*/ */
@Test @Test
@Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testGensimEquality() throws Exception { public void testGensimEquality() throws Exception {
INDArray expA = Nd4j.create(new double[] {-0.02461922, -0.00801059, -0.01821643, 0.0167951, 0.02240154, INDArray expA = Nd4j.create(new double[] {-0.02461922, -0.00801059, -0.01821643, 0.0167951, 0.02240154,
@ -1003,6 +1022,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
} }
@Test @Test
@Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testDirectInference(@TempDir Path testDir) throws Exception { public void testDirectInference(@TempDir Path testDir) throws Exception {
boolean isIntegration = isIntegrationTests(); boolean isIntegration = isIntegrationTests();
File resource = Resources.asFile("/big/raw_sentences.txt"); File resource = Resources.asFile("/big/raw_sentences.txt");
@ -1036,6 +1057,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
} }
@Test @Test
@Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testGoogleModelForInference() throws Exception { public void testGoogleModelForInference() throws Exception {
WordVectors googleVectors = WordVectorSerializer.readWord2VecModel(new File("/ext/GoogleNews-vectors-negative300.bin.gz")); WordVectors googleVectors = WordVectorSerializer.readWord2VecModel(new File("/ext/GoogleNews-vectors-negative300.bin.gz"));
@ -1055,6 +1078,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
@Test() @Test()
@Timeout(300000) @Timeout(300000)
@Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testHash() { public void testHash() {
VocabWord w1 = new VocabWord(1.0, "D1"); VocabWord w1 = new VocabWord(1.0, "D1");
VocabWord w2 = new VocabWord(1.0, "Bo"); VocabWord w2 = new VocabWord(1.0, "Bo");
@ -1076,7 +1101,9 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
@Tag(TagNames.LONG_TEST) @Tag(TagNames.LONG_TEST)
@ParameterizedTest @ParameterizedTest
@MethodSource("org.nd4j.linalg.BaseNd4jTestWithBackends#configs") @MethodSource("org.nd4j.linalg.BaseNd4jTestWithBackends#configs")
public void testsParallelFit1() throws Exception { @Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testsParallelFit1(Nd4jBackend backend) throws Exception {
final File file = Resources.asFile("big/raw_sentences.txt"); final File file = Resources.asFile("big/raw_sentences.txt");
for (int i = 0; i < 1000; i++) { for (int i = 0; i < 1000; i++) {
@ -1119,6 +1146,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
@Test() @Test()
@Timeout(300000) @Timeout(300000)
@Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testJSONSerialization() { public void testJSONSerialization() {
ParagraphVectors paragraphVectors = new ParagraphVectors.Builder().build(); ParagraphVectors paragraphVectors = new ParagraphVectors.Builder().build();
AbstractCache<VocabWord> cache = new AbstractCache.Builder<VocabWord>().build(); AbstractCache<VocabWord> cache = new AbstractCache.Builder<VocabWord>().build();
@ -1160,6 +1189,7 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
@Test() @Test()
@Timeout(300000) @Timeout(300000)
@Tag(TagNames.LONG_TEST) @Tag(TagNames.LONG_TEST)
@Tag(TagNames.LARGE_RESOURCES)
public void testDoubleFit() throws Exception { public void testDoubleFit() throws Exception {
boolean isIntegration = isIntegrationTests(); boolean isIntegration = isIntegrationTests();
File resource = Resources.asFile("/big/raw_sentences.txt"); File resource = Resources.asFile("/big/raw_sentences.txt");

View File

@ -140,6 +140,7 @@
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId> <artifactId>maven-surefire-plugin</artifactId>
<configuration> <configuration>
<trimStackTrace>false</trimStackTrace>
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/> <forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
<forkCount>${cpu.core.count}</forkCount> <forkCount>${cpu.core.count}</forkCount>
<reuseForks>false</reuseForks> <reuseForks>false</reuseForks>

View File

@ -127,6 +127,7 @@
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/> <forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
<forkCount>${cpu.core.count}</forkCount> <forkCount>${cpu.core.count}</forkCount>
<reuseForks>false</reuseForks> <reuseForks>false</reuseForks>
<trimStackTrace>false</trimStackTrace>
<environmentVariables> <environmentVariables>
<OMP_NUM_THREADS>1</OMP_NUM_THREADS> <OMP_NUM_THREADS>1</OMP_NUM_THREADS>
<LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${user.dir}</LD_LIBRARY_PATH> <LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${user.dir}</LD_LIBRARY_PATH>

View File

@ -232,6 +232,7 @@
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/> <forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
<forkCount>${cpu.core.count}</forkCount> <forkCount>${cpu.core.count}</forkCount>
<reuseForks>false</reuseForks> <reuseForks>false</reuseForks>
<trimStackTrace>false</trimStackTrace>
<environmentVariables> <environmentVariables>
<OMP_NUM_THREADS>1</OMP_NUM_THREADS> <OMP_NUM_THREADS>1</OMP_NUM_THREADS>

View File

@ -109,6 +109,7 @@
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/> <forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
<forkCount>${cpu.core.count}</forkCount> <forkCount>${cpu.core.count}</forkCount>
<reuseForks>false</reuseForks> <reuseForks>false</reuseForks>
<trimStackTrace>false</trimStackTrace>
<environmentVariables> <environmentVariables>
<OMP_NUM_THREADS>1</OMP_NUM_THREADS> <OMP_NUM_THREADS>1</OMP_NUM_THREADS>
</environmentVariables> </environmentVariables>
@ -146,7 +147,7 @@
<forkCount>${cpu.core.count}</forkCount> <forkCount>${cpu.core.count}</forkCount>
<reuseForks>false</reuseForks> <reuseForks>false</reuseForks>
<argLine>-Xmx${test.heap.size} -Dorg.bytedeco.javacpp.maxphysicalbytes=${test.offheap.size} -Dorg.bytedeco.javacpp.maxbytes=${test.offheap.size}</argLine> <argLine>-Xmx${test.heap.size} -Dorg.bytedeco.javacpp.maxphysicalbytes=${test.offheap.size} -Dorg.bytedeco.javacpp.maxbytes=${test.offheap.size}</argLine>
<trimStackTrace>false</trimStackTrace>
</configuration> </configuration>
</plugin> </plugin>
</plugins> </plugins>

View File

@ -107,7 +107,7 @@
<include>**/*.java</include> <include>**/*.java</include>
</includes> </includes>
<argLine>-Xmx${test.heap.size} -Dorg.bytedeco.javacpp.maxphysicalbytes=${test.offheap.size} -Dorg.bytedeco.javacpp.maxbytes=${test.offheap.size}</argLine> <argLine>-Xmx${test.heap.size} -Dorg.bytedeco.javacpp.maxphysicalbytes=${test.offheap.size} -Dorg.bytedeco.javacpp.maxbytes=${test.offheap.size}</argLine>
<trimStackTrace>false</trimStackTrace>
</configuration> </configuration>
</plugin> </plugin>
</plugins> </plugins>

View File

@ -474,6 +474,7 @@
</dependency> </dependency>
</dependencies> </dependencies>
<configuration> <configuration>
<trimStackTrace>false</trimStackTrace>
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/> <forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
<forkCount>${cpu.core.count}</forkCount> <forkCount>${cpu.core.count}</forkCount>
<reuseForks>false</reuseForks> <reuseForks>false</reuseForks>
@ -1197,7 +1198,7 @@
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/> <forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
<forkCount>${cpu.core.count}</forkCount> <forkCount>${cpu.core.count}</forkCount>
<reuseForks>false</reuseForks> <reuseForks>false</reuseForks>
<trimStackTrace>false</trimStackTrace>
<environmentVariables> <environmentVariables>
<OMP_NUM_THREADS>1</OMP_NUM_THREADS> <OMP_NUM_THREADS>1</OMP_NUM_THREADS>
<DL4J_INTEGRATION_TESTS>true</DL4J_INTEGRATION_TESTS> <DL4J_INTEGRATION_TESTS>true</DL4J_INTEGRATION_TESTS>