Ensure all stack traces are printed, fix 1 test failure
parent
83cb1a2326
commit
1a8f360798
|
@ -59,6 +59,8 @@ import java.util.List;
|
|||
import static org.junit.jupiter.api.Assertions.*;
|
||||
@NativeTag
|
||||
@Tag(TagNames.FILE_IO)
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public class TestObjectDetectionRecordReader {
|
||||
|
||||
|
||||
|
|
|
@ -47,6 +47,7 @@
|
|||
<configuration>
|
||||
<forkCount>${cpu.core.count}</forkCount>
|
||||
<reuseForks>false</reuseForks>
|
||||
<trimStackTrace>false</trimStackTrace>
|
||||
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
|
||||
<argLine>-Ddtype=float -Dfile.encoding=UTF-8
|
||||
-Dtest.solr.allowed.securerandom=NativePRNG -Xmx${test.heap.size} -Dorg.bytedeco.javacpp.maxphysicalbytes=${test.offheap.size} -Dorg.bytedeco.javacpp.maxbytes=${test.offheap.size}
|
||||
|
|
|
@ -41,6 +41,7 @@
|
|||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<trimStackTrace>false</trimStackTrace>
|
||||
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
|
||||
<argLine>-Ddtype=float -Dfile.encoding=UTF-8 -Xmx${test.heap.size}
|
||||
-Dtest.solr.allowed.securerandom=NativePRNG
|
||||
|
|
|
@ -111,6 +111,7 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
@ParameterizedTest
|
||||
@MethodSource("org.nd4j.linalg.BaseNd4jTestWithBackends#configs")
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testParagraphVectorsVocabBuilding1() throws Exception {
|
||||
File file = Resources.asFile("/big/raw_sentences.txt");
|
||||
SentenceIterator iter = new BasicLineIterator(file); //UimaSentenceIterator.createWithPath(file.getAbsolutePath());
|
||||
|
@ -160,6 +161,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
@Tag(TagNames.LONG_TEST)
|
||||
@ParameterizedTest
|
||||
@MethodSource("org.nd4j.linalg.BaseNd4jTestWithBackends#configs")
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testParagraphVectorsModelling1(Nd4jBackend backend) throws Exception {
|
||||
File file = Resources.asFile("/big/raw_sentences.txt");
|
||||
SentenceIterator iter = new BasicLineIterator(file);
|
||||
|
@ -288,7 +291,7 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
SerializationUtils.saveObject(vec, tempFile);
|
||||
|
||||
|
||||
ParagraphVectors vec2 = (ParagraphVectors) SerializationUtils.readObject(tempFile);
|
||||
ParagraphVectors vec2 = SerializationUtils.readObject(tempFile);
|
||||
INDArray day2 = vec2.getWordVectorMatrix("day").dup();
|
||||
|
||||
List<String> labelsBinary = vec2.labelsSource.getLabels();
|
||||
|
@ -352,6 +355,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
|
||||
|
||||
@Test
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testParagraphVectorsDM() throws Exception {
|
||||
File file = Resources.asFile("/big/raw_sentences.txt");
|
||||
SentenceIterator iter = new BasicLineIterator(file);
|
||||
|
@ -416,6 +421,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
|
||||
|
||||
@Timeout(300000)
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testParagraphVectorsDBOW() throws Exception {
|
||||
skipUnlessIntegrationTests();
|
||||
|
||||
|
@ -494,6 +501,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
|
||||
@Test()
|
||||
@Timeout(300000)
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testParagraphVectorsWithWordVectorsModelling1() throws Exception {
|
||||
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
|
||||
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
|
||||
|
@ -585,6 +594,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testParagraphVectorsReducedLabels1(@TempDir Path testDir) throws Exception {
|
||||
val tempDir = testDir.toFile();
|
||||
ClassPathResource resource = new ClassPathResource("/labeled");
|
||||
|
@ -636,6 +647,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
|
||||
@Test()
|
||||
@Timeout(300000)
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testParallelIterator() throws IOException {
|
||||
TokenizerFactory factory = new DefaultTokenizerFactory();
|
||||
SentenceIterator iterator = new BasicLineIterator(Resources.asFile("big/raw_sentences.txt"));
|
||||
|
@ -659,6 +672,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testIterator(@TempDir Path testDir) throws IOException {
|
||||
val folder_labeled = new File(testDir.toFile(),"labeled");
|
||||
val folder_unlabeled = new File(testDir.toFile(),"unlabeled");
|
||||
|
@ -708,6 +723,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
there's no need in this test within travis, use it manually only for problems detection
|
||||
*/
|
||||
@Test
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testParagraphVectorsOverExistingWordVectorsModel(@TempDir Path testDir) throws Exception {
|
||||
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
|
||||
if(!isIntegrationTests() && "CUDA".equalsIgnoreCase(backend)) {
|
||||
|
@ -854,6 +871,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
* Special test to check d2v inference against pre-trained gensim model and
|
||||
*/
|
||||
@Test
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testGensimEquality() throws Exception {
|
||||
|
||||
INDArray expA = Nd4j.create(new double[] {-0.02461922, -0.00801059, -0.01821643, 0.0167951, 0.02240154,
|
||||
|
@ -1003,6 +1022,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testDirectInference(@TempDir Path testDir) throws Exception {
|
||||
boolean isIntegration = isIntegrationTests();
|
||||
File resource = Resources.asFile("/big/raw_sentences.txt");
|
||||
|
@ -1036,6 +1057,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testGoogleModelForInference() throws Exception {
|
||||
WordVectors googleVectors = WordVectorSerializer.readWord2VecModel(new File("/ext/GoogleNews-vectors-negative300.bin.gz"));
|
||||
|
||||
|
@ -1055,6 +1078,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
|
||||
@Test()
|
||||
@Timeout(300000)
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testHash() {
|
||||
VocabWord w1 = new VocabWord(1.0, "D1");
|
||||
VocabWord w2 = new VocabWord(1.0, "Bo");
|
||||
|
@ -1076,7 +1101,9 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
@Tag(TagNames.LONG_TEST)
|
||||
@ParameterizedTest
|
||||
@MethodSource("org.nd4j.linalg.BaseNd4jTestWithBackends#configs")
|
||||
public void testsParallelFit1() throws Exception {
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testsParallelFit1(Nd4jBackend backend) throws Exception {
|
||||
final File file = Resources.asFile("big/raw_sentences.txt");
|
||||
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
|
@ -1119,6 +1146,8 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
|
||||
@Test()
|
||||
@Timeout(300000)
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testJSONSerialization() {
|
||||
ParagraphVectors paragraphVectors = new ParagraphVectors.Builder().build();
|
||||
AbstractCache<VocabWord> cache = new AbstractCache.Builder<VocabWord>().build();
|
||||
|
@ -1160,6 +1189,7 @@ public class ParagraphVectorsTest extends BaseDL4JTest {
|
|||
@Test()
|
||||
@Timeout(300000)
|
||||
@Tag(TagNames.LONG_TEST)
|
||||
@Tag(TagNames.LARGE_RESOURCES)
|
||||
public void testDoubleFit() throws Exception {
|
||||
boolean isIntegration = isIntegrationTests();
|
||||
File resource = Resources.asFile("/big/raw_sentences.txt");
|
||||
|
|
|
@ -140,6 +140,7 @@
|
|||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<trimStackTrace>false</trimStackTrace>
|
||||
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
|
||||
<forkCount>${cpu.core.count}</forkCount>
|
||||
<reuseForks>false</reuseForks>
|
||||
|
|
|
@ -127,6 +127,7 @@
|
|||
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
|
||||
<forkCount>${cpu.core.count}</forkCount>
|
||||
<reuseForks>false</reuseForks>
|
||||
<trimStackTrace>false</trimStackTrace>
|
||||
<environmentVariables>
|
||||
<OMP_NUM_THREADS>1</OMP_NUM_THREADS>
|
||||
<LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${user.dir}</LD_LIBRARY_PATH>
|
||||
|
|
|
@ -232,6 +232,7 @@
|
|||
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
|
||||
<forkCount>${cpu.core.count}</forkCount>
|
||||
<reuseForks>false</reuseForks>
|
||||
<trimStackTrace>false</trimStackTrace>
|
||||
<environmentVariables>
|
||||
<OMP_NUM_THREADS>1</OMP_NUM_THREADS>
|
||||
|
||||
|
|
|
@ -109,6 +109,7 @@
|
|||
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
|
||||
<forkCount>${cpu.core.count}</forkCount>
|
||||
<reuseForks>false</reuseForks>
|
||||
<trimStackTrace>false</trimStackTrace>
|
||||
<environmentVariables>
|
||||
<OMP_NUM_THREADS>1</OMP_NUM_THREADS>
|
||||
</environmentVariables>
|
||||
|
@ -146,7 +147,7 @@
|
|||
<forkCount>${cpu.core.count}</forkCount>
|
||||
<reuseForks>false</reuseForks>
|
||||
<argLine>-Xmx${test.heap.size} -Dorg.bytedeco.javacpp.maxphysicalbytes=${test.offheap.size} -Dorg.bytedeco.javacpp.maxbytes=${test.offheap.size}</argLine>
|
||||
|
||||
<trimStackTrace>false</trimStackTrace>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
|
|
@ -107,7 +107,7 @@
|
|||
<include>**/*.java</include>
|
||||
</includes>
|
||||
<argLine>-Xmx${test.heap.size} -Dorg.bytedeco.javacpp.maxphysicalbytes=${test.offheap.size} -Dorg.bytedeco.javacpp.maxbytes=${test.offheap.size}</argLine>
|
||||
|
||||
<trimStackTrace>false</trimStackTrace>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
|
3
pom.xml
3
pom.xml
|
@ -474,6 +474,7 @@
|
|||
</dependency>
|
||||
</dependencies>
|
||||
<configuration>
|
||||
<trimStackTrace>false</trimStackTrace>
|
||||
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
|
||||
<forkCount>${cpu.core.count}</forkCount>
|
||||
<reuseForks>false</reuseForks>
|
||||
|
@ -1197,7 +1198,7 @@
|
|||
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
|
||||
<forkCount>${cpu.core.count}</forkCount>
|
||||
<reuseForks>false</reuseForks>
|
||||
|
||||
<trimStackTrace>false</trimStackTrace>
|
||||
<environmentVariables>
|
||||
<OMP_NUM_THREADS>1</OMP_NUM_THREADS>
|
||||
<DL4J_INTEGRATION_TESTS>true</DL4J_INTEGRATION_TESTS>
|
||||
|
|
Loading…
Reference in New Issue