Added support for the archunit (#9062)
* Added support for the archunit Signed-off-by: Dariusz Zbyrad <dariusz.zbyrad@gmail.com> * Updated pom files Signed-off-by: Dariusz Zbyrad <dariusz.zbyrad@gmail.com>master
parent
6b736eae33
commit
7f4f3b61f5
|
@ -0,0 +1,31 @@
|
||||||
|
package org.datavec.api.transform.ops;
|
||||||
|
|
||||||
|
import com.tngtech.archunit.core.importer.ImportOption;
|
||||||
|
import com.tngtech.archunit.junit.AnalyzeClasses;
|
||||||
|
import com.tngtech.archunit.junit.ArchTest;
|
||||||
|
import com.tngtech.archunit.junit.ArchUnitRunner;
|
||||||
|
import com.tngtech.archunit.lang.ArchRule;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.nd4j.common.tests.BaseND4JTest;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import static com.tngtech.archunit.lang.syntax.ArchRuleDefinition.classes;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Created by dariuszzbyrad on 7/31/2020.
|
||||||
|
*/
|
||||||
|
@RunWith(ArchUnitRunner.class)
|
||||||
|
@AnalyzeClasses(packages = "org.datavec.api.transform.ops", importOptions = {ImportOption.DoNotIncludeTests.class})
|
||||||
|
public class AggregableMultiOpArchTest extends BaseND4JTest {
|
||||||
|
|
||||||
|
@ArchTest
|
||||||
|
public static final ArchRule ALL_AGGREGATE_OPS_MUST_BE_SERIALIZABLE = classes()
|
||||||
|
.that().resideInAPackage("org.datavec.api.transform.ops")
|
||||||
|
.and().doNotHaveSimpleName("AggregatorImpls")
|
||||||
|
.and().doNotHaveSimpleName("IAggregableReduceOp")
|
||||||
|
.and().doNotHaveSimpleName("StringAggregatorImpls")
|
||||||
|
.and().doNotHaveFullyQualifiedName("org.datavec.api.transform.ops.StringAggregatorImpls$1")
|
||||||
|
.should().implement(Serializable.class)
|
||||||
|
.because("All aggregate ops must be serializable.");
|
||||||
|
}
|
|
@ -68,91 +68,4 @@ public class AggregableMultiOpTest extends BaseND4JTest {
|
||||||
assertTrue(combinedRes.get(1).toDouble() == 90D);
|
assertTrue(combinedRes.get(1).toDouble() == 90D);
|
||||||
assertTrue(combinedRes.get(0).toInt() == 1);
|
assertTrue(combinedRes.get(0).toInt() == 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testAllAggregateOpsAreSerializable() throws Exception {
|
|
||||||
Set<String> allTypes = new HashSet<>();
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.LongWritableOp");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.IntWritableOp");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImpls$AggregableMean");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.StringAggregatorImpls$AggregableStringReduce");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImpls$AggregableRange");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImplsTest");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.DispatchWithConditionOp");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImpls$AggregableVariance");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImpls");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.FloatWritableOp");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImpls$AggregableProd");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImpls$AggregableLast");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.StringAggregatorImpls$AggregableStringPrepend");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.ByteWritableOp");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregableMultiOpTest");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImpls$AggregableStdDev");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.StringAggregatorImpls$1");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.DispatchOp");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImpls$AggregableMin");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.StringAggregatorImpls$AggregableStringAppend");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImpls$AggregableCount");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImpls$AggregableSum");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImpls$AggregablePopulationVariance");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregableCheckingOp");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImpls$AggregableMax");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregableMultiOp");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.IAggregableReduceOp");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.DispatchOpTest");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImpls$AggregableCountUnique");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImpls$AggregableUncorrectedStdDev");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.StringWritableOp");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.StringAggregatorImpls");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.DoubleWritableOp");
|
|
||||||
allTypes.add("org.datavec.api.transform.ops.AggregatorImpls$AggregableFirst");
|
|
||||||
|
|
||||||
Set<String> ops = new HashSet<>();
|
|
||||||
|
|
||||||
for (String type : allTypes) {
|
|
||||||
if (type.startsWith("org.datavec.api.transform.ops")) {
|
|
||||||
if (type.endsWith("Op")) {
|
|
||||||
ops.add(type);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (type.contains("Aggregable") && !type.endsWith("Test")) {
|
|
||||||
ops.add(type);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (String op : ops) {
|
|
||||||
Class<?> cls = Class.forName(op);
|
|
||||||
assertTrue(op + " should implement Serializable", implementsSerializable(cls));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean implementsSerializable(Class<?> cls) {
|
|
||||||
if (cls == null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (cls == Serializable.class) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
Class<?>[] interfaces = cls.getInterfaces();
|
|
||||||
Set<Class<?>> parents = new HashSet<>();
|
|
||||||
parents.add(cls.getSuperclass());
|
|
||||||
|
|
||||||
for (Class<?> anInterface : interfaces) {
|
|
||||||
Collections.addAll(parents, anInterface.getInterfaces());
|
|
||||||
|
|
||||||
if (anInterface.equals(Serializable.class)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (Class<?> parent : parents) {
|
|
||||||
if (implementsSerializable(parent)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -75,6 +75,12 @@
|
||||||
<version>${junit.version}</version>
|
<version>${junit.version}</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.tngtech.archunit</groupId>
|
||||||
|
<artifactId>archunit-junit4</artifactId>
|
||||||
|
<version>${archunit.version}</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.projectlombok</groupId>
|
<groupId>org.projectlombok</groupId>
|
||||||
<artifactId>lombok</artifactId>
|
<artifactId>lombok</artifactId>
|
||||||
|
|
1
pom.xml
1
pom.xml
|
@ -333,6 +333,7 @@
|
||||||
<args4j.version>2.0.29</args4j.version>
|
<args4j.version>2.0.29</args4j.version>
|
||||||
<slf4j.version>1.7.21</slf4j.version>
|
<slf4j.version>1.7.21</slf4j.version>
|
||||||
<junit.version>4.12</junit.version>
|
<junit.version>4.12</junit.version>
|
||||||
|
<archunit.version>0.14.1</archunit.version>
|
||||||
<logback.version>1.2.3</logback.version>
|
<logback.version>1.2.3</logback.version>
|
||||||
<jackson.version>2.10.1</jackson.version>
|
<jackson.version>2.10.1</jackson.version>
|
||||||
<jackson.databind.version>2.10.3</jackson.databind.version>
|
<jackson.databind.version>2.10.3</jackson.databind.version>
|
||||||
|
|
Loading…
Reference in New Issue