More test fixes

master
Brian Rosenberger 2022-10-11 07:59:22 +02:00
parent a32644bdb7
commit 2b4d44ea67
4 changed files with 7 additions and 45 deletions

View File

@ -55,7 +55,6 @@ configurations.all {
} }
allprojects { Project proj -> allprojects { Project proj ->
apply plugin: 'com.google.osdetector' apply plugin: 'com.google.osdetector'
@ -162,21 +161,3 @@ allprojects { Project proj ->
} }
} }
} }
task aggregatedJavadocs(type: Javadoc, description: 'Generate javadocs from all child projects as if it was a single project', group: 'Documentation') {
subprojects.each { proj ->
proj.tasks.withType(Javadoc).each { javadocTask ->
logger.quiet("Adding javadoc for project " + proj.name)
source += javadocTask.source
classpath += javadocTask.classpath
excludes += javadocTask.excludes
includes += javadocTask.includes
}
}
destinationDir = file("$buildDir/docs/javadoc")
title = "$project.name $version API"
options.author true
options.links 'http://docs.oracle.com/javase/8/docs/api/'
options.addStringOption('Xdoclint:none', '-quiet')
}

View File

@ -129,20 +129,4 @@ echo "nameserver 8.8.8.8" | sudo tee -a /etc/resolv.conf
# Buildparameter: # # Buildparameter: #
-P\<xxx>\ -P\<xxx>\
CAVIS_AVX_EXTENSION = {avx2 | avx512}, default is avx2 CAVIS_AVX_EXTENSION = {avx2 | avx512}, default is avx2
# Zeppelin Spark dependencies #
3
To add the dependency to the language models, use the following format in the Dependencies section of the of the Spark Interpreter configuration (Interpreters -> Spark -> Edit -> Dependencies):
groupId:artifactId:packaging:classifier:version
In your case it should work with
edu.stanford.nlp:stanford-corenlp:jar:models:3.8.0
Native cpu code under linux needs libc6-dev
/lib/x86_64-linux-gnu/libm.so.6: version `GLIBC_2.29' not found

View File

@ -64,7 +64,7 @@ public class HelperUtils {
if("CUDA".equalsIgnoreCase(backend) && cudnnHelperClassName != null && !cudnnHelperClassName.isEmpty()) { if("CUDA".equalsIgnoreCase(backend) && cudnnHelperClassName != null && !cudnnHelperClassName.isEmpty()) {
if(DL4JClassLoading.loadClassByName(cudnnHelperClassName) != null) { if(DL4JClassLoading.loadClassByName(cudnnHelperClassName) != null) {
log.debug("Attempting to initialize cudnn helper {}",cudnnHelperClassName); log.debug("Attempting to initialize cudnn helper {}",cudnnHelperClassName);
helperRet = DL4JClassLoading.<LayerHelper>createNewInstance( helperRet = (LayerHelper) DL4JClassLoading.<LayerHelper>createNewInstance(
cudnnHelperClassName, cudnnHelperClassName,
(Class<? super LayerHelper>) layerHelperSuperClass, (Class<? super LayerHelper>) layerHelperSuperClass,
new Object[]{arguments}); new Object[]{arguments});
@ -76,7 +76,7 @@ public class HelperUtils {
ClassLoader classLoader = DL4JClassLoading.getDl4jClassloader(); ClassLoader classLoader = DL4JClassLoading.getDl4jClassloader();
DL4JClassLoading.setDl4jClassloaderFromClass(layerHelperSuperClass); DL4JClassLoading.setDl4jClassloaderFromClass(layerHelperSuperClass);
try { try {
helperRet = DL4JClassLoading.<LayerHelper>createNewInstance( helperRet = (LayerHelper) DL4JClassLoading.<LayerHelper>createNewInstance(
cudnnHelperClassName, cudnnHelperClassName,
(Class<? super LayerHelper>) layerHelperSuperClass, (Class<? super LayerHelper>) layerHelperSuperClass,
arguments); arguments);
@ -99,7 +99,7 @@ public class HelperUtils {
} }
} else if("CPU".equalsIgnoreCase(backend) && oneDnnClassName != null && !oneDnnClassName.isEmpty()) { } else if("CPU".equalsIgnoreCase(backend) && oneDnnClassName != null && !oneDnnClassName.isEmpty()) {
helperRet = DL4JClassLoading.createNewInstance( helperRet = DL4JClassLoading.<LayerHelper>createNewInstance(
oneDnnClassName, oneDnnClassName,
arguments); arguments);
log.trace("Created oneDNN helper: {}, layer {}", oneDnnClassName,layerName); log.trace("Created oneDNN helper: {}, layer {}", oneDnnClassName,layerName);

View File

@ -12,15 +12,12 @@ configurations.archives.artifacts.with { archives ->
dependencies { dependencies {
//Todo clean this //Todo clean this
api platform(project(":cavis-common-platform")) api platform(project(":cavis-common-platform"))
//api "org.bytedeco:javacpp:1.5.7" //for some reason we needed to apply version numbers here, they do not end up in POM otherwise api "org.bytedeco:javacpp:1.5.7" //for some reason we needed to apply version numbers here, they do not end up in POM otherwise
api "com.fasterxml.jackson.datatype:jackson-datatype-joda:2.10.5" api "com.fasterxml.jackson.datatype:jackson-datatype-joda:2.10.5"
api 'org.slf4j:slf4j-simple:2.0.3' api 'org.slf4j:slf4j-simple:2.0.3'
api 'org.slf4j:slf4j-api:2.0.3' api 'org.slf4j:slf4j-api:2.0.3'
//TODO for the two below.. either platform specific uber jars or a single big one with all platforms //api group: "org.bytedeco", name: "javacpp", classifier: "linux-x64_86"
api group: "org.bytedeco", name: "javacpp", version: "1.5.7", classifier: "linux-x86_64"
//api group: "org.bytedeco", name: "javacpp", version: "1.5.7"
// api group: 'net.brutex.cavis-native', name: 'cavis-native-lib', version: '1.0.0-SNAPSHOT', classifier: "linux-x86_64-avx2-cpu"
//api group: 'net.brutex.cavis-native', name: 'cavis-native-lib', version: '1.0.0-SNAPSHOT'
rootProject.getAllprojects().each { Project sproj -> rootProject.getAllprojects().each { Project sproj ->
if(!sproj.name.equals(name) && !sproj.name.equals("cavis-common-platform") if(!sproj.name.equals(name) && !sproj.name.equals("cavis-common-platform")
&& !sproj.name.equals("Cavis") && !sproj.name.equals("Cavis")