diff --git a/brutex-extended-tests/build.gradle b/brutex-extended-tests/build.gradle index c15f6d325..8115c10bd 100644 --- a/brutex-extended-tests/build.gradle +++ b/brutex-extended-tests/build.gradle @@ -34,6 +34,8 @@ ext { } dependencies { + implementation platform(projects.cavisCommonPlatform) + implementation "com.fasterxml.jackson.core:jackson-databind" implementation "com.google.guava:guava" implementation projects.cavisDnn.cavisDnnCore @@ -52,6 +54,16 @@ dependencies { testImplementation "org.apache.spark:spark-sql_${scalaVersion}" testCompileOnly "org.scala-lang:scala-library" + //Rest Client + // define any required OkHttp artifacts without version + implementation("com.squareup.okhttp3:okhttp") + implementation("com.squareup.okhttp3:logging-interceptor") + + + implementation "org.bytedeco:javacv" + implementation "org.bytedeco:opencv" + implementation group: "org.bytedeco", name: "opencv", classifier: buildTarget + implementation "it.unimi.dsi:fastutil-core:8.5.8" implementation projects.cavisDnn.cavisDnnSpark.cavisDnnSparkCore diff --git a/brutex-extended-tests/src/test/java/net/brutex/gan/App.java b/brutex-extended-tests/src/test/java/net/brutex/gan/App.java index 5d4704f2c..bf4783145 100644 --- a/brutex-extended-tests/src/test/java/net/brutex/gan/App.java +++ b/brutex-extended-tests/src/test/java/net/brutex/gan/App.java @@ -21,49 +21,90 @@ package net.brutex.gan; +import java.util.Random; +import javax.ws.rs.client.ClientBuilder; +import lombok.extern.slf4j.Slf4j; +import okhttp3.OkHttpClient; +import okhttp3.Request; +import okhttp3.Response; import org.apache.commons.lang3.ArrayUtils; +import org.datavec.api.Writable; +import org.datavec.api.records.reader.RecordReader; +import org.datavec.api.split.FileSplit; +import org.datavec.image.loader.NativeImageLoader; +import org.datavec.image.recordreader.ImageRecordReader; +import org.datavec.image.transform.ColorConversionTransform; +import org.datavec.image.transform.ImageTransform; +import org.datavec.image.transform.PipelineImageTransform; +import org.datavec.image.transform.ResizeImageTransform; +import org.datavec.image.transform.ScaleImageTransform; +import org.datavec.image.transform.ShowImageTransform; +import org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator; import org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator; import org.deeplearning4j.nn.conf.GradientNormalization; import org.deeplearning4j.nn.conf.MultiLayerConfiguration; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; +import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.layers.*; import org.deeplearning4j.nn.conf.layers.misc.FrozenLayerWithBackprop; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.deeplearning4j.nn.weights.WeightInit; import org.deeplearning4j.optimize.listeners.PerformanceListener; +import org.deeplearning4j.optimize.listeners.ScoreToChartListener; +import org.glassfish.jersey.client.JerseyClient; +import org.glassfish.jersey.client.JerseyClientBuilder; import org.junit.jupiter.api.Test; import org.nd4j.linalg.activations.Activation; import org.nd4j.linalg.activations.impl.ActivationLReLU; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.dataset.DataSet; +import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; +import org.nd4j.linalg.dataset.api.preprocessor.ImagePreProcessingScaler; +import org.nd4j.linalg.dataset.api.preprocessor.NormalizerMinMaxScaler; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.learning.config.Adam; import org.nd4j.linalg.learning.config.IUpdater; import org.nd4j.linalg.lossfunctions.LossFunctions; + import javax.swing.*; import java.awt.*; import java.awt.image.BufferedImage; import java.io.File; import java.util.Arrays; +import org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction; +@Slf4j public class App { - private static final double LEARNING_RATE = 0.0002; + private static final double LEARNING_RATE = 0.000002; private static final double GRADIENT_THRESHOLD = 100.0; + + private static final int X_DIM = 28; + private static final int Y_DIM = 28; + private static final int CHANNELS = 1; + private static final int batchSize = 9; + private static final int INPUT = 128; + + private static final int OUTPUT_PER_PANEL = 4; + + private static final int ARRAY_SIZE_PER_SAMPLE = X_DIM*Y_DIM*CHANNELS; private static final IUpdater UPDATER = Adam.builder().learningRate(LEARNING_RATE).beta1(0.5).build(); - private static JFrame frame; + private static JFrame frame; + private static JFrame frame2; private static JPanel panel; + private static JPanel panel2; private static Layer[] genLayers() { return new Layer[] { - new DenseLayer.Builder().nIn(100).nOut(256).weightInit(WeightInit.NORMAL).build(), + new DenseLayer.Builder().nIn(INPUT).nOut(X_DIM*Y_DIM*CHANNELS).weightInit(WeightInit.NORMAL).build(), new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(), - new DenseLayer.Builder().nIn(256).nOut(512).build(), + new DenseLayer.Builder().nIn(X_DIM*Y_DIM*CHANNELS).nOut(X_DIM*Y_DIM).build(), new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(), - new DenseLayer.Builder().nIn(512).nOut(1024).build(), + new DenseLayer.Builder().nIn(X_DIM*Y_DIM).nOut(X_DIM*Y_DIM).build(), new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(), - new DenseLayer.Builder().nIn(1024).nOut(784).activation(Activation.TANH).build() + new DenseLayer.Builder().nIn(X_DIM*Y_DIM).nOut(X_DIM*Y_DIM*CHANNELS).activation(Activation.TANH) + .build() }; } @@ -81,6 +122,7 @@ public class App { .weightInit(WeightInit.XAVIER) .activation(Activation.IDENTITY) .list(genLayers()) + .setInputType(InputType.convolutional(X_DIM, Y_DIM, CHANNELS)) .build(); return conf; @@ -88,16 +130,19 @@ public class App { private static Layer[] disLayers() { return new Layer[]{ - new DenseLayer.Builder().nIn(784).nOut(1024).build(), + new DenseLayer.Builder().nOut(X_DIM*Y_DIM*CHANNELS*2).build(), //input is set by setInputType on the network new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(), new DropoutLayer.Builder(1 - 0.5).build(), - new DenseLayer.Builder().nIn(1024).nOut(512).build(), + new DenseLayer.Builder().nIn(X_DIM * Y_DIM*CHANNELS*2).nOut(X_DIM*Y_DIM*CHANNELS*4).build(), //HxBxC new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(), new DropoutLayer.Builder(1 - 0.5).build(), - new DenseLayer.Builder().nIn(512).nOut(256).build(), + new DenseLayer.Builder().nIn(X_DIM*Y_DIM*CHANNELS*4).nOut(X_DIM*Y_DIM*CHANNELS).build(), new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(), new DropoutLayer.Builder(1 - 0.5).build(), - new OutputLayer.Builder(LossFunctions.LossFunction.XENT).nIn(256).nOut(1).activation(Activation.SIGMOID).build() + new DenseLayer.Builder().nIn(X_DIM*Y_DIM*CHANNELS).nOut(X_DIM*Y_DIM).build(), + new ActivationLayer.Builder(new ActivationLReLU(0.2)).build(), + new DropoutLayer.Builder(1 - 0.5).build(), + new OutputLayer.Builder(LossFunction.XENT).nIn(X_DIM*Y_DIM).nOut(1).activation(Activation.SIGMOID).build() }; } @@ -110,6 +155,7 @@ public class App { .weightInit(WeightInit.XAVIER) .activation(Activation.IDENTITY) .list(disLayers()) + .setInputType(InputType.convolutional(X_DIM, Y_DIM, CHANNELS)) .build(); return conf; @@ -135,6 +181,7 @@ public class App { .weightInit(WeightInit.XAVIER) .activation(Activation.IDENTITY) .list(layers) + .setInputType(InputType.convolutional(X_DIM, Y_DIM, CHANNELS)) .build(); return conf; @@ -149,7 +196,25 @@ public class App { public static void main(String... args) throws Exception { Nd4j.getMemoryManager().setAutoGcWindow(15 * 1000); - MnistDataSetIterator trainData = new MnistDataSetIterator(128, true, 42); +// MnistDataSetIterator trainData = new MnistDataSetIterator(128, true, 45); + // FileSplit fileSplit = new FileSplit(new File("c:/users/brian/downloads/flowers"), NativeImageLoader.getALLOWED_FORMATS()); + FileSplit fileSplit = new FileSplit(new File("c:/users/brian/downloads/humans"), NativeImageLoader.getALLOWED_FORMATS()); + + + ImageTransform transform = new ColorConversionTransform(new Random(42), 7 ); + + ImageTransform transform2 = new ShowImageTransform("Tester", 30); + ImageTransform transform3 = new ResizeImageTransform(X_DIM, Y_DIM); + + ImageTransform tr = new PipelineImageTransform.Builder() + .addImageTransform(transform) //convert to GREY SCALE + .addImageTransform(transform3) + //.addImageTransform(transform2) + .build(); + + ImageRecordReader imageRecordReader = new ImageRecordReader(X_DIM, Y_DIM, CHANNELS); + imageRecordReader.initialize(fileSplit, tr); + DataSetIterator trainData = new RecordReaderDataSetIterator(imageRecordReader, batchSize ); MultiLayerNetwork gen = new MultiLayerNetwork(generator()); MultiLayerNetwork dis = new MultiLayerNetwork(discriminator()); @@ -160,27 +225,50 @@ public class App { copyParams(gen, dis, gan); - gen.setListeners(new PerformanceListener(10, true)); - dis.setListeners(new PerformanceListener(10, true)); - gan.setListeners(new PerformanceListener(10, true)); + //gen.setListeners(new PerformanceListener(10, true)); + //dis.setListeners(new PerformanceListener(10, true)); + //gan.setListeners(new PerformanceListener(10, true)); + gan.setListeners(new ScoreToChartListener("gan")); + //dis.setListeners(new ScoreToChartListener("dis")); - trainData.reset(); + gan.fit(Nd4j.rand(batchSize, CHANNELS, X_DIM, Y_DIM), Nd4j.zeros(batchSize, 1)); + + //gan.fit(new DataSet(trainData.next().getFeatures(), Nd4j.zeros(batchSize, 1))); + //trainData.reset(); int j = 0; - for (int i = 0; i < 20; i++) { + for (int i = 0; i < 201; i++) { //epoch while (trainData.hasNext()) { j++; + DataSet next = trainData.next(); // generate data - INDArray real = trainData.next().getFeatures().muli(2).subi(1); - int batchSize = (int) real.shape()[0]; + INDArray real = next.getFeatures();//.div(255f); - INDArray fakeIn = Nd4j.rand(batchSize, 100); + //start next round if there are not enough images left to have a full batchsize dataset + if(real.length() < ARRAY_SIZE_PER_SAMPLE*batchSize) { + log.warn("Your total number of input images is not a multiple of {}, " + + "thus skipping {} images to make it fit", batchSize, real.length()/ARRAY_SIZE_PER_SAMPLE); + break; + } + + if(i%20 == 0) { + // frame2 = visualize(new INDArray[]{real}, batchSize, + // frame2 == null ? new JFrame() : frame2, true); //real has batchsize number of images + } + real.divi(255f); + +// int batchSize = (int) real.shape()[0]; + + INDArray fakeIn = Nd4j.rand(batchSize, CHANNELS, X_DIM, Y_DIM); INDArray fake = gan.activateSelectedLayers(0, gen.getLayers().length - 1, fakeIn); + fake = fake.reshape(batchSize, CHANNELS, X_DIM, Y_DIM); + //log.info("real has {} items.", real.length()); DataSet realSet = new DataSet(real, Nd4j.zeros(batchSize, 1)); DataSet fakeSet = new DataSet(fake, Nd4j.ones(batchSize, 1)); + DataSet data = DataSet.merge(Arrays.asList(realSet, fakeSet)); dis.fit(data); @@ -189,21 +277,29 @@ public class App { // Update the discriminator in the GAN network updateGan(gen, dis, gan); - gan.fit(new DataSet(Nd4j.rand(batchSize, 100), Nd4j.zeros(batchSize, 1))); + //gan.fit(new DataSet(Nd4j.rand(batchSize, INPUT), Nd4j.zeros(batchSize, 1))); + gan.fit(new DataSet(Nd4j.rand(batchSize, CHANNELS, X_DIM, Y_DIM), Nd4j.zeros(batchSize, 1))); if (j % 10 == 1) { System.out.println("Iteration " + j + " Visualizing..."); - INDArray[] samples = new INDArray[9]; - DataSet fakeSet2 = new DataSet(fakeIn, Nd4j.ones(batchSize, 1)); + INDArray[] samples = batchSize > OUTPUT_PER_PANEL ? new INDArray[OUTPUT_PER_PANEL] : new INDArray[batchSize]; - for (int k = 0; k < 9; k++) { + + for (int k = 0; k < samples.length; k++) { + //INDArray input = fakeSet2.get(k).getFeatures(); + DataSet fakeSet2 = new DataSet(fakeIn, Nd4j.ones(batchSize, 1)); INDArray input = fakeSet2.get(k).getFeatures(); + input = input.reshape(1,CHANNELS, X_DIM, Y_DIM); //batch size will be 1 here + //samples[k] = gen.output(input, false); samples[k] = gan.activateSelectedLayers(0, gen.getLayers().length - 1, input); + samples[k] = samples[k].reshape(1, CHANNELS, X_DIM, Y_DIM); + //samples[k] = + samples[k].addi(1f).divi(2f).muli(255f); } - visualize(samples); + frame = visualize(samples, 1, frame == null ? new JFrame() : frame, false); //each samples only has 1 image, thus batchElements=1 } } trainData.reset(); @@ -239,41 +335,57 @@ public class App { } } - private static void visualize(INDArray[] samples) { - if (frame == null) { - frame = new JFrame(); - frame.setTitle("Viz"); - frame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); - frame.setLayout(new BorderLayout()); - - panel = new JPanel(); - - panel.setLayout(new GridLayout(samples.length / 3, 1, 8, 8)); - frame.add(panel, BorderLayout.CENTER); - frame.setVisible(true); + private static JFrame visualize(INDArray[] samples, int batchElements, JFrame frame, boolean isOrig) { + if (isOrig) { + frame.setTitle("Viz Original"); + } else { + frame.setTitle("Generated"); } - panel.removeAll(); + frame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); + frame.setLayout(new BorderLayout()); + JPanel panelx = new JPanel(); + + panelx.setLayout(new GridLayout(4, 4, 8, 8)); for (INDArray sample : samples) { - panel.add(getImage(sample)); + for(int i = 0; i distributionFactoryClazz = ND4JClassLoading.loadClassByName(clazzName); - memoryManager = memoryManagerClazz.newInstance(); - constantHandler = constantProviderClazz.newInstance(); - shapeInfoProvider = shapeInfoProviderClazz.newInstance(); - workspaceManager = workspaceManagerClazz.newInstance(); + memoryManager = memoryManagerClazz.getDeclaredConstructor().newInstance(); + constantHandler = constantProviderClazz.getDeclaredConstructor().newInstance(); + shapeInfoProvider = shapeInfoProviderClazz.getDeclaredConstructor().newInstance(); + workspaceManager = workspaceManagerClazz.getDeclaredConstructor().newInstance(); Class opExecutionerClazz = ND4JClassLoading .loadClassByName(pp.toString(OP_EXECUTIONER, DefaultOpExecutioner.class.getName())); - OP_EXECUTIONER_INSTANCE = opExecutionerClazz.newInstance(); + OP_EXECUTIONER_INSTANCE = opExecutionerClazz.getDeclaredConstructor().newInstance(); Constructor c2 = ndArrayFactoryClazz.getConstructor(DataType.class, char.class); INSTANCE = (NDArrayFactory) c2.newInstance(dtype, ORDER); - CONVOLUTION_INSTANCE = convolutionInstanceClazz.newInstance(); - BLAS_WRAPPER_INSTANCE = blasWrapperClazz.newInstance(); - DATA_BUFFER_FACTORY_INSTANCE = dataBufferFactoryClazz.newInstance(); + CONVOLUTION_INSTANCE = convolutionInstanceClazz.getDeclaredConstructor().newInstance(); + BLAS_WRAPPER_INSTANCE = blasWrapperClazz.getDeclaredConstructor().newInstance(); + DATA_BUFFER_FACTORY_INSTANCE = dataBufferFactoryClazz.getDeclaredConstructor().newInstance(); - DISTRIBUTION_FACTORY = distributionFactoryClazz.newInstance(); + DISTRIBUTION_FACTORY = distributionFactoryClazz.getDeclaredConstructor().newInstance(); if (isFallback()) { fallbackMode.set(true); diff --git a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/config/ND4JClassLoading.java b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/config/ND4JClassLoading.java index a16c7bac4..1a520c7cd 100644 --- a/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/config/ND4JClassLoading.java +++ b/cavis-dnn/cavis-dnn-common/src/main/java/org/nd4j/common/config/ND4JClassLoading.java @@ -58,11 +58,13 @@ public final class ND4JClassLoading { @SuppressWarnings("unchecked") public static Class loadClassByName(String className, boolean initialize, ClassLoader classLoader) { + try { - log.info(String.format("Trying to load class [%s]", className)); - return (Class) Class.forName(className, initialize, classLoader); + Class clazz = (Class) Class.forName(className, initialize, classLoader); + log.info(String.format("Trying to load class [%s] - Success", className)); + return clazz; } catch (ClassNotFoundException classNotFoundException) { - log.error(String.format("Cannot find class [%s] of provided class-loader.", className)); + log.error(String.format("Trying to load class [%s] - Failure: Cannot find class with provided class-loader.", className)); return null; } } diff --git a/cavis-dnn/cavis-dnn-nn/build.gradle b/cavis-dnn/cavis-dnn-nn/build.gradle index 3ffdbee6a..e38b43f1d 100644 --- a/cavis-dnn/cavis-dnn-nn/build.gradle +++ b/cavis-dnn/cavis-dnn-nn/build.gradle @@ -21,6 +21,8 @@ apply from: "${project.rootProject.projectDir}/createTestBackends.gradle" dependencies { + implementation platform(projects.cavisCommonPlatform) + implementation projects.cavisDnn.cavisDnnData.cavisDnnDataUtilityIterators implementation 'org.lucee:oswego-concurrent:1.3.4' implementation projects.cavisDnn.cavisDnnCommon @@ -50,4 +52,9 @@ dependencies { implementation "com.fasterxml.jackson.core:jackson-databind" implementation "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml" implementation "com.jakewharton.byteunits:byteunits:0.9.1" + + //Rest Client + // define any required OkHttp artifacts without version + implementation "com.squareup.okhttp3:okhttp" + implementation "com.squareup.okhttp3:logging-interceptor" } \ No newline at end of file diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Layer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Layer.java index cccc3cb1b..a96ec6db7 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Layer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Layer.java @@ -215,7 +215,7 @@ public abstract class Layer implements TrainingConfig, Serializable, Cloneable { /** * Get the updater for the given parameter. Typically the same updater will be used for all - * updaters, but this is not necessarily the case + * parameters, but this is not necessarily the case * * @param paramName Parameter name * @return IUpdater for the parameter diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/dense/DenseLayer.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/dense/DenseLayer.java index 77b030b4c..d2aa10406 100644 --- a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/dense/DenseLayer.java +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/dense/DenseLayer.java @@ -30,27 +30,28 @@ import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; * @author Adam Gibson */ public class DenseLayer extends BaseLayer { - public DenseLayer(NeuralNetConfiguration conf, DataType dataType) { - super(conf, dataType); - } - @Override - public void fit(INDArray input, LayerWorkspaceMgr workspaceMgr) { - throw new UnsupportedOperationException("Not supported"); - } + public DenseLayer(NeuralNetConfiguration conf, DataType dataType) { + super(conf, dataType); + } - @Override - public boolean isPretrainLayer() { - return false; - } + @Override + public void fit(INDArray input, LayerWorkspaceMgr workspaceMgr) { + throw new UnsupportedOperationException("Not supported"); + } - @Override - public boolean hasBias(){ - return layerConf().hasBias(); - } + @Override + public boolean isPretrainLayer() { + return false; + } - @Override - public boolean hasLayerNorm(){ - return layerConf().hasLayerNorm(); - } + @Override + public boolean hasBias() { + return layerConf().hasBias(); + } + + @Override + public boolean hasLayerNorm() { + return layerConf().hasLayerNorm(); + } } diff --git a/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/ScoreToChartListener.java b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/ScoreToChartListener.java new file mode 100644 index 000000000..2fc2999d6 --- /dev/null +++ b/cavis-dnn/cavis-dnn-nn/src/main/java/org/deeplearning4j/optimize/listeners/ScoreToChartListener.java @@ -0,0 +1,62 @@ +/* + * + * ****************************************************************************** + * * + * * This program and the accompanying materials are made available under the + * * terms of the Apache License, Version 2.0 which is available at + * * https://www.apache.org/licenses/LICENSE-2.0. + * * + * * See the NOTICE file distributed with this work for additional + * * information regarding copyright ownership. + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * * License for the specific language governing permissions and limitations + * * under the License. + * * + * * SPDX-License-Identifier: Apache-2.0 + * ***************************************************************************** + * + */ + +package org.deeplearning4j.optimize.listeners; + +import java.io.IOException; +import lombok.extern.slf4j.Slf4j; +import okhttp3.OkHttpClient; +import okhttp3.Request; +import okhttp3.Response; +import org.deeplearning4j.nn.api.Model; +import org.deeplearning4j.optimize.api.BaseTrainingListener; + +@Slf4j +public class ScoreToChartListener extends BaseTrainingListener { + + final String url = "http://bru5:8080/cavis-rest-1.0-SNAPSHOT.war/hello/hello-world?"; + final String seriesName; + + public ScoreToChartListener(String seriesName) { + this.seriesName = seriesName; + } + + @Override + public void iterationDone(Model model, int iteration, int epoch) { + double score = model.score(); + String nurl = url+"s="+score+"&n="+seriesName; + OkHttpClient client = new OkHttpClient(); + + Request request = new Request.Builder() + .url(nurl) + .build(); + + try { + Response response = client.newCall(request).execute(); + log.debug(String.format("Did send score to chart at '%s'.", nurl)); + response.body().close(); + } catch (IOException e) { + log.warn(String.format("Could not send score to chart at '%s' because %s", nurl, e.getMessage())); + } + //response.body().string(); + } + +} diff --git a/cavis-native/cavis-native-blas/src/main/java/org/nd4j/nativeblas/NativeOpsGPUInfoProvider.java b/cavis-native/cavis-native-blas/src/main/java/org/nd4j/nativeblas/NativeOpsGPUInfoProvider.java index 1a8d3950b..654825a4c 100644 --- a/cavis-native/cavis-native-blas/src/main/java/org/nd4j/nativeblas/NativeOpsGPUInfoProvider.java +++ b/cavis-native/cavis-native-blas/src/main/java/org/nd4j/nativeblas/NativeOpsGPUInfoProvider.java @@ -31,31 +31,30 @@ import java.util.List; @Slf4j public class NativeOpsGPUInfoProvider implements GPUInfoProvider { - @Override - public List getGPUs() { - NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); + @Override + public List getGPUs() { + NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); - List gpus = new ArrayList<>(); + List gpus = new ArrayList<>(); + int nDevices = nativeOps.getAvailableDevices(); + if (nDevices > 0) { + for (int i = 0; i < nDevices; i++) { + try { + String name = nativeOps.getDeviceName(i); + long total = nativeOps.getDeviceTotalMemory(i); + long free = nativeOps.getDeviceFreeMemory(i); + int major = nativeOps.getDeviceMajor(i); + int minor = nativeOps.getDeviceMinor(i); - int nDevices = nativeOps.getAvailableDevices(); - if (nDevices > 0) { - for (int i = 0; i < nDevices; i++) { - try { - String name = nativeOps.getDeviceName(i); - long total = nativeOps.getDeviceTotalMemory(i); - long free = nativeOps.getDeviceFreeMemory(i); - int major = nativeOps.getDeviceMajor(i); - int minor = nativeOps.getDeviceMinor(i); - - gpus.add(new GPUInfo(name, total, free, major, minor)); - } catch (Exception e) { - log.info("Can't add GPU", e); - } - } + gpus.add(new GPUInfo(name, total, free, major, minor)); + } catch (Exception e) { + log.info("Can't add GPU", e); } - - return gpus; + } } + return gpus; + } + } diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/AtomicAllocator.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/AtomicAllocator.java index e0102fcac..337cbc23e 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/AtomicAllocator.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/jita/allocator/impl/AtomicAllocator.java @@ -83,7 +83,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; */ @Slf4j public class AtomicAllocator implements Allocator { - private static final AtomicAllocator INSTANCE = new AtomicAllocator(); + private static AtomicAllocator INSTANCE = new AtomicAllocator(); private Configuration configuration; @@ -122,6 +122,7 @@ public class AtomicAllocator implements Allocator { private final AtomicLong useTracker = new AtomicLong(System.currentTimeMillis()); public static AtomicAllocator getInstance() { + if(INSTANCE == null) INSTANCE = new AtomicAllocator(); if (INSTANCE == null) throw new RuntimeException("AtomicAllocator is NULL"); return INSTANCE; diff --git a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/buffer/BaseCudaDataBuffer.java b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/buffer/BaseCudaDataBuffer.java index 6b4793704..59d775ec6 100644 --- a/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/buffer/BaseCudaDataBuffer.java +++ b/cavis-native/cavis-native-jcublas/src/main/java/org/nd4j/linalg/jcublas/buffer/BaseCudaDataBuffer.java @@ -402,6 +402,10 @@ public abstract class BaseCudaDataBuffer extends BaseDataBuffer implements JCuda val ctx = AtomicAllocator.getInstance().getDeviceContext(); val devicePtr = allocationPoint.getDevicePointer(); NativeOpsHolder.getInstance().getDeviceNativeOps().memsetAsync(devicePtr, 0, length * elementSize, 0, ctx.getSpecialStream()); + int ec = NativeOpsHolder.getInstance().getDeviceNativeOps().lastErrorCode(); + if(ec != 0) { + throw new RuntimeException(NativeOpsHolder.getInstance().getDeviceNativeOps().lastErrorMessage()); + } ctx.getSpecialStream().synchronize(); } diff --git a/cavis-native/cavis-native-jcublas/src/test/java/org/nd4j/linalg/jcublas/buffer/BaseCudaDataBufferTest.java b/cavis-native/cavis-native-jcublas/src/test/java/org/nd4j/linalg/jcublas/buffer/BaseCudaDataBufferTest.java new file mode 100644 index 000000000..39843d68f --- /dev/null +++ b/cavis-native/cavis-native-jcublas/src/test/java/org/nd4j/linalg/jcublas/buffer/BaseCudaDataBufferTest.java @@ -0,0 +1,55 @@ +/* + * + * ****************************************************************************** + * * + * * This program and the accompanying materials are made available under the + * * terms of the Apache License, Version 2.0 which is available at + * * https://www.apache.org/licenses/LICENSE-2.0. + * * + * * See the NOTICE file distributed with this work for additional + * * information regarding copyright ownership. + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * * License for the specific language governing permissions and limitations + * * under the License. + * * + * * SPDX-License-Identifier: Apache-2.0 + * ***************************************************************************** + * + */ + +package org.nd4j.linalg.jcublas.buffer; + +import static org.junit.jupiter.api.Assertions.*; + +import lombok.extern.slf4j.Slf4j; +import org.bytedeco.javacpp.BytePointer; +import org.bytedeco.javacpp.Pointer; +import org.junit.jupiter.api.Test; +import org.nd4j.linalg.api.buffer.DataBuffer; +import org.nd4j.linalg.api.environment.Nd4jEnvironment; +import org.nd4j.linalg.factory.Nd4j; +import org.nd4j.nativeblas.NativeOpsHolder; + +@Slf4j +class BaseCudaDataBufferTest { + + @Test + public void testMemoryAlloc() throws InterruptedException { + BaseCudaDataBuffer cuBuffer = new CudaLongDataBuffer(16l); + log.info( + "Allocation Status: " + cuBuffer.getAllocationPoint().getAllocationStatus().toString()); + Thread.sleep(3000); + cuBuffer.getAllocationPoint().tickDeviceWrite(); + DataBuffer buf = Nd4j.rand(8,1).shapeInfoDataBuffer(); + NativeOpsHolder.getInstance().getDeviceNativeOps().memcpySync(cuBuffer.pointer(), buf.pointer(), 8, 0, new Pointer() ); + + log.info( + "Allocation Status: " + cuBuffer.getAllocationPoint().getAllocationStatus().toString()); + + cuBuffer.release(); + } + + +} \ No newline at end of file