From 29104083cc5893fbc5c05822d7808c4e6c64bcf6 Mon Sep 17 00:00:00 2001 From: Alex Black Date: Sat, 4 Jan 2020 13:45:07 +1100 Subject: [PATCH] Various fixes (#143) * #8568 ArrayUtil optimization Signed-off-by: AlexDBlack * #6171 Keras ReLU and ELU support Signed-off-by: AlexDBlack * Keras softmax layer import Signed-off-by: AlexDBlack * #8549 Webjars dependency management Signed-off-by: AlexDBlack * Fix for TF import names ':0' suffix issue / NPE Signed-off-by: AlexDBlack * BiasAdd: fix default data format for TF import Signed-off-by: AlexDBlack * Update zoo test ignores Signed-off-by: AlexDBlack * #8509 SameDiff Listener API - provide frame + iteration Signed-off-by: AlexDBlack * #8520 ND4J Environment Signed-off-by: AlexDBlack * Deconv3d Signed-off-by: AlexDBlack * Deconv3d fixes + gradient check Signed-off-by: AlexDBlack * Conv3d fixes + deconv3d DType test Signed-off-by: AlexDBlack * Fix issue with deconv3d gradinet check weight init Signed-off-by: AlexDBlack * #8579 Fix BaseCudaDataBuffer constructor fix for UINT16 Signed-off-by: AlexDBlack * DataType.isNumerical() returns false for BOOL type Signed-off-by: AlexDBlack * #8504 Reduce Spark log spam for tests Signed-off-by: AlexDBlack * Clean up DL4J gradient check test spam Signed-off-by: AlexDBlack * More Gradient check spam reduction Signed-off-by: AlexDBlack * SameDiff test spam reduction Signed-off-by: AlexDBlack * Fixes for FlatBuffers mapping Signed-off-by: AlexDBlack * SameDiff log spam cleanup Signed-off-by: AlexDBlack * Tests should extend BaseNd4jTest Signed-off-by: AlexDBlack * Remove debug line in c++ op Signed-off-by: AlexDBlack * ND4J test spam cleanup Signed-off-by: AlexDBlack * DL4J test spam reduction Signed-off-by: AlexDBlack * More Dl4J and datavec test spam cleanup Signed-off-by: AlexDBlack * Fix for bad conv3d test Signed-off-by: AlexDBlack * Additional test Signed-off-by: AlexDBlack * Embedding layers: don't inherit global default activation function Signed-off-by: AlexDBlack * Trigger CI Signed-off-by: AlexDBlack * Consolidate all BaseDL4JTest classes to single class used everywhere; make timeout configurable per class Signed-off-by: AlexDBlack * Test fixes and timeout increases Signed-off-by: AlexDBlack * Timeouts and PReLU fixes Signed-off-by: AlexDBlack * Restore libnd4j build threads arg for CUDA build Signed-off-by: AlexDBlack * Increase timeouts on a few tests to avoid spurious failures on some CI machines Signed-off-by: AlexDBlack * More timeout fixes Signed-off-by: AlexDBlack * More test timeout fixes Signed-off-by: AlexDBlack * Tweak timeout for one more test Signed-off-by: AlexDBlack * Final tweaks Signed-off-by: AlexDBlack * One more ignore Signed-off-by: AlexDBlack --- .../reader/impl/CSVRecordReaderTest.java | 6 +- .../records/reader/impl/LineReaderTest.java | 22 +- .../datavec/image/transform/JsonYamlTest.java | 6 +- .../transform/CSVSparkTransformTest.java | 7 +- .../transform/ImageSparkTransformTest.java | 4 +- .../ImageSparkTransformServerTest.java | 6 +- .../transform/analysis/TestAnalysis.java | 12 +- .../deeplearning4j-common-tests/pom.xml | 68 ++++ .../java/org/deeplearning4j/BaseDL4JTest.java | 22 +- deeplearning4j/deeplearning4j-core/pom.xml | 6 + .../java/org/deeplearning4j/TestUtils.java | 1 - .../RecordReaderDataSetiteratorTest.java | 4 +- .../iterator/AsyncDataSetIteratorTest.java | 4 +- .../iterator/DataSetIteratorTest.java | 9 +- .../VariableMultiTimeseriesGenerator.java | 4 +- .../tools/VariableTimeseriesGenerator.java | 4 +- .../org/deeplearning4j/eval/EvalJsonTest.java | 4 +- .../org/deeplearning4j/eval/EvalTest.java | 34 +- .../eval/EvaluationToolsTests.java | 2 +- .../gradientcheck/AttentionLayerTest.java | 31 +- .../gradientcheck/BNGradientCheckTest.java | 101 ++--- .../gradientcheck/CNN1DGradientCheckTest.java | 24 +- .../gradientcheck/CNN3DGradientCheckTest.java | 125 +++++- .../gradientcheck/CNNGradientCheckTest.java | 57 ++- .../CapsnetGradientCheckTest.java | 6 +- .../gradientcheck/DropoutGradientCheck.java | 12 +- .../GlobalPoolingGradientCheckTests.java | 24 +- .../gradientcheck/GradientCheckTests.java | 36 +- .../GradientCheckTestsComputationGraph.java | 246 +++++------- .../GradientCheckTestsMasking.java | 38 +- .../gradientcheck/LRNGradientCheckTests.java | 8 +- .../gradientcheck/LSTMGradientCheckTests.java | 24 +- .../LossFunctionGradientCheck.java | 23 +- .../OutputLayerGradientChecks.java | 24 +- .../gradientcheck/RnnGradientChecks.java | 16 +- .../UtilLayerGradientChecks.java | 16 +- .../gradientcheck/VaeGradientCheckTests.java | 18 +- .../gradientcheck/YoloGradientCheckTests.java | 8 +- .../ComputationGraphConfigurationTest.java | 4 +- .../preprocessor/CustomPreprocessorTest.java | 2 +- .../deeplearning4j/nn/dtypes/DTypeTests.java | 6 + .../nn/graph/TestComputationGraphNetwork.java | 56 ++- .../layers/custom/TestCustomActivation.java | 2 +- .../nn/layers/custom/TestCustomLayers.java | 8 +- .../embedding/EmbeddingLayerTest.java | 36 +- .../normalization/BatchNormalizationTest.java | 21 +- .../objdetect/TestYolo2OutputLayer.java | 2 +- .../nn/layers/recurrent/GravesLSTMTest.java | 20 +- .../nn/layers/samediff/TestSameDiffConv.java | 4 +- .../samediff/TestSameDiffDenseVertex.java | 2 +- .../TestReconstructionDistributions.java | 4 +- .../nn/misc/WorkspaceTests.java | 2 +- .../nn/multilayer/BackPropMLPTest.java | 8 +- .../nn/multilayer/MultiLayerTest.java | 8 +- .../nn/multilayer/MultiLayerTestRNN.java | 4 +- .../nn/multilayer/TestVariableLengthTS.java | 10 +- .../nn/transferlearning/TestFrozenLayers.java | 1 - .../TransferLearningComplex.java | 7 +- .../TransferLearningHelperTest.java | 7 +- .../nn/util/TestDataSetConsumer.java | 4 +- .../solver/BackTrackLineSearchTest.java | 6 +- .../optimize/solver/TestOptimizers.java | 2 +- ...TestParamAndGradientIterationListener.java | 79 ---- .../plot/BarnesHutTsneTest.java | 16 +- .../CompareTrainingImplementations.java | 2 +- .../util/CrashReportingUtilTest.java | 5 + .../util/ModelValidatorTests.java | 32 +- deeplearning4j/deeplearning4j-cuda/pom.xml | 6 + .../java/org/deeplearning4j/BaseDL4JTest.java | 141 ------- .../gradientcheck/CNNGradientCheckTest.java | 31 +- deeplearning4j/deeplearning4j-graph/pom.xml | 7 + .../deeplearning4j/graph/BaseDL4JTest.java | 140 ------- .../graph/data/TestGraphLoading.java | 2 +- .../graph/data/TestGraphLoadingWeighted.java | 2 +- .../deeplearning4j/graph/graph/TestGraph.java | 2 +- .../deepwalk/DeepWalkGradientCheck.java | 2 +- .../graph/models/deepwalk/TestDeepWalk.java | 2 +- .../models/deepwalk/TestGraphHuffman.java | 2 +- .../deeplearning4j-tsne/pom.xml | 7 + .../org/deeplearning4j/plot/Test6058.java | 3 +- .../deeplearning4j-modelimport/pom.xml | 6 + .../keras/config/KerasLayerConfiguration.java | 3 + .../layers/advanced/activations/KerasELU.java | 95 +++++ .../advanced/activations/KerasReLU.java | 99 +++++ .../advanced/activations/KerasSoftmax.java | 85 ++++ .../keras/utils/KerasLayerUtils.java | 10 +- .../nn/modelimport/keras/BaseDL4JTest.java | 140 ------- .../nn/modelimport/keras/MiscTests.java | 1 + .../configurations/FullModelComparisons.java | 2 +- .../keras/configurations/JsonTest.java | 2 +- .../Keras1ModelConfigurationTest.java | 2 +- .../Keras2ModelConfigurationTest.java | 2 +- .../KerasInitilizationTest.java | 2 +- .../configurations/KerasModelImportTest.java | 2 +- .../keras/e2e/KerasCustomLayerTest.java | 2 +- .../keras/e2e/KerasLambdaTest.java | 2 +- .../keras/e2e/KerasModelEndToEndTest.java | 29 +- .../keras/e2e/KerasYolo9000PredictTest.java | 2 +- .../keras/e2e/KerasYolo9000Test.java | 2 +- .../activation/KerasLeakyReLUTest.java | 2 +- .../advanced/activation/KerasPReLUTest.java | 2 +- .../activation/KerasThresholdedReLUTest.java | 2 +- .../KerasAtrousConvolution1DTest.java | 2 +- .../KerasAtrousConvolution2DTest.java | 2 +- .../convolution/KerasConvolution1DTest.java | 2 +- .../convolution/KerasConvolution2DTest.java | 2 +- .../convolution/KerasConvolution3DTest.java | 2 +- .../convolution/KerasCropping1DTest.java | 2 +- .../convolution/KerasCropping2DTest.java | 2 +- .../convolution/KerasCropping3DTest.java | 2 +- .../convolution/KerasDeconvolution2DTest.java | 2 +- .../KerasDepthwiseConvolution2DTest.java | 2 +- .../KerasSeparableConvolution2DTest.java | 2 +- .../convolution/KerasUpsampling1DTest.java | 2 +- .../convolution/KerasUpsampling2DTest.java | 2 +- .../convolution/KerasUpsampling3DTest.java | 2 +- .../convolution/KerasZeroPadding1DTest.java | 2 +- .../convolution/KerasZeroPadding2DTest.java | 2 +- .../convolution/KerasZeroPadding3DTest.java | 2 +- .../layers/core/KerasActivationLayer.java | 2 +- .../keras/layers/core/KerasDenseTest.java | 2 +- .../keras/layers/core/KerasDropoutTest.java | 2 +- .../keras/layers/core/KerasMaskingTest.java | 2 +- .../keras/layers/core/KerasPermuteTest.java | 2 +- .../layers/core/KerasRepeatVectorTest.java | 2 +- .../keras/layers/core/KerasReshapeTest.java | 2 +- .../core/KerasSpatialDropout2DTest.java | 2 +- .../layers/embeddings/KerasEmbeddingTest.java | 2 +- .../local/KerasLocallyConnected1DTest.java | 2 +- .../local/KerasLocallyConnected2DTest.java | 2 +- .../layers/noise/KerasAlphaDropoutTest.java | 2 +- .../noise/KerasGaussianDropoutTest.java | 2 +- .../layers/noise/KerasGaussianNoiseTest.java | 2 +- .../KerasBatchNormalizationTest.java | 2 +- .../layers/pooling/KerasPooling1DTest.java | 2 +- .../layers/pooling/KerasPooling2DTest.java | 2 +- .../layers/pooling/KerasPooling3DTest.java | 2 +- .../keras/layers/recurrent/KerasLSTMTest.java | 2 +- .../layers/recurrent/KerasSimpleRnnTest.java | 2 +- .../wrappers/KerasBidirectionalTest.java | 2 +- .../keras/optimizers/OptimizerImport.java | 2 +- .../TimeSeriesGeneratorImportTest.java | 2 +- .../sequence/TimeSeriesGeneratorTest.java | 2 +- .../text/TokenizerImportTest.java | 2 +- .../preprocessing/text/TokenizerTest.java | 2 +- .../weights/KerasWeightSettingTests.java | 2 +- .../pom.xml | 6 + .../nearestneighbor/server/BaseDL4JTest.java | 140 ------- .../server/NearestNeighborTest.java | 3 +- .../nearestneighbor-core/pom.xml | 6 + .../clustering/BaseDL4JTest.java | 140 ------- .../clustering/kdtree/KDTreeTest.java | 13 +- .../clustering/kmeans/KMeansTest.java | 2 +- .../lsh/RandomProjectionLSHTest.java | 2 +- .../clustering/quadtree/QuadTreeTest.java | 2 +- .../randomprojection/RPTreeTest.java | 2 +- .../randomprojection/RPUtilsTest.java | 2 +- .../clustering/sptree/SPTreeTest.java | 12 +- .../vptree/VPTreeSerializationTests.java | 2 +- .../clustering/vptree/VpTreeNodeTest.java | 2 +- .../deeplearning4j-nlp-chinese/pom.xml | 6 + .../tokenizer/ChineseTokenizerTest.java | 3 +- .../deeplearning4j-nlp-japanese/pom.xml | 7 + .../kuromoji/CommonCornerCasesTest.java | 4 +- .../buffer/StringValueMapBufferTest.java | 3 +- .../CharacterDefinitionsCompilerTest.java | 3 +- .../compile/ConnectionCostsCompilerTest.java | 3 +- .../compile/TokenInfoBufferCompilerTest.java | 3 +- .../UnknownDictionaryCompilerTest.java | 3 +- .../compile/WordIdMapCompilerTest.java | 3 +- .../kuromoji/dict/InsertedDictionaryTest.java | 3 +- .../kuromoji/dict/UserDictionaryTest.java | 3 +- .../ipadic/MultiThreadedTokenizerTest.java | 3 +- .../kuromoji/ipadic/SearchTokenizerTest.java | 3 +- .../kuromoji/ipadic/TokenizerTest.java | 3 +- .../ipadic/UserDictionaryTokenizerTest.java | 3 +- .../kuromoji/trie/DoubleArrayTrieTest.java | 3 +- .../com/atilika/kuromoji/trie/NodeTest.java | 3 +- .../kuromoji/trie/PatriciaTrieTest.java | 3 +- .../com/atilika/kuromoji/trie/TrieTest.java | 3 +- .../util/DictionaryEntryLineParserTest.java | 3 +- .../tokenizer/JapaneseTokenizerTest.java | 3 +- .../deeplearning4j-nlp-korean/pom.xml | 6 + .../tokenizer/KoreanTokenizerTest.java | 3 +- .../tokenizer/PerformanceTests.java | 3 +- .../deeplearning4j-nlp-uima/pom.xml | 24 ++ .../java/org/deeplearning4j/BaseDL4JTest.java | 140 ------- .../deeplearning4j-nlp/pom.xml | 6 + .../java/org/deeplearning4j/BaseDL4JTest.java | 140 ------- .../java/org/deeplearning4j/TsneTest.java | 11 +- .../ParagraphVectorsTest.java | 5 + .../iterator/Word2VecDataSetIteratorTest.java | 7 +- deeplearning4j/deeplearning4j-nn/pom.xml | 6 + .../gradientcheck/GradientCheckUtil.java | 380 ++++++++---------- .../nn/conf/layers/Convolution3D.java | 2 +- .../nn/conf/layers/ConvolutionLayer.java | 9 +- .../nn/conf/layers/Deconvolution3D.java | 219 ++++++++++ .../nn/conf/layers/EmbeddingLayer.java | 9 +- .../conf/layers/EmbeddingSequenceLayer.java | 8 +- .../nn/conf/layers/FeedForwardLayer.java | 3 +- .../nn/conf/layers/InputTypeUtil.java | 61 ++- .../nn/conf/layers/PReLULayer.java | 6 +- .../nn/conf/layers/Subsampling3DLayer.java | 2 +- .../Cnn3DToFeedForwardPreProcessor.java | 2 +- .../layers/convolution/ConvolutionLayer.java | 3 +- .../convolution/Deconvolution2DLayer.java | 2 +- .../convolution/Deconvolution3DLayer.java | 231 +++++++++++ .../nn/layers/feedforward/PReLU.java | 8 +- .../Deconvolution3DParamInitializer.java | 146 +++++++ .../ParamAndGradientIterationListener.java | 235 ----------- .../deeplearning4j/util/ConvolutionUtils.java | 55 ++- .../java/org/deeplearning4j/BaseDL4JTest.java | 140 ------- .../EncodedGradientsAccumulatorTest.java | 5 + .../SmartFancyBlockingQueueTest.java | 4 +- .../deeplearning4j-json-server/pom.xml | 7 + .../remote/BinaryModelServerTest.java | 3 +- .../remote/JsonModelServerTest.java | 3 +- .../deeplearning4j/remote/ServletTest.java | 3 +- .../pom.xml | 7 + .../parameterserver/BaseDL4JTest.java | 144 ------- .../ParameterServerParallelWrapperTest.java | 1 + .../pom.xml | 7 + .../parallelism/BaseDL4JTest.java | 140 ------- .../InplaceParallelInferenceTest.java | 1 + .../parallelism/ParallelInferenceTest.java | 1 + .../parallelism/ParallelWrapperTest.java | 1 + .../parallelism/TestListeners.java | 1 + .../TestParallelEarlyStopping.java | 1 + .../TestParallelEarlyStoppingUI.java | 1 + .../factory/DefaultTrainerContextTest.java | 2 +- .../factory/SymmetricTrainerContextTest.java | 2 +- .../BatchedInferenceObservableTest.java | 2 +- .../main/ParallelWrapperMainTest.java | 2 +- .../spark/dl4j-spark-nlp-java8/pom.xml | 7 + .../SparkSequenceVectorsTest.java | 3 +- .../export/ExportContainerTest.java | 3 +- .../models/word2vec/SparkWord2VecTest.java | 3 +- .../spark/dl4j-spark-nlp/pom.xml | 7 + .../spark/text/BaseSparkTest.java | 3 +- .../spark/dl4j-spark-parameterserver/pom.xml | 7 + .../spark/parameterserver/BaseSparkTest.java | 3 +- .../src/test/resources/log4j.properties | 5 +- .../src/test/resources/logback.xml | 5 +- .../spark/dl4j-spark/pom.xml | 7 + .../deeplearning4j/spark/util/SparkUtils.java | 2 +- .../deeplearning4j/spark/BaseSparkTest.java | 3 +- .../src/test/resources/log4j.properties | 2 + .../dl4j-spark/src/test/resources/logback.xml | 1 + .../deeplearning4j-ui-components/pom.xml | 7 + .../ui/TestComponentSerialization.java | 3 +- .../org/deeplearning4j/ui/TestRendering.java | 3 +- .../org/deeplearning4j/ui/TestStandAlone.java | 3 +- .../deeplearning4j-ui-model/pom.xml | 7 + .../org/deeplearning4j/ui/BaseDL4JTest.java | 140 ------- .../ui/TestStorageMetaData.java | 1 + .../ui/stats/TestStatsClasses.java | 2 +- .../ui/stats/TestStatsListener.java | 2 +- .../ui/stats/TestTransferStatsCollection.java | 2 +- .../ui/storage/TestStatsStorage.java | 2 +- .../deeplearning4j-vertx/pom.xml | 17 + .../deeplearning4j/ui/TestRemoteReceiver.java | 3 +- .../org/deeplearning4j/ui/TestSameDiffUI.java | 3 +- .../org/deeplearning4j/ui/TestVertxUI.java | 3 +- .../ui/TestVertxUIMultiSession.java | 3 +- deeplearning4j/deeplearning4j-zoo/pom.xml | 7 + .../org/deeplearning4j/zoo/BaseDL4JTest.java | 145 ------- .../org/deeplearning4j/zoo/MiscTests.java | 1 + .../org/deeplearning4j/zoo/TestDownload.java | 1 + .../org/deeplearning4j/zoo/TestImageNet.java | 1 + .../deeplearning4j/zoo/TestInstantiation.java | 1 + deeplearning4j/dl4j-integration-tests/pom.xml | 7 + .../integration/BaseDL4JTest.java | 141 ------- .../integration/IntegrationTests.java | 1 + deeplearning4j/dl4j-perf/pom.xml | 7 + .../perf/listener/BaseDL4JTest.java | 140 ------- .../perf/listener/SystemPollingTest.java | 1 + .../perf/listener/TestHardWareMetric.java | 1 + .../listener/TestSystemInfoPrintListener.java | 1 + deeplearning4j/pom.xml | 1 + .../declarable/helpers/cpu/compare_elem.cpp | 2 +- libnd4j/pom.xml | 2 + .../java/org/nd4j/autodiff/listeners/At.java | 10 +- .../org/nd4j/autodiff/samediff/SameDiff.java | 2 +- .../samediff/internal/AbstractSession.java | 26 -- .../autodiff/samediff/internal/FrameIter.java | 46 +++ .../samediff/internal/InferenceSession.java | 1 + .../samediff/serde/FlatBuffersMapper.java | 5 +- .../autodiff/validation/GradCheckUtil.java | 19 +- .../nd4j/autodiff/validation/TestCase.java | 2 +- .../imports/graphmapper/tf/TFGraphMapper.java | 11 + .../activations/impl/ActivationPReLU.java | 10 +- .../activations/impl/ActivationReLU.java | 66 ++- .../api/ops/impl/broadcast/BiasAdd.java | 2 + .../convolution/config/Conv1DConfig.java | 2 +- .../scalar/RectifiedLinearDerivative.java | 7 +- .../api/ops/impl/summarystats/Variance.java | 2 + .../java/org/nd4j/linalg/factory/Nd4j.java | 8 + .../java/org/nd4j/nativeblas/Nd4jBlas.java | 6 +- .../nd4j/linalg/jcublas/JCublasBackend.java | 7 + .../jcublas/buffer/BaseCudaDataBuffer.java | 3 + .../org/nd4j/nativeblas/CudaEnvironment.java | 170 ++++++++ .../nd4j/linalg/cpu/nativecpu/CpuBackend.java | 6 + .../linalg/cpu/nativecpu/CpuEnvironment.java | 170 ++++++++ .../cpu/nativecpu/CpuNDArrayFactory.java | 11 +- .../java/org/nd4j/autodiff/TestSessions.java | 18 +- .../internal/TestDependencyTracker.java | 13 +- .../opvalidation/LayerOpValidation.java | 10 +- .../opvalidation/LossOpValidation.java | 5 + .../opvalidation/MiscOpValidation.java | 4 +- .../opvalidation/ReductionBpOpValidation.java | 4 +- .../opvalidation/ReductionOpValidation.java | 8 +- .../opvalidation/RnnOpValidation.java | 6 +- .../opvalidation/ShapeOpValidation.java | 18 +- .../autodiff/samediff/ConvConfigTests.java | 13 +- .../samediff/LogisticPredictions.java | 40 -- .../SameDiffSpecifiedLossVarsTests.java | 3 +- .../nd4j/autodiff/samediff/SameDiffTests.java | 48 ++- .../listeners/CheckpointListenerTest.java | 4 +- .../listeners/ProfilingListenerTest.java | 4 +- .../org/nd4j/autodiff/ui/UIListenerTest.java | 13 +- .../org/nd4j/evaluation/EvalJsonTest.java | 4 +- .../java/org/nd4j/evaluation/EvalTest.java | 41 +- .../nd4j/evaluation/EvaluationBinaryTest.java | 3 +- .../nd4j/evaluation/RegressionEvalTest.java | 3 +- .../java/org/nd4j/imports/ExecutionTests.java | 2 +- .../TFGraphs/TFGraphTestAllHelper.java | 5 +- .../TFGraphs/TFGraphTestAllSameDiff.java | 5 +- .../TFGraphs/TFGraphTestZooModels.java | 15 +- .../nd4j/imports/TensorFlowImportTest.java | 3 +- .../java/org/nd4j/linalg/BaseNd4jTest.java | 10 +- .../test/java/org/nd4j/linalg/LoneTest.java | 68 ++-- .../org/nd4j/linalg/NDArrayTestsFortran.java | 34 +- .../test/java/org/nd4j/linalg/Nd4jTestsC.java | 293 +++++++------- .../java/org/nd4j/linalg/ShufflesTests.java | 60 +-- ...ctivationJson.java => TestActivation.java} | 65 ++- .../org/nd4j/linalg/api/TestEnvironment.java | 48 +++ .../org/nd4j/linalg/api/TestNamespaces.java | 20 +- .../linalg/api/buffer/DataBufferTests.java | 2 +- .../linalg/api/indexing/IndexingTests.java | 4 +- .../linalg/api/indexing/IndexingTestsC.java | 16 +- .../api/ndarray/TestNdArrReadWriteTxt.java | 6 +- .../org/nd4j/linalg/crash/SpecialTests.java | 34 +- .../nd4j/linalg/custom/CustomOpsTests.java | 26 +- .../org/nd4j/linalg/dataset/DataSetTest.java | 10 +- .../dimensionalityreduction/TestPCA.java | 11 +- .../linalg/indexing/BooleanIndexingTest.java | 13 +- .../linalg/mixed/MixedDataTypesTests.java | 32 +- .../nd4j/linalg/nativ/NativeBlasTests.java | 4 +- .../nd4j/linalg/nativ/OpsMappingTests.java | 6 +- .../org/nd4j/linalg/ops/DerivativeTests.java | 6 +- .../nd4j/linalg/ops/OpExecutionerTests.java | 34 +- .../nd4j/linalg/ops/OpExecutionerTestsC.java | 41 +- .../profiling/OperationProfilerTests.java | 24 +- .../java/org/nd4j/linalg/rng/RandomTests.java | 38 +- .../nd4j/linalg/serde/NumpyFormatTests.java | 12 +- .../nd4j/linalg/shape/NDArrayMathTests.java | 5 +- .../org/nd4j/linalg/shape/ShapeTestsC.java | 8 +- .../java/org/nd4j/linalg/shape/TADTests.java | 14 +- .../nd4j/linalg/shape/concat/ConcatTests.java | 6 +- .../linalg/shape/concat/ConcatTestsC.java | 10 +- .../shape/concat/padding/PaddingTestsC.java | 2 +- .../linalg/shape/indexing/IndexingTests.java | 8 +- .../linalg/shape/indexing/IndexingTestsC.java | 8 +- .../shape/ones/LeadingAndTrailingOnes.java | 2 +- .../shape/ones/LeadingAndTrailingOnesC.java | 22 +- .../nd4j/linalg/slicing/SlicingTestsC.java | 27 +- .../nd4j/linalg/specials/SortCooTests.java | 4 +- .../nd4j/linalg/util/ValidationUtilTests.java | 78 ++-- .../workspace/CyclicWorkspaceTests.java | 2 +- .../workspace/SpecialWorkspaceTests.java | 4 +- .../org/nd4j/linalg/api/buffer/DataType.java | 2 +- .../org/nd4j/config/ND4JSystemProperties.java | 7 + .../java/org/nd4j/linalg/util/ArrayUtil.java | 231 ++++++----- .../org/nd4j/linalg/factory/Environment.java | 85 ++++ .../org/nd4j/linalg/factory/Nd4jBackend.java | 5 +- 375 files changed, 3944 insertions(+), 4078 deletions(-) create mode 100644 deeplearning4j/deeplearning4j-common-tests/pom.xml rename deeplearning4j/{deeplearning4j-core/src/test => deeplearning4j-common-tests/src/main}/java/org/deeplearning4j/BaseDL4JTest.java (88%) delete mode 100644 deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimizer/listener/TestParamAndGradientIterationListener.java delete mode 100644 deeplearning4j/deeplearning4j-cuda/src/test/java/org/deeplearning4j/BaseDL4JTest.java delete mode 100644 deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/BaseDL4JTest.java create mode 100644 deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasELU.java create mode 100644 deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasReLU.java create mode 100644 deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasSoftmax.java delete mode 100644 deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/BaseDL4JTest.java delete mode 100644 deeplearning4j/deeplearning4j-nearestneighbors-parent/deeplearning4j-nearestneighbor-server/src/test/java/org/deeplearning4j/nearestneighbor/server/BaseDL4JTest.java delete mode 100644 deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/BaseDL4JTest.java delete mode 100644 deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-uima/src/test/java/org/deeplearning4j/BaseDL4JTest.java delete mode 100644 deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/BaseDL4JTest.java create mode 100644 deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Deconvolution3D.java create mode 100644 deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Deconvolution3DLayer.java create mode 100644 deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/params/Deconvolution3DParamInitializer.java delete mode 100644 deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/optimize/listeners/ParamAndGradientIterationListener.java delete mode 100644 deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/BaseDL4JTest.java delete mode 100644 deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/src/test/java/org/deeplearning4j/parallelism/parameterserver/BaseDL4JTest.java delete mode 100644 deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/BaseDL4JTest.java delete mode 100644 deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-ui-model/src/test/java/org/deeplearning4j/ui/BaseDL4JTest.java delete mode 100644 deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/BaseDL4JTest.java delete mode 100644 deeplearning4j/dl4j-integration-tests/src/test/java/org/deeplearning4j/integration/BaseDL4JTest.java delete mode 100644 deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/BaseDL4JTest.java create mode 100644 nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/internal/FrameIter.java create mode 100644 nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/nativeblas/CudaEnvironment.java create mode 100644 nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuEnvironment.java delete mode 100644 nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/LogisticPredictions.java rename nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/activations/{TestActivationJson.java => TestActivation.java} (63%) create mode 100644 nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/api/TestEnvironment.java create mode 100644 nd4j/nd4j-context/src/main/java/org/nd4j/linalg/factory/Environment.java diff --git a/datavec/datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVRecordReaderTest.java b/datavec/datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVRecordReaderTest.java index 8a26e0d5c..534cc986e 100644 --- a/datavec/datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVRecordReaderTest.java +++ b/datavec/datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVRecordReaderTest.java @@ -311,7 +311,11 @@ public class CSVRecordReaderTest { rr.reset(); fail("Expected exception"); } catch (Exception e){ - e.printStackTrace(); + String msg = e.getMessage(); + String msg2 = e.getCause().getMessage(); + assertTrue(msg, msg.contains("Error during LineRecordReader reset")); + assertTrue(msg2, msg2.contains("Reset not supported from streams")); +// e.printStackTrace(); } } diff --git a/datavec/datavec-api/src/test/java/org/datavec/api/records/reader/impl/LineReaderTest.java b/datavec/datavec-api/src/test/java/org/datavec/api/records/reader/impl/LineReaderTest.java index a209abb91..5027357eb 100644 --- a/datavec/datavec-api/src/test/java/org/datavec/api/records/reader/impl/LineReaderTest.java +++ b/datavec/datavec-api/src/test/java/org/datavec/api/records/reader/impl/LineReaderTest.java @@ -55,8 +55,7 @@ public class LineReaderTest { @Test public void testLineReader() throws Exception { - String tempDir = System.getProperty("java.io.tmpdir"); - File tmpdir = new File(tempDir, "tmpdir-testLineReader"); + File tmpdir = testDir.newFolder(); if (tmpdir.exists()) tmpdir.delete(); tmpdir.mkdir(); @@ -84,12 +83,6 @@ public class LineReaderTest { } assertEquals(9, count); - - try { - FileUtils.deleteDirectory(tmpdir); - } catch (Exception e) { - e.printStackTrace(); - } } @Test @@ -145,13 +138,6 @@ public class LineReaderTest { assertEquals(2, subset.size()); assertEquals(out3.get(4), subset.get(0)); assertEquals(out3.get(7), subset.get(1)); - - - try { - FileUtils.deleteDirectory(tmpdir); - } catch (Exception e) { - e.printStackTrace(); - } } @Test @@ -177,11 +163,5 @@ public class LineReaderTest { } assertEquals(9, count); - - try { - FileUtils.deleteDirectory(tmpdir); - } catch (Exception e) { - e.printStackTrace(); - } } } diff --git a/datavec/datavec-data/datavec-data-image/src/test/java/org/datavec/image/transform/JsonYamlTest.java b/datavec/datavec-data/datavec-data-image/src/test/java/org/datavec/image/transform/JsonYamlTest.java index 3f0155097..9825e6899 100644 --- a/datavec/datavec-data/datavec-data-image/src/test/java/org/datavec/image/transform/JsonYamlTest.java +++ b/datavec/datavec-data/datavec-data-image/src/test/java/org/datavec/image/transform/JsonYamlTest.java @@ -66,9 +66,9 @@ public class JsonYamlTest { String asJson = itp.toJson(); String asYaml = itp.toYaml(); - System.out.println(asJson); - System.out.println("\n\n\n"); - System.out.println(asYaml); +// System.out.println(asJson); +// System.out.println("\n\n\n"); +// System.out.println(asYaml); ImageWritable img = TestImageTransform.makeRandomImage(0, 0, 3); ImageWritable imgJson = new ImageWritable(img.getFrame().clone()); diff --git a/datavec/datavec-spark-inference-parent/datavec-spark-inference-model/src/test/java/org/datavec/spark/transform/CSVSparkTransformTest.java b/datavec/datavec-spark-inference-parent/datavec-spark-inference-model/src/test/java/org/datavec/spark/transform/CSVSparkTransformTest.java index 64bb2cc89..9bc445a49 100644 --- a/datavec/datavec-spark-inference-parent/datavec-spark-inference-model/src/test/java/org/datavec/spark/transform/CSVSparkTransformTest.java +++ b/datavec/datavec-spark-inference-parent/datavec-spark-inference-model/src/test/java/org/datavec/spark/transform/CSVSparkTransformTest.java @@ -60,7 +60,7 @@ public class CSVSparkTransformTest { Base64NDArrayBody body = csvSparkTransform.toArray(new SingleCSVRecord(values)); INDArray fromBase64 = Nd4jBase64.fromBase64(body.getNdarray()); assertTrue(fromBase64.isVector()); - System.out.println("Base 64ed array " + fromBase64); +// System.out.println("Base 64ed array " + fromBase64); } @Test @@ -125,7 +125,7 @@ public class CSVSparkTransformTest { SequenceBatchCSVRecord transformed = csvSparkTransform.transformSequence(sequenceBatchCSVRecord); assertNotNull(transformed.getRecords()); - System.out.println(transformed); +// System.out.println(transformed); } @@ -153,7 +153,8 @@ public class CSVSparkTransformTest { new SingleCSVRecord(data2))); final CSVSparkTransform transform = new CSVSparkTransform(transformProcess); - System.out.println(transform.transformSequenceIncremental(batchCsvRecord)); +// System.out.println(transform.transformSequenceIncremental(batchCsvRecord)); + transform.transformSequenceIncremental(batchCsvRecord); assertEquals(3,Nd4jBase64.fromBase64(transform.transformSequenceArrayIncremental(batchCsvRecord).getNdarray()).rank()); } diff --git a/datavec/datavec-spark-inference-parent/datavec-spark-inference-model/src/test/java/org/datavec/spark/transform/ImageSparkTransformTest.java b/datavec/datavec-spark-inference-parent/datavec-spark-inference-model/src/test/java/org/datavec/spark/transform/ImageSparkTransformTest.java index 34075006c..c3474ab85 100644 --- a/datavec/datavec-spark-inference-parent/datavec-spark-inference-model/src/test/java/org/datavec/spark/transform/ImageSparkTransformTest.java +++ b/datavec/datavec-spark-inference-parent/datavec-spark-inference-model/src/test/java/org/datavec/spark/transform/ImageSparkTransformTest.java @@ -54,7 +54,7 @@ public class ImageSparkTransformTest { Base64NDArrayBody body = imgSparkTransform.toArray(imgRecord); INDArray fromBase64 = Nd4jBase64.fromBase64(body.getNdarray()); - System.out.println("Base 64ed array " + fromBase64); +// System.out.println("Base 64ed array " + fromBase64); assertEquals(1, fromBase64.size(0)); } @@ -78,7 +78,7 @@ public class ImageSparkTransformTest { Base64NDArrayBody body = imgSparkTransform.toArray(batch); INDArray fromBase64 = Nd4jBase64.fromBase64(body.getNdarray()); - System.out.println("Base 64ed array " + fromBase64); +// System.out.println("Base 64ed array " + fromBase64); assertEquals(3, fromBase64.size(0)); } } diff --git a/datavec/datavec-spark-inference-parent/datavec-spark-inference-server/src/test/java/org/datavec/spark/transform/ImageSparkTransformServerTest.java b/datavec/datavec-spark-inference-parent/datavec-spark-inference-server/src/test/java/org/datavec/spark/transform/ImageSparkTransformServerTest.java index bfae23358..049e08e47 100644 --- a/datavec/datavec-spark-inference-parent/datavec-spark-inference-server/src/test/java/org/datavec/spark/transform/ImageSparkTransformServerTest.java +++ b/datavec/datavec-spark-inference-parent/datavec-spark-inference-server/src/test/java/org/datavec/spark/transform/ImageSparkTransformServerTest.java @@ -120,7 +120,7 @@ public class ImageSparkTransformServerTest { INDArray batchResult = getNDArray(jsonNodeBatch); assertEquals(3, batchResult.size(0)); - System.out.println(array); +// System.out.println(array); } @Test @@ -136,7 +136,7 @@ public class ImageSparkTransformServerTest { INDArray batchResult = getNDArray(jsonNode); assertEquals(3, batchResult.size(0)); - System.out.println(batchResult); +// System.out.println(batchResult); } @Test @@ -153,7 +153,7 @@ public class ImageSparkTransformServerTest { INDArray result = getNDArray(jsonNode); assertEquals(1, result.size(0)); - System.out.println(result); +// System.out.println(result); } public INDArray getNDArray(JsonNode node) throws IOException { diff --git a/datavec/datavec-spark/src/test/java/org/datavec/spark/transform/analysis/TestAnalysis.java b/datavec/datavec-spark/src/test/java/org/datavec/spark/transform/analysis/TestAnalysis.java index 05058fea8..d251d18d1 100644 --- a/datavec/datavec-spark/src/test/java/org/datavec/spark/transform/analysis/TestAnalysis.java +++ b/datavec/datavec-spark/src/test/java/org/datavec/spark/transform/analysis/TestAnalysis.java @@ -72,7 +72,9 @@ public class TestAnalysis extends BaseSparkTest { DataAnalysis da = AnalyzeSpark.analyze(schema, rdd); String daString = da.toString(); - System.out.println(da); +// System.out.println(da); + da.toJson(); + da.toString(); List ca = da.getColumnAnalysis(); assertEquals(5, ca.size()); @@ -151,7 +153,7 @@ public class TestAnalysis extends BaseSparkTest { assertEquals(1, countD[countD.length - 1]); File f = Files.createTempFile("datavec_spark_analysis_UITest", ".html").toFile(); - System.out.println(f.getAbsolutePath()); +// System.out.println(f.getAbsolutePath()); f.deleteOnExit(); HtmlAnalysis.createHtmlAnalysisFile(da, f); } @@ -210,7 +212,7 @@ public class TestAnalysis extends BaseSparkTest { for( int i=1; i<10; i++ ){ counter.merge(counters.get(i)); sparkCounter.merge(sparkCounters.get(i)); - System.out.println(); +// System.out.println(); } assertEquals(sc1.sampleStdev(), counter.getStddev(false), 1e-6); assertEquals(sparkCounter.sampleStdev(), counter.getStddev(false), 1e-6); @@ -356,7 +358,9 @@ public class TestAnalysis extends BaseSparkTest { JavaRDD> rdd = sc.parallelize(data); DataAnalysis da = AnalyzeSpark.analyze(s, rdd); - System.out.println(da); +// System.out.println(da); + da.toString(); + da.toJson(); } } diff --git a/deeplearning4j/deeplearning4j-common-tests/pom.xml b/deeplearning4j/deeplearning4j-common-tests/pom.xml new file mode 100644 index 000000000..825c55ca5 --- /dev/null +++ b/deeplearning4j/deeplearning4j-common-tests/pom.xml @@ -0,0 +1,68 @@ + + + + + deeplearning4j-parent + org.deeplearning4j + 1.0.0-SNAPSHOT + + 4.0.0 + + deeplearning4j-common-tests + + + + junit + junit + provided + + + org.nd4j + nd4j-api + ${project.version} + + + + + + test-nd4j-native + + + org.nd4j + nd4j-native + ${project.version} + test + + + + + + test-nd4j-cuda-10.2 + + + org.nd4j + nd4j-cuda-10.2 + ${project.version} + test + + + + + + \ No newline at end of file diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/BaseDL4JTest.java b/deeplearning4j/deeplearning4j-common-tests/src/main/java/org/deeplearning4j/BaseDL4JTest.java similarity index 88% rename from deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/BaseDL4JTest.java rename to deeplearning4j/deeplearning4j-common-tests/src/main/java/org/deeplearning4j/BaseDL4JTest.java index 0da356677..466a68e6a 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/BaseDL4JTest.java +++ b/deeplearning4j/deeplearning4j-common-tests/src/main/java/org/deeplearning4j/BaseDL4JTest.java @@ -1,5 +1,6 @@ -/******************************************************************************* +/* ****************************************************************************** * Copyright (c) 2015-2018 Skymind, Inc. + * Copyright (c) 2019-2020 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at @@ -23,7 +24,7 @@ import org.junit.Before; import org.junit.Rule; import org.junit.rules.TestName; import org.junit.rules.Timeout; -import org.nd4j.linalg.api.buffer.DataBuffer; +import org.nd4j.config.ND4JSystemProperties; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.memory.MemoryWorkspace; import org.nd4j.linalg.api.ops.executioner.OpExecutioner; @@ -31,24 +32,28 @@ import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.profiler.ProfilerConfig; import java.lang.management.ManagementFactory; -import java.lang.management.ThreadMXBean; import java.util.List; import java.util.Map; import java.util.Properties; -import static org.junit.Assert.assertNull; - @Slf4j -public class BaseDL4JTest { +public abstract class BaseDL4JTest { @Rule public TestName name = new TestName(); @Rule - public Timeout timeout = Timeout.seconds(30); + public Timeout timeout = Timeout.millis(getTimeoutMilliseconds()); protected long startTime; protected int threadCountBefore; + /** + * Override this method to set the default timeout for methods in the test class + */ + public long getTimeoutMilliseconds(){ + return 30000; + } + /** * Override this to set the profiling mode for the tests defined in the child class */ @@ -70,6 +75,9 @@ public class BaseDL4JTest { @Before public void beforeTest(){ log.info("{}.{}", getClass().getSimpleName(), name.getMethodName()); + //Suppress ND4J initialization - don't need this logged for every test... + System.setProperty(ND4JSystemProperties.LOG_INITIALIZATION, "false"); + System.setProperty(ND4JSystemProperties.ND4J_IGNORE_AVX, "true"); Nd4j.getExecutioner().setProfilingMode(getProfilingMode()); Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build()); Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType()); diff --git a/deeplearning4j/deeplearning4j-core/pom.xml b/deeplearning4j/deeplearning4j-core/pom.xml index 65830d341..728ce9598 100644 --- a/deeplearning4j/deeplearning4j-core/pom.xml +++ b/deeplearning4j/deeplearning4j-core/pom.xml @@ -95,6 +95,12 @@ junit test + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + org.nd4j diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/TestUtils.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/TestUtils.java index df072b64f..d90ce628b 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/TestUtils.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/TestUtils.java @@ -75,7 +75,6 @@ public class TestUtils { } public static ComputationGraph testModelSerialization(ComputationGraph net){ - ComputationGraph restored; try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/datavec/RecordReaderDataSetiteratorTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/datavec/RecordReaderDataSetiteratorTest.java index 7dfd46a8d..6b3047aa5 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/datavec/RecordReaderDataSetiteratorTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/datavec/RecordReaderDataSetiteratorTest.java @@ -1006,7 +1006,9 @@ public class RecordReaderDataSetiteratorTest extends BaseDL4JTest { for (RecordMetaData m : meta) { Record r = csv.loadFromMetaData(m); INDArray row = ds.getFeatures().getRow(i); - System.out.println(m.getLocation() + "\t" + r.getRecord() + "\t" + row); + if(i <= 3) { + System.out.println(m.getLocation() + "\t" + r.getRecord() + "\t" + row); + } for (int j = 0; j < 4; j++) { double exp = r.getRecord().get(j).toDouble(); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/AsyncDataSetIteratorTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/AsyncDataSetIteratorTest.java index 5a73e8ba8..8d8db1b8d 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/AsyncDataSetIteratorTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/AsyncDataSetIteratorTest.java @@ -183,7 +183,7 @@ public class AsyncDataSetIteratorTest extends BaseDL4JTest { } adsi.reset(); - log.info("Epoch {} finished...", e); +// log.info("Epoch {} finished...", e); } } @@ -215,7 +215,7 @@ public class AsyncDataSetIteratorTest extends BaseDL4JTest { } adsi.reset(); - log.info("Epoch {} finished...", e); +// log.info("Epoch {} finished...", e); } } } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/DataSetIteratorTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/DataSetIteratorTest.java index 6ed7819df..5201b3f56 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/DataSetIteratorTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/DataSetIteratorTest.java @@ -57,6 +57,11 @@ import static org.junit.Assert.*; public class DataSetIteratorTest extends BaseDL4JTest { + @Override + public long getTimeoutMilliseconds() { + return 90000; + } + @Test public void testBatchSizeOfOneIris() throws Exception { //Test for (a) iterators returning correct number of examples, and @@ -190,7 +195,7 @@ public class DataSetIteratorTest extends BaseDL4JTest { INDArray output = model.output(dataTest.getFeatures()); Evaluation eval = new Evaluation(outputNum); eval.eval(dataTest.getLabels(), output); - System.out.println(eval.stats()); +// System.out.println(eval.stats()); } @Test @@ -257,7 +262,7 @@ public class DataSetIteratorTest extends BaseDL4JTest { INDArray output = model.output(testDS.getFeatures()); eval.eval(testDS.getLabels(), output); } - System.out.println(eval.stats(true)); +// System.out.println(eval.stats(true)); listener.exportScores(System.out); } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/tools/VariableMultiTimeseriesGenerator.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/tools/VariableMultiTimeseriesGenerator.java index 04ad55bc1..17642b74c 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/tools/VariableMultiTimeseriesGenerator.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/tools/VariableMultiTimeseriesGenerator.java @@ -68,8 +68,8 @@ public class VariableMultiTimeseriesGenerator implements MultiDataSetIterator { int localMaxima = isFirst && firstMaxima > 0 ? firstMaxima : minTS == maxTS ? minTS : rng.nextInt(maxTS - minTS) + minTS; - if (isFirst) - log.info("Local maxima: {}", localMaxima); +// if (isFirst) +// log.info("Local maxima: {}", localMaxima); isFirst = false; diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/tools/VariableTimeseriesGenerator.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/tools/VariableTimeseriesGenerator.java index 9d2eb17ff..46dbbac9c 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/tools/VariableTimeseriesGenerator.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/tools/VariableTimeseriesGenerator.java @@ -69,8 +69,8 @@ public class VariableTimeseriesGenerator implements DataSetIterator { int localMaxima = isFirst && firstMaxima > 0 ? firstMaxima : minTS == maxTS ? minTS : rng.nextInt(maxTS - minTS) + minTS; - if (isFirst) - log.info("Local maxima: {}", localMaxima); +// if (isFirst) +// log.info("Local maxima: {}", localMaxima); isFirst = false; diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/eval/EvalJsonTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/eval/EvalJsonTest.java index 74cecae86..db86ad617 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/eval/EvalJsonTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/eval/EvalJsonTest.java @@ -54,7 +54,7 @@ public class EvalJsonTest extends BaseDL4JTest { @Test public void testSerde() { - boolean print = true; + boolean print = false; Nd4j.getRandom().setSeed(12345); Evaluation evaluation = new Evaluation(); @@ -105,7 +105,7 @@ public class EvalJsonTest extends BaseDL4JTest { @Test public void testSerdeExactRoc() { Nd4j.getRandom().setSeed(12345); - boolean print = true; + boolean print = false; ROC roc = new ROC(0); ROCBinary roc2 = new ROCBinary(0); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/eval/EvalTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/eval/EvalTest.java index 90d9a37c9..43370548f 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/eval/EvalTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/eval/EvalTest.java @@ -131,11 +131,15 @@ public class EvalTest extends BaseDL4JTest { org.nd4j.evaluation.classification.Evaluation evalViaMethod = model.evaluate(new ListDataSetIterator<>(Collections.singletonList(test))); checkEvaluationEquality(eval, evalViaMethod); - System.out.println(eval.getConfusionMatrix().toString()); - System.out.println(eval.getConfusionMatrix().toCSV()); - System.out.println(eval.getConfusionMatrix().toHTML()); +// System.out.println(eval.getConfusionMatrix().toString()); +// System.out.println(eval.getConfusionMatrix().toCSV()); +// System.out.println(eval.getConfusionMatrix().toHTML()); +// System.out.println(eval.confusionToString()); - System.out.println(eval.confusionToString()); + eval.getConfusionMatrix().toString(); + eval.getConfusionMatrix().toCSV(); + eval.getConfusionMatrix().toHTML(); + eval.confusionToString(); } private static void assertMapEquals(Map first, Map second) { @@ -205,9 +209,10 @@ public class EvalTest extends BaseDL4JTest { e.eval(ds.getLabels(), out, meta); //*** New - evaluate and also store metadata *** } - System.out.println(e.stats()); +// System.out.println(e.stats()); + e.stats(); - System.out.println("\n\n*** Prediction Errors: ***"); +// System.out.println("\n\n*** Prediction Errors: ***"); List errors = e.getPredictionErrors(); //*** New - get list of prediction errors from evaluation *** List metaForErrors = new ArrayList<>(); @@ -219,10 +224,11 @@ public class EvalTest extends BaseDL4JTest { int count = 0; for (org.nd4j.evaluation.meta.Prediction t : errors) { - System.out.println(t + "\t\tRaw Data: " - + csv.loadFromMetaData((RecordMetaData) t.getRecordMetaData()).getRecord() //*** New - load subset of data from MetaData object (usually batched for efficiency) *** - + "\tNormalized: " + ds.getFeatures().getRow(count) + "\tLabels: " - + ds.getLabels().getRow(count) + "\tNetwork predictions: " + output.getRow(count)); + String s = t + "\t\tRaw Data: " + + csv.loadFromMetaData((RecordMetaData) t.getRecordMetaData()).getRecord() //*** New - load subset of data from MetaData object (usually batched for efficiency) *** + + "\tNormalized: " + ds.getFeatures().getRow(count) + "\tLabels: " + + ds.getLabels().getRow(count) + "\tNetwork predictions: " + output.getRow(count); +// System.out.println(s); count++; } @@ -322,9 +328,9 @@ public class EvalTest extends BaseDL4JTest { List l = Arrays.asList(new DataSet(in1, out1, null, lMask1), new DataSet(in2, out2, null, lMask2)); DataSetIterator iter = new ExistingDataSetIterator(l); - System.out.println("Net 1 eval"); +// System.out.println("Net 1 eval"); org.nd4j.evaluation.IEvaluation[] e1 = net1.doEvaluation(iter, new org.nd4j.evaluation.classification.Evaluation(), new org.nd4j.evaluation.classification.ROCMultiClass(), new org.nd4j.evaluation.regression.RegressionEvaluation()); - System.out.println("Net 2 eval"); +// System.out.println("Net 2 eval"); org.nd4j.evaluation.IEvaluation[] e2 = net2.doEvaluation(iter, new org.nd4j.evaluation.classification.Evaluation(), new org.nd4j.evaluation.classification.ROCMultiClass(), new org.nd4j.evaluation.regression.RegressionEvaluation()); assertEquals(e1[0], e2[0]); @@ -403,9 +409,9 @@ public class EvalTest extends BaseDL4JTest { List l = Arrays.asList(new DataSet(in1, out1), new DataSet(in2, out2)); DataSetIterator iter = new ExistingDataSetIterator(l); - System.out.println("Eval net 1"); +// System.out.println("Eval net 1"); org.nd4j.evaluation.IEvaluation[] e1 = net1.doEvaluation(iter, new org.nd4j.evaluation.classification.Evaluation(), new org.nd4j.evaluation.classification.ROCMultiClass(), new org.nd4j.evaluation.regression.RegressionEvaluation()); - System.out.println("Eval net 2"); +// System.out.println("Eval net 2"); org.nd4j.evaluation.IEvaluation[] e2 = net2.doEvaluation(iter, new org.nd4j.evaluation.classification.Evaluation(), new org.nd4j.evaluation.classification.ROCMultiClass(), new org.nd4j.evaluation.regression.RegressionEvaluation()); assertEquals(e1[0], e2[0]); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/eval/EvaluationToolsTests.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/eval/EvaluationToolsTests.java index e9fcc2fff..032b06ed0 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/eval/EvaluationToolsTests.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/eval/EvaluationToolsTests.java @@ -117,7 +117,7 @@ public class EvaluationToolsTests extends BaseDL4JTest { String str = EvaluationTools.rocChartToHtml(roc, Arrays.asList("setosa", "versicolor", "virginica")); - System.out.println(str); +// System.out.println(str); } } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/AttentionLayerTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/AttentionLayerTest.java index e3380337b..8a41b614f 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/AttentionLayerTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/AttentionLayerTest.java @@ -46,12 +46,6 @@ public class AttentionLayerTest extends BaseDL4JTest { @Rule public ExpectedException exceptionRule = ExpectedException.none(); - private static final boolean PRINT_RESULTS = true; - private static final boolean RETURN_ON_FIRST_FAILURE = false; - private static final double DEFAULT_EPS = 1e-6; - private static final double DEFAULT_MAX_REL_ERROR = 1e-3; - private static final double DEFAULT_MIN_ABS_ERROR = 1e-8; - @Test public void testSelfAttentionLayer() { int nIn = 3; @@ -104,8 +98,8 @@ public class AttentionLayerTest extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, in, labels, inMask, null, true, 100); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(in) + .labels(labels).inputMask(inMask).subset(true).maxPerParam(100)); assertTrue(name, gradOK); } } @@ -165,8 +159,8 @@ public class AttentionLayerTest extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, in, labels, inMask, null, true, 100); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(in) + .labels(labels).inputMask(inMask).subset(true).maxPerParam(100)); assertTrue(name, gradOK); } } @@ -226,8 +220,8 @@ public class AttentionLayerTest extends BaseDL4JTest { String name = "testLearnedSelfAttentionLayer() - mb=" + mb + ", tsLength = " + tsLength + ", maskType=" + maskType + ", projectInput = " + projectInput; System.out.println("Starting test: " + name); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, in, labels, inMask, null, true, 100); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(in) + .labels(labels).inputMask(inMask).subset(true).maxPerParam(100)); assertTrue(name, gradOK); } } @@ -320,8 +314,8 @@ public class AttentionLayerTest extends BaseDL4JTest { net.init(); //System.out.println("Original"); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, in, labels, inMask, null, true, 100, null); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(in) + .labels(labels).inputMask(inMask).subset(true).maxPerParam(100)); assertTrue(name, gradOK); } } @@ -383,8 +377,8 @@ public class AttentionLayerTest extends BaseDL4JTest { ComputationGraph net = new ComputationGraph(graph); net.init(); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[]{in}, new INDArray[]{labels}, inMask != null ? new INDArray[]{inMask} : null, null); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(net).inputs(new INDArray[]{in}) + .labels(new INDArray[]{labels}).inputMask(inMask != null ? new INDArray[]{inMask} : null).subset(true).maxPerParam(100)); assertTrue(name, gradOK); } } @@ -445,9 +439,8 @@ public class AttentionLayerTest extends BaseDL4JTest { ComputationGraph net = new ComputationGraph(graph); net.init(); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[]{in}, - new INDArray[]{labels}, inMask != null ? new INDArray[]{inMask} : null, null); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(net).inputs(new INDArray[]{in}) + .labels(new INDArray[]{labels}).inputMask(inMask != null ? new INDArray[]{inMask} : null)); assertTrue(name, gradOK); } } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/BNGradientCheckTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/BNGradientCheckTest.java index 5bafc81b0..eac917e13 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/BNGradientCheckTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/BNGradientCheckTest.java @@ -56,11 +56,6 @@ import static org.junit.Assert.assertTrue; * */ public class BNGradientCheckTest extends BaseDL4JTest { - private static final boolean PRINT_RESULTS = true; - private static final boolean RETURN_ON_FIRST_FAILURE = false; - private static final double DEFAULT_EPS = 1e-5; - private static final double DEFAULT_MAX_REL_ERROR = 1e-5; - private static final double DEFAULT_MIN_ABS_ERROR = 1e-9; static { Nd4j.setDataType(DataType.DOUBLE); @@ -93,17 +88,15 @@ public class BNGradientCheckTest extends BaseDL4JTest { MultiLayerNetwork mln = new MultiLayerNetwork(builder.build()); mln.init(); - if (PRINT_RESULTS) { - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); - } +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); //Mean and variance vars are not gradient checkable; mean/variance "gradient" is used to implement running mean/variance calc //i.e., runningMean = decay * runningMean + (1-decay) * batchMean //However, numerical gradient will be 0 as forward pass doesn't depend on this "parameter" Set excludeParams = new HashSet<>(Arrays.asList("1_mean", "1_var", "1_log10stdev")); - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, excludeParams); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).excludeParams(excludeParams)); assertTrue(gradOK); TestUtils.testModelSerialization(mln); @@ -140,17 +133,15 @@ public class BNGradientCheckTest extends BaseDL4JTest { MultiLayerNetwork mln = new MultiLayerNetwork(builder.build()); mln.init(); - if (PRINT_RESULTS) { - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); - } +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); //Mean and variance vars are not gradient checkable; mean/variance "gradient" is used to implement running mean/variance calc //i.e., runningMean = decay * runningMean + (1-decay) * batchMean //However, numerical gradient will be 0 as forward pass doesn't depend on this "parameter" Set excludeParams = new HashSet<>(Arrays.asList("1_mean", "1_var", "1_log10stdev")); - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, excludeParams); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).excludeParams(excludeParams)); assertTrue(gradOK); TestUtils.testModelSerialization(mln); @@ -220,7 +211,7 @@ public class BNGradientCheckTest extends BaseDL4JTest { String name = new Object() { }.getClass().getEnclosingMethod().getName(); - System.out.println("Num params: " + mln.numParams()); +// System.out.println("Num params: " + mln.numParams()); if (doLearningFirst) { //Run a number of iterations of learning @@ -241,20 +232,18 @@ public class BNGradientCheckTest extends BaseDL4JTest { assertTrue(msg, scoreAfter < 0.9 * scoreBefore); } - if (PRINT_RESULTS) { - System.out.println(name + " - activationFn=" + afn + ", lossFn=" + lf - + ", outputActivation=" + outputActivation + ", doLearningFirst=" - + doLearningFirst + ", l1=" + l1vals[j] + ", l2=" + l2vals[j]); - for (int k = 0; k < mln.getnLayers(); k++) - System.out.println("Layer " + k + " # params: " + mln.getLayer(k).numParams()); - } + System.out.println(name + " - activationFn=" + afn + ", lossFn=" + lf + + ", outputActivation=" + outputActivation + ", doLearningFirst=" + + doLearningFirst + ", l1=" + l1vals[j] + ", l2=" + l2vals[j]); +// for (int k = 0; k < mln.getnLayers(); k++) +// System.out.println("Layer " + k + " # params: " + mln.getLayer(k).numParams()); //Mean and variance vars are not gradient checkable; mean/variance "gradient" is used to implement running mean/variance calc //i.e., runningMean = decay * runningMean + (1-decay) * batchMean //However, numerical gradient will be 0 as forward pass doesn't depend on this "parameter" Set excludeParams = new HashSet<>(Arrays.asList("1_mean", "1_var", "3_mean", "3_var", "1_log10stdev", "3_log10stdev")); - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, null, true, 25, excludeParams); //Most params are in output layer, only these should be skipped with this threshold + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).excludeParams(excludeParams).subset(true).maxPerParam(25)); //Most params are in output layer, only these should be skipped with this threshold assertTrue(gradOK); TestUtils.testModelSerialization(mln); @@ -347,20 +336,18 @@ public class BNGradientCheckTest extends BaseDL4JTest { assertTrue(msg, scoreAfter < 0.8 * scoreBefore); } - if (PRINT_RESULTS) { - System.out.println(name + " - activationFn=" + afn + ", lossFn=" + lf - + ", outputActivation=" + outputActivation + ", doLearningFirst=" - + doLearningFirst + ", l1=" + l1vals[j] + ", l2=" + l2vals[j]); - for (int k = 0; k < mln.getnLayers(); k++) - System.out.println("Layer " + k + " # params: " + mln.getLayer(k).numParams()); - } + System.out.println(name + " - activationFn=" + afn + ", lossFn=" + lf + + ", outputActivation=" + outputActivation + ", doLearningFirst=" + + doLearningFirst + ", l1=" + l1vals[j] + ", l2=" + l2vals[j]); +// for (int k = 0; k < mln.getnLayers(); k++) +// System.out.println("Layer " + k + " # params: " + mln.getLayer(k).numParams()); //Mean and variance vars are not gradient checkable; mean/variance "gradient" is used to implement running mean/variance calc //i.e., runningMean = decay * runningMean + (1-decay) * batchMean //However, numerical gradient will be 0 as forward pass doesn't depend on this "parameter" Set excludeParams = new HashSet<>(Arrays.asList("1_mean", "1_var", "3_mean", "3_var", "1_log10stdev", "3_log10stdev")); - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, excludeParams); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).excludeParams(excludeParams)); assertTrue(gradOK); TestUtils.testModelSerialization(mln); @@ -396,17 +383,15 @@ public class BNGradientCheckTest extends BaseDL4JTest { MultiLayerNetwork mln = new MultiLayerNetwork(builder.build()); mln.init(); - if (PRINT_RESULTS) { - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); - } +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); //Mean and variance vars are not gradient checkable; mean/variance "gradient" is used to implement running mean/variance calc //i.e., runningMean = decay * runningMean + (1-decay) * batchMean //However, numerical gradient will be 0 as forward pass doesn't depend on this "parameter" Set excludeParams = new HashSet<>(Arrays.asList("1_mean", "1_var", "1_log10stdev")); - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, excludeParams); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).excludeParams(excludeParams)); assertTrue(gradOK); TestUtils.testModelSerialization(mln); @@ -443,17 +428,15 @@ public class BNGradientCheckTest extends BaseDL4JTest { MultiLayerNetwork mln = new MultiLayerNetwork(builder.build()); mln.init(); - if (PRINT_RESULTS) { - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); - } +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); //Mean and variance vars are not gradient checkable; mean/variance "gradient" is used to implement running mean/variance calc //i.e., runningMean = decay * runningMean + (1-decay) * batchMean //However, numerical gradient will be 0 as forward pass doesn't depend on this "parameter" Set excludeParams = new HashSet<>(Arrays.asList("1_mean", "1_var", "1_log10stdev")); - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, excludeParams); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).excludeParams(excludeParams)); assertTrue(gradOK); TestUtils.testModelSerialization(mln); @@ -496,9 +479,8 @@ public class BNGradientCheckTest extends BaseDL4JTest { //i.e., runningMean = decay * runningMean + (1-decay) * batchMean //However, numerical gradient will be 0 as forward pass doesn't depend on this "parameter" Set excludeParams = new HashSet<>(Arrays.asList("bn_mean", "bn_var")); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[]{input}, - new INDArray[]{labels}, null, null, excludeParams); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(net).inputs(new INDArray[]{input}) + .labels(new INDArray[]{labels}).excludeParams(excludeParams)); assertTrue(gradOK); TestUtils.testModelSerialization(net); @@ -585,21 +567,18 @@ public class BNGradientCheckTest extends BaseDL4JTest { assertTrue(msg, scoreAfter < 0.9 * scoreBefore); } - if (PRINT_RESULTS) { - System.out.println(name + " - activationFn=" + afn + ", lossFn=" + lf - + ", outputActivation=" + outputActivation + ", doLearningFirst=" - + doLearningFirst + ", l1=" + l1vals[j] + ", l2=" + l2vals[j]); - for (int k = 0; k < net.getNumLayers(); k++) - System.out.println("Layer " + k + " # params: " + net.getLayer(k).numParams()); - } + System.out.println(name + " - activationFn=" + afn + ", lossFn=" + lf + + ", outputActivation=" + outputActivation + ", doLearningFirst=" + + doLearningFirst + ", l1=" + l1vals[j] + ", l2=" + l2vals[j]); +// for (int k = 0; k < net.getNumLayers(); k++) +// System.out.println("Layer " + k + " # params: " + net.getLayer(k).numParams()); //Mean and variance vars are not gradient checkable; mean/variance "gradient" is used to implement running mean/variance calc //i.e., runningMean = decay * runningMean + (1-decay) * batchMean //However, numerical gradient will be 0 as forward pass doesn't depend on this "parameter" Set excludeParams = new HashSet<>(Arrays.asList("1_mean", "1_var", "3_mean", "3_var", "1_log10stdev", "3_log10stdev")); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, - new INDArray[]{input}, new INDArray[]{labels}, null, null, excludeParams); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(net).inputs(new INDArray[]{input}) + .labels(new INDArray[]{labels}).excludeParams(excludeParams)); assertTrue(gradOK); TestUtils.testModelSerialization(net); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CNN1DGradientCheckTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CNN1DGradientCheckTest.java index a0a109cb1..a2b29e06c 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CNN1DGradientCheckTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CNN1DGradientCheckTest.java @@ -108,8 +108,8 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < net.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// for (int j = 0; j < net.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -188,8 +188,8 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < net.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// for (int j = 0; j < net.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -272,8 +272,8 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < net.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// for (int j = 0; j < net.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -349,8 +349,8 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < net.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// for (int j = 0; j < net.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -414,8 +414,8 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest { INDArray label = TestUtils.randomOneHot(2, finalNOut); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, f, label, fm, null); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(f) + .labels(label).inputMask(fm)); assertTrue(s, gradOK); TestUtils.testModelSerialization(net); @@ -509,8 +509,8 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest { INDArray label = TestUtils.randomOneHotTimeSeries(2, finalNOut, (int)outSize2); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, f, label, fm, null); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(f) + .labels(label).inputMask(fm)); assertTrue(s, gradOK); TestUtils.testModelSerialization(net); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CNN3DGradientCheckTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CNN3DGradientCheckTest.java index 13cc11e80..cbad1adbb 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CNN3DGradientCheckTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CNN3DGradientCheckTest.java @@ -144,14 +144,13 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest { if (PRINT_RESULTS) { log.info(msg); - for (int j = 0; j < net.getnLayers(); j++) { - log.info("Layer " + j + " # params: " + net.getLayer(j).numParams()); - } +// for (int j = 0; j < net.getnLayers(); j++) { +// log.info("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// } } - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, - DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, - RETURN_ON_FIRST_FAILURE, input, labels, null, null, true, 128); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(input) + .labels(labels).subset(true).maxPerParam(128)); assertTrue(msg, gradOK); @@ -248,14 +247,13 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest { if (PRINT_RESULTS) { log.info(msg); - for (int j = 0; j < net.getnLayers(); j++) { - log.info("Layer " + j + " # params: " + net.getLayer(j).numParams()); - } +// for (int j = 0; j < net.getnLayers(); j++) { +// log.info("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// } } - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, - DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, - RETURN_ON_FIRST_FAILURE, input, labels, null, null, true, 512); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(input) + .labels(labels).subset(true).maxPerParam(512)); assertTrue(msg, gradOK); @@ -431,9 +429,9 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest { if (PRINT_RESULTS) { log.info(msg); - for (int j = 0; j < net.getnLayers(); j++) { - log.info("Layer " + j + " # params: " + net.getLayer(j).numParams()); - } +// for (int j = 0; j < net.getnLayers(); j++) { +// log.info("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// } } boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, @@ -530,9 +528,9 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest { if (PRINT_RESULTS) { log.info(msg); - for (int j = 0; j < net.getnLayers(); j++) { - log.info("Layer " + j + " # params: " + net.getLayer(j).numParams()); - } +// for (int j = 0; j < net.getnLayers(); j++) { +// log.info("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// } } boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, @@ -547,4 +545,95 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest { } } } + + @Test + public void testDeconv3d() { + Nd4j.getRandom().setSeed(12345); + // Note: we checked this with a variety of parameters, but it takes a lot of time. + int[] depths = {8, 8, 9}; + int[] heights = {8, 9, 9}; + int[] widths = {8, 8, 9}; + + + int[][] kernels = {{2, 2, 2}, {3, 3, 3}, {2, 3, 2}}; + int[][] strides = {{1, 1, 1}, {1, 1, 1}, {2, 2, 2}}; + + Activation[] activations = {Activation.SIGMOID, Activation.TANH, Activation.IDENTITY}; + + ConvolutionMode[] modes = {ConvolutionMode.Truncate, ConvolutionMode.Same, ConvolutionMode.Same}; + int[] mbs = {1, 3, 2}; + Convolution3D.DataFormat[] dataFormats = new Convolution3D.DataFormat[]{Convolution3D.DataFormat.NCDHW, Convolution3D.DataFormat.NDHWC, Convolution3D.DataFormat.NCDHW}; + + int convNIn = 2; + int finalNOut = 2; + int[] deconvOut = {2, 3, 4}; + + for (int i = 0; i < activations.length; i++) { + Activation afn = activations[i]; + int miniBatchSize = mbs[i]; + int depth = depths[i]; + int height = heights[i]; + int width = widths[i]; + ConvolutionMode mode = modes[i]; + int[] kernel = kernels[i]; + int[] stride = strides[i]; + Convolution3D.DataFormat df = dataFormats[i]; + int dOut = deconvOut[i]; + + INDArray input; + if (df == Convolution3D.DataFormat.NDHWC) { + input = Nd4j.rand(new int[]{miniBatchSize, depth, height, width, convNIn}); + } else { + input = Nd4j.rand(new int[]{miniBatchSize, convNIn, depth, height, width}); + } + INDArray labels = Nd4j.zeros(miniBatchSize, finalNOut); + for (int j = 0; j < miniBatchSize; j++) { + labels.putScalar(new int[]{j, j % finalNOut}, 1.0); + } + + MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() + .dataType(DataType.DOUBLE) + .updater(new NoOp()) + .weightInit(new NormalDistribution(0, 0.1)) + .list() + .layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel) + .stride(stride).nIn(convNIn).nOut(dOut).hasBias(false) + .convolutionMode(mode).dataFormat(df) + .build()) + .layer(1, new Deconvolution3D.Builder().activation(afn).kernelSize(kernel) + .stride(stride).nOut(dOut).hasBias(false) + .convolutionMode(mode).dataFormat(df) + .build()) + .layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) + .activation(Activation.SOFTMAX).nOut(finalNOut).build()) + .setInputType(InputType.convolutional3D(df, depth, height, width, convNIn)).build(); + + String json = conf.toJson(); + MultiLayerConfiguration c2 = MultiLayerConfiguration.fromJson(json); + assertEquals(conf, c2); + + MultiLayerNetwork net = new MultiLayerNetwork(conf); + net.init(); + + String msg = "DataFormat = " + df + ", minibatch size = " + miniBatchSize + ", activationFn=" + afn + + ", kernel = " + Arrays.toString(kernel) + ", stride = " + + Arrays.toString(stride) + ", mode = " + mode.toString() + + ", input depth " + depth + ", input height " + height + + ", input width " + width; + + if (PRINT_RESULTS) { + log.info(msg); +// for (int j = 0; j < net.getnLayers(); j++) { +// log.info("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// } + } + + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(input) + .labels(labels).subset(true).maxPerParam(128)); + + assertTrue(msg, gradOK); + + TestUtils.testModelSerialization(net); + } + } } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CNNGradientCheckTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CNNGradientCheckTest.java index decb81bb0..4ddb1ad40 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CNNGradientCheckTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CNNGradientCheckTest.java @@ -122,8 +122,8 @@ public class CNNGradientCheckTest extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(name + " - activationFn=" + afn + ", lossFn=" + lf + ", outputActivation=" + outputActivation + ", doLearningFirst=" + doLearningFirst); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -213,8 +213,8 @@ public class CNNGradientCheckTest extends BaseDL4JTest { System.out.println(testName + "- activationFn=" + afn + ", lossFn=" + lf + ", outputActivation=" + outputActivation + ", doLearningFirst=" + doLearningFirst); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -275,8 +275,8 @@ public class CNNGradientCheckTest extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < net.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// for (int j = 0; j < net.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels); @@ -336,8 +336,8 @@ public class CNNGradientCheckTest extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < net.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// for (int j = 0; j < net.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels); @@ -346,8 +346,8 @@ public class CNNGradientCheckTest extends BaseDL4JTest { //Also check compgraph: ComputationGraph cg = net.toComputationGraph(); - gradOK = GradientCheckUtil.checkGradients(cg, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[]{input}, new INDArray[]{labels}); + gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(cg).inputs(new INDArray[]{input}) + .labels(new INDArray[]{labels})); assertTrue(msg + " - compgraph", gradOK); TestUtils.testModelSerialization(net); @@ -399,8 +399,8 @@ public class CNNGradientCheckTest extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < net.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// for (int j = 0; j < net.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -468,8 +468,8 @@ public class CNNGradientCheckTest extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < net.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// for (int j = 0; j < net.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -793,9 +793,8 @@ public class CNNGradientCheckTest extends BaseDL4JTest { + convFirst; System.out.println(msg); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, - labels, null, null, true, 128); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(input) + .labels(labels).subset(true).maxPerParam(128)); assertTrue(msg, gradOK); @@ -863,8 +862,8 @@ public class CNNGradientCheckTest extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < net.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// for (int j = 0; j < net.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -937,8 +936,8 @@ public class CNNGradientCheckTest extends BaseDL4JTest { + k + ", s=" + s + ", d=" + d + ", cm=" + cm; System.out.println(msg); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, null, true, 100); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(input) + .labels(labels).subset(true).maxPerParam(100)); assertTrue(msg, gradOK); @@ -1009,8 +1008,8 @@ public class CNNGradientCheckTest extends BaseDL4JTest { + k + ", s=" + s + ", d=" + d + ", cm=" + cm; System.out.println(msg); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, null, true, 50); //Most params are in output layer + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(input) + .labels(labels).subset(true).maxPerParam(50)); //Most params are in output layer assertTrue(msg, gradOK); @@ -1160,12 +1159,12 @@ public class CNNGradientCheckTest extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < net.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// for (int j = 0; j < net.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, null, true, 160); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(input) + .labels(labels).subset(true).maxPerParam(160)); assertTrue(msg, gradOK); @@ -1235,8 +1234,8 @@ public class CNNGradientCheckTest extends BaseDL4JTest { + k + ", nIn=" + nIn + ", depthMul=" + depthMultiplier + ", s=" + s + ", cm=" + cm; System.out.println(msg); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, null, true, 256); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(input) + .labels(labels).subset(true).maxPerParam(256)); assertTrue(msg, gradOK); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CapsnetGradientCheckTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CapsnetGradientCheckTest.java index 3aa437381..e69766677 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CapsnetGradientCheckTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/CapsnetGradientCheckTest.java @@ -110,10 +110,8 @@ public class CapsnetGradientCheckTest extends BaseDL4JTest { " capsules with " + capsuleDim + " dimensions and " + routing + " routings"; System.out.println(msg); - boolean gradOK = GradientCheckUtil - .checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, - labels, null, null, true, 100); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(input) + .labels(labels).subset(true).maxPerParam(100)); assertTrue(msg, gradOK); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/DropoutGradientCheck.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/DropoutGradientCheck.java index 1fe67b30c..48db906f5 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/DropoutGradientCheck.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/DropoutGradientCheck.java @@ -36,6 +36,7 @@ import org.nd4j.linalg.activations.Activation; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; +import org.nd4j.linalg.function.Consumer; import org.nd4j.linalg.learning.config.NoOp; import org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction; @@ -171,10 +172,15 @@ public class DropoutGradientCheck extends BaseDL4JTest { INDArray[] in = new INDArray[]{Nd4j.rand(mb, 5)}; INDArray[] l = new INDArray[]{TestUtils.randomOneHot(mb, 5)}; - boolean ok = GradientCheckUtil.checkGradients(cg, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, in, l, null, null, null, 12345); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(cg).inputs(in) + .labels(l).callEachIter(new Consumer() { + @Override + public void accept(ComputationGraph net) { + Nd4j.getRandom().setSeed(12345); + } + })); - assertTrue(ok); + assertTrue(gradOK); } } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GlobalPoolingGradientCheckTests.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GlobalPoolingGradientCheckTests.java index b141323a9..d4e3d3089 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GlobalPoolingGradientCheckTests.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GlobalPoolingGradientCheckTests.java @@ -92,8 +92,8 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testLSTMGlobalPoolingBasicMultiLayer() - " + pt + ", minibatch = " + miniBatchSize); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -150,8 +150,8 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println( "testCnnGlobalPoolingBasicMultiLayer() - " + pt + ", minibatch = " + miniBatchSize); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -209,12 +209,12 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testLSTMGlobalPoolingBasicMultiLayer() - " + pt + ", minibatch = " + miniBatchSize); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, featuresMask, null); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).inputMask(featuresMask)); assertTrue(gradOK); TestUtils.testModelSerialization(mln); @@ -292,12 +292,12 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testCnnGlobalPoolingBasicMultiLayer() - " + pt + ", minibatch = " + miniBatchSize); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, inputMask, null); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).inputMask(inputMask)); assertTrue(gradOK); TestUtils.testModelSerialization(mln); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTests.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTests.java index d506bb233..cd3e1d2e3 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTests.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTests.java @@ -120,8 +120,8 @@ public class GradientCheckTests extends BaseDL4JTest { System.out.println("testMinibatchApplication() - activationFn=" + afn + ", lossFn=" + lf + ", outputActivation=" + outputActivation + ", doLearningFirst=" + doLearningFirst); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -200,8 +200,8 @@ public class GradientCheckTests extends BaseDL4JTest { System.out.println("testGradientMLP2LayerIrisSimpleRandom() - activationFn=" + afn + ", lossFn=" + lf + ", outputActivation=" + outputActivation + ", doLearningFirst=" + doLearningFirst); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -294,8 +294,8 @@ public class GradientCheckTests extends BaseDL4JTest { System.out.println("testGradientMLP2LayerIrisSimpleRandom() - activationFn=" + afn + ", lossFn=" + lf + ", outputActivation=" + outputActivation + ", doLearningFirst=" + doLearningFirst + ", l2=" + l2 + ", l1=" + l1); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -339,8 +339,8 @@ public class GradientCheckTests extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testEmbeddingLayerSimple"); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -379,8 +379,8 @@ public class GradientCheckTests extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testEmbeddingLayerSimple"); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -469,8 +469,8 @@ public class GradientCheckTests extends BaseDL4JTest { + doLearningFirst + ", l2=" + l2 + ", l1=" + l1; if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -539,8 +539,8 @@ public class GradientCheckTests extends BaseDL4JTest { // expectation in case linear regression(with only element wise multiplication layer): large weight for the fourth weight log.info("params after learning: " + netGraph.getLayer(1).paramTable()); - boolean gradOK = checkGradients(netGraph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[]{features}, new INDArray[]{labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(netGraph).inputs(new INDArray[]{features}) + .labels(new INDArray[]{labels})); msg = "elementWiseMultiplicationLayerTest() - activationFn=" + "ID" + ", lossFn=" + "Cos-sim" + ", outputActivation=" + "Id" + ", doLearningFirst=" + "true"; @@ -592,8 +592,8 @@ public class GradientCheckTests extends BaseDL4JTest { } String msg = "mask=" + maskArray + ", inputRank=" + inputRank; - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, in, label, fMask, null); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(in) + .labels(label).inputMask(fMask)); assertTrue(msg, gradOK); TestUtils.testModelSerialization(net); @@ -767,8 +767,8 @@ public class GradientCheckTests extends BaseDL4JTest { System.out.println("testGradientMLP2LayerIrisSimpleRandom() - activationFn=" + afn + ", lossFn=" + lf + ", outputActivation=" + outputActivation + ", doLearningFirst=" + doLearningFirst + ", layerNorm=" + layerNorm); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsComputationGraph.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsComputationGraph.java index 623158c68..b702520e4 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsComputationGraph.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsComputationGraph.java @@ -103,13 +103,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testBasicIris()"); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, - new INDArray[] {labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) + .labels(new INDArray[]{labels})); String msg = "testBasicIris()"; assertTrue(msg, gradOK); @@ -155,13 +154,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testBasicIrisWithMerging()"); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, - new INDArray[] {labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) + .labels(new INDArray[]{labels})); String msg = "testBasicIrisWithMerging()"; assertTrue(msg, gradOK); @@ -213,13 +211,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testBasicIrisWithElementWiseVertex(op=" + op + ")"); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, - new INDArray[] {labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) + .labels(new INDArray[]{labels})); String msg = "testBasicIrisWithElementWiseVertex(op=" + op + ")"; assertTrue(msg, gradOK); @@ -274,13 +271,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testBasicIrisWithElementWiseVertex(op=" + op + ")"); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, - new INDArray[] {labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) + .labels(new INDArray[]{labels})); String msg = "testBasicIrisWithElementWiseVertex(op=" + op + ")"; assertTrue(msg, gradOK); @@ -328,9 +324,8 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { graph.fit(new DataSet(in, labels)); - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[]{in}, - new INDArray[]{labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in}) + .labels(new INDArray[]{labels})); assertTrue(msg, gradOK); TestUtils.testModelSerialization(graph); } @@ -372,13 +367,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testCnnDepthMerge()"); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, - new INDArray[] {labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) + .labels(new INDArray[]{labels})); String msg = "testCnnDepthMerge()"; assertTrue(msg, gradOK); @@ -430,13 +424,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testLSTMWithMerging()"); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, - new INDArray[] {labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) + .labels(new INDArray[]{labels})); String msg = "testLSTMWithMerging()"; assertTrue(msg, gradOK); @@ -466,13 +459,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testLSTMWithSubset()"); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, - new INDArray[] {labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) + .labels(new INDArray[]{labels})); String msg = "testLSTMWithSubset()"; assertTrue(msg, gradOK); @@ -504,26 +496,24 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testLSTMWithLastTimeStepVertex()"); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } //First: test with no input mask array - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, - new INDArray[] {labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) + .labels(new INDArray[]{labels})); String msg = "testLSTMWithLastTimeStepVertex()"; assertTrue(msg, gradOK); //Second: test with input mask arrays. - INDArray inMask = Nd4j.zeros(3, 5); - inMask.putRow(0, Nd4j.create(new double[] {1, 1, 1, 0, 0})); - inMask.putRow(1, Nd4j.create(new double[] {1, 1, 1, 1, 0})); - inMask.putRow(2, Nd4j.create(new double[] {1, 1, 1, 1, 1})); - graph.setLayerMaskArrays(new INDArray[] {inMask}, null); - gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, - PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, new INDArray[] {labels}); + INDArray inMask = Nd4j.zeros(3, 4); + inMask.putRow(0, Nd4j.create(new double[] {1, 1, 0, 0})); + inMask.putRow(1, Nd4j.create(new double[] {1, 1, 1, 0})); + inMask.putRow(2, Nd4j.create(new double[] {1, 1, 1, 1})); + gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) + .labels(new INDArray[]{labels}).inputMask(new INDArray[]{inMask})); assertTrue(msg, gradOK); TestUtils.testModelSerialization(graph); @@ -566,13 +556,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testLSTMWithDuplicateToTimeSeries()"); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input1, input2}, - new INDArray[] {labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input1, input2}) + .labels(new INDArray[]{labels})); String msg = "testLSTMWithDuplicateToTimeSeries()"; assertTrue(msg, gradOK); @@ -615,13 +604,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testLSTMWithReverseTimeSeriesVertex()"); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, - new INDArray[] {labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) + .labels(new INDArray[]{labels})); String msg = "testLSTMWithDuplicateToTimeSeries()"; assertTrue(msg, gradOK); @@ -632,8 +620,8 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { inMask.putRow(1, Nd4j.create(new double[] {1, 1, 0, 1, 0})); inMask.putRow(2, Nd4j.create(new double[] {1, 1, 1, 1, 1})); graph.setLayerMaskArrays(new INDArray[] {inMask}, null); - gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, - PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, new INDArray[] {labels}); + gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) + .labels(new INDArray[]{labels})); assertTrue(msg, gradOK); TestUtils.testModelSerialization(graph); @@ -671,13 +659,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { String msg = "testMultipleInputsLayer() - minibatchSize = " + mb; if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, inputs, - new INDArray[] {out}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(inputs) + .labels(new INDArray[]{out})); assertTrue(msg, gradOK); TestUtils.testModelSerialization(graph); @@ -712,13 +699,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { String msg = "testMultipleOutputsLayer() - minibatchSize = " + mb; if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, - new INDArray[] {out}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) + .labels(new INDArray[]{out})); assertTrue(msg, gradOK); TestUtils.testModelSerialization(graph); @@ -759,12 +745,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { String msg = "testMultipleOutputsMergeVertex() - minibatchSize = " + mb; if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, new INDArray[] {out}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(input) + .labels(new INDArray[]{out})); assertTrue(msg, gradOK); TestUtils.testModelSerialization(graph); @@ -810,13 +796,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { String msg = "testMultipleOutputsMergeVertex() - minibatchSize = " + mb; if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, - new INDArray[] {out}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) + .labels(new INDArray[]{out})); assertTrue(msg, gradOK); TestUtils.testModelSerialization(graph); @@ -873,19 +858,18 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { Map out = graph.feedForward(new INDArray[] {pos, anc, neg}, true); - for (String s : out.keySet()) { - System.out.println(s + "\t" + Arrays.toString(out.get(s).shape())); - } +// for (String s : out.keySet()) { +// System.out.println(s + "\t" + Arrays.toString(out.get(s).shape())); +// } if (PRINT_RESULTS) { System.out.println("testBasicIrisTripletStackingL2Loss()"); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {pos, anc, neg}, - new INDArray[] {labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{pos, anc, neg}) + .labels(new INDArray[]{labels})); String msg = "testBasicIrisTripletStackingL2Loss()"; assertTrue(msg, gradOK); @@ -941,13 +925,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { String msg = "testBasicCenterLoss() - lambda = " + lambda + ", trainFirst = " + train; if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {example}, - new INDArray[] {labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{example}) + .labels(new INDArray[]{labels})); assertTrue(msg, gradOK); TestUtils.testModelSerialization(graph); @@ -1007,8 +990,8 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { String msg = "testBasicCenterLoss() - trainFirst = " + train; if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < net.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); +// for (int j = 0; j < net.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -1056,13 +1039,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(testName); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {in1, in2}, - new INDArray[] {labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1, in2}) + .labels(new INDArray[]{labels})); assertTrue(testName, gradOK); TestUtils.testModelSerialization(graph); @@ -1115,13 +1097,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(testName); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {in1, in2}, - new INDArray[] {labels1, labels2}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1, in2}) + .labels(new INDArray[]{labels1, labels2})); assertTrue(testName, gradOK); TestUtils.testModelSerialization(graph); @@ -1174,13 +1155,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(testName); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {in1, in2}, - new INDArray[] {labels1, labels2}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1, in2}) + .labels(new INDArray[]{labels1, labels2})); assertTrue(testName, gradOK); TestUtils.testModelSerialization(graph); @@ -1238,15 +1218,14 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(testName); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } graph.setLayerMaskArrays(new INDArray[] {inMask1, inMask2}, null); - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {in1, in2}, - new INDArray[] {labels1, labels2}, new INDArray[] {inMask1, inMask2}, null); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1, in2}) + .labels(new INDArray[]{labels1, labels2}).inputMask(new INDArray[]{inMask1, inMask2})); assertTrue(testName, gradOK); TestUtils.testModelSerialization(graph); @@ -1298,13 +1277,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(testName); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {in1, in2}, - new INDArray[] {labels1, labels2}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1, in2}) + .labels(new INDArray[]{labels1, labels2})); assertTrue(testName, gradOK); TestUtils.testModelSerialization(graph); } @@ -1341,13 +1319,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(testName); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {in1}, - new INDArray[] {labels1}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1}) + .labels(new INDArray[]{labels1})); assertTrue(testName, gradOK); TestUtils.testModelSerialization(graph); @@ -1391,13 +1368,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println(testName); - for (int j = 0; j < graph.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); +// for (int j = 0; j < graph.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {in1}, - new INDArray[] {labels1}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1}) + .labels(new INDArray[]{labels1})); assertTrue(testName, gradOK); TestUtils.testModelSerialization(graph); @@ -1430,12 +1406,12 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testGraphEmbeddingLayerSimple"); - for (int j = 0; j < cg.getNumLayers(); j++) - System.out.println("Layer " + j + " # params: " + cg.getLayer(j).numParams()); +// for (int j = 0; j < cg.getNumLayers(); j++) +// System.out.println("Layer " + j + " # params: " + cg.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(cg, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, - PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] {input}, new INDArray[] {labels}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(cg).inputs(new INDArray[]{input}) + .labels(new INDArray[]{labels})); String msg = "testGraphEmbeddingLayerSimple"; assertTrue(msg, gradOK); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsMasking.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsMasking.java index c1e97a385..a6dd288fe 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsMasking.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GradientCheckTestsMasking.java @@ -51,10 +51,6 @@ import static org.nd4j.linalg.indexing.NDArrayIndex.*; public class GradientCheckTestsMasking extends BaseDL4JTest { private static final boolean PRINT_RESULTS = true; - private static final boolean RETURN_ON_FIRST_FAILURE = false; - private static final double DEFAULT_EPS = 1e-6; - private static final double DEFAULT_MAX_REL_ERROR = 1e-3; - private static final double DEFAULT_MIN_ABS_ERROR = 1e-7; static { Nd4j.setDataType(DataType.DOUBLE); @@ -130,8 +126,8 @@ public class GradientCheckTestsMasking extends BaseDL4JTest { MultiLayerNetwork mln = new MultiLayerNetwork(conf); mln.init(); - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, maskArr); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).labelMask(maskArr)); String msg = "gradientCheckMaskingOutputSimple() - timeSeriesLength=" + timeSeriesLength + ", miniBatchSize=" + 1; @@ -186,12 +182,12 @@ public class GradientCheckTestsMasking extends BaseDL4JTest { if (PRINT_RESULTS) { System.out.println("testBidirectionalLSTMMasking() - testNum = " + testNum++); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, mask, mask, true, 16); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).inputMask(mask).labelMask(mask).subset(true).maxPerParam(16)); assertTrue(gradOK); TestUtils.testModelSerialization(mln); @@ -271,8 +267,8 @@ public class GradientCheckTestsMasking extends BaseDL4JTest { System.out.println(msg); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, features, labels, null, labelMask); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(features) + .labels(labels).labelMask(labelMask)); assertTrue(msg, gradOK); TestUtils.testModelSerialization(net); @@ -366,8 +362,8 @@ public class GradientCheckTestsMasking extends BaseDL4JTest { System.out.println(msg); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, features, labels, null, labelMask); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(features) + .labels(labels).labelMask(labelMask)); assertTrue(msg, gradOK); @@ -387,9 +383,8 @@ public class GradientCheckTestsMasking extends BaseDL4JTest { ComputationGraph graph = new ComputationGraph(cg); graph.init(); - gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, - new INDArray[] {features}, new INDArray[] {labels}, null, new INDArray[]{labelMask}, null); + gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{features}) + .labels(new INDArray[]{labels}).labelMask(new INDArray[]{labelMask})); assertTrue(msg + " (compgraph)", gradOK); TestUtils.testModelSerialization(graph); @@ -425,8 +420,8 @@ public class GradientCheckTestsMasking extends BaseDL4JTest { assertTrue(lm.sumNumber().intValue() > 0); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, f, l, null, lm); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(f) + .labels(l).labelMask(lm)); assertTrue(gradOK); //Also ensure score doesn't depend on masked feature or label values @@ -478,9 +473,8 @@ public class GradientCheckTestsMasking extends BaseDL4JTest { assertTrue(lm.sumNumber().intValue() > 0); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[]{f}, new INDArray[]{l}, - null, new INDArray[]{lm}); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(net).inputs(new INDArray[]{f}) + .labels(new INDArray[]{l}).labelMask(new INDArray[]{lm})); assertTrue(gradOK); //Also ensure score doesn't depend on masked feature or label values diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/LRNGradientCheckTests.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/LRNGradientCheckTests.java index 18fbcce45..a2bb1989c 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/LRNGradientCheckTests.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/LRNGradientCheckTests.java @@ -82,10 +82,10 @@ public class LRNGradientCheckTests extends BaseDL4JTest { MultiLayerNetwork mln = new MultiLayerNetwork(builder.build()); mln.init(); - if (PRINT_RESULTS) { - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); - } +// if (PRINT_RESULTS) { +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/LSTMGradientCheckTests.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/LSTMGradientCheckTests.java index caa52f4c9..1e673b936 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/LSTMGradientCheckTests.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/LSTMGradientCheckTests.java @@ -124,8 +124,8 @@ public class LSTMGradientCheckTests extends BaseDL4JTest { String testName = "testLSTMBasic(" + (graves ? "GravesLSTM" : "LSTM") + ")"; if (PRINT_RESULTS) { System.out.println(testName); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -213,12 +213,12 @@ public class LSTMGradientCheckTests extends BaseDL4JTest { + outputActivation + ", l2=" + l2 + ", l1=" + l1; if (PRINT_RESULTS) { System.out.println(testName); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, null, true, 128); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).subset(true).maxPerParam(128)); assertTrue(testName, gradOK); TestUtils.testModelSerialization(mln); @@ -341,8 +341,8 @@ public class LSTMGradientCheckTests extends BaseDL4JTest { System.out.println("testGradientGravesBidirectionalLSTMFull() - activationFn=" + afn + ", lossFn=" + lf + ", outputActivation=" + outputActivation + ", l2=" + l2 + ", l1=" + l1); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -394,8 +394,8 @@ public class LSTMGradientCheckTests extends BaseDL4JTest { MultiLayerNetwork mln = new MultiLayerNetwork(conf); mln.init(); - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, null, true, 128); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).subset(true).maxPerParam(128)); String msg = "testGradientGravesLSTMEdgeCases() - timeSeriesLength=" + timeSeriesLength[i] + ", miniBatchSize=" + miniBatchSize[i]; @@ -452,8 +452,8 @@ public class LSTMGradientCheckTests extends BaseDL4JTest { System.out.println("layer " + i + "\t" + mln.getLayer(i).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, null, true, 32); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).subset(true).maxPerParam(32)); assertTrue(gradOK); TestUtils.testModelSerialization(mln); } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/LossFunctionGradientCheck.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/LossFunctionGradientCheck.java index fa06ff8f7..632a85e22 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/LossFunctionGradientCheck.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/LossFunctionGradientCheck.java @@ -206,21 +206,19 @@ public class LossFunctionGradientCheck extends BaseDL4JTest { } else { failed.add(testName); } - - System.out.println("\n\n"); TestUtils.testModelSerialization(net); } } - - System.out.println("---- Passed ----"); - for (String s : passed) { - System.out.println(s); - } - - System.out.println("---- Failed ----"); - for (String s : failed) { - System.out.println(s); + if(failed.size() > 0) { + System.out.println("---- Passed ----"); + for (String s : passed) { + System.out.println(s); + } + System.out.println("---- Failed ----"); + for (String s : failed) { + System.out.println(s); + } } assertEquals("Tests failed", 0, failed.size()); @@ -376,7 +374,6 @@ public class LossFunctionGradientCheck extends BaseDL4JTest { failed.add(testName); } - System.out.println("\n\n"); TestUtils.testModelSerialization(net); } } @@ -684,8 +681,6 @@ public class LossFunctionGradientCheck extends BaseDL4JTest { } else { failed.add(testName); } - - System.out.println("\n\n"); } } } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/OutputLayerGradientChecks.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/OutputLayerGradientChecks.java index 32a229101..67fc4c11c 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/OutputLayerGradientChecks.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/OutputLayerGradientChecks.java @@ -136,13 +136,13 @@ public class OutputLayerGradientChecks extends BaseDL4JTest { String testName = "testRnnLossLayer(lf=" + lf + ", maskType=" + mt + ", outputActivation = " + oa + ")"; if (PRINT_RESULTS) { System.out.println(testName); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } System.out.println("Starting test: " + testName); - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, labelMask); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).labelMask(labelMask)); assertTrue(testName, gradOK); TestUtils.testModelSerialization(mln); @@ -243,13 +243,13 @@ public class OutputLayerGradientChecks extends BaseDL4JTest { String testName = "testCnnLossLayer(lf=" + lf + ", maskType=" + mt + ", outputActivation = " + oa + ")"; if (PRINT_RESULTS) { System.out.println(testName); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } System.out.println("Starting test: " + testName); - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, labelMask); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).labelMask(labelMask)); assertTrue(testName, gradOK); TestUtils.testModelSerialization(mln); @@ -392,13 +392,13 @@ public class OutputLayerGradientChecks extends BaseDL4JTest { String testName = "testCnn3dLossLayer(dataFormat=" + dataFormat + ",lf=" + lf + ", maskType=" + mt + ", outputActivation = " + oa + ")"; if (PRINT_RESULTS) { System.out.println(testName); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } System.out.println("Starting test: " + testName); - boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, labelMask); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) + .labels(labels).labelMask(labelMask)); assertTrue(testName, gradOK); TestUtils.testModelSerialization(mln); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/RnnGradientChecks.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/RnnGradientChecks.java index 98385de17..2980cad7c 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/RnnGradientChecks.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/RnnGradientChecks.java @@ -127,8 +127,8 @@ public class RnnGradientChecks extends BaseDL4JTest { net.init(); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, in, labels, inMask, null); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(in) + .labels(labels).inputMask(inMask)); assertTrue(gradOK); @@ -207,8 +207,8 @@ public class RnnGradientChecks extends BaseDL4JTest { net.init(); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, in, labels, inMask, null); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(in) + .labels(labels).inputMask(inMask)); assertTrue(gradOK); TestUtils.testModelSerialization(net); } @@ -282,8 +282,8 @@ public class RnnGradientChecks extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, in, labels, inMask, null, true, 16); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(in) + .labels(labels).inputMask(inMask).subset(true).maxPerParam(16)); assertTrue(name, gradOK); TestUtils.testModelSerialization(net); } @@ -346,8 +346,8 @@ public class RnnGradientChecks extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, in, labels, inMask, null, true, 16); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(in) + .labels(labels).inputMask(inMask).subset(true).maxPerParam(16)); assertTrue(name, gradOK); TestUtils.testModelSerialization(net); } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/UtilLayerGradientChecks.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/UtilLayerGradientChecks.java index 8349b732d..2d889a6a1 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/UtilLayerGradientChecks.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/UtilLayerGradientChecks.java @@ -182,9 +182,9 @@ public class UtilLayerGradientChecks extends BaseDL4JTest { MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); - - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, label, inMask, null); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(input) + .minAbsoluteError(1e-7) + .labels(label).inputMask(inMask)); assertTrue(gradOK); TestUtils.testModelSerialization(net); @@ -223,9 +223,8 @@ public class UtilLayerGradientChecks extends BaseDL4JTest { Set excludeParams = new HashSet<>(); excludeParams.addAll(Arrays.asList("1_W", "1_b", "2_W", "2_b")); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, in, labels, null, null, - false, -1, excludeParams); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(in) + .labels(labels).excludeParams(excludeParams)); assertTrue(gradOK); TestUtils.testModelSerialization(net); @@ -234,9 +233,8 @@ public class UtilLayerGradientChecks extends BaseDL4JTest { //Test ComputationGraph equivalent: ComputationGraph g = net.toComputationGraph(); - boolean gradOKCG = GradientCheckUtil.checkGradients(g, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[]{in}, new INDArray[]{labels}, - null, null, excludeParams); + boolean gradOKCG = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(g).inputs(new INDArray[]{in}) + .labels(new INDArray[]{labels}).excludeParams(excludeParams)); assertTrue(gradOKCG); TestUtils.testModelSerialization(g); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/VaeGradientCheckTests.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/VaeGradientCheckTests.java index cbf662987..6d1903579 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/VaeGradientCheckTests.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/VaeGradientCheckTests.java @@ -46,7 +46,7 @@ import static org.junit.Assert.assertTrue; */ public class VaeGradientCheckTests extends BaseDL4JTest { - private static final boolean PRINT_RESULTS = true; + private static final boolean PRINT_RESULTS = false; private static final boolean RETURN_ON_FIRST_FAILURE = false; private static final double DEFAULT_EPS = 1e-6; private static final double DEFAULT_MAX_REL_ERROR = 1e-3; @@ -122,8 +122,8 @@ public class VaeGradientCheckTests extends BaseDL4JTest { + Arrays.toString(decoderSizes) + ", l2=" + l2 + ", l1=" + l1; if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, @@ -193,8 +193,8 @@ public class VaeGradientCheckTests extends BaseDL4JTest { + l1; if (PRINT_RESULTS) { System.out.println(msg); - for (int l = 0; l < mln.getnLayers(); l++) - System.out.println("Layer " + l + " # params: " + mln.getLayer(l).numParams()); +// for (int l = 0; l < mln.getnLayers(); l++) +// System.out.println("Layer " + l + " # params: " + mln.getLayer(l).numParams()); } boolean gradOK = GradientCheckUtil.checkGradientsPretrainLayer(layer, DEFAULT_EPS, @@ -281,8 +281,8 @@ public class VaeGradientCheckTests extends BaseDL4JTest { String msg = "testVaePretrainReconstructionDistributions() - " + reconstructionDistributions[i]; if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradientsPretrainLayer(layer, DEFAULT_EPS, @@ -323,8 +323,8 @@ public class VaeGradientCheckTests extends BaseDL4JTest { String msg = "testVaePretrainMultipleSamples() - numSamples = " + numSamples; if (PRINT_RESULTS) { System.out.println(msg); - for (int j = 0; j < mln.getnLayers(); j++) - System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); +// for (int j = 0; j < mln.getnLayers(); j++) +// System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradientsPretrainLayer(layer, DEFAULT_EPS, diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/YoloGradientCheckTests.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/YoloGradientCheckTests.java index 0b95dc3b6..147150aa8 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/YoloGradientCheckTests.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/YoloGradientCheckTests.java @@ -120,8 +120,8 @@ public class YoloGradientCheckTests extends BaseDL4JTest { String msg = "testYoloOutputLayer() - minibatch = " + mb + ", w=" + w + ", h=" + h + ", l1=" + l1[i] + ", l2=" + l2[i]; System.out.println(msg); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, null, true, 100); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(input) + .labels(labels).subset(true).maxPerParam(100)); assertTrue(msg, gradOK); TestUtils.testModelSerialization(net); @@ -228,8 +228,8 @@ public class YoloGradientCheckTests extends BaseDL4JTest { INDArray f = ds.getFeatures(); INDArray l = ds.getLabels(); - boolean ok = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, f, l, null, null, true, 64); + boolean ok = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(f) + .labels(l).inputMask(null).subset(true).maxPerParam(64)); assertTrue(ok); TestUtils.testModelSerialization(net); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/conf/ComputationGraphConfigurationTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/conf/ComputationGraphConfigurationTest.java index 06f4d36b7..cf972bac3 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/conf/ComputationGraphConfigurationTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/conf/ComputationGraphConfigurationTest.java @@ -130,7 +130,7 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest { .setOutputs("out").build(); String json = conf.toJson(); - System.out.println(json); +// System.out.println(json); ComputationGraphConfiguration conf2 = ComputationGraphConfiguration.fromJson(json); @@ -258,7 +258,7 @@ public class ComputationGraphConfigurationTest extends BaseDL4JTest { .addVertex("test2", new StaticInnerGraphVertex(4, 5), "in").setOutputs("test", "test2").build(); String json = conf.toJson(); - System.out.println(json); +// System.out.println(json); ComputationGraphConfiguration conf2 = ComputationGraphConfiguration.fromJson(json); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/CustomPreprocessorTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/CustomPreprocessorTest.java index 1f6bc9816..8e726b869 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/CustomPreprocessorTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/conf/preprocessor/CustomPreprocessorTest.java @@ -54,7 +54,7 @@ public class CustomPreprocessorTest extends BaseDL4JTest { String json = conf.toJson(); String yaml = conf.toYaml(); - System.out.println(json); +// System.out.println(json); MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json); assertEquals(conf, confFromJson); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/dtypes/DTypeTests.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/dtypes/DTypeTests.java index 013738476..d9da12b62 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/dtypes/DTypeTests.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/dtypes/DTypeTests.java @@ -99,6 +99,11 @@ public class DTypeTests extends BaseDL4JTest { Convolution1D.class //Alias for Convolution1DLayer )); + @Override + public long getTimeoutMilliseconds() { + return 90000L; + } + @AfterClass public static void after() { ImmutableSet info; @@ -545,6 +550,7 @@ public class DTypeTests extends BaseDL4JTest { .layer(new Convolution3D.Builder().kernelSize(2, 2, 2).stride(1, 1, 1).nOut(3).activation(Activation.TANH).build()) .layer(new Convolution3D.Builder().kernelSize(2, 2, 2).stride(1, 1, 1).nOut(3).activation(Activation.TANH).build()) .layer(new Subsampling3DLayer.Builder().poolingType(PoolingType.AVG).kernelSize(2, 2, 2).stride(2, 2, 2).build()) + .layer(new Deconvolution3D.Builder().kernelSize(2,2,2).stride(1,1,1).nIn(3).nOut(3).activation(Activation.TANH).build()) .layer(new Cropping3D.Builder(1, 1, 1, 1, 1, 1).build()) .layer(new ZeroPadding3DLayer.Builder(1, 1, 1, 1, 1, 1).build()) .layer(new ActivationLayer(Activation.LEAKYRELU)) diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/graph/TestComputationGraphNetwork.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/graph/TestComputationGraphNetwork.java index 0aecb4c94..28bc42983 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/graph/TestComputationGraphNetwork.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/graph/TestComputationGraphNetwork.java @@ -531,28 +531,38 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { ComputationGraph graph = new ComputationGraph(conf1); graph.init(); - System.out.println(graph.summary()); - System.out.println(graph.summary(InputType.feedForward(5))); +// System.out.println(graph.summary()); +// System.out.println(graph.summary(InputType.feedForward(5))); + graph.summary(); + graph.summary(InputType.feedForward(5)); graph = new ComputationGraph(conf2); graph.init(); - System.out.println(graph.summary()); - System.out.println(graph.summary(InputType.recurrent(5))); +// System.out.println(graph.summary()); +// System.out.println(graph.summary(InputType.recurrent(5))); + graph.summary(); + graph.summary(InputType.recurrent(5)); graph = new ComputationGraph(conf3); graph.init(); - System.out.println(graph.summary()); - System.out.println(graph.summary(InputType.convolutional(28, 28, 1))); +// System.out.println(graph.summary()); +// System.out.println(graph.summary(InputType.convolutional(28, 28, 1))); + graph.summary(); + graph.summary(InputType.convolutional(28, 28, 1)); graph = new ComputationGraph(conf4); graph.init(); - System.out.println(graph.summary()); - System.out.println(graph.summary(InputType.convolutional(28, 28, 1), InputType.recurrent(5))); +// System.out.println(graph.summary()); +// System.out.println(graph.summary(InputType.convolutional(28, 28, 1), InputType.recurrent(5))); + graph.summary(); + graph.summary(InputType.convolutional(28, 28, 1), InputType.recurrent(5)); graph = new ComputationGraph(conf5); graph.init(); - System.out.println(graph.summary()); - System.out.println(graph.summary(InputType.convolutional(28, 28, 1))); +// System.out.println(graph.summary()); +// System.out.println(graph.summary(InputType.convolutional(28, 28, 1))); + graph.summary(); + graph.summary(InputType.convolutional(28, 28, 1)); } @Test @@ -753,7 +763,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { int nOut = 3; for(WorkspaceMode ws : WorkspaceMode.values()) { - System.out.println("***** WORKSPACE: " + ws); +// System.out.println("***** WORKSPACE: " + ws); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder() .updater(new Adam(0.01)) @@ -981,7 +991,7 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { OptimizationAlgorithm.LBFGS}; for (OptimizationAlgorithm oa : oas) { - System.out.println(oa); +// System.out.println(oa); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(oa).graphBuilder() .addInputs("input") @@ -1065,12 +1075,15 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { ComputationGraph modelToTune = new ComputationGraph(conf); modelToTune.init(); - System.out.println(modelToTune.summary()); +// System.out.println(modelToTune.summary()); + modelToTune.summary(); ComputationGraph modelNow = new TransferLearning.GraphBuilder(modelToTune).setFeatureExtractor("denseCentre2").build(); - System.out.println(modelNow.summary()); - System.out.println(modelNow.summary(InputType.feedForward(10),InputType.feedForward(2))); +// System.out.println(modelNow.summary()); +// System.out.println(modelNow.summary(InputType.feedForward(10),InputType.feedForward(2))); + modelNow.summary(); + modelNow.summary(InputType.feedForward(10),InputType.feedForward(2)); } @Test @@ -1315,9 +1328,12 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { ComputationGraph modelExpectedArch = new ComputationGraph(confForArchitecture); modelExpectedArch.init(); ComputationGraph modelMow = new TransferLearning.GraphBuilder(modelExpectedArch).setFeatureExtractor("layer2").build(); - System.out.println(modelExpectedArch.summary()); - System.out.println(modelMow.summary()); - System.out.println(modelExpectedArch.summary(InputType.recurrent(V_HEIGHT* V_WIDTH* 3))); +// System.out.println(modelExpectedArch.summary()); +// System.out.println(modelMow.summary()); +// System.out.println(modelExpectedArch.summary(InputType.recurrent(V_HEIGHT* V_WIDTH* 3))); + modelExpectedArch.summary(); + modelMow.summary(); + modelExpectedArch.summary(InputType.recurrent(V_HEIGHT* V_WIDTH* 3)); } @Test @@ -2117,8 +2133,8 @@ public class TestComputationGraphNetwork extends BaseDL4JTest { INDArray features = Nd4j.rand(new int[] {dataSize, inputSize}); INDArray labels = Nd4j.rand(new int[] {dataSize, outputSize}); - boolean gradOK = GradientCheckUtil.checkGradients(net, 1e-6, 1e-3, - 1e-8, false, true, new INDArray[]{features}, new INDArray[]{labels}, null, null); + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(net).inputs(new INDArray[]{features}) + .labels(new INDArray[]{labels})); assertTrue(gradOK); } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/custom/TestCustomActivation.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/custom/TestCustomActivation.java index dee8efac4..69b15951e 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/custom/TestCustomActivation.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/custom/TestCustomActivation.java @@ -53,7 +53,7 @@ public class TestCustomActivation extends BaseDL4JTest { String json = conf.toJson(); String yaml = conf.toYaml(); - System.out.println(json); +// System.out.println(json); MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json); assertEquals(conf, confFromJson); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/custom/TestCustomLayers.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/custom/TestCustomLayers.java index f5b131600..5ead0e4b1 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/custom/TestCustomLayers.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/custom/TestCustomLayers.java @@ -64,7 +64,7 @@ public class TestCustomLayers extends BaseDL4JTest { String json = conf.toJson(); String yaml = conf.toYaml(); - System.out.println(json); +// System.out.println(json); MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json); assertEquals(conf, confFromJson); @@ -88,7 +88,7 @@ public class TestCustomLayers extends BaseDL4JTest { String json = conf.toJson(); String yaml = conf.toYaml(); - System.out.println(json); +// System.out.println(json); ComputationGraphConfiguration confFromJson = ComputationGraphConfiguration.fromJson(json); assertEquals(conf, confFromJson); @@ -135,7 +135,7 @@ public class TestCustomLayers extends BaseDL4JTest { String json = conf.toJson(); String yaml = conf.toYaml(); - System.out.println(json); +// System.out.println(json); MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json); assertEquals(conf, confFromJson); @@ -188,7 +188,7 @@ public class TestCustomLayers extends BaseDL4JTest { String json = conf.toJson(); String yaml = conf.toYaml(); - System.out.println(json); +// System.out.println(json); ComputationGraphConfiguration confFromJson = ComputationGraphConfiguration.fromJson(json); assertEquals(conf, confFromJson); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/embedding/EmbeddingLayerTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/embedding/EmbeddingLayerTest.java index 20a6b34cf..972302d85 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/embedding/EmbeddingLayerTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/feedforward/embedding/EmbeddingLayerTest.java @@ -35,6 +35,7 @@ import org.deeplearning4j.nn.weights.WeightInit; import org.deeplearning4j.nn.weights.embeddings.EmbeddingInitializer; import org.junit.Test; import org.nd4j.linalg.activations.Activation; +import org.nd4j.linalg.activations.impl.ActivationIdentity; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; @@ -156,7 +157,7 @@ public class EmbeddingLayerTest extends BaseDL4JTest { .layer(new RnnOutputLayer.Builder().nIn(embeddingDim).nOut(nOut).activation(Activation.SOFTMAX).build()) .build(); MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().activation(Activation.TANH).list() - .layer(0, new DenseLayer.Builder().nIn(nClassesIn).nOut(5).build()) + .layer(0, new DenseLayer.Builder().nIn(nClassesIn).nOut(5).activation(Activation.IDENTITY).build()) .layer(1, new OutputLayer.Builder().nIn(5).nOut(4).activation(Activation.SOFTMAX).build()) .inputPreProcessor(0, new RnnToFeedForwardPreProcessor()) .build(); @@ -204,7 +205,7 @@ public class EmbeddingLayerTest extends BaseDL4JTest { .layer(1, new OutputLayer.Builder().nIn(5).nOut(4).activation(Activation.SOFTMAX).build()) .build(); MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().activation(Activation.TANH).list() - .layer(0, new DenseLayer.Builder().nIn(nClassesIn).nOut(5).build()) + .layer(0, new DenseLayer.Builder().nIn(nClassesIn).nOut(5).activation(Activation.IDENTITY).build()) .layer(1, new OutputLayer.Builder().nIn(5).nOut(4).activation(Activation.SOFTMAX).build()) .build(); @@ -249,8 +250,8 @@ public class EmbeddingLayerTest extends BaseDL4JTest { .build(); MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().activation(Activation.TANH) .weightInit(WeightInit.XAVIER).list() - .layer(0, new DenseLayer.Builder().nIn(nClassesIn).nOut(5).build()).layer(1, - new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(5).nOut(4) + .layer(new DenseLayer.Builder().nIn(nClassesIn).nOut(5).activation(Activation.IDENTITY).build()) + .layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(5).nOut(4) .activation(Activation.SOFTMAX).build()) .build(); @@ -309,7 +310,7 @@ public class EmbeddingLayerTest extends BaseDL4JTest { .layer(new RnnOutputLayer.Builder().nIn(embeddingDim).nOut(nOut).activation(Activation.SOFTMAX).build()) .build(); MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().activation(Activation.TANH).list() - .layer(new DenseLayer.Builder().nIn(nClassesIn).nOut(embeddingDim).build()) + .layer(new DenseLayer.Builder().nIn(nClassesIn).nOut(embeddingDim).activation(Activation.IDENTITY).build()) .layer(new RnnOutputLayer.Builder().nIn(embeddingDim).nOut(nOut).activation(Activation.SOFTMAX).build()) .setInputType(InputType.recurrent(nClassesIn)) .build(); @@ -344,7 +345,7 @@ public class EmbeddingLayerTest extends BaseDL4JTest { net.computeGradientAndScore(); net2.computeGradientAndScore(); - System.out.println(net.score() + "\t" + net2.score()); +// System.out.println(net.score() + "\t" + net2.score()); assertEquals(net2.score(), net.score(), 1e-6); Map gradient = net.gradient().gradientForVariable(); @@ -375,7 +376,7 @@ public class EmbeddingLayerTest extends BaseDL4JTest { .weightInit(WeightInit.XAVIER) .dataType(DataType.DOUBLE) .list() - .layer(0, new DenseLayer.Builder().nIn(nClassesIn).nOut(5).build()) + .layer(0, new DenseLayer.Builder().nIn(nClassesIn).nOut(5).activation(Activation.IDENTITY).build()) .layer(1, new GravesLSTM.Builder().nIn(5).nOut(7).activation(Activation.SOFTSIGN).build()) .layer(2, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(7).nOut(4) .activation(Activation.SOFTMAX).build()) @@ -416,7 +417,7 @@ public class EmbeddingLayerTest extends BaseDL4JTest { net.computeGradientAndScore(); net2.computeGradientAndScore(); - System.out.println(net.score() + "\t" + net2.score()); +// System.out.println(net.score() + "\t" + net2.score()); assertEquals(net2.score(), net.score(), 1e-5); Map gradient = net.gradient().gradientForVariable(); @@ -513,7 +514,7 @@ public class EmbeddingLayerTest extends BaseDL4JTest { net.computeGradientAndScore(); net2.computeGradientAndScore(); - System.out.println(net.score() + "\t" + net2.score()); +// System.out.println(net.score() + "\t" + net2.score()); assertEquals(net2.score(), net.score(), 1e-5); Map gradients = net.gradient().gradientForVariable(); @@ -707,4 +708,21 @@ public class EmbeddingLayerTest extends BaseDL4JTest { return true; } } + + @Test + public void testEmbeddingDefaultActivation(){ + + MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() + .list() + .layer(new EmbeddingLayer.Builder().nIn(10).nOut(10).build()) + .layer(new EmbeddingSequenceLayer.Builder().nIn(10).nOut(10).build()) + .build(); + + EmbeddingLayer l = (EmbeddingLayer) conf.getConf(0).getLayer(); + assertEquals(new ActivationIdentity(), l.getActivationFn()); + + EmbeddingSequenceLayer l2 = (EmbeddingSequenceLayer) conf.getConf(1).getLayer(); + assertEquals(new ActivationIdentity(), l2.getActivationFn()); + + } } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/normalization/BatchNormalizationTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/normalization/BatchNormalizationTest.java index 2acb555a6..2c88cef3c 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/normalization/BatchNormalizationTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/normalization/BatchNormalizationTest.java @@ -90,6 +90,11 @@ public class BatchNormalizationTest extends BaseDL4JTest { public void doBefore() { } + @Override + public long getTimeoutMilliseconds() { + return 90000L; + } + @Test public void testDnnForwardPass() { int nOut = 10; @@ -102,7 +107,7 @@ public class BatchNormalizationTest extends BaseDL4JTest { INDArray mean = output.mean(0); INDArray stdev = output.std(false, 0); - System.out.println(Arrays.toString(mean.data().asFloat())); +// System.out.println(Arrays.toString(mean.data().asFloat())); assertArrayEquals(new float[nOut], mean.data().asFloat(), 1e-6f); assertEquals(Nd4j.ones(nOut), stdev); @@ -161,8 +166,8 @@ public class BatchNormalizationTest extends BaseDL4JTest { INDArray out = l.activate(input, true, LayerWorkspaceMgr.noWorkspaces()); - System.out.println(Arrays.toString(outExpected.data().asDouble())); - System.out.println(Arrays.toString(out.data().asDouble())); +// System.out.println(Arrays.toString(outExpected.data().asDouble())); +// System.out.println(Arrays.toString(out.data().asDouble())); assertEquals(outExpected, out); @@ -190,9 +195,9 @@ public class BatchNormalizationTest extends BaseDL4JTest { assertEquals(dldgammaExp, dldgamma); assertEquals(dldbetaExp, dldbeta); - System.out.println("EPSILONS"); - System.out.println(Arrays.toString(dldinExp.data().asDouble())); - System.out.println(Arrays.toString(p.getSecond().dup().data().asDouble())); +// System.out.println("EPSILONS"); +// System.out.println(Arrays.toString(dldinExp.data().asDouble())); +// System.out.println(Arrays.toString(p.getSecond().dup().data().asDouble())); assertEquals(dldinExp, p.getSecond()); } @@ -303,8 +308,8 @@ public class BatchNormalizationTest extends BaseDL4JTest { INDArray out = l.activate(input, true, LayerWorkspaceMgr.noWorkspaces()); - System.out.println(Arrays.toString(outExpected.data().asDouble())); - System.out.println(Arrays.toString(out.data().asDouble())); +// System.out.println(Arrays.toString(outExpected.data().asDouble())); +// System.out.println(Arrays.toString(out.data().asDouble())); assertEquals(outExpected, out); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/objdetect/TestYolo2OutputLayer.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/objdetect/TestYolo2OutputLayer.java index 0a004bbae..14f3ee6c0 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/objdetect/TestYolo2OutputLayer.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/objdetect/TestYolo2OutputLayer.java @@ -140,7 +140,7 @@ public class TestYolo2OutputLayer extends BaseDL4JTest { y2impl.setLabels(labels); double score = y2impl.computeScore(0.0, true, LayerWorkspaceMgr.noWorkspaces()); - System.out.println("SCORE: " + score); +// System.out.println("SCORE: " + score); assertTrue(score > 0.0); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesLSTMTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesLSTMTest.java index a0fc0f99d..66b4c8eab 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesLSTMTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/recurrent/GravesLSTMTest.java @@ -220,20 +220,20 @@ public class GravesLSTMTest extends BaseDL4JTest { INDArray out1 = net.output(in1); INDArray out2 = net.output(in2); - System.out.println(Arrays.toString(net.output(in1).data().asFloat())); - System.out.println(Arrays.toString(net.output(in2).data().asFloat())); +// System.out.println(Arrays.toString(net.output(in1).data().asFloat())); +// System.out.println(Arrays.toString(net.output(in2).data().asFloat())); List activations1 = net.feedForward(in1); List activations2 = net.feedForward(in2); - for (int i = 0; i < 3; i++) { - System.out.println("-----\n" + i); - System.out.println(Arrays.toString(activations1.get(i).dup().data().asDouble())); - System.out.println(Arrays.toString(activations2.get(i).dup().data().asDouble())); - - System.out.println(activations1.get(i)); - System.out.println(activations2.get(i)); - } +// for (int i = 0; i < 3; i++) { +// System.out.println("-----\n" + i); +// System.out.println(Arrays.toString(activations1.get(i).dup().data().asDouble())); +// System.out.println(Arrays.toString(activations2.get(i).dup().data().asDouble())); +// +// System.out.println(activations1.get(i)); +// System.out.println(activations2.get(i)); +// } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffConv.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffConv.java index d45195870..317dca24d 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffConv.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffConv.java @@ -306,8 +306,8 @@ public class TestSameDiffConv extends BaseDL4JTest { INDArray l = TestUtils.randomOneHot(minibatch, nOut); log.info("Starting: " + msg); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, f, l, null, null, true, 50); //Most of weights are in output layer + boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(f) + .labels(l).subset(true).maxPerParam(50)); assertTrue(msg, gradOK); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffDenseVertex.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffDenseVertex.java index 7f9a54f8e..4e923bf4a 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffDenseVertex.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/samediff/TestSameDiffDenseVertex.java @@ -135,7 +135,7 @@ public class TestSameDiffDenseVertex extends BaseDL4JTest { assertEquals(gStd.gradient(), gSD.gradient()); - System.out.println("========================================================================"); +// System.out.println("========================================================================"); //Sanity check: different minibatch size in = Nd4j.rand(2 * minibatch, nIn); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestReconstructionDistributions.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestReconstructionDistributions.java index 6a88cc550..aa7527841 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestReconstructionDistributions.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/layers/variational/TestReconstructionDistributions.java @@ -317,7 +317,7 @@ public class TestReconstructionDistributions extends BaseDL4JTest { INDArray gradient = rd.gradient(x, distributionParams); String testName = "minibatch = " + minibatch + ", size = " + inputSize + ", Distribution = " + rd; - System.out.println("\n\n***** Starting test: " + testName + "*****"); + System.out.println("***** Starting test: " + testName + "*****"); int totalFailureCount = 0; for (int i = 0; i < distributionParams.size(1); i++) { @@ -349,7 +349,7 @@ public class TestReconstructionDistributions extends BaseDL4JTest { totalFailureCount++; } } else { - log.info("Input (" + j + "," + i + ") passed: grad= " + backpropGrad + ", numericalGrad= " + log.trace("Input (" + j + "," + i + ") passed: grad= " + backpropGrad + ", numericalGrad= " + numericalGrad + ", relError= " + relError); } } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/misc/WorkspaceTests.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/misc/WorkspaceTests.java index 077173a5a..d2bf06a56 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/misc/WorkspaceTests.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/misc/WorkspaceTests.java @@ -472,7 +472,7 @@ public class WorkspaceTests extends BaseDL4JTest { final ComputationGraph computationGraph = new ComputationGraph(config); computationGraph.init(); - computationGraph.setListeners(new ScoreIterationListener(1)); + computationGraph.setListeners(new ScoreIterationListener(3)); WSTestDataSetIterator iterator = new WSTestDataSetIterator(); computationGraph.fit(iterator); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/BackPropMLPTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/BackPropMLPTest.java index af15f3b45..e8236bf01 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/BackPropMLPTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/BackPropMLPTest.java @@ -66,7 +66,7 @@ public class BackPropMLPTest extends BaseDL4JTest { public void testMLP() { //Simple mini-batch test with multiple hidden layers MultiLayerConfiguration conf = getIrisMLPSimpleConfig(new int[] {5, 4, 3}, Activation.SIGMOID); - System.out.println(conf); +// System.out.println(conf); MultiLayerNetwork network = new MultiLayerNetwork(conf); network.init(); DataSetIterator iter = new IrisDataSetIterator(10, 100); @@ -80,7 +80,7 @@ public class BackPropMLPTest extends BaseDL4JTest { public void testMLP2() { //Simple mini-batch test with multiple hidden layers MultiLayerConfiguration conf = getIrisMLPSimpleConfig(new int[] {5, 15, 3}, Activation.TANH); - System.out.println(conf); +// System.out.println(conf); MultiLayerNetwork network = new MultiLayerNetwork(conf); network.init(); @@ -104,7 +104,7 @@ public class BackPropMLPTest extends BaseDL4JTest { Layer[] layers = network.getLayers(); - final boolean printCalculations = true; + final boolean printCalculations = false; while (iris.hasNext()) { DataSet data = iris.next(); @@ -212,7 +212,7 @@ public class BackPropMLPTest extends BaseDL4JTest { assertEquals(l1BiasFloatAfter,expectedL1BiasAfter,eps); assertArrayEquals(l2BiasFloatAfter,expectedL2BiasAfter,eps); */ - System.out.println("\n\n--------------"); +// System.out.println("\n\n--------------"); } } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java index 73ebf1ccd..ac1656e92 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java @@ -922,9 +922,9 @@ public class MultiLayerTest extends BaseDL4JTest { MultiLayerNetwork modelExpectedArch = new MultiLayerNetwork(confForArchitecture); modelExpectedArch.init(); MultiLayerNetwork modelMow = new TransferLearning.Builder(modelExpectedArch).setFeatureExtractor(2).build(); - System.out.println(modelExpectedArch.summary()); - System.out.println(modelMow.summary()); - System.out.println(modelMow.summary(InputType.recurrent(V_HEIGHT*V_WIDTH*3))); +// System.out.println(modelExpectedArch.summary()); +// System.out.println(modelMow.summary()); +// System.out.println(modelMow.summary(InputType.recurrent(V_HEIGHT*V_WIDTH*3))); } @Test(expected = DL4JException.class) @@ -1149,7 +1149,7 @@ public class MultiLayerTest extends BaseDL4JTest { int nOut = 3; for(WorkspaceMode ws : WorkspaceMode.values()) { - System.out.println("***** WORKSPACE: " + ws); +// System.out.println("***** WORKSPACE: " + ws); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .updater(new Adam(0.01)) diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTestRNN.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTestRNN.java index 93e9bb9c7..5da79bc58 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTestRNN.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTestRNN.java @@ -570,8 +570,8 @@ public class MultiLayerTestRNN extends BaseDL4JTest { for (int j = 0; j < expOut.size(); j++) { INDArray exp = expOut.get(j); INDArray act = outSlice.get(j); - System.out.println(j); - System.out.println(exp.sub(act)); +// System.out.println(j); +// System.out.println(exp.sub(act)); assertEquals(exp, act); } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/TestVariableLengthTS.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/TestVariableLengthTS.java index 959ccbd22..2feb7792c 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/TestVariableLengthTS.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/TestVariableLengthTS.java @@ -219,10 +219,10 @@ public class TestVariableLengthTS extends BaseDL4JTest { INDArray g1s = g1map.get(s); INDArray g2s = g2map.get(s); - System.out.println("-------"); - System.out.println("Variable: " + s); - System.out.println(Arrays.toString(g1s.dup().data().asFloat())); - System.out.println(Arrays.toString(g2s.dup().data().asFloat())); +// System.out.println("-------"); +// System.out.println("Variable: " + s); +// System.out.println(Arrays.toString(g1s.dup().data().asFloat())); +// System.out.println(Arrays.toString(g2s.dup().data().asFloat())); assertNotEquals(s, g1s, g2s); } @@ -507,7 +507,7 @@ public class TestVariableLengthTS extends BaseDL4JTest { for (boolean bidirectional : isBidirectional) { for (PoolingType pt : poolingTypes) { - System.out.println("Starting test: bidirectional = " + bidirectional + ", poolingType = " + pt); +// System.out.println("Starting test: bidirectional = " + bidirectional + ", poolingType = " + pt); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().weightInit(WeightInit.XAVIER) .activation(Activation.TANH).list().layer(0, bidirectional diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestFrozenLayers.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestFrozenLayers.java index 3a2153e1e..b9a15ccb2 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestFrozenLayers.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/transferlearning/TestFrozenLayers.java @@ -51,7 +51,6 @@ public class TestFrozenLayers extends BaseDL4JTest { for(double l1 : new double[]{0.0, 0.3}){ for( double l2 : new double[]{0.0, 0.4}){ - System.out.println("--------------------"); String msg = "l1=" + l1 + ", l2=" + l2; FineTuneConfiguration ftc = new FineTuneConfiguration.Builder() diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningComplex.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningComplex.java index 8a15197f1..f0fadc968 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningComplex.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningComplex.java @@ -273,8 +273,9 @@ public class TransferLearningComplex extends BaseDL4JTest { MultiDataSet rand = new MultiDataSet(new INDArray[] {Nd4j.rand(2, 2), Nd4j.rand(2, 2)}, new INDArray[] {Nd4j.rand(2, 2), Nd4j.rand(2, 3)}); modelNow.fit(rand); - log.info(modelNow.summary()); - log.info(modelNow.summary(InputType.feedForward(2),InputType.feedForward(2))); - +// log.info(modelNow.summary()); +// log.info(modelNow.summary(InputType.feedForward(2),InputType.feedForward(2))); + modelNow.summary(); + modelNow.summary(InputType.feedForward(2),InputType.feedForward(2)); } } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningHelperTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningHelperTest.java index 22521b23b..75b30ffd7 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningHelperTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/transferlearning/TransferLearningHelperTest.java @@ -195,9 +195,10 @@ public class TransferLearningHelperTest extends BaseDL4JTest { assertEquals(modelIdentical.getLayer("denseLeft0").params(), modelToTune.getLayer("denseLeft0").params()); assertEquals(modelIdentical.getLayer("outLeft").params(), modelToTune.getLayer("outLeft").params()); - log.info(modelIdentical.summary()); - log.info(helper.unfrozenGraph().summary()); - +// log.info(modelIdentical.summary()); +// log.info(helper.unfrozenGraph().summary()); + modelIdentical.summary(); + helper.unfrozenGraph().summary(); } @Test diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/util/TestDataSetConsumer.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/util/TestDataSetConsumer.java index 232751bb4..7439c5ad2 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/util/TestDataSetConsumer.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/util/TestDataSetConsumer.java @@ -84,8 +84,8 @@ public class TestDataSetConsumer { count.incrementAndGet(); - if (count.get() % 100 == 0) - logger.info("Passed {} datasets...", count.get()); +// if (count.get() % 100 == 0) +// logger.info("Passed {} datasets...", count.get()); return count.get(); } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimize/solver/BackTrackLineSearchTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimize/solver/BackTrackLineSearchTest.java index 6975de250..e68cf133b 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimize/solver/BackTrackLineSearchTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimize/solver/BackTrackLineSearchTest.java @@ -186,7 +186,7 @@ public class BackTrackLineSearchTest extends BaseDL4JTest { MultiLayerNetwork network = new MultiLayerNetwork(getIrisMultiLayerConfig(Activation.SIGMOID, optimizer)); network.init(); - TrainingListener listener = new ScoreIterationListener(1); + TrainingListener listener = new ScoreIterationListener(10); network.setListeners(Collections.singletonList(listener)); double oldScore = network.score(data); for( int i=0; i<100; i++ ) { @@ -204,7 +204,7 @@ public class BackTrackLineSearchTest extends BaseDL4JTest { data.normalizeZeroMeanZeroUnitVariance(); MultiLayerNetwork network = new MultiLayerNetwork(getIrisMultiLayerConfig(Activation.RELU, optimizer)); network.init(); - TrainingListener listener = new ScoreIterationListener(1); + TrainingListener listener = new ScoreIterationListener(10); network.setListeners(Collections.singletonList(listener)); double firstScore = network.score(data); @@ -223,7 +223,7 @@ public class BackTrackLineSearchTest extends BaseDL4JTest { data.normalizeZeroMeanZeroUnitVariance(); MultiLayerNetwork network = new MultiLayerNetwork(getIrisMultiLayerConfig(Activation.RELU, optimizer)); network.init(); - TrainingListener listener = new ScoreIterationListener(1); + TrainingListener listener = new ScoreIterationListener(10); network.setListeners(Collections.singletonList(listener)); double oldScore = network.score(data); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java index a7ce1622f..c2f5cd595 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimize/solver/TestOptimizers.java @@ -66,7 +66,7 @@ import static org.junit.Assert.assertTrue; public class TestOptimizers extends BaseDL4JTest { //For debugging. - private static final boolean PRINT_OPT_RESULTS = true; + private static final boolean PRINT_OPT_RESULTS = false; @Test public void testOptimizersBasicMLPBackprop() { diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimizer/listener/TestParamAndGradientIterationListener.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimizer/listener/TestParamAndGradientIterationListener.java deleted file mode 100644 index 797be51cc..000000000 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/optimizer/listener/TestParamAndGradientIterationListener.java +++ /dev/null @@ -1,79 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.deeplearning4j.optimizer.listener; - - -import org.deeplearning4j.BaseDL4JTest; -import org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator; -import org.deeplearning4j.nn.api.OptimizationAlgorithm; -import org.deeplearning4j.nn.conf.MultiLayerConfiguration; -import org.deeplearning4j.nn.conf.NeuralNetConfiguration; -import org.deeplearning4j.nn.conf.layers.DenseLayer; -import org.deeplearning4j.nn.conf.layers.OutputLayer; -import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; -import org.deeplearning4j.optimize.api.TrainingListener; -import org.deeplearning4j.optimize.listeners.ParamAndGradientIterationListener; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.nd4j.linalg.activations.Activation; -import org.nd4j.linalg.learning.config.Sgd; -import org.nd4j.linalg.lossfunctions.LossFunctions; - -import java.io.File; - -import static org.junit.Assert.assertEquals; - -public class TestParamAndGradientIterationListener extends BaseDL4JTest { - - @Rule - public TemporaryFolder testDir = new TemporaryFolder(); - - @Test - public void test() throws Exception { - - IrisDataSetIterator iter = new IrisDataSetIterator(30, 150); - - MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() - .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new Sgd(1e-5)) - .list().layer(0, new DenseLayer.Builder().nIn(4).nOut(20).build()) - .layer(1, new DenseLayer.Builder().nIn(20).nOut(30).build()) - .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) - .activation(Activation.SOFTMAX).nIn(30).nOut(3).build()) - .build(); - - MultiLayerNetwork net = new MultiLayerNetwork(conf); - net.init(); - - File f = testDir.newFile("paramAndGradTest.txt"); - TrainingListener listener = ParamAndGradientIterationListener.builder().outputToFile(true) - .file(f) - .outputToConsole(true).outputToLogger(false).iterations(2).printHeader(true).printMean(false) - .printMinMax(false).printMeanAbsValue(true).delimiter("\t").build(); - net.setListeners(listener); - - for (int i = 0; i < 2; i++) { - net.fit(iter); - } - - - } - - - - -} diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/plot/BarnesHutTsneTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/plot/BarnesHutTsneTest.java index 1c64c1fd1..7a818810a 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/plot/BarnesHutTsneTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/plot/BarnesHutTsneTest.java @@ -91,7 +91,7 @@ public class BarnesHutTsneTest extends BaseDL4JTest { .useAdaGrad(false).build(); b.fit(data); - log.info("Result: {}", b.getData()); +// log.info("Result: {}", b.getData()); val exp = Nd4j.createFromArray(new double[]{-3.5318212819287327, 35.40331834897696, 3.890809489531651, -1.291195609955519, -42.854099388207466, 7.8761368019456635, 28.798057251442877, 7.1456564000935225, 2.9518396278984786, -42.860181054199636, -34.989343304202, -108.99770355680282, 31.78123839126566, -29.322118879730205, 163.87558311206212, 2.9538984612478396, 31.419519824305546, 13.105400907817279, 25.46987139120746, -43.27317406736858, 32.455151773056144, 25.28067703547214, 0.005442008567682552, 21.005029233370358, -61.71390311950051, 5.218417653362599, 47.15762099517554, 8.834739256343404, 17.845790108867153, -54.31654219224107, -18.71285871476804, -16.446982180909007, -71.22568781913213, -12.339975548387091, 70.49096598213703, 25.022454385237456, -14.572652938207126, -5.320080866729078, 1.5874449933639676, -40.60960510287835, -31.98564381157643, -95.40875746933808, 19.196346639002364, -38.80930682421929, 135.00454225923906, 5.277879540549592, 30.79963767087089, -0.007276462027131683, 31.278796123365815, -38.47381680049993, 10.415728497075905, 36.567265019013085, -7.406587944733211, -18.376174615781114, -45.26976962854271}).reshape(-1, 5); @@ -178,7 +178,7 @@ public class BarnesHutTsneTest extends BaseDL4JTest { INDArray data = iter.next().getFeatures(); INDArray perplexityOutput = b.computeGaussianPerplexity(data, 30.0); - System.out.println(perplexityOutput); +// System.out.println(perplexityOutput); } @Test @@ -217,17 +217,17 @@ public class BarnesHutTsneTest extends BaseDL4JTest { StopWatch watch = new StopWatch(); watch.start(); b.fit(data); - System.out.println(b.getData()); +// System.out.println(b.getData()); watch.stop(); File outDir = testDir.newFolder(); ClassPathResource labels = new ClassPathResource("mnist2500_labels.txt"); List labelsList = IOUtils.readLines(labels.getInputStream()); b.saveAsFile(/*labelsList,*/ new File(outDir, "raw.txt").getAbsolutePath()); - System.out.println(b.getData()); +// System.out.println(b.getData()); System.out.println("Fit done in " + watch); assertEquals(2500, b.getData().size(0)); - System.out.println(b.getData()); +// System.out.println(b.getData()); INDArray a1 = b.getData().getRow(0); INDArray a2 = b.getData().getRow(1); @@ -338,7 +338,7 @@ public class BarnesHutTsneTest extends BaseDL4JTest { double[] dC = {-0.0618386320333619, -0.06266654959379839, 0.029998268806149204, 0.10780566335888186, -0.19449543068355346, -0.14763764361792697, 0.17493572758118422, 0.1926109839221966, -0.15176648259935419, 0.10974665709698186, 0.13102419155322598, 0.004941641352409449, 0.19159764518354974, -0.26332838053474944, -0.023631441261541583, 0.09838669432305949, 0.09709129638394683, -0.01605053000727605, 0.06566171635025217, -0.17325078066035252, -0.1090854255505605, 0.023350644966904276, 0.075192354899586, -0.08278373866517603, 0.18431338134579323, 0.2766031655578053, -0.17557907233268688, 0.10616148241800637, -0.09999024423215641, -0.017181932145255287, 0.06711331400576945, -0.01388231800826619, -0.10248189290485302, 0.20786521034824304, 0.11254913977572988, -0.289564646781519, 0.13491805919337516, -0.07504249344962562, 0.004154656287570634, -0.10516715438388784, -0.27984655075804576, 0.09811828071286613, 0.03684521473995052, -0.054645216532387256, -0.18147132772800725, 0.027588750493223044, 0.214734364419479, -0.026729138234415008, -0.28410504978879136, 0.007015481601883835, 0.04427981739424874, -0.059253265830134655, -0.05325479031206952, -0.11319889109674944, 0.1530133971867549}; INDArray actual = gradient.getGradientFor("yIncs"); - System.out.println(actual); +// System.out.println(actual); assertArrayEquals(dC, actual.reshape(1,55).toDoubleVector(), 1e-05); } @@ -482,8 +482,8 @@ public class BarnesHutTsneTest extends BaseDL4JTest { List results = new ArrayList<>(); List distances = new ArrayList<>(); tree.search(target, 11, results, distances); - System.out.println("Results:" + results); - System.out.println("Distances:" + distances); +// System.out.println("Results:" + results); +// System.out.println("Distances:" + distances); } } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/samediff/CompareTrainingImplementations.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/samediff/CompareTrainingImplementations.java index fa0fc335f..cf75700f8 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/samediff/CompareTrainingImplementations.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/samediff/CompareTrainingImplementations.java @@ -250,7 +250,7 @@ public class CompareTrainingImplementations extends BaseDL4JTest { sd.evaluate(iter, "softmax", rEvalSd); assertEquals(rEvalDl4j, rEvalSd); - System.out.println("---------------------------------"); +// System.out.println("---------------------------------"); } } } diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/util/CrashReportingUtilTest.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/util/CrashReportingUtilTest.java index bdb72d4b6..5975d1c1e 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/util/CrashReportingUtilTest.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/util/CrashReportingUtilTest.java @@ -47,6 +47,11 @@ import static org.junit.Assert.*; public class CrashReportingUtilTest extends BaseDL4JTest { + @Override + public long getTimeoutMilliseconds() { + return 120000; + } + @Rule public TemporaryFolder testDir = new TemporaryFolder(); diff --git a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/util/ModelValidatorTests.java b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/util/ModelValidatorTests.java index a66207cd2..a704a1899 100644 --- a/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/util/ModelValidatorTests.java +++ b/deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/util/ModelValidatorTests.java @@ -51,7 +51,7 @@ public class ModelValidatorTests extends BaseDL4JTest { assertEquals("MultiLayerNetwork", vr0.getFormatType()); assertEquals(MultiLayerNetwork.class, vr0.getFormatClass()); assertNull(vr0.getException()); - System.out.println(vr0.toString()); +// System.out.println(vr0.toString()); //Test empty file File f1 = new File(f, "empty.bin"); @@ -63,7 +63,7 @@ public class ModelValidatorTests extends BaseDL4JTest { assertEquals("MultiLayerNetwork", vr1.getFormatType()); assertEquals(MultiLayerNetwork.class, vr1.getFormatClass()); assertNull(vr1.getException()); - System.out.println(vr1.toString()); +// System.out.println(vr1.toString()); //Test invalid zip file File f2 = new File(f, "notReallyZip.zip"); @@ -75,7 +75,7 @@ public class ModelValidatorTests extends BaseDL4JTest { assertEquals("MultiLayerNetwork", vr2.getFormatType()); assertEquals(MultiLayerNetwork.class, vr2.getFormatClass()); assertNotNull(vr2.getException()); - System.out.println(vr2.toString()); +// System.out.println(vr2.toString()); //Test valid zip, but missing configuration File f3 = new File(f, "modelNoConfig.zip"); @@ -92,7 +92,7 @@ public class ModelValidatorTests extends BaseDL4JTest { assertEquals("MultiLayerNetwork", vr3.getFormatType()); assertEquals(MultiLayerNetwork.class, vr3.getFormatClass()); assertNull(vr3.getException()); - System.out.println(vr3.toString()); +// System.out.println(vr3.toString()); //Test valid sip, but missing params @@ -110,7 +110,7 @@ public class ModelValidatorTests extends BaseDL4JTest { assertEquals("MultiLayerNetwork", vr4.getFormatType()); assertEquals(MultiLayerNetwork.class, vr4.getFormatClass()); assertNull(vr4.getException()); - System.out.println(vr4.toString()); +// System.out.println(vr4.toString()); //Test valid model @@ -122,7 +122,7 @@ public class ModelValidatorTests extends BaseDL4JTest { assertEquals("MultiLayerNetwork", vr5.getFormatType()); assertEquals(MultiLayerNetwork.class, vr5.getFormatClass()); assertNull(vr5.getException()); - System.out.println(vr5.toString()); +// System.out.println(vr5.toString()); //Test valid model with corrupted JSON @@ -141,7 +141,7 @@ public class ModelValidatorTests extends BaseDL4JTest { bytes = IOUtils.toByteArray(zis); } zo.write(bytes); - System.out.println("WROTE: " + ze.getName()); +// System.out.println("WROTE: " + ze.getName()); } } } @@ -153,7 +153,7 @@ public class ModelValidatorTests extends BaseDL4JTest { assertEquals("MultiLayerNetwork", vr6.getFormatType()); assertEquals(MultiLayerNetwork.class, vr6.getFormatClass()); assertNotNull(vr6.getException()); - System.out.println(vr6.toString()); +// System.out.println(vr6.toString()); } @@ -169,7 +169,7 @@ public class ModelValidatorTests extends BaseDL4JTest { assertEquals("ComputationGraph", vr0.getFormatType()); assertEquals(ComputationGraph.class, vr0.getFormatClass()); assertNull(vr0.getException()); - System.out.println(vr0.toString()); +// System.out.println(vr0.toString()); //Test empty file File f1 = new File(f, "empty.bin"); @@ -181,7 +181,7 @@ public class ModelValidatorTests extends BaseDL4JTest { assertEquals("ComputationGraph", vr1.getFormatType()); assertEquals(ComputationGraph.class, vr1.getFormatClass()); assertNull(vr1.getException()); - System.out.println(vr1.toString()); +// System.out.println(vr1.toString()); //Test invalid zip file File f2 = new File(f, "notReallyZip.zip"); @@ -193,7 +193,7 @@ public class ModelValidatorTests extends BaseDL4JTest { assertEquals("ComputationGraph", vr2.getFormatType()); assertEquals(ComputationGraph.class, vr2.getFormatClass()); assertNotNull(vr2.getException()); - System.out.println(vr2.toString()); +// System.out.println(vr2.toString()); //Test valid zip, but missing configuration File f3 = new File(f, "modelNoConfig.zip"); @@ -210,7 +210,7 @@ public class ModelValidatorTests extends BaseDL4JTest { assertEquals("ComputationGraph", vr3.getFormatType()); assertEquals(ComputationGraph.class, vr3.getFormatClass()); assertNull(vr3.getException()); - System.out.println(vr3.toString()); +// System.out.println(vr3.toString()); //Test valid sip, but missing params @@ -228,7 +228,7 @@ public class ModelValidatorTests extends BaseDL4JTest { assertEquals("ComputationGraph", vr4.getFormatType()); assertEquals(ComputationGraph.class, vr4.getFormatClass()); assertNull(vr4.getException()); - System.out.println(vr4.toString()); +// System.out.println(vr4.toString()); //Test valid model @@ -240,7 +240,7 @@ public class ModelValidatorTests extends BaseDL4JTest { assertEquals("ComputationGraph", vr5.getFormatType()); assertEquals(ComputationGraph.class, vr5.getFormatClass()); assertNull(vr5.getException()); - System.out.println(vr5.toString()); +// System.out.println(vr5.toString()); //Test valid model with corrupted JSON @@ -259,7 +259,7 @@ public class ModelValidatorTests extends BaseDL4JTest { bytes = IOUtils.toByteArray(zis); } zo.write(bytes); - System.out.println("WROTE: " + ze.getName()); +// System.out.println("WROTE: " + ze.getName()); } } } @@ -271,7 +271,7 @@ public class ModelValidatorTests extends BaseDL4JTest { assertEquals("ComputationGraph", vr6.getFormatType()); assertEquals(ComputationGraph.class, vr6.getFormatClass()); assertNotNull(vr6.getException()); - System.out.println(vr6.toString()); +// System.out.println(vr6.toString()); } diff --git a/deeplearning4j/deeplearning4j-cuda/pom.xml b/deeplearning4j/deeplearning4j-cuda/pom.xml index 95c5f5deb..dfdc76efb 100644 --- a/deeplearning4j/deeplearning4j-cuda/pom.xml +++ b/deeplearning4j/deeplearning4j-cuda/pom.xml @@ -83,6 +83,12 @@ junit test + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-cuda/src/test/java/org/deeplearning4j/BaseDL4JTest.java b/deeplearning4j/deeplearning4j-cuda/src/test/java/org/deeplearning4j/BaseDL4JTest.java deleted file mode 100644 index ef26f2848..000000000 --- a/deeplearning4j/deeplearning4j-cuda/src/test/java/org/deeplearning4j/BaseDL4JTest.java +++ /dev/null @@ -1,141 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.deeplearning4j; - -import lombok.extern.slf4j.Slf4j; -import org.bytedeco.javacpp.Pointer; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; -import org.nd4j.linalg.api.buffer.DataBuffer; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.memory.MemoryWorkspace; -import org.nd4j.linalg.api.ops.executioner.OpExecutioner; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.profiler.ProfilerConfig; - -import java.lang.management.ManagementFactory; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -@Slf4j -public class BaseDL4JTest { - - @Rule - public TestName name = new TestName(); - - protected long startTime; - protected int threadCountBefore; - - /** - * Override this to set the profiling mode for the tests defined in the child class - */ - public OpExecutioner.ProfilingMode getProfilingMode(){ - return OpExecutioner.ProfilingMode.SCOPE_PANIC; - } - - /** - * Override this to set the datatype of the tests defined in the child class - */ - public DataType getDataType(){ - return DataType.DOUBLE; - } - - public DataType getDefaultFPDataType(){ - return getDataType(); - } - - @Before - public void beforeTest(){ - log.info("{}.{}", getClass().getSimpleName(), name.getMethodName()); - Nd4j.getExecutioner().setProfilingMode(getProfilingMode()); - Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build()); - Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType()); - startTime = System.currentTimeMillis(); - threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount(); - } - - @After - public void afterTest(){ - //Attempt to keep workspaces isolated between tests - Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); - MemoryWorkspace currWS = Nd4j.getMemoryManager().getCurrentWorkspace(); - Nd4j.getMemoryManager().setCurrentWorkspace(null); - if(currWS != null){ - //Not really safe to continue testing under this situation... other tests will likely fail with obscure - // errors that are hard to track back to this - log.error("Open workspace leaked from test! Exiting - {}, isOpen = {} - {}", currWS.getId(), currWS.isScopeActive(), currWS); - System.exit(1); - } - - StringBuilder sb = new StringBuilder(); - long maxPhys = Pointer.maxPhysicalBytes(); - long maxBytes = Pointer.maxBytes(); - long currPhys = Pointer.physicalBytes(); - long currBytes = Pointer.totalBytes(); - - long jvmTotal = Runtime.getRuntime().totalMemory(); - long jvmMax = Runtime.getRuntime().maxMemory(); - - int threadsAfter = ManagementFactory.getThreadMXBean().getThreadCount(); - - long duration = System.currentTimeMillis() - startTime; - sb.append(getClass().getSimpleName()).append(".").append(name.getMethodName()) - .append(": ").append(duration).append(" ms") - .append(", threadCount: (").append(threadCountBefore).append("->").append(threadsAfter).append(")") - .append(", jvmTotal=").append(jvmTotal) - .append(", jvmMax=").append(jvmMax) - .append(", totalBytes=").append(currBytes).append(", maxBytes=").append(maxBytes) - .append(", currPhys=").append(currPhys).append(", maxPhys=").append(maxPhys); - - List ws = Nd4j.getWorkspaceManager().getAllWorkspacesForCurrentThread(); - if(ws != null && ws.size() > 0){ - long currSize = 0; - for(MemoryWorkspace w : ws){ - currSize += w.getCurrentSize(); - } - if(currSize > 0){ - sb.append(", threadWSSize=").append(currSize) - .append(" (").append(ws.size()).append(" WSs)"); - } - } - - - Properties p = Nd4j.getExecutioner().getEnvironmentInformation(); - Object o = p.get("cuda.devicesInformation"); - if(o instanceof List){ - List> l = (List>) o; - if(l.size() > 0) { - - sb.append(" [").append(l.size()) - .append(" GPUs: "); - - for (int i = 0; i < l.size(); i++) { - Map m = l.get(i); - if(i > 0) - sb.append(","); - sb.append("(").append(m.get("cuda.freeMemory")).append(" free, ") - .append(m.get("cuda.totalMemory")).append(" total)"); - } - sb.append("]"); - } - } - log.info(sb.toString()); - } -} diff --git a/deeplearning4j/deeplearning4j-cuda/src/test/java/org/deeplearning4j/gradientcheck/CNNGradientCheckTest.java b/deeplearning4j/deeplearning4j-cuda/src/test/java/org/deeplearning4j/gradientcheck/CNNGradientCheckTest.java index 2f4532823..e98ef1e58 100644 --- a/deeplearning4j/deeplearning4j-cuda/src/test/java/org/deeplearning4j/gradientcheck/CNNGradientCheckTest.java +++ b/deeplearning4j/deeplearning4j-cuda/src/test/java/org/deeplearning4j/gradientcheck/CNNGradientCheckTest.java @@ -735,9 +735,10 @@ public class CNNGradientCheckTest extends BaseDL4JTest { + convFirst; System.out.println(msg); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, - labels, null, null, true, 128); + boolean gradOK = GradientCheckUtil.checkGradients( + new GradientCheckUtil.MLNConfig().net(net) + .input(input).labels(labels) + .subset(true).maxPerParam(128)); assertTrue(msg, gradOK); @@ -879,8 +880,10 @@ public class CNNGradientCheckTest extends BaseDL4JTest { + k + ", s=" + s + ", d=" + d + ", cm=" + cm; System.out.println(msg); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, null, true, 100); + boolean gradOK = GradientCheckUtil.checkGradients( + new GradientCheckUtil.MLNConfig().net(net) + .input(input).labels(labels) + .subset(true).maxPerParam(100)); assertTrue(msg, gradOK); @@ -948,8 +951,10 @@ public class CNNGradientCheckTest extends BaseDL4JTest { + k + ", nIn=" + nIn + ", depthMul=" + depthMultiplier + ", s=" + s + ", cm=" + cm; System.out.println(msg); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, null, true, 256); + boolean gradOK = GradientCheckUtil.checkGradients( + new GradientCheckUtil.MLNConfig().net(net) + .input(input).labels(labels) + .subset(true).maxPerParam(256)); assertTrue(msg, gradOK); @@ -1021,8 +1026,10 @@ public class CNNGradientCheckTest extends BaseDL4JTest { + k + ", s=" + s + ", d=" + d + ", cm=" + cm; System.out.println(msg); - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, null, true, 50); //Most params are in output layer + boolean gradOK = GradientCheckUtil.checkGradients( + new GradientCheckUtil.MLNConfig().net(net) + .input(input).labels(labels) + .subset(true).maxPerParam(50)); assertTrue(msg, gradOK); @@ -1176,8 +1183,10 @@ public class CNNGradientCheckTest extends BaseDL4JTest { System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams()); } - boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, - DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels, null, null, true, 160); + boolean gradOK = GradientCheckUtil.checkGradients( + new GradientCheckUtil.MLNConfig().net(net) + .input(input).labels(labels) + .subset(true).maxPerParam(160)); assertTrue(msg, gradOK); diff --git a/deeplearning4j/deeplearning4j-graph/pom.xml b/deeplearning4j/deeplearning4j-graph/pom.xml index 9c4b25ac3..ebc6740d9 100644 --- a/deeplearning4j/deeplearning4j-graph/pom.xml +++ b/deeplearning4j/deeplearning4j-graph/pom.xml @@ -51,6 +51,13 @@ test + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + + diff --git a/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/BaseDL4JTest.java b/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/BaseDL4JTest.java deleted file mode 100644 index b1b6df5dd..000000000 --- a/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/BaseDL4JTest.java +++ /dev/null @@ -1,140 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.deeplearning4j.graph; - -import lombok.extern.slf4j.Slf4j; -import org.bytedeco.javacpp.Pointer; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.memory.MemoryWorkspace; -import org.nd4j.linalg.api.ops.executioner.OpExecutioner; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.profiler.ProfilerConfig; - -import java.lang.management.ManagementFactory; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -@Slf4j -public class BaseDL4JTest { - - @Rule - public TestName name = new TestName(); - - protected long startTime; - protected int threadCountBefore; - - /** - * Override this to set the profiling mode for the tests defined in the child class - */ - public OpExecutioner.ProfilingMode getProfilingMode(){ - return OpExecutioner.ProfilingMode.SCOPE_PANIC; - } - - /** - * Override this to set the datatype of the tests defined in the child class - */ - public DataType getDataType(){ - return DataType.DOUBLE; - } - - public DataType getDefaultFPDataType(){ - return getDataType(); - } - - @Before - public void beforeTest(){ - log.info("{}.{}", getClass().getSimpleName(), name.getMethodName()); - Nd4j.getExecutioner().setProfilingMode(getProfilingMode()); - Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build()); - Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType()); - startTime = System.currentTimeMillis(); - threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount(); - } - - @After - public void afterTest(){ - //Attempt to keep workspaces isolated between tests - Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); - MemoryWorkspace currWS = Nd4j.getMemoryManager().getCurrentWorkspace(); - Nd4j.getMemoryManager().setCurrentWorkspace(null); - if(currWS != null){ - //Not really safe to continue testing under this situation... other tests will likely fail with obscure - // errors that are hard to track back to this - log.error("Open workspace leaked from test! Exiting - {}, isOpen = {} - {}", currWS.getId(), currWS.isScopeActive(), currWS); - System.exit(1); - } - - StringBuilder sb = new StringBuilder(); - long maxPhys = Pointer.maxPhysicalBytes(); - long maxBytes = Pointer.maxBytes(); - long currPhys = Pointer.physicalBytes(); - long currBytes = Pointer.totalBytes(); - - long jvmTotal = Runtime.getRuntime().totalMemory(); - long jvmMax = Runtime.getRuntime().maxMemory(); - - int threadsAfter = ManagementFactory.getThreadMXBean().getThreadCount(); - - long duration = System.currentTimeMillis() - startTime; - sb.append(getClass().getSimpleName()).append(".").append(name.getMethodName()) - .append(": ").append(duration).append(" ms") - .append(", threadCount: (").append(threadCountBefore).append("->").append(threadsAfter).append(")") - .append(", jvmTotal=").append(jvmTotal) - .append(", jvmMax=").append(jvmMax) - .append(", totalBytes=").append(currBytes).append(", maxBytes=").append(maxBytes) - .append(", currPhys=").append(currPhys).append(", maxPhys=").append(maxPhys); - - List ws = Nd4j.getWorkspaceManager().getAllWorkspacesForCurrentThread(); - if(ws != null && ws.size() > 0){ - long currSize = 0; - for(MemoryWorkspace w : ws){ - currSize += w.getCurrentSize(); - } - if(currSize > 0){ - sb.append(", threadWSSize=").append(currSize) - .append(" (").append(ws.size()).append(" WSs)"); - } - } - - - Properties p = Nd4j.getExecutioner().getEnvironmentInformation(); - Object o = p.get("cuda.devicesInformation"); - if(o instanceof List){ - List> l = (List>) o; - if(l.size() > 0) { - - sb.append(" [").append(l.size()) - .append(" GPUs: "); - - for (int i = 0; i < l.size(); i++) { - Map m = l.get(i); - if(i > 0) - sb.append(","); - sb.append("(").append(m.get("cuda.freeMemory")).append(" free, ") - .append(m.get("cuda.totalMemory")).append(" total)"); - } - sb.append("]"); - } - } - log.info(sb.toString()); - } -} diff --git a/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/data/TestGraphLoading.java b/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/data/TestGraphLoading.java index 551750f7c..1a5a27918 100644 --- a/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/data/TestGraphLoading.java +++ b/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/data/TestGraphLoading.java @@ -17,7 +17,7 @@ package org.deeplearning4j.graph.data; import org.apache.commons.lang3.ArrayUtils; -import org.deeplearning4j.graph.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.graph.api.Edge; import org.deeplearning4j.graph.api.IGraph; import org.deeplearning4j.graph.data.impl.DelimitedEdgeLineProcessor; diff --git a/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/data/TestGraphLoadingWeighted.java b/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/data/TestGraphLoadingWeighted.java index a06f40248..94e1a20bf 100644 --- a/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/data/TestGraphLoadingWeighted.java +++ b/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/data/TestGraphLoadingWeighted.java @@ -17,7 +17,7 @@ package org.deeplearning4j.graph.data; import org.apache.commons.lang3.ArrayUtils; -import org.deeplearning4j.graph.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.graph.api.Edge; import org.deeplearning4j.graph.api.IGraph; import org.deeplearning4j.graph.data.impl.WeightedEdgeLineProcessor; diff --git a/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/graph/TestGraph.java b/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/graph/TestGraph.java index 74c7f7dc2..0dc456107 100644 --- a/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/graph/TestGraph.java +++ b/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/graph/TestGraph.java @@ -17,7 +17,7 @@ package org.deeplearning4j.graph.graph; import org.apache.commons.lang3.ArrayUtils; -import org.deeplearning4j.graph.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.graph.api.*; import org.deeplearning4j.graph.data.GraphLoader; import org.deeplearning4j.graph.iterator.RandomWalkIterator; diff --git a/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/models/deepwalk/DeepWalkGradientCheck.java b/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/models/deepwalk/DeepWalkGradientCheck.java index 951a4c50b..39e91921a 100644 --- a/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/models/deepwalk/DeepWalkGradientCheck.java +++ b/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/models/deepwalk/DeepWalkGradientCheck.java @@ -16,7 +16,7 @@ package org.deeplearning4j.graph.models.deepwalk; -import org.deeplearning4j.graph.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.graph.data.GraphLoader; import org.deeplearning4j.graph.graph.Graph; import org.deeplearning4j.graph.iterator.GraphWalkIterator; diff --git a/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/models/deepwalk/TestDeepWalk.java b/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/models/deepwalk/TestDeepWalk.java index 82d94fc46..d92c3bec1 100644 --- a/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/models/deepwalk/TestDeepWalk.java +++ b/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/models/deepwalk/TestDeepWalk.java @@ -17,7 +17,7 @@ package org.deeplearning4j.graph.models.deepwalk; import org.apache.commons.io.FilenameUtils; -import org.deeplearning4j.graph.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.graph.api.Edge; import org.deeplearning4j.graph.api.IGraph; import org.deeplearning4j.graph.data.GraphLoader; diff --git a/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/models/deepwalk/TestGraphHuffman.java b/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/models/deepwalk/TestGraphHuffman.java index 5651eec2e..763aae822 100644 --- a/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/models/deepwalk/TestGraphHuffman.java +++ b/deeplearning4j/deeplearning4j-graph/src/test/java/org/deeplearning4j/graph/models/deepwalk/TestGraphHuffman.java @@ -16,7 +16,7 @@ package org.deeplearning4j.graph.models.deepwalk; -import org.deeplearning4j.graph.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Test; import java.util.Arrays; diff --git a/deeplearning4j/deeplearning4j-manifold/deeplearning4j-tsne/pom.xml b/deeplearning4j/deeplearning4j-manifold/deeplearning4j-tsne/pom.xml index 37002b5e1..7ebb82e75 100644 --- a/deeplearning4j/deeplearning4j-manifold/deeplearning4j-tsne/pom.xml +++ b/deeplearning4j/deeplearning4j-manifold/deeplearning4j-tsne/pom.xml @@ -55,6 +55,13 @@ nd4j-api ${nd4j.version} + + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-manifold/deeplearning4j-tsne/src/test/java/org/deeplearning4j/plot/Test6058.java b/deeplearning4j/deeplearning4j-manifold/deeplearning4j-tsne/src/test/java/org/deeplearning4j/plot/Test6058.java index af544b36e..3359e729f 100644 --- a/deeplearning4j/deeplearning4j-manifold/deeplearning4j-tsne/src/test/java/org/deeplearning4j/plot/Test6058.java +++ b/deeplearning4j/deeplearning4j-manifold/deeplearning4j-tsne/src/test/java/org/deeplearning4j/plot/Test6058.java @@ -17,6 +17,7 @@ package org.deeplearning4j.plot; import lombok.val; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Test; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; @@ -25,7 +26,7 @@ import java.util.ArrayList; import static org.junit.Assert.assertTrue; -public class Test6058 { +public class Test6058 extends BaseDL4JTest { @Test public void test() throws Exception { diff --git a/deeplearning4j/deeplearning4j-modelimport/pom.xml b/deeplearning4j/deeplearning4j-modelimport/pom.xml index 223aebdaa..566bf6012 100644 --- a/deeplearning4j/deeplearning4j-modelimport/pom.xml +++ b/deeplearning4j/deeplearning4j-modelimport/pom.xml @@ -86,6 +86,12 @@ junit test + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + ch.qos.logback diff --git a/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/config/KerasLayerConfiguration.java b/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/config/KerasLayerConfiguration.java index 6d6fc42c9..7841fdf27 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/config/KerasLayerConfiguration.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/config/KerasLayerConfiguration.java @@ -108,6 +108,9 @@ public class KerasLayerConfiguration { private final String LAYER_CLASS_NAME_LEAKY_RELU = "LeakyReLU"; private final String LAYER_CLASS_NAME_PRELU = "PReLU"; private final String LAYER_CLASS_NAME_THRESHOLDED_RELU = "ThresholdedReLU"; + private final String LAYER_CLASS_NAME_RELU = "ReLU"; + private final String LAYER_CLASS_NAME_ELU = "ELU"; + private final String LAYER_CLASS_NAME_SOFTMAX = "Softmax"; private final String LAYER_CLASS_NAME_UPSAMPLING_1D = "UpSampling1D"; private final String LAYER_CLASS_NAME_UPSAMPLING_2D = "UpSampling2D"; private final String LAYER_CLASS_NAME_UPSAMPLING_3D = "UpSampling3D"; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasELU.java b/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasELU.java new file mode 100644 index 000000000..2517ae0ac --- /dev/null +++ b/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasELU.java @@ -0,0 +1,95 @@ +/* ****************************************************************************** + * Copyright (c) 2019 Konduit K.K. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0. + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + * + * SPDX-License-Identifier: Apache-2.0 + ******************************************************************************/ + +package org.deeplearning4j.nn.modelimport.keras.layers.advanced.activations; + +import org.deeplearning4j.nn.conf.inputs.InputType; +import org.deeplearning4j.nn.conf.layers.ActivationLayer; +import org.deeplearning4j.nn.modelimport.keras.KerasLayer; +import org.deeplearning4j.nn.modelimport.keras.exceptions.InvalidKerasConfigurationException; +import org.deeplearning4j.nn.modelimport.keras.exceptions.UnsupportedKerasConfigurationException; +import org.deeplearning4j.nn.modelimport.keras.utils.KerasLayerUtils; +import org.nd4j.linalg.activations.IActivation; +import org.nd4j.linalg.activations.impl.ActivationELU; +import org.nd4j.linalg.activations.impl.ActivationLReLU; + +import java.util.Map; + +/** + * Imports ELU layer from Keras + * + * @author Alex Black + */ +public class KerasELU extends KerasLayer { + + + /** + * Constructor from parsed Keras layer configuration dictionary. + * + * @param layerConfig dictionary containing Keras layer configuration + * @throws InvalidKerasConfigurationException Invalid Keras config + * @throws UnsupportedKerasConfigurationException Unsupported Invalid Keras config + */ + public KerasELU(Map layerConfig) + throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { + this(layerConfig, true); + } + + /** + * Constructor from parsed Keras layer configuration dictionary. + * + * @param layerConfig dictionary containing Keras layer configuration + * @param enforceTrainingConfig whether to enforce training-related configuration options + * @throws InvalidKerasConfigurationException Invalid Keras config + * @throws UnsupportedKerasConfigurationException Invalid Keras config + */ + public KerasELU(Map layerConfig, boolean enforceTrainingConfig) + throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { + super(layerConfig, enforceTrainingConfig); + Map innerConfig = KerasLayerUtils.getInnerLayerConfigFromConfig(layerConfig, conf); + double alpha = 1.0; // Set default alpha to default in nd4j + String layerFieldLeakyReluAlpha = "alpha"; + if (innerConfig.containsKey(layerFieldLeakyReluAlpha)) { + alpha = (double) innerConfig.get(layerFieldLeakyReluAlpha); + } + IActivation leakyReLU = new ActivationELU(alpha); + this.layer = new ActivationLayer.Builder().name(this.layerName).activation(leakyReLU).build(); + } + + /** + * Get layer output type. + * + * @param inputType Array of InputTypes + * @return output type as InputType + * @throws InvalidKerasConfigurationException Invalid Keras config + */ + public InputType getOutputType(InputType... inputType) throws InvalidKerasConfigurationException { + if (inputType.length > 1) + throw new InvalidKerasConfigurationException( + "Keras Activation layer accepts only one input (received " + inputType.length + ")"); + return this.getActivationLayer().getOutputType(-1, inputType[0]); + } + + /** + * Get DL4J ActivationLayer. + * + * @return ActivationLayer + */ + public ActivationLayer getActivationLayer() { + return (ActivationLayer) this.layer; + } + +} diff --git a/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasReLU.java b/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasReLU.java new file mode 100644 index 000000000..14c4b3d73 --- /dev/null +++ b/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasReLU.java @@ -0,0 +1,99 @@ +/* ****************************************************************************** + * Copyright (c) 2019 Konduit K.K. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0. + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + * + * SPDX-License-Identifier: Apache-2.0 + ******************************************************************************/ + +package org.deeplearning4j.nn.modelimport.keras.layers.advanced.activations; + +import org.deeplearning4j.nn.conf.inputs.InputType; +import org.deeplearning4j.nn.conf.layers.ActivationLayer; +import org.deeplearning4j.nn.modelimport.keras.KerasLayer; +import org.deeplearning4j.nn.modelimport.keras.exceptions.InvalidKerasConfigurationException; +import org.deeplearning4j.nn.modelimport.keras.exceptions.UnsupportedKerasConfigurationException; +import org.deeplearning4j.nn.modelimport.keras.utils.KerasLayerUtils; +import org.nd4j.linalg.activations.IActivation; +import org.nd4j.linalg.activations.impl.ActivationLReLU; +import org.nd4j.linalg.activations.impl.ActivationReLU; + +import java.util.Map; + +/** + * Imports ReLU layer from Keras + * + * @author Alex Black + */ +public class KerasReLU extends KerasLayer { + + /** + * Constructor from parsed Keras layer configuration dictionary. + * + * @param layerConfig dictionary containing Keras layer configuration + * @throws InvalidKerasConfigurationException Invalid Keras config + * @throws UnsupportedKerasConfigurationException Unsupported Invalid Keras config + */ + public KerasReLU(Map layerConfig) + throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { + this(layerConfig, true); + } + + /** + * Constructor from parsed Keras layer configuration dictionary. + * + * @param layerConfig dictionary containing Keras layer configuration + * @param enforceTrainingConfig whether to enforce training-related configuration options + * @throws InvalidKerasConfigurationException Invalid Keras config + * @throws UnsupportedKerasConfigurationException Invalid Keras config + */ + public KerasReLU(Map layerConfig, boolean enforceTrainingConfig) + throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { + super(layerConfig, enforceTrainingConfig); + Map innerConfig = KerasLayerUtils.getInnerLayerConfigFromConfig(layerConfig, conf); + Double maxValue = (Double) innerConfig.get("max_value"); + double negativeSlope = 0.0; + double threshold = 0.0; + if (innerConfig.containsKey("negative_slope")) { + negativeSlope = (double) innerConfig.get("negative_slope"); + } + if (innerConfig.containsKey("threshold")) { + threshold = (double) innerConfig.get("threshold"); + } + + this.layer = new ActivationLayer.Builder().name(this.layerName) + .activation(new ActivationReLU(maxValue, threshold, negativeSlope)).build(); + } + + /** + * Get layer output type. + * + * @param inputType Array of InputTypes + * @return output type as InputType + * @throws InvalidKerasConfigurationException Invalid Keras config + */ + public InputType getOutputType(InputType... inputType) throws InvalidKerasConfigurationException { + if (inputType.length > 1) + throw new InvalidKerasConfigurationException( + "Keras Activation layer accepts only one input (received " + inputType.length + ")"); + return this.getActivationLayer().getOutputType(-1, inputType[0]); + } + + /** + * Get DL4J ActivationLayer. + * + * @return ActivationLayer + */ + public ActivationLayer getActivationLayer() { + return (ActivationLayer) this.layer; + } + +} diff --git a/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasSoftmax.java b/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasSoftmax.java new file mode 100644 index 000000000..884c55ef1 --- /dev/null +++ b/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activations/KerasSoftmax.java @@ -0,0 +1,85 @@ +/* ****************************************************************************** + * Copyright (c) 2019 Konduit K.K. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0. + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + * + * SPDX-License-Identifier: Apache-2.0 + ******************************************************************************/ + +package org.deeplearning4j.nn.modelimport.keras.layers.advanced.activations; + +import org.deeplearning4j.nn.conf.inputs.InputType; +import org.deeplearning4j.nn.conf.layers.ActivationLayer; +import org.deeplearning4j.nn.modelimport.keras.KerasLayer; +import org.deeplearning4j.nn.modelimport.keras.exceptions.InvalidKerasConfigurationException; +import org.deeplearning4j.nn.modelimport.keras.exceptions.UnsupportedKerasConfigurationException; +import org.nd4j.linalg.activations.impl.ActivationSoftmax; + +import java.util.Map; + +/** + * Imports Softmax layer from Keras + * + * @author Alex Black + */ +public class KerasSoftmax extends KerasLayer { + + /** + * Constructor from parsed Keras layer configuration dictionary. + * + * @param layerConfig dictionary containing Keras layer configuration + * @throws InvalidKerasConfigurationException Invalid Keras config + * @throws UnsupportedKerasConfigurationException Unsupported Invalid Keras config + */ + public KerasSoftmax(Map layerConfig) + throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { + this(layerConfig, true); + } + + /** + * Constructor from parsed Keras layer configuration dictionary. + * + * @param layerConfig dictionary containing Keras layer configuration + * @param enforceTrainingConfig whether to enforce training-related configuration options + * @throws InvalidKerasConfigurationException Invalid Keras config + * @throws UnsupportedKerasConfigurationException Invalid Keras config + */ + public KerasSoftmax(Map layerConfig, boolean enforceTrainingConfig) + throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { + super(layerConfig, enforceTrainingConfig); + + this.layer = new ActivationLayer.Builder().name(this.layerName).activation(new ActivationSoftmax()).build(); + } + + /** + * Get layer output type. + * + * @param inputType Array of InputTypes + * @return output type as InputType + * @throws InvalidKerasConfigurationException Invalid Keras config + */ + public InputType getOutputType(InputType... inputType) throws InvalidKerasConfigurationException { + if (inputType.length > 1) + throw new InvalidKerasConfigurationException( + "Keras Activation layer accepts only one input (received " + inputType.length + ")"); + return this.getActivationLayer().getOutputType(-1, inputType[0]); + } + + /** + * Get DL4J ActivationLayer. + * + * @return ActivationLayer + */ + public ActivationLayer getActivationLayer() { + return (ActivationLayer) this.layer; + } + +} diff --git a/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/utils/KerasLayerUtils.java b/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/utils/KerasLayerUtils.java index 3494ecf49..1428b6322 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/utils/KerasLayerUtils.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/utils/KerasLayerUtils.java @@ -25,9 +25,7 @@ import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.exceptions.InvalidKerasConfigurationException; import org.deeplearning4j.nn.modelimport.keras.exceptions.UnsupportedKerasConfigurationException; import org.deeplearning4j.nn.modelimport.keras.layers.KerasInput; -import org.deeplearning4j.nn.modelimport.keras.layers.advanced.activations.KerasLeakyReLU; -import org.deeplearning4j.nn.modelimport.keras.layers.advanced.activations.KerasPReLU; -import org.deeplearning4j.nn.modelimport.keras.layers.advanced.activations.KerasThresholdedReLU; +import org.deeplearning4j.nn.modelimport.keras.layers.advanced.activations.*; import org.deeplearning4j.nn.modelimport.keras.layers.convolutional.*; import org.deeplearning4j.nn.modelimport.keras.layers.core.*; import org.deeplearning4j.nn.modelimport.keras.layers.embeddings.KerasEmbedding; @@ -313,6 +311,12 @@ public class KerasLayerUtils { if (lambdaLayer != null){ layer = new KerasLambda(layerConfig, enforceTrainingConfig, lambdaLayer); } + } else if(layerClassName.equals(conf.getLAYER_CLASS_NAME_RELU())){ + layer = new KerasReLU(layerConfig, enforceTrainingConfig); + } else if(layerClassName.equals(conf.getLAYER_CLASS_NAME_ELU())){ + layer = new KerasELU(layerConfig, enforceTrainingConfig); + } else if(layerClassName.equals(conf.getLAYER_CLASS_NAME_SOFTMAX())){ + layer = new KerasSoftmax(layerConfig, enforceTrainingConfig); } if (layer == null){ Class customConfig = customLayers.get(layerClassName); diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/BaseDL4JTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/BaseDL4JTest.java deleted file mode 100644 index d7ae7e2ca..000000000 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/BaseDL4JTest.java +++ /dev/null @@ -1,140 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.deeplearning4j.nn.modelimport.keras; - -import lombok.extern.slf4j.Slf4j; -import org.bytedeco.javacpp.Pointer; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.memory.MemoryWorkspace; -import org.nd4j.linalg.api.ops.executioner.OpExecutioner; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.profiler.ProfilerConfig; - -import java.lang.management.ManagementFactory; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -@Slf4j -public class BaseDL4JTest { - - @Rule - public TestName name = new TestName(); - - protected long startTime; - protected int threadCountBefore; - - /** - * Override this to set the profiling mode for the tests defined in the child class - */ - public OpExecutioner.ProfilingMode getProfilingMode(){ - return OpExecutioner.ProfilingMode.SCOPE_PANIC; - } - - /** - * Override this to set the datatype of the tests defined in the child class - */ - public DataType getDataType(){ - return DataType.DOUBLE; - } - - public DataType getDefaultFPDataType(){ - return getDataType(); - } - - @Before - public void beforeTest(){ - log.info("{}.{}", getClass().getSimpleName(), name.getMethodName()); - Nd4j.getExecutioner().setProfilingMode(getProfilingMode()); - Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build()); - Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType()); - startTime = System.currentTimeMillis(); - threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount(); - } - - @After - public void afterTest(){ - //Attempt to keep workspaces isolated between tests - Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); - MemoryWorkspace currWS = Nd4j.getMemoryManager().getCurrentWorkspace(); - Nd4j.getMemoryManager().setCurrentWorkspace(null); - if(currWS != null){ - //Not really safe to continue testing under this situation... other tests will likely fail with obscure - // errors that are hard to track back to this - log.error("Open workspace leaked from test! Exiting - {}, isOpen = {} - {}", currWS.getId(), currWS.isScopeActive(), currWS); - System.exit(1); - } - - StringBuilder sb = new StringBuilder(); - long maxPhys = Pointer.maxPhysicalBytes(); - long maxBytes = Pointer.maxBytes(); - long currPhys = Pointer.physicalBytes(); - long currBytes = Pointer.totalBytes(); - - long jvmTotal = Runtime.getRuntime().totalMemory(); - long jvmMax = Runtime.getRuntime().maxMemory(); - - int threadsAfter = ManagementFactory.getThreadMXBean().getThreadCount(); - - long duration = System.currentTimeMillis() - startTime; - sb.append(getClass().getSimpleName()).append(".").append(name.getMethodName()) - .append(": ").append(duration).append(" ms") - .append(", threadCount: (").append(threadCountBefore).append("->").append(threadsAfter).append(")") - .append(", jvmTotal=").append(jvmTotal) - .append(", jvmMax=").append(jvmMax) - .append(", totalBytes=").append(currBytes).append(", maxBytes=").append(maxBytes) - .append(", currPhys=").append(currPhys).append(", maxPhys=").append(maxPhys); - - List ws = Nd4j.getWorkspaceManager().getAllWorkspacesForCurrentThread(); - if(ws != null && ws.size() > 0){ - long currSize = 0; - for(MemoryWorkspace w : ws){ - currSize += w.getCurrentSize(); - } - if(currSize > 0){ - sb.append(", threadWSSize=").append(currSize) - .append(" (").append(ws.size()).append(" WSs)"); - } - } - - - Properties p = Nd4j.getExecutioner().getEnvironmentInformation(); - Object o = p.get("cuda.devicesInformation"); - if(o instanceof List){ - List> l = (List>) o; - if(l.size() > 0) { - - sb.append(" [").append(l.size()) - .append(" GPUs: "); - - for (int i = 0; i < l.size(); i++) { - Map m = l.get(i); - if(i > 0) - sb.append(","); - sb.append("(").append(m.get("cuda.freeMemory")).append(" free, ") - .append(m.get("cuda.totalMemory")).append(" total)"); - } - sb.append("]"); - } - } - log.info(sb.toString()); - } -} diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/MiscTests.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/MiscTests.java index dcfd53518..92c55c891 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/MiscTests.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/MiscTests.java @@ -17,6 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras; import org.apache.commons.io.FileUtils; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.utils.DL4JKerasModelValidator; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.junit.Rule; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/FullModelComparisons.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/FullModelComparisons.java index 2fb99dd2e..ee5129b84 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/FullModelComparisons.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/FullModelComparisons.java @@ -23,7 +23,7 @@ import org.datavec.api.split.NumberedFileInputSplit; import org.deeplearning4j.datasets.datavec.SequenceRecordReaderDataSetIterator; import org.deeplearning4j.nn.layers.recurrent.LSTM; import org.deeplearning4j.nn.layers.recurrent.LastTimeStepLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasModel; import org.deeplearning4j.nn.modelimport.keras.KerasSequentialModel; import org.deeplearning4j.nn.modelimport.keras.exceptions.InvalidKerasConfigurationException; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/JsonTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/JsonTest.java index 20b80d30f..4aae27af3 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/JsonTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/JsonTest.java @@ -18,7 +18,7 @@ package org.deeplearning4j.nn.modelimport.keras.configurations; import org.deeplearning4j.nn.conf.InputPreProcessor; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.preprocessors.KerasFlattenRnnPreprocessor; import org.deeplearning4j.nn.modelimport.keras.preprocessors.PermutePreprocessor; import org.deeplearning4j.nn.modelimport.keras.preprocessors.ReshapePreprocessor; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras1ModelConfigurationTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras1ModelConfigurationTest.java index 554a2c2d1..d4b8e453a 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras1ModelConfigurationTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras1ModelConfigurationTest.java @@ -20,7 +20,7 @@ import lombok.extern.slf4j.Slf4j; import org.deeplearning4j.nn.conf.ComputationGraphConfiguration; import org.deeplearning4j.nn.conf.MultiLayerConfiguration; import org.deeplearning4j.nn.graph.ComputationGraph; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasModel; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.junit.Test; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras2ModelConfigurationTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras2ModelConfigurationTest.java index cb3c40a1e..4d4bf067e 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras2ModelConfigurationTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/Keras2ModelConfigurationTest.java @@ -21,7 +21,7 @@ import lombok.val; import org.deeplearning4j.nn.conf.ComputationGraphConfiguration; import org.deeplearning4j.nn.conf.MultiLayerConfiguration; import org.deeplearning4j.nn.graph.ComputationGraph; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasLayer; import org.deeplearning4j.nn.modelimport.keras.KerasModel; import org.deeplearning4j.nn.modelimport.keras.KerasModelImport; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasInitilizationTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasInitilizationTest.java index 8ac231e12..583634264 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasInitilizationTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasInitilizationTest.java @@ -18,7 +18,7 @@ package org.deeplearning4j.nn.modelimport.keras.configurations; import org.deeplearning4j.nn.conf.distribution.*; import org.deeplearning4j.nn.conf.layers.DenseLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasModelImportTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasModelImportTest.java index b5d3c9ab6..cf51831a2 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasModelImportTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/configurations/KerasModelImportTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.configurations; import lombok.extern.slf4j.Slf4j; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasModelImport; import org.deeplearning4j.nn.modelimport.keras.exceptions.InvalidKerasConfigurationException; import org.deeplearning4j.nn.modelimport.keras.exceptions.UnsupportedKerasConfigurationException; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasCustomLayerTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasCustomLayerTest.java index c14377b31..cdf5faca3 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasCustomLayerTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasCustomLayerTest.java @@ -20,7 +20,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.FileUtils; import org.deeplearning4j.common.resources.DL4JResources; import org.deeplearning4j.nn.graph.ComputationGraph; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasLayer; import org.deeplearning4j.nn.modelimport.keras.KerasModelImport; import org.deeplearning4j.nn.modelimport.keras.layers.custom.KerasLRN; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasLambdaTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasLambdaTest.java index 97ae4318f..1d55a5d2c 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasLambdaTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasLambdaTest.java @@ -19,7 +19,7 @@ package org.deeplearning4j.nn.modelimport.keras.e2e; import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.layers.samediff.SameDiffLambdaLayer; import org.deeplearning4j.nn.graph.ComputationGraph; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasLayer; import org.deeplearning4j.nn.modelimport.keras.KerasModel; import org.deeplearning4j.nn.modelimport.keras.KerasSequentialModel; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasModelEndToEndTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasModelEndToEndTest.java index d4f458a39..b17c215cb 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasModelEndToEndTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasModelEndToEndTest.java @@ -30,7 +30,7 @@ import org.deeplearning4j.nn.conf.layers.FeedForwardLayer; import org.deeplearning4j.nn.conf.layers.LossLayer; import org.deeplearning4j.nn.conf.layers.RnnOutputLayer; import org.deeplearning4j.nn.graph.ComputationGraph; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.Hdf5Archive; import org.deeplearning4j.nn.modelimport.keras.KerasModel; import org.deeplearning4j.nn.modelimport.keras.KerasSequentialModel; @@ -724,6 +724,29 @@ public class KerasModelEndToEndTest extends BaseDL4JTest { } } + @Test + public void testActivationLayers() throws Exception { + String[] names = new String[]{ + "ELU_0_model.h5", + "LeakyReLU_0_model.h5", + "ReLU_0_model.h5", + "ReLU_1_model.h5", + "ReLU_2_model.h5", + "ReLU_3_model.h5", + "Softmax_0_model.h5", + "ThresholdReLU_0_model.h5", + }; + + for(String name : names ){ + System.out.println("Starting test: " + name); + String modelPath = "modelimport/keras/examples/activations/" + name; + String inputsOutputPath = "modelimport/keras/examples/activations/" + (name.substring(0,name.length()-"model.h5".length()) + "inputs_and_outputs.h5"); + + importEndModelTest(modelPath, inputsOutputPath, true, true, + true, true, false, null, null); + } + } + private ComputationGraph importFunctionalModelH5Test(String modelPath) throws Exception { return importFunctionalModelH5Test(modelPath, null, false); } @@ -991,8 +1014,8 @@ public class KerasModelEndToEndTest extends BaseDL4JTest { } Nd4j.setDataType(DataType.DOUBLE); - boolean passed = GradientCheckUtil.checkGradients(netToTest, eps, max_rel_error, min_abs_error, true, false, - input, labels, null, null, true, 9); + boolean passed = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(netToTest).input(input) + .labels(labels).subset(true).maxPerParam(9)); assertTrue("Gradient check failed", passed); } diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasYolo9000PredictTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasYolo9000PredictTest.java index 8bd6e779d..1da4bf5cc 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasYolo9000PredictTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasYolo9000PredictTest.java @@ -19,7 +19,7 @@ package org.deeplearning4j.nn.modelimport.keras.e2e; import lombok.extern.slf4j.Slf4j; import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.graph.ComputationGraph; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasLayer; import org.deeplearning4j.nn.modelimport.keras.KerasModelImport; import org.deeplearning4j.nn.modelimport.keras.layers.convolutional.KerasSpaceToDepth; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasYolo9000Test.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasYolo9000Test.java index dcfe7bfda..f428a0dbd 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasYolo9000Test.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/e2e/KerasYolo9000Test.java @@ -18,7 +18,7 @@ package org.deeplearning4j.nn.modelimport.keras.e2e; import lombok.extern.slf4j.Slf4j; import org.deeplearning4j.nn.graph.ComputationGraph; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasLayer; import org.deeplearning4j.nn.modelimport.keras.KerasModel; import org.deeplearning4j.nn.modelimport.keras.layers.convolutional.KerasSpaceToDepth; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasLeakyReLUTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasLeakyReLUTest.java index 7770e7816..e16da1bfa 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasLeakyReLUTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasLeakyReLUTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.advanced.activation; import org.deeplearning4j.nn.conf.layers.ActivationLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasPReLUTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasPReLUTest.java index b22963e2a..ee7c0ab48 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasPReLUTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasPReLUTest.java @@ -18,7 +18,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.advanced.activation; import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.layers.PReLULayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasThresholdedReLUTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasThresholdedReLUTest.java index 02bf24e1d..1b24c1ff2 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasThresholdedReLUTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/advanced/activation/KerasThresholdedReLUTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.advanced.activation; import org.deeplearning4j.nn.conf.layers.ActivationLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution1DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution1DTest.java index eccaeb536..1b3c98f60 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution1DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution1DTest.java @@ -19,7 +19,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.ConvolutionMode; import org.deeplearning4j.nn.conf.dropout.Dropout; import org.deeplearning4j.nn.conf.layers.Convolution1DLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasTestUtils; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution2DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution2DTest.java index 2a01a1d8b..411127bd7 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution2DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasAtrousConvolution2DTest.java @@ -19,7 +19,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.ConvolutionMode; import org.deeplearning4j.nn.conf.dropout.Dropout; import org.deeplearning4j.nn.conf.layers.ConvolutionLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasTestUtils; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution1DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution1DTest.java index 449dc10cc..f12d66f56 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution1DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution1DTest.java @@ -19,7 +19,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.ConvolutionMode; import org.deeplearning4j.nn.conf.dropout.Dropout; import org.deeplearning4j.nn.conf.layers.Convolution1DLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasTestUtils; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution2DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution2DTest.java index bd0d6e012..8f61d7038 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution2DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution2DTest.java @@ -19,7 +19,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.ConvolutionMode; import org.deeplearning4j.nn.conf.dropout.Dropout; import org.deeplearning4j.nn.conf.layers.ConvolutionLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasTestUtils; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution3DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution3DTest.java index ff0ba8f3d..11177c5dd 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution3DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasConvolution3DTest.java @@ -19,7 +19,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.ConvolutionMode; import org.deeplearning4j.nn.conf.dropout.Dropout; import org.deeplearning4j.nn.conf.layers.ConvolutionLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasTestUtils; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping1DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping1DTest.java index 1676f6136..86fb5591b 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping1DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping1DTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.layers.convolutional.Cropping1D; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping2DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping2DTest.java index 95f5d7485..88226704e 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping2DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping2DTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.layers.convolutional.Cropping2D; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping3DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping3DTest.java index 6ae3065b6..392195e0e 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping3DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasCropping3DTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.layers.convolutional.Cropping3D; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDeconvolution2DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDeconvolution2DTest.java index 3675d46a8..177e2e717 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDeconvolution2DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDeconvolution2DTest.java @@ -19,7 +19,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.ConvolutionMode; import org.deeplearning4j.nn.conf.dropout.Dropout; import org.deeplearning4j.nn.conf.layers.Deconvolution2D; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasTestUtils; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDepthwiseConvolution2DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDepthwiseConvolution2DTest.java index 364c50e72..6b173ba8e 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDepthwiseConvolution2DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasDepthwiseConvolution2DTest.java @@ -19,7 +19,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.ConvolutionMode; import org.deeplearning4j.nn.conf.dropout.Dropout; import org.deeplearning4j.nn.conf.layers.DepthwiseConvolution2D; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasLayer; import org.deeplearning4j.nn.modelimport.keras.KerasTestUtils; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasSeparableConvolution2DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasSeparableConvolution2DTest.java index 7d05a1b67..f8ec7e163 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasSeparableConvolution2DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasSeparableConvolution2DTest.java @@ -19,7 +19,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.ConvolutionMode; import org.deeplearning4j.nn.conf.dropout.Dropout; import org.deeplearning4j.nn.conf.layers.SeparableConvolution2D; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasTestUtils; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling1DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling1DTest.java index aec4278e2..c93a0fe32 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling1DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling1DTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.layers.Upsampling1D; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling2DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling2DTest.java index cea117f8f..8033f24e7 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling2DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling2DTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.layers.Upsampling2D; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling3DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling3DTest.java index a8e564340..578d92276 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling3DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasUpsampling3DTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.layers.Upsampling3D; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding1DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding1DTest.java index 4cc9cc2cb..aa2d96653 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding1DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding1DTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.layers.ZeroPadding1DLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding2DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding2DTest.java index 5f72cbcf2..08d6e57a9 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding2DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding2DTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.layers.ZeroPaddingLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding3DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding3DTest.java index c0a60defd..960a0194e 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding3DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/convolution/KerasZeroPadding3DTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.convolution; import org.deeplearning4j.nn.conf.layers.ZeroPadding3DLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasActivationLayer.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasActivationLayer.java index d7f4d8ad9..e19178ea3 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasActivationLayer.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasActivationLayer.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.core; import org.deeplearning4j.nn.conf.layers.ActivationLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDenseTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDenseTest.java index cca2515a8..f2ad5c242 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDenseTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDenseTest.java @@ -18,7 +18,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.core; import org.deeplearning4j.nn.conf.dropout.Dropout; import org.deeplearning4j.nn.conf.layers.DenseLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasTestUtils; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDropoutTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDropoutTest.java index ff5c49cc5..943a76b26 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDropoutTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasDropoutTest.java @@ -18,7 +18,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.core; import org.deeplearning4j.nn.conf.dropout.Dropout; import org.deeplearning4j.nn.conf.layers.DropoutLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasMaskingTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasMaskingTest.java index 144d24ab8..76e5c6239 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasMaskingTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasMaskingTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.core; import org.deeplearning4j.nn.conf.layers.util.MaskZeroLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasPermuteTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasPermuteTest.java index 1f2400426..218e2fc7c 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasPermuteTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasPermuteTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.core; import org.deeplearning4j.nn.conf.inputs.InputType; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasRepeatVectorTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasRepeatVectorTest.java index 6c439fb95..2a448bf4c 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasRepeatVectorTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasRepeatVectorTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.core; import org.deeplearning4j.nn.conf.layers.misc.RepeatVector; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasReshapeTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasReshapeTest.java index 19d5ce623..7adfa09c7 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasReshapeTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasReshapeTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.core; import org.deeplearning4j.nn.conf.inputs.InputType; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasSpatialDropout2DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasSpatialDropout2DTest.java index 71ec2f468..dc17a4946 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasSpatialDropout2DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/core/KerasSpatialDropout2DTest.java @@ -18,7 +18,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.core; import org.deeplearning4j.nn.conf.dropout.SpatialDropout; import org.deeplearning4j.nn.conf.layers.DropoutLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/embeddings/KerasEmbeddingTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/embeddings/KerasEmbeddingTest.java index b171e063f..55274209d 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/embeddings/KerasEmbeddingTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/embeddings/KerasEmbeddingTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.embeddings; import org.deeplearning4j.nn.conf.layers.EmbeddingSequenceLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected1DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected1DTest.java index 428d5d99e..cc91c89bb 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected1DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected1DTest.java @@ -20,7 +20,7 @@ import org.deeplearning4j.nn.conf.ConvolutionMode; import org.deeplearning4j.nn.conf.dropout.Dropout; import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.layers.LocallyConnected1D; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasTestUtils; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected2DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected2DTest.java index 1ea69e06a..cb05d4597 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected2DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/local/KerasLocallyConnected2DTest.java @@ -20,7 +20,7 @@ import org.deeplearning4j.nn.conf.ConvolutionMode; import org.deeplearning4j.nn.conf.dropout.Dropout; import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.layers.LocallyConnected2D; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasTestUtils; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasAlphaDropoutTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasAlphaDropoutTest.java index 6f34e1684..3a632f2b4 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasAlphaDropoutTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasAlphaDropoutTest.java @@ -18,7 +18,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.noise; import org.deeplearning4j.nn.conf.dropout.AlphaDropout; import org.deeplearning4j.nn.conf.layers.DropoutLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianDropoutTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianDropoutTest.java index 7ca51b37d..b759dd370 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianDropoutTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianDropoutTest.java @@ -18,7 +18,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.noise; import org.deeplearning4j.nn.conf.dropout.GaussianDropout; import org.deeplearning4j.nn.conf.layers.DropoutLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianNoiseTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianNoiseTest.java index 58abe77a4..be01a06c5 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianNoiseTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/noise/KerasGaussianNoiseTest.java @@ -18,7 +18,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.noise; import org.deeplearning4j.nn.conf.dropout.GaussianNoise; import org.deeplearning4j.nn.conf.layers.DropoutLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/normalization/KerasBatchNormalizationTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/normalization/KerasBatchNormalizationTest.java index 6fbf9ec43..e240b19f0 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/normalization/KerasBatchNormalizationTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/normalization/KerasBatchNormalizationTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.normalization; import org.deeplearning4j.nn.conf.layers.BatchNormalization; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling1DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling1DTest.java index 3b2716cd8..25acee41a 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling1DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling1DTest.java @@ -19,7 +19,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.pooling; import org.deeplearning4j.nn.conf.ConvolutionMode; import org.deeplearning4j.nn.conf.layers.PoolingType; import org.deeplearning4j.nn.conf.layers.Subsampling1DLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling2DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling2DTest.java index f4852d89a..137f8ca00 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling2DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling2DTest.java @@ -19,7 +19,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.pooling; import org.deeplearning4j.nn.conf.ConvolutionMode; import org.deeplearning4j.nn.conf.layers.PoolingType; import org.deeplearning4j.nn.conf.layers.SubsamplingLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling3DTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling3DTest.java index 9026c7308..153e41103 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling3DTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/pooling/KerasPooling3DTest.java @@ -19,7 +19,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.pooling; import org.deeplearning4j.nn.conf.ConvolutionMode; import org.deeplearning4j.nn.conf.layers.PoolingType; import org.deeplearning4j.nn.conf.layers.Subsampling3DLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasLSTMTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasLSTMTest.java index 3b82f14ae..60b1044d9 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasLSTMTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasLSTMTest.java @@ -21,7 +21,7 @@ import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.layers.LSTM; import org.deeplearning4j.nn.conf.layers.recurrent.LastTimeStep; import org.deeplearning4j.nn.conf.layers.util.MaskZeroLayer; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasTestUtils; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasSimpleRnnTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasSimpleRnnTest.java index b760a90e1..2abcd3e2a 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasSimpleRnnTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/recurrent/KerasSimpleRnnTest.java @@ -19,7 +19,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.recurrent; import org.deeplearning4j.nn.conf.dropout.Dropout; import org.deeplearning4j.nn.conf.layers.recurrent.LastTimeStep; import org.deeplearning4j.nn.conf.layers.recurrent.SimpleRnn; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasTestUtils; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/wrappers/KerasBidirectionalTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/wrappers/KerasBidirectionalTest.java index 91969bc09..0613ecf67 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/wrappers/KerasBidirectionalTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/layers/wrappers/KerasBidirectionalTest.java @@ -18,7 +18,7 @@ package org.deeplearning4j.nn.modelimport.keras.layers.wrappers; import org.deeplearning4j.nn.conf.layers.LSTM; import org.deeplearning4j.nn.conf.layers.recurrent.Bidirectional; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.config.Keras1LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.Keras2LayerConfiguration; import org.deeplearning4j.nn.modelimport.keras.config.KerasLayerConfiguration; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/optimizers/OptimizerImport.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/optimizers/OptimizerImport.java index f2a693d9a..d030158f0 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/optimizers/OptimizerImport.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/optimizers/OptimizerImport.java @@ -16,7 +16,7 @@ package org.deeplearning4j.nn.modelimport.keras.optimizers; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasModel; import org.deeplearning4j.nn.modelimport.keras.KerasSequentialModel; import org.deeplearning4j.nn.modelimport.keras.utils.KerasModelBuilder; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/sequence/TimeSeriesGeneratorImportTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/sequence/TimeSeriesGeneratorImportTest.java index 632bcc692..fc0cbfe9b 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/sequence/TimeSeriesGeneratorImportTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/sequence/TimeSeriesGeneratorImportTest.java @@ -16,7 +16,7 @@ package org.deeplearning4j.nn.modelimport.keras.preprocessing.sequence; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.exceptions.InvalidKerasConfigurationException; import org.junit.Test; import org.nd4j.resources.Resources; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/sequence/TimeSeriesGeneratorTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/sequence/TimeSeriesGeneratorTest.java index 3a2da91a9..b068881e1 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/sequence/TimeSeriesGeneratorTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/sequence/TimeSeriesGeneratorTest.java @@ -16,7 +16,7 @@ package org.deeplearning4j.nn.modelimport.keras.preprocessing.sequence; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.exceptions.InvalidKerasConfigurationException; import org.junit.Test; import org.nd4j.linalg.api.ndarray.INDArray; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/text/TokenizerImportTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/text/TokenizerImportTest.java index f79ef60a5..935be4fbe 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/text/TokenizerImportTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/text/TokenizerImportTest.java @@ -16,7 +16,7 @@ package org.deeplearning4j.nn.modelimport.keras.preprocessing.text; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.exceptions.InvalidKerasConfigurationException; import org.junit.Test; import org.nd4j.resources.Resources; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/text/TokenizerTest.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/text/TokenizerTest.java index a4fb6994b..cebb22fb4 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/text/TokenizerTest.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/preprocessing/text/TokenizerTest.java @@ -16,7 +16,7 @@ package org.deeplearning4j.nn.modelimport.keras.preprocessing.text; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Test; import org.nd4j.linalg.api.ndarray.INDArray; diff --git a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/weights/KerasWeightSettingTests.java b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/weights/KerasWeightSettingTests.java index 7791e3417..75334bcd0 100644 --- a/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/weights/KerasWeightSettingTests.java +++ b/deeplearning4j/deeplearning4j-modelimport/src/test/java/org/deeplearning4j/nn/modelimport/keras/weights/KerasWeightSettingTests.java @@ -19,7 +19,7 @@ package org.deeplearning4j.nn.modelimport.keras.weights; import lombok.extern.slf4j.Slf4j; import lombok.val; import org.deeplearning4j.nn.graph.ComputationGraph; -import org.deeplearning4j.nn.modelimport.keras.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.modelimport.keras.KerasLayer; import org.deeplearning4j.nn.modelimport.keras.KerasModel; import org.deeplearning4j.nn.modelimport.keras.layers.convolutional.KerasSpaceToDepth; diff --git a/deeplearning4j/deeplearning4j-nearestneighbors-parent/deeplearning4j-nearestneighbor-server/pom.xml b/deeplearning4j/deeplearning4j-nearestneighbors-parent/deeplearning4j-nearestneighbor-server/pom.xml index ab28d78c4..2d4a4da14 100644 --- a/deeplearning4j/deeplearning4j-nearestneighbors-parent/deeplearning4j-nearestneighbor-server/pom.xml +++ b/deeplearning4j/deeplearning4j-nearestneighbors-parent/deeplearning4j-nearestneighbor-server/pom.xml @@ -103,6 +103,12 @@ logback-classic test + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-nearestneighbors-parent/deeplearning4j-nearestneighbor-server/src/test/java/org/deeplearning4j/nearestneighbor/server/BaseDL4JTest.java b/deeplearning4j/deeplearning4j-nearestneighbors-parent/deeplearning4j-nearestneighbor-server/src/test/java/org/deeplearning4j/nearestneighbor/server/BaseDL4JTest.java deleted file mode 100644 index 41107decf..000000000 --- a/deeplearning4j/deeplearning4j-nearestneighbors-parent/deeplearning4j-nearestneighbor-server/src/test/java/org/deeplearning4j/nearestneighbor/server/BaseDL4JTest.java +++ /dev/null @@ -1,140 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.deeplearning4j.nearestneighbor.server; - -import lombok.extern.slf4j.Slf4j; -import org.bytedeco.javacpp.Pointer; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.memory.MemoryWorkspace; -import org.nd4j.linalg.api.ops.executioner.OpExecutioner; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.profiler.ProfilerConfig; - -import java.lang.management.ManagementFactory; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -@Slf4j -public class BaseDL4JTest { - - @Rule - public TestName name = new TestName(); - - protected long startTime; - protected int threadCountBefore; - - /** - * Override this to set the profiling mode for the tests defined in the child class - */ - public OpExecutioner.ProfilingMode getProfilingMode(){ - return OpExecutioner.ProfilingMode.SCOPE_PANIC; - } - - /** - * Override this to set the datatype of the tests defined in the child class - */ - public DataType getDataType(){ - return DataType.DOUBLE; - } - - public DataType getDefaultFPDataType(){ - return getDataType(); - } - - @Before - public void beforeTest(){ - log.info("{}.{}", getClass().getSimpleName(), name.getMethodName()); - Nd4j.getExecutioner().setProfilingMode(getProfilingMode()); - Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build()); - Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType()); - startTime = System.currentTimeMillis(); - threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount(); - } - - @After - public void afterTest(){ - //Attempt to keep workspaces isolated between tests - Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); - MemoryWorkspace currWS = Nd4j.getMemoryManager().getCurrentWorkspace(); - Nd4j.getMemoryManager().setCurrentWorkspace(null); - if(currWS != null){ - //Not really safe to continue testing under this situation... other tests will likely fail with obscure - // errors that are hard to track back to this - log.error("Open workspace leaked from test! Exiting - {}, isOpen = {} - {}", currWS.getId(), currWS.isScopeActive(), currWS); - System.exit(1); - } - - StringBuilder sb = new StringBuilder(); - long maxPhys = Pointer.maxPhysicalBytes(); - long maxBytes = Pointer.maxBytes(); - long currPhys = Pointer.physicalBytes(); - long currBytes = Pointer.totalBytes(); - - long jvmTotal = Runtime.getRuntime().totalMemory(); - long jvmMax = Runtime.getRuntime().maxMemory(); - - int threadsAfter = ManagementFactory.getThreadMXBean().getThreadCount(); - - long duration = System.currentTimeMillis() - startTime; - sb.append(getClass().getSimpleName()).append(".").append(name.getMethodName()) - .append(": ").append(duration).append(" ms") - .append(", threadCount: (").append(threadCountBefore).append("->").append(threadsAfter).append(")") - .append(", jvmTotal=").append(jvmTotal) - .append(", jvmMax=").append(jvmMax) - .append(", totalBytes=").append(currBytes).append(", maxBytes=").append(maxBytes) - .append(", currPhys=").append(currPhys).append(", maxPhys=").append(maxPhys); - - List ws = Nd4j.getWorkspaceManager().getAllWorkspacesForCurrentThread(); - if(ws != null && ws.size() > 0){ - long currSize = 0; - for(MemoryWorkspace w : ws){ - currSize += w.getCurrentSize(); - } - if(currSize > 0){ - sb.append(", threadWSSize=").append(currSize) - .append(" (").append(ws.size()).append(" WSs)"); - } - } - - - Properties p = Nd4j.getExecutioner().getEnvironmentInformation(); - Object o = p.get("cuda.devicesInformation"); - if(o instanceof List){ - List> l = (List>) o; - if(l.size() > 0) { - - sb.append(" [").append(l.size()) - .append(" GPUs: "); - - for (int i = 0; i < l.size(); i++) { - Map m = l.get(i); - if(i > 0) - sb.append(","); - sb.append("(").append(m.get("cuda.freeMemory")).append(" free, ") - .append(m.get("cuda.totalMemory")).append(" total)"); - } - sb.append("]"); - } - } - log.info(sb.toString()); - } -} diff --git a/deeplearning4j/deeplearning4j-nearestneighbors-parent/deeplearning4j-nearestneighbor-server/src/test/java/org/deeplearning4j/nearestneighbor/server/NearestNeighborTest.java b/deeplearning4j/deeplearning4j-nearestneighbors-parent/deeplearning4j-nearestneighbor-server/src/test/java/org/deeplearning4j/nearestneighbor/server/NearestNeighborTest.java index b42c407e5..4555511ce 100644 --- a/deeplearning4j/deeplearning4j-nearestneighbors-parent/deeplearning4j-nearestneighbor-server/src/test/java/org/deeplearning4j/nearestneighbor/server/NearestNeighborTest.java +++ b/deeplearning4j/deeplearning4j-nearestneighbors-parent/deeplearning4j-nearestneighbor-server/src/test/java/org/deeplearning4j/nearestneighbor/server/NearestNeighborTest.java @@ -17,6 +17,7 @@ package org.deeplearning4j.nearestneighbor.server; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.clustering.sptree.DataPoint; import org.deeplearning4j.clustering.vptree.VPTree; import org.deeplearning4j.clustering.vptree.VPTreeFillSearch; @@ -24,7 +25,6 @@ import org.deeplearning4j.nearestneighbor.client.NearestNeighborsClient; import org.deeplearning4j.nearestneighbor.model.NearestNeighborRequest; import org.deeplearning4j.nearestneighbor.model.NearestNeighborsResult; import org.deeplearning4j.nearestneighbor.model.NearestNeighborsResults; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -36,7 +36,6 @@ import java.io.File; import java.io.IOException; import java.net.ServerSocket; import java.util.List; -import java.util.UUID; import java.util.concurrent.Executor; import java.util.concurrent.Executors; diff --git a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/pom.xml b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/pom.xml index 87bb7e68e..fbe0ddccf 100644 --- a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/pom.xml +++ b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/pom.xml @@ -66,6 +66,12 @@ 2.10.3 test + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/BaseDL4JTest.java b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/BaseDL4JTest.java deleted file mode 100644 index 8b57f5dc0..000000000 --- a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/BaseDL4JTest.java +++ /dev/null @@ -1,140 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.deeplearning4j.clustering; - -import lombok.extern.slf4j.Slf4j; -import org.bytedeco.javacpp.Pointer; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.memory.MemoryWorkspace; -import org.nd4j.linalg.api.ops.executioner.OpExecutioner; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.profiler.ProfilerConfig; - -import java.lang.management.ManagementFactory; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -@Slf4j -public class BaseDL4JTest { - - @Rule - public TestName name = new TestName(); - - protected long startTime; - protected int threadCountBefore; - - /** - * Override this to set the profiling mode for the tests defined in the child class - */ - public OpExecutioner.ProfilingMode getProfilingMode(){ - return OpExecutioner.ProfilingMode.SCOPE_PANIC; - } - - /** - * Override this to set the datatype of the tests defined in the child class - */ - public DataType getDataType(){ - return DataType.DOUBLE; - } - - public DataType getDefaultFPDataType(){ - return getDataType(); - } - - @Before - public void beforeTest(){ - log.info("{}.{}", getClass().getSimpleName(), name.getMethodName()); - Nd4j.getExecutioner().setProfilingMode(getProfilingMode()); - Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build()); - Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType()); - startTime = System.currentTimeMillis(); - threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount(); - } - - @After - public void afterTest(){ - //Attempt to keep workspaces isolated between tests - Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); - MemoryWorkspace currWS = Nd4j.getMemoryManager().getCurrentWorkspace(); - Nd4j.getMemoryManager().setCurrentWorkspace(null); - if(currWS != null){ - //Not really safe to continue testing under this situation... other tests will likely fail with obscure - // errors that are hard to track back to this - log.error("Open workspace leaked from test! Exiting - {}, isOpen = {} - {}", currWS.getId(), currWS.isScopeActive(), currWS); - System.exit(1); - } - - StringBuilder sb = new StringBuilder(); - long maxPhys = Pointer.maxPhysicalBytes(); - long maxBytes = Pointer.maxBytes(); - long currPhys = Pointer.physicalBytes(); - long currBytes = Pointer.totalBytes(); - - long jvmTotal = Runtime.getRuntime().totalMemory(); - long jvmMax = Runtime.getRuntime().maxMemory(); - - int threadsAfter = ManagementFactory.getThreadMXBean().getThreadCount(); - - long duration = System.currentTimeMillis() - startTime; - sb.append(getClass().getSimpleName()).append(".").append(name.getMethodName()) - .append(": ").append(duration).append(" ms") - .append(", threadCount: (").append(threadCountBefore).append("->").append(threadsAfter).append(")") - .append(", jvmTotal=").append(jvmTotal) - .append(", jvmMax=").append(jvmMax) - .append(", totalBytes=").append(currBytes).append(", maxBytes=").append(maxBytes) - .append(", currPhys=").append(currPhys).append(", maxPhys=").append(maxPhys); - - List ws = Nd4j.getWorkspaceManager().getAllWorkspacesForCurrentThread(); - if(ws != null && ws.size() > 0){ - long currSize = 0; - for(MemoryWorkspace w : ws){ - currSize += w.getCurrentSize(); - } - if(currSize > 0){ - sb.append(", threadWSSize=").append(currSize) - .append(" (").append(ws.size()).append(" WSs)"); - } - } - - - Properties p = Nd4j.getExecutioner().getEnvironmentInformation(); - Object o = p.get("cuda.devicesInformation"); - if(o instanceof List){ - List> l = (List>) o; - if(l.size() > 0) { - - sb.append(" [").append(l.size()) - .append(" GPUs: "); - - for (int i = 0; i < l.size(); i++) { - Map m = l.get(i); - if(i > 0) - sb.append(","); - sb.append("(").append(m.get("cuda.freeMemory")).append(" free, ") - .append(m.get("cuda.totalMemory")).append(" total)"); - } - sb.append("]"); - } - } - log.info(sb.toString()); - } -} diff --git a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/kdtree/KDTreeTest.java b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/kdtree/KDTreeTest.java index 618ee0c94..89a46a3fe 100644 --- a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/kdtree/KDTreeTest.java +++ b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/kdtree/KDTreeTest.java @@ -16,11 +16,8 @@ package org.deeplearning4j.clustering.kdtree; -import org.joda.time.Instant; -import org.nd4j.shade.guava.base.Stopwatch; -import org.nd4j.shade.guava.primitives.Doubles; import lombok.val; -import org.deeplearning4j.clustering.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.joda.time.Duration; import org.junit.Before; import org.junit.BeforeClass; @@ -30,8 +27,9 @@ import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.primitives.Pair; +import org.nd4j.shade.guava.base.Stopwatch; +import org.nd4j.shade.guava.primitives.Doubles; import org.nd4j.shade.guava.primitives.Floats; -import org.opencv.ml.KNearest; import java.util.ArrayList; import java.util.Arrays; @@ -48,6 +46,11 @@ import static org.junit.Assert.assertTrue; */ public class KDTreeTest extends BaseDL4JTest { + @Override + public long getTimeoutMilliseconds() { + return 120000L; + } + private KDTree kdTree; @BeforeClass diff --git a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/kmeans/KMeansTest.java b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/kmeans/KMeansTest.java index c9140942d..2f2619e78 100644 --- a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/kmeans/KMeansTest.java +++ b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/kmeans/KMeansTest.java @@ -18,7 +18,7 @@ package org.deeplearning4j.clustering.kmeans; import lombok.val; import org.apache.commons.lang3.time.StopWatch; -import org.deeplearning4j.clustering.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.clustering.algorithm.Distance; import org.deeplearning4j.clustering.cluster.*; import org.junit.Ignore; diff --git a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/lsh/RandomProjectionLSHTest.java b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/lsh/RandomProjectionLSHTest.java index be148c699..d9a041f0b 100644 --- a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/lsh/RandomProjectionLSHTest.java +++ b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/lsh/RandomProjectionLSHTest.java @@ -16,7 +16,7 @@ package org.deeplearning4j.clustering.lsh; -import org.deeplearning4j.clustering.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.junit.After; import org.junit.Before; import org.junit.Ignore; diff --git a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/quadtree/QuadTreeTest.java b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/quadtree/QuadTreeTest.java index aa0e4db40..ec304b0c1 100644 --- a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/quadtree/QuadTreeTest.java +++ b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/quadtree/QuadTreeTest.java @@ -16,7 +16,7 @@ package org.deeplearning4j.clustering.quadtree; -import org.deeplearning4j.clustering.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Test; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; diff --git a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/randomprojection/RPTreeTest.java b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/randomprojection/RPTreeTest.java index 30f4a841e..05bbb1cc9 100644 --- a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/randomprojection/RPTreeTest.java +++ b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/randomprojection/RPTreeTest.java @@ -16,7 +16,7 @@ package org.deeplearning4j.clustering.randomprojection; -import org.deeplearning4j.clustering.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator; import org.junit.Before; import org.junit.Test; diff --git a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/randomprojection/RPUtilsTest.java b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/randomprojection/RPUtilsTest.java index 96fbabd41..cb3af27bc 100644 --- a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/randomprojection/RPUtilsTest.java +++ b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/randomprojection/RPUtilsTest.java @@ -16,7 +16,7 @@ package org.deeplearning4j.clustering.randomprojection; -import org.deeplearning4j.clustering.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Test; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; diff --git a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/sptree/SPTreeTest.java b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/sptree/SPTreeTest.java index f5ee19403..132c8a960 100644 --- a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/sptree/SPTreeTest.java +++ b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/sptree/SPTreeTest.java @@ -16,18 +16,15 @@ package org.deeplearning4j.clustering.sptree; -import org.nd4j.shade.guava.util.concurrent.AtomicDouble; import org.apache.commons.lang3.time.StopWatch; -import org.deeplearning4j.clustering.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Before; -import org.junit.Ignore; import org.junit.Test; -import org.nd4j.linalg.api.buffer.DataBuffer; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.buffer.util.DataTypeUtil; -import org.nd4j.linalg.api.memory.MemoryWorkspace; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; +import org.nd4j.shade.guava.util.concurrent.AtomicDouble; import static org.junit.Assert.*; @@ -36,6 +33,11 @@ import static org.junit.Assert.*; */ public class SPTreeTest extends BaseDL4JTest { + @Override + public long getTimeoutMilliseconds() { + return 120000L; + } + @Before public void setUp() { DataTypeUtil.setDTypeForContext(DataType.DOUBLE); diff --git a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/vptree/VPTreeSerializationTests.java b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/vptree/VPTreeSerializationTests.java index 12180a978..b67d5ccbf 100644 --- a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/vptree/VPTreeSerializationTests.java +++ b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/vptree/VPTreeSerializationTests.java @@ -19,7 +19,7 @@ package org.deeplearning4j.clustering.vptree; import lombok.extern.slf4j.Slf4j; import lombok.val; import org.apache.commons.lang3.SerializationUtils; -import org.deeplearning4j.clustering.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.clustering.sptree.DataPoint; import org.junit.Ignore; import org.junit.Test; diff --git a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/vptree/VpTreeNodeTest.java b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/vptree/VpTreeNodeTest.java index 5edb3926a..b7f254a63 100644 --- a/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/vptree/VpTreeNodeTest.java +++ b/deeplearning4j/deeplearning4j-nearestneighbors-parent/nearestneighbor-core/src/test/java/org/deeplearning4j/clustering/vptree/VpTreeNodeTest.java @@ -18,7 +18,7 @@ package org.deeplearning4j.clustering.vptree; import lombok.extern.slf4j.Slf4j; import lombok.val; -import org.deeplearning4j.clustering.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.clustering.sptree.DataPoint; import org.joda.time.Duration; import org.junit.BeforeClass; diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-chinese/pom.xml b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-chinese/pom.xml index 219301c56..23d863cdc 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-chinese/pom.xml +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-chinese/pom.xml @@ -59,6 +59,12 @@ compile + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-chinese/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/ChineseTokenizerTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-chinese/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/ChineseTokenizerTest.java index 0307e906e..aef6ed348 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-chinese/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/ChineseTokenizerTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-chinese/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/ChineseTokenizerTest.java @@ -17,6 +17,7 @@ package org.deeplearning4j.text.tokenization.tokenizer; import lombok.extern.slf4j.Slf4j; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.models.embeddings.loader.WordVectorSerializer; import org.deeplearning4j.models.word2vec.Word2Vec; import org.deeplearning4j.text.sentenceiterator.BasicLineIterator; @@ -37,7 +38,7 @@ import static org.junit.Assert.assertEquals; * */ @Slf4j -public class ChineseTokenizerTest { +public class ChineseTokenizerTest extends BaseDL4JTest { private final String toTokenize = "青山绿水和伟大的科学家让世界更美好和平"; private final String[] expect = {"青山绿水", "和", "伟大", "的", "科学家", "让", "世界", "更", "美好", "和平"}; diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/pom.xml b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/pom.xml index beeb07d34..a4fea6b07 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/pom.xml +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/pom.xml @@ -61,6 +61,13 @@ org.slf4j slf4j-api + + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/CommonCornerCasesTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/CommonCornerCasesTest.java index 43bdf1c7a..1d23baab6 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/CommonCornerCasesTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/CommonCornerCasesTest.java @@ -32,11 +32,13 @@ */ package com.atilika.kuromoji; +import org.deeplearning4j.BaseDL4JTest; + import java.util.Arrays; import static com.atilika.kuromoji.TestUtils.assertTokenSurfacesEquals; -public class CommonCornerCasesTest { +public class CommonCornerCasesTest extends BaseDL4JTest { public static void testPunctuation(TokenizerBase tokenizer) { String gerryNoHanaNoHanashi = "僕の鼻はちょっと\r\n長いよ。"; diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/buffer/StringValueMapBufferTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/buffer/StringValueMapBufferTest.java index 7e60f4544..265f4faf4 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/buffer/StringValueMapBufferTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/buffer/StringValueMapBufferTest.java @@ -32,13 +32,14 @@ */ package com.atilika.kuromoji.buffer; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Test; import java.util.TreeMap; import static org.junit.Assert.assertEquals; -public class StringValueMapBufferTest { +public class StringValueMapBufferTest extends BaseDL4JTest { @Test public void testInsertIntoMap() throws Exception { diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/CharacterDefinitionsCompilerTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/CharacterDefinitionsCompilerTest.java index 947007eeb..2e077eef6 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/CharacterDefinitionsCompilerTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/CharacterDefinitionsCompilerTest.java @@ -35,6 +35,7 @@ package com.atilika.kuromoji.compile; import com.atilika.kuromoji.dict.CharacterDefinitions; import com.atilika.kuromoji.io.IntegerArrayIO; import com.atilika.kuromoji.io.StringArrayIO; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Before; import org.junit.Test; @@ -46,7 +47,7 @@ import java.util.Map; import static org.junit.Assert.*; -public class CharacterDefinitionsCompilerTest { +public class CharacterDefinitionsCompilerTest extends BaseDL4JTest { private File charDef; diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/ConnectionCostsCompilerTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/ConnectionCostsCompilerTest.java index 8bac040d4..516535e42 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/ConnectionCostsCompilerTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/ConnectionCostsCompilerTest.java @@ -34,6 +34,7 @@ package com.atilika.kuromoji.compile; import com.atilika.kuromoji.dict.ConnectionCosts; import com.atilika.kuromoji.io.ByteBufferIO; +import org.deeplearning4j.BaseDL4JTest; import org.junit.BeforeClass; import org.junit.Test; @@ -43,7 +44,7 @@ import java.nio.charset.StandardCharsets; import static org.junit.Assert.assertEquals; -public class ConnectionCostsCompilerTest { +public class ConnectionCostsCompilerTest extends BaseDL4JTest { private static ConnectionCosts connectionCosts; diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/TokenInfoBufferCompilerTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/TokenInfoBufferCompilerTest.java index 69f51d9c3..abda8710d 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/TokenInfoBufferCompilerTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/TokenInfoBufferCompilerTest.java @@ -34,6 +34,7 @@ package com.atilika.kuromoji.compile; import com.atilika.kuromoji.buffer.BufferEntry; import com.atilika.kuromoji.buffer.TokenInfoBuffer; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Test; import java.io.File; @@ -47,7 +48,7 @@ import java.util.Map; import static org.junit.Assert.assertEquals; -public class TokenInfoBufferCompilerTest { +public class TokenInfoBufferCompilerTest extends BaseDL4JTest { @Test public void testReadAndWriteFromBuffer() throws Exception { diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/UnknownDictionaryCompilerTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/UnknownDictionaryCompilerTest.java index 14855bd7a..3156e47a3 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/UnknownDictionaryCompilerTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/UnknownDictionaryCompilerTest.java @@ -36,6 +36,7 @@ import com.atilika.kuromoji.dict.CharacterDefinitions; import com.atilika.kuromoji.dict.UnknownDictionary; import com.atilika.kuromoji.io.IntegerArrayIO; import com.atilika.kuromoji.io.StringArrayIO; +import org.deeplearning4j.BaseDL4JTest; import org.junit.BeforeClass; import org.junit.Test; @@ -45,7 +46,7 @@ import java.util.Map; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; -public class UnknownDictionaryCompilerTest { +public class UnknownDictionaryCompilerTest extends BaseDL4JTest { private static UnknownDictionary unknownDictionary; diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/WordIdMapCompilerTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/WordIdMapCompilerTest.java index 5b374d338..6edb8f541 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/WordIdMapCompilerTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/compile/WordIdMapCompilerTest.java @@ -33,6 +33,7 @@ package com.atilika.kuromoji.compile; import com.atilika.kuromoji.buffer.WordIdMap; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Test; import java.io.*; @@ -40,7 +41,7 @@ import java.util.Arrays; import static org.junit.Assert.assertEquals; -public class WordIdMapCompilerTest { +public class WordIdMapCompilerTest extends BaseDL4JTest { @Test public void testGrowableArray() { diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/dict/InsertedDictionaryTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/dict/InsertedDictionaryTest.java index 0144a3f3a..eae973831 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/dict/InsertedDictionaryTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/dict/InsertedDictionaryTest.java @@ -32,12 +32,13 @@ */ package com.atilika.kuromoji.dict; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Test; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; -public class InsertedDictionaryTest { +public class InsertedDictionaryTest extends BaseDL4JTest { @Test public void testFeatureSize() { diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/dict/UserDictionaryTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/dict/UserDictionaryTest.java index 6da0ddf20..cb6b503c0 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/dict/UserDictionaryTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/dict/UserDictionaryTest.java @@ -32,6 +32,7 @@ */ package com.atilika.kuromoji.dict; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Test; import org.nd4j.linalg.io.ClassPathResource; @@ -43,7 +44,7 @@ import java.util.List; import static org.junit.Assert.assertEquals; -public class UserDictionaryTest { +public class UserDictionaryTest extends BaseDL4JTest { @Test public void testLookup() throws IOException { diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/MultiThreadedTokenizerTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/MultiThreadedTokenizerTest.java index 4f766b4a9..05f0f9f8f 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/MultiThreadedTokenizerTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/MultiThreadedTokenizerTest.java @@ -32,6 +32,7 @@ */ package com.atilika.kuromoji.ipadic; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Test; import org.nd4j.linalg.io.ClassPathResource; @@ -39,7 +40,7 @@ import java.io.IOException; import static com.atilika.kuromoji.TestUtils.assertMultiThreadedTokenizedStreamEquals; -public class MultiThreadedTokenizerTest { +public class MultiThreadedTokenizerTest extends BaseDL4JTest { @Test public void testMultiThreadedBocchan() throws IOException, InterruptedException { diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/SearchTokenizerTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/SearchTokenizerTest.java index e5f57811e..4883ba0b3 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/SearchTokenizerTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/SearchTokenizerTest.java @@ -33,6 +33,7 @@ package com.atilika.kuromoji.ipadic; import com.atilika.kuromoji.TokenizerBase.Mode; +import org.deeplearning4j.BaseDL4JTest; import org.junit.BeforeClass; import org.junit.Test; import org.nd4j.linalg.io.ClassPathResource; @@ -47,7 +48,7 @@ import java.util.List; import static org.junit.Assert.assertEquals; -public class SearchTokenizerTest { +public class SearchTokenizerTest extends BaseDL4JTest { private static Tokenizer tokenizer; diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/TokenizerTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/TokenizerTest.java index d4fc66849..ce8bcb206 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/TokenizerTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/TokenizerTest.java @@ -33,6 +33,7 @@ package com.atilika.kuromoji.ipadic; import com.atilika.kuromoji.CommonCornerCasesTest; +import org.deeplearning4j.BaseDL4JTest; import org.junit.BeforeClass; import org.junit.Test; import org.nd4j.linalg.io.ClassPathResource; @@ -48,7 +49,7 @@ import java.util.List; import static com.atilika.kuromoji.TestUtils.*; import static org.junit.Assert.*; -public class TokenizerTest { +public class TokenizerTest extends BaseDL4JTest { private static Tokenizer tokenizer; diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/UserDictionaryTokenizerTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/UserDictionaryTokenizerTest.java index 586453aa3..204693e31 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/UserDictionaryTokenizerTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/ipadic/UserDictionaryTokenizerTest.java @@ -32,6 +32,7 @@ */ package com.atilika.kuromoji.ipadic; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Ignore; import org.junit.Test; @@ -45,7 +46,7 @@ import java.util.List; import static com.atilika.kuromoji.TestUtils.assertTokenSurfacesEquals; import static org.junit.Assert.assertEquals; -public class UserDictionaryTokenizerTest { +public class UserDictionaryTokenizerTest extends BaseDL4JTest { private String userDictionary = "" + "クロ,クロ,クロ,カスタム名詞\n" + "真救世主,真救世主,シンキュウセイシュ,カスタム名詞\n" + "真救世主伝説,真救世主伝説,シンキュウセイシュデンセツ,カスタム名詞\n" + "北斗の拳,北斗の拳,ホクトノケン,カスタム名詞"; diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/DoubleArrayTrieTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/DoubleArrayTrieTest.java index cac7d2116..3f4e5763e 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/DoubleArrayTrieTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/DoubleArrayTrieTest.java @@ -32,6 +32,7 @@ */ package com.atilika.kuromoji.trie; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Test; import java.io.*; @@ -39,7 +40,7 @@ import java.io.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -public class DoubleArrayTrieTest { +public class DoubleArrayTrieTest extends BaseDL4JTest { @Test public void testSparseTrie() throws IOException { diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/NodeTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/NodeTest.java index d36f5381e..474c073f2 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/NodeTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/NodeTest.java @@ -32,12 +32,13 @@ */ package com.atilika.kuromoji.trie; +import org.deeplearning4j.BaseDL4JTest; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.assertEquals; -public class NodeTest { +public class NodeTest extends BaseDL4JTest { @BeforeClass public static void setUpBeforeClass() throws Exception {} diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/PatriciaTrieTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/PatriciaTrieTest.java index 7bfe62ac0..1f4bec6ad 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/PatriciaTrieTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/PatriciaTrieTest.java @@ -32,13 +32,14 @@ */ package com.atilika.kuromoji.trie; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Test; import java.util.*; import static org.junit.Assert.*; -public class PatriciaTrieTest { +public class PatriciaTrieTest extends BaseDL4JTest { @Test public void testRomaji() { diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/TrieTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/TrieTest.java index 976903a9d..a27131eb8 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/TrieTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/trie/TrieTest.java @@ -33,11 +33,12 @@ package com.atilika.kuromoji.trie; import com.atilika.kuromoji.trie.Trie.Node; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Test; import static org.junit.Assert.*; -public class TrieTest { +public class TrieTest extends BaseDL4JTest { @Test public void testGetRoot() { diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/util/DictionaryEntryLineParserTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/util/DictionaryEntryLineParserTest.java index 41bb0bc52..f9ff1c060 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/util/DictionaryEntryLineParserTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/com/atilika/kuromoji/util/DictionaryEntryLineParserTest.java @@ -32,6 +32,7 @@ */ package com.atilika.kuromoji.util; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Test; import java.util.Arrays; @@ -39,7 +40,7 @@ import java.util.Arrays; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; -public class DictionaryEntryLineParserTest { +public class DictionaryEntryLineParserTest extends BaseDL4JTest { private DictionaryEntryLineParser parser = new DictionaryEntryLineParser(); diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/JapaneseTokenizerTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/JapaneseTokenizerTest.java index 5e5daf383..849009bdd 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/JapaneseTokenizerTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-japanese/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/JapaneseTokenizerTest.java @@ -16,6 +16,7 @@ package org.deeplearning4j.text.tokenization.tokenizer; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.text.tokenization.tokenizerfactory.JapaneseTokenizerFactory; import org.deeplearning4j.text.tokenization.tokenizerfactory.TokenizerFactory; import org.junit.Test; @@ -25,7 +26,7 @@ import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -public class JapaneseTokenizerTest { +public class JapaneseTokenizerTest extends BaseDL4JTest { private String toTokenize = "黒い瞳の綺麗な女の子"; private String[] expect = {"黒い", "瞳", "の", "綺麗", "な", "女の子"}; diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-korean/pom.xml b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-korean/pom.xml index e11e9044f..c0fcdb84a 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-korean/pom.xml +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-korean/pom.xml @@ -54,6 +54,12 @@ deeplearning4j-nlp ${project.version} + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-korean/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/KoreanTokenizerTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-korean/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/KoreanTokenizerTest.java index c60243b75..275515968 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-korean/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/KoreanTokenizerTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-korean/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/KoreanTokenizerTest.java @@ -16,6 +16,7 @@ package org.deeplearning4j.text.tokenization.tokenizer; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.text.tokenization.tokenizerfactory.KoreanTokenizerFactory; import org.deeplearning4j.text.tokenization.tokenizerfactory.TokenizerFactory; import org.junit.Test; @@ -25,7 +26,7 @@ import static org.junit.Assert.assertEquals; /** * Created by kepricon on 16. 10. 24. */ -public class KoreanTokenizerTest { +public class KoreanTokenizerTest extends BaseDL4JTest { @Test public void testKoreanTokenizer() throws Exception { String toTokenize = "세계 최초의 상용 수준 오픈소스 딥러닝 라이브러리입니다"; diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-korean/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/PerformanceTests.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-korean/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/PerformanceTests.java index b9e0c35dc..c9fece977 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-korean/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/PerformanceTests.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-korean/src/test/java/org/deeplearning4j/text/tokenization/tokenizer/PerformanceTests.java @@ -17,6 +17,7 @@ package org.deeplearning4j.text.tokenization.tokenizer; import lombok.extern.slf4j.Slf4j; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.models.embeddings.learning.impl.elements.CBOW; import org.deeplearning4j.models.embeddings.reader.impl.BasicModelUtils; import org.deeplearning4j.models.word2vec.VocabWord; @@ -32,7 +33,7 @@ import org.junit.Test; * @author raver119@gmail.com */ @Slf4j -public class PerformanceTests { +public class PerformanceTests extends BaseDL4JTest { @Ignore diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-uima/pom.xml b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-uima/pom.xml index 39eda5e50..7aa6090e1 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-uima/pom.xml +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-uima/pom.xml @@ -32,6 +32,23 @@ UTF-8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 1.8 + 1.8 + 1.8 + 1.8 + + + + + + org.cleartk @@ -72,6 +89,13 @@ ${project.version} test + + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-uima/src/test/java/org/deeplearning4j/BaseDL4JTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-uima/src/test/java/org/deeplearning4j/BaseDL4JTest.java deleted file mode 100644 index 05d0957fb..000000000 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp-uima/src/test/java/org/deeplearning4j/BaseDL4JTest.java +++ /dev/null @@ -1,140 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.deeplearning4j; - -import lombok.extern.slf4j.Slf4j; -import org.bytedeco.javacpp.Pointer; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.memory.MemoryWorkspace; -import org.nd4j.linalg.api.ops.executioner.OpExecutioner; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.profiler.ProfilerConfig; - -import java.lang.management.ManagementFactory; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -@Slf4j -public class BaseDL4JTest { - - @Rule - public TestName name = new TestName(); - - protected long startTime; - protected int threadCountBefore; - - /** - * Override this to set the profiling mode for the tests defined in the child class - */ - public OpExecutioner.ProfilingMode getProfilingMode(){ - return OpExecutioner.ProfilingMode.SCOPE_PANIC; - } - - /** - * Override this to set the datatype of the tests defined in the child class - */ - public DataType getDataType(){ - return DataType.DOUBLE; - } - - public DataType getDefaultFPDataType(){ - return getDataType(); - } - - @Before - public void beforeTest(){ - log.info("{}.{}", getClass().getSimpleName(), name.getMethodName()); - Nd4j.getExecutioner().setProfilingMode(getProfilingMode()); - Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build()); - Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType()); - startTime = System.currentTimeMillis(); - threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount(); - } - - @After - public void afterTest(){ - //Attempt to keep workspaces isolated between tests - Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); - MemoryWorkspace currWS = Nd4j.getMemoryManager().getCurrentWorkspace(); - Nd4j.getMemoryManager().setCurrentWorkspace(null); - if(currWS != null){ - //Not really safe to continue testing under this situation... other tests will likely fail with obscure - // errors that are hard to track back to this - log.error("Open workspace leaked from test! Exiting - {}, isOpen = {} - {}", currWS.getId(), currWS.isScopeActive(), currWS); - System.exit(1); - } - - StringBuilder sb = new StringBuilder(); - long maxPhys = Pointer.maxPhysicalBytes(); - long maxBytes = Pointer.maxBytes(); - long currPhys = Pointer.physicalBytes(); - long currBytes = Pointer.totalBytes(); - - long jvmTotal = Runtime.getRuntime().totalMemory(); - long jvmMax = Runtime.getRuntime().maxMemory(); - - int threadsAfter = ManagementFactory.getThreadMXBean().getThreadCount(); - - long duration = System.currentTimeMillis() - startTime; - sb.append(getClass().getSimpleName()).append(".").append(name.getMethodName()) - .append(": ").append(duration).append(" ms") - .append(", threadCount: (").append(threadCountBefore).append("->").append(threadsAfter).append(")") - .append(", jvmTotal=").append(jvmTotal) - .append(", jvmMax=").append(jvmMax) - .append(", totalBytes=").append(currBytes).append(", maxBytes=").append(maxBytes) - .append(", currPhys=").append(currPhys).append(", maxPhys=").append(maxPhys); - - List ws = Nd4j.getWorkspaceManager().getAllWorkspacesForCurrentThread(); - if(ws != null && ws.size() > 0){ - long currSize = 0; - for(MemoryWorkspace w : ws){ - currSize += w.getCurrentSize(); - } - if(currSize > 0){ - sb.append(", threadWSSize=").append(currSize) - .append(" (").append(ws.size()).append(" WSs)"); - } - } - - - Properties p = Nd4j.getExecutioner().getEnvironmentInformation(); - Object o = p.get("cuda.devicesInformation"); - if(o instanceof List){ - List> l = (List>) o; - if(l.size() > 0) { - - sb.append(" [").append(l.size()) - .append(" GPUs: "); - - for (int i = 0; i < l.size(); i++) { - Map m = l.get(i); - if(i > 0) - sb.append(","); - sb.append("(").append(m.get("cuda.freeMemory")).append(" free, ") - .append(m.get("cuda.totalMemory")).append(" total)"); - } - sb.append("]"); - } - } - log.info(sb.toString()); - } -} diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/pom.xml b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/pom.xml index da4fd9cba..f27fd7a94 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/pom.xml +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/pom.xml @@ -77,6 +77,12 @@ 0.4 + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/BaseDL4JTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/BaseDL4JTest.java deleted file mode 100644 index 05d0957fb..000000000 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/BaseDL4JTest.java +++ /dev/null @@ -1,140 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.deeplearning4j; - -import lombok.extern.slf4j.Slf4j; -import org.bytedeco.javacpp.Pointer; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.memory.MemoryWorkspace; -import org.nd4j.linalg.api.ops.executioner.OpExecutioner; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.profiler.ProfilerConfig; - -import java.lang.management.ManagementFactory; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -@Slf4j -public class BaseDL4JTest { - - @Rule - public TestName name = new TestName(); - - protected long startTime; - protected int threadCountBefore; - - /** - * Override this to set the profiling mode for the tests defined in the child class - */ - public OpExecutioner.ProfilingMode getProfilingMode(){ - return OpExecutioner.ProfilingMode.SCOPE_PANIC; - } - - /** - * Override this to set the datatype of the tests defined in the child class - */ - public DataType getDataType(){ - return DataType.DOUBLE; - } - - public DataType getDefaultFPDataType(){ - return getDataType(); - } - - @Before - public void beforeTest(){ - log.info("{}.{}", getClass().getSimpleName(), name.getMethodName()); - Nd4j.getExecutioner().setProfilingMode(getProfilingMode()); - Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build()); - Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType()); - startTime = System.currentTimeMillis(); - threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount(); - } - - @After - public void afterTest(){ - //Attempt to keep workspaces isolated between tests - Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); - MemoryWorkspace currWS = Nd4j.getMemoryManager().getCurrentWorkspace(); - Nd4j.getMemoryManager().setCurrentWorkspace(null); - if(currWS != null){ - //Not really safe to continue testing under this situation... other tests will likely fail with obscure - // errors that are hard to track back to this - log.error("Open workspace leaked from test! Exiting - {}, isOpen = {} - {}", currWS.getId(), currWS.isScopeActive(), currWS); - System.exit(1); - } - - StringBuilder sb = new StringBuilder(); - long maxPhys = Pointer.maxPhysicalBytes(); - long maxBytes = Pointer.maxBytes(); - long currPhys = Pointer.physicalBytes(); - long currBytes = Pointer.totalBytes(); - - long jvmTotal = Runtime.getRuntime().totalMemory(); - long jvmMax = Runtime.getRuntime().maxMemory(); - - int threadsAfter = ManagementFactory.getThreadMXBean().getThreadCount(); - - long duration = System.currentTimeMillis() - startTime; - sb.append(getClass().getSimpleName()).append(".").append(name.getMethodName()) - .append(": ").append(duration).append(" ms") - .append(", threadCount: (").append(threadCountBefore).append("->").append(threadsAfter).append(")") - .append(", jvmTotal=").append(jvmTotal) - .append(", jvmMax=").append(jvmMax) - .append(", totalBytes=").append(currBytes).append(", maxBytes=").append(maxBytes) - .append(", currPhys=").append(currPhys).append(", maxPhys=").append(maxPhys); - - List ws = Nd4j.getWorkspaceManager().getAllWorkspacesForCurrentThread(); - if(ws != null && ws.size() > 0){ - long currSize = 0; - for(MemoryWorkspace w : ws){ - currSize += w.getCurrentSize(); - } - if(currSize > 0){ - sb.append(", threadWSSize=").append(currSize) - .append(" (").append(ws.size()).append(" WSs)"); - } - } - - - Properties p = Nd4j.getExecutioner().getEnvironmentInformation(); - Object o = p.get("cuda.devicesInformation"); - if(o instanceof List){ - List> l = (List>) o; - if(l.size() > 0) { - - sb.append(" [").append(l.size()) - .append(" GPUs: "); - - for (int i = 0; i < l.size(); i++) { - Map m = l.get(i); - if(i > 0) - sb.append(","); - sb.append("(").append(m.get("cuda.freeMemory")).append(" free, ") - .append(m.get("cuda.totalMemory")).append(" total)"); - } - sb.append("]"); - } - } - log.info(sb.toString()); - } -} diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/TsneTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/TsneTest.java index 613090d8a..2c04da0e9 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/TsneTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/TsneTest.java @@ -46,6 +46,11 @@ import static org.junit.Assert.assertEquals; @Slf4j public class TsneTest extends BaseDL4JTest { + @Override + public long getTimeoutMilliseconds() { + return 120000L; + } + @Rule public TemporaryFolder testDir = new TemporaryFolder(); @@ -58,15 +63,15 @@ public class TsneTest extends BaseDL4JTest { log.info("Starting test: WSM={}, syntheticData={}", wsm, syntheticData); //STEP 1: Initialization - int iterations = 300; + int iterations = 50; //create an n-dimensional array of doubles - Nd4j.setDataType(DataType.DOUBLE); + Nd4j.setDefaultDataTypes(DataType.FLOAT, DataType.FLOAT); List cacheList = new ArrayList<>(); //cacheList is a dynamic array of strings used to hold all words //STEP 2: Turn text input into a list of words INDArray weights; if(syntheticData){ - weights = Nd4j.rand(5000, 200); + weights = Nd4j.rand(1000, 200); } else { log.info("Load & Vectorize data...."); File wordFile = new ClassPathResource("deeplearning4j-tsne/words.txt").getFile(); //Open the file diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/models/paragraphvectors/ParagraphVectorsTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/models/paragraphvectors/ParagraphVectorsTest.java index 4750e8bfb..c3e8ac89b 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/models/paragraphvectors/ParagraphVectorsTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/models/paragraphvectors/ParagraphVectorsTest.java @@ -79,6 +79,11 @@ import static org.junit.Assert.*; @Slf4j public class ParagraphVectorsTest extends BaseDL4JTest { + @Override + public long getTimeoutMilliseconds() { + return 240000; + } + @Rule public TemporaryFolder testDir = new TemporaryFolder(); diff --git a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataSetIteratorTest.java b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataSetIteratorTest.java index b84ecf95c..44b098dc1 100644 --- a/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataSetIteratorTest.java +++ b/deeplearning4j/deeplearning4j-nlp-parent/deeplearning4j-nlp/src/test/java/org/deeplearning4j/models/word2vec/iterator/Word2VecDataSetIteratorTest.java @@ -47,10 +47,15 @@ import static org.junit.Assert.assertArrayEquals; */ public class Word2VecDataSetIteratorTest extends BaseDL4JTest { + @Override + public long getTimeoutMilliseconds() { + return 240000L; + } + /** * Basically all we want from this test - being able to finish without exceptions. */ - @Test(timeout = 300000) + @Test public void testIterator1() throws Exception { File inputFile = Resources.asFile("big/raw_sentences.txt"); SentenceIterator iter = new BasicLineIterator(inputFile.getAbsolutePath()); diff --git a/deeplearning4j/deeplearning4j-nn/pom.xml b/deeplearning4j/deeplearning4j-nn/pom.xml index c1ff45a61..e92372fc8 100644 --- a/deeplearning4j/deeplearning4j-nn/pom.xml +++ b/deeplearning4j/deeplearning4j-nn/pom.xml @@ -117,6 +117,12 @@ test + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/gradientcheck/GradientCheckUtil.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/gradientcheck/GradientCheckUtil.java index d15e961b7..3cd169c59 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/gradientcheck/GradientCheckUtil.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/gradientcheck/GradientCheckUtil.java @@ -16,8 +16,9 @@ package org.deeplearning4j.gradientcheck; +import lombok.*; +import lombok.experimental.Accessors; import lombok.extern.slf4j.Slf4j; -import lombok.val; import org.deeplearning4j.nn.api.Model; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.exception.ND4JArraySizeException; @@ -113,6 +114,52 @@ public class GradientCheckUtil { } } + public enum PrintMode { + ALL, + ZEROS, + FAILURES_ONLY + } + + @Accessors(fluent = true) + @Data + @NoArgsConstructor + public static class MLNConfig { + private MultiLayerNetwork net; + private INDArray input; + private INDArray labels; + private INDArray inputMask; + private INDArray labelMask; + private double epsilon = 1e-6; + private double maxRelError = 1e-3; + private double minAbsoluteError = 1e-8; + private PrintMode print = PrintMode.ZEROS; + private boolean exitOnFirstError = false; + private boolean subset; + private int maxPerParam; + private Set excludeParams; + private Consumer callEachIter; + } + + @Accessors(fluent = true) + @Data + @NoArgsConstructor + public static class GraphConfig { + private ComputationGraph net; + private INDArray[] inputs; + private INDArray[] labels; + private INDArray[] inputMask; + private INDArray[] labelMask; + private double epsilon = 1e-6; + private double maxRelError = 1e-3; + private double minAbsoluteError = 1e-8; + private PrintMode print = PrintMode.ZEROS; + private boolean exitOnFirstError = false; + private boolean subset; + private int maxPerParam; + private Set excludeParams; + private Consumer callEachIter; + } + /** * Check backprop gradients for a MultiLayerNetwork. * @param mln MultiLayerNetwork to test. This must be initialized. @@ -127,46 +174,18 @@ public class GradientCheckUtil { * @param labels Labels/targets to use to calculate backprop gradient. May be mini-batch data. * @return true if gradients are passed, false otherwise. */ + @Deprecated public static boolean checkGradients(MultiLayerNetwork mln, double epsilon, double maxRelError, - double minAbsoluteError, boolean print, boolean exitOnFirstError, INDArray input, INDArray labels) { - return checkGradients(mln, epsilon, maxRelError, minAbsoluteError, print, exitOnFirstError, input, labels, null, null); - } - - public static boolean checkGradients(MultiLayerNetwork mln, double epsilon, double maxRelError, - double minAbsoluteError, boolean print, boolean exitOnFirstError, INDArray input, - INDArray labels, Set excludeParams) { - return checkGradients(mln, epsilon, maxRelError, minAbsoluteError, print, exitOnFirstError, input, labels, null, null, - false, -1, excludeParams, (Integer)null); - } - - public static boolean checkGradients(MultiLayerNetwork mln, double epsilon, double maxRelError, - double minAbsoluteError, boolean print, boolean exitOnFirstError, - INDArray input, INDArray labels, INDArray inputMask, INDArray labelMask) { - return checkGradients(mln, epsilon, maxRelError, minAbsoluteError, print, exitOnFirstError, - input, labels, inputMask, labelMask, false, -1); - } - - public static boolean checkGradients(MultiLayerNetwork mln, double epsilon, double maxRelError, - double minAbsoluteError, boolean print, boolean exitOnFirstError, - INDArray input, INDArray labels, INDArray inputMask, INDArray labelMask, - boolean subset, int maxPerParam) { - return checkGradients(mln, epsilon, maxRelError, minAbsoluteError, print, exitOnFirstError, input, - labels, inputMask, labelMask, subset, maxPerParam, null); - } - - public static boolean checkGradients(MultiLayerNetwork mln, double epsilon, double maxRelError, - double minAbsoluteError, boolean print, boolean exitOnFirstError, - INDArray input, INDArray labels, INDArray inputMask, INDArray labelMask, - boolean subset, int maxPerParam, Set excludeParams) { - return checkGradients(mln, epsilon, maxRelError, minAbsoluteError, print, exitOnFirstError, input, - labels, inputMask, labelMask, subset, maxPerParam, excludeParams, (Consumer)null); + double minAbsoluteError, boolean print, boolean exitOnFirstError, INDArray input, INDArray labels) { + return checkGradients(new MLNConfig().net(mln).epsilon(epsilon).maxRelError(maxRelError).minAbsoluteError(minAbsoluteError).print(PrintMode.FAILURES_ONLY) + .exitOnFirstError(exitOnFirstError).input(input).labels(labels)); } + @Deprecated public static boolean checkGradients(MultiLayerNetwork mln, double epsilon, double maxRelError, double minAbsoluteError, boolean print, boolean exitOnFirstError, INDArray input, INDArray labels, INDArray inputMask, INDArray labelMask, boolean subset, int maxPerParam, Set excludeParams, final Integer rngSeedResetEachIter) { - Consumer c = null; if(rngSeedResetEachIter != null){ c = new Consumer() { @@ -177,21 +196,18 @@ public class GradientCheckUtil { }; } - return checkGradients(mln, epsilon, maxRelError, minAbsoluteError, print, exitOnFirstError, input, - labels, inputMask, labelMask, subset, maxPerParam, excludeParams, c); + return checkGradients(new MLNConfig().net(mln).epsilon(epsilon).maxRelError(maxRelError).minAbsoluteError(minAbsoluteError).print(PrintMode.FAILURES_ONLY) + .exitOnFirstError(exitOnFirstError).input(input).labels(labels).inputMask(inputMask).labelMask(labelMask).subset(subset).maxPerParam(maxPerParam).excludeParams(excludeParams).callEachIter(c)); } - public static boolean checkGradients(MultiLayerNetwork mln, double epsilon, double maxRelError, - double minAbsoluteError, boolean print, boolean exitOnFirstError, - INDArray input, INDArray labels, INDArray inputMask, INDArray labelMask, - boolean subset, int maxPerParam, Set excludeParams, Consumer callEachIter) { + public static boolean checkGradients(MLNConfig c){ //Basic sanity checks on input: - if (epsilon <= 0.0 || epsilon > 0.1) + if (c.epsilon <= 0.0 || c.epsilon > 0.1) throw new IllegalArgumentException("Invalid epsilon: expect epsilon in range (0,0.1], usually 1e-4 or so"); - if (maxRelError <= 0.0 || maxRelError > 0.25) - throw new IllegalArgumentException("Invalid maxRelativeError: " + maxRelError); - if (!(mln.getOutputLayer() instanceof IOutputLayer)) + if (c.maxRelError <= 0.0 || c.maxRelError > 0.25) + throw new IllegalArgumentException("Invalid maxRelativeError: " + c.maxRelError); + if (!(c.net.getOutputLayer() instanceof IOutputLayer)) throw new IllegalArgumentException("Cannot check backprop gradients without OutputLayer"); DataType dataType = DataTypeUtil.getDtypeFromContext(); @@ -201,21 +217,21 @@ public class GradientCheckUtil { + "DataTypeUtil.setDTypeForContext(DataType.DOUBLE); before using GradientCheckUtil"); } - DataType netDataType = mln.getLayerWiseConfigurations().getDataType(); + DataType netDataType = c.net.getLayerWiseConfigurations().getDataType(); if (netDataType != DataType.DOUBLE) { throw new IllegalStateException("Cannot perform gradient check: Network datatype is not set to double precision (" + "is: " + netDataType + "). Double precision must be used for gradient checks. Create network with .dataType(DataType.DOUBLE) before using GradientCheckUtil"); } - if(netDataType != mln.params().dataType()){ + if(netDataType != c.net.params().dataType()){ throw new IllegalStateException("Parameters datatype does not match network configuration datatype (" - + "is: " + mln.params().dataType() + "). If network datatype is set to DOUBLE, parameters must also be DOUBLE."); + + "is: " + c.net.params().dataType() + "). If network datatype is set to DOUBLE, parameters must also be DOUBLE."); } //Check network configuration: int layerCount = 0; - for (NeuralNetConfiguration n : mln.getLayerWiseConfigurations().getConfs()) { + for (NeuralNetConfiguration n : c.net.getLayerWiseConfigurations().getConfs()) { if (n.getLayer() instanceof BaseLayer) { BaseLayer bl = (BaseLayer) n.getLayer(); IUpdater u = bl.getIUpdater(); @@ -243,7 +259,7 @@ public class GradientCheckUtil { } } - if (n.getLayer().getIDropout() != null && callEachIter == null) { + if (n.getLayer().getIDropout() != null && c.callEachIter == null) { throw new IllegalStateException("When gradient checking dropout, need to reset RNG seed each iter, or no" + " dropout should be present during gradient checks - got dropout = " + n.getLayer().getIDropout() + " for layer " + layerCount); @@ -251,45 +267,45 @@ public class GradientCheckUtil { } //Set softmax clipping to 0 if necessary, to avoid spurious failures due to clipping - for(Layer l : mln.getLayers()){ + for(Layer l : c.net.getLayers()){ if(l instanceof IOutputLayer){ configureLossFnClippingIfPresent((IOutputLayer) l); } } - mln.setInput(input); - mln.setLabels(labels); - mln.setLayerMaskArrays(inputMask, labelMask); - if(callEachIter != null){ - callEachIter.accept(mln); + c.net.setInput(c.input); + c.net.setLabels(c.labels); + c.net.setLayerMaskArrays(c.inputMask, c.labelMask); + if(c.callEachIter != null){ + c.callEachIter.accept(c.net); } - mln.computeGradientAndScore(); - Pair gradAndScore = mln.gradientAndScore(); + c.net.computeGradientAndScore(); + Pair gradAndScore = c.net.gradientAndScore(); - Updater updater = UpdaterCreator.getUpdater(mln); - updater.update(mln, gradAndScore.getFirst(), 0, 0, mln.batchSize(), LayerWorkspaceMgr.noWorkspaces()); + Updater updater = UpdaterCreator.getUpdater(c.net); + updater.update(c.net, gradAndScore.getFirst(), 0, 0, c.net.batchSize(), LayerWorkspaceMgr.noWorkspaces()); INDArray gradientToCheck = gradAndScore.getFirst().gradient().dup(); //need dup: gradients are a *view* of the full gradient array (which will change every time backprop is done) - INDArray originalParams = mln.params().dup(); //need dup: params are a *view* of full parameters + INDArray originalParams = c.net.params().dup(); //need dup: params are a *view* of full parameters val nParams = originalParams.length(); - Map paramTable = mln.paramTable(); + Map paramTable = c.net.paramTable(); List paramNames = new ArrayList<>(paramTable.keySet()); val paramEnds = new long[paramNames.size()]; paramEnds[0] = paramTable.get(paramNames.get(0)).length(); Map stepSizeForParam; - if(subset){ + if(c.subset){ stepSizeForParam = new HashMap<>(); - stepSizeForParam.put(paramNames.get(0), (int) Math.max(1, paramTable.get(paramNames.get(0)).length() / maxPerParam)); + stepSizeForParam.put(paramNames.get(0), (int) Math.max(1, paramTable.get(paramNames.get(0)).length() / c.maxPerParam)); } else { stepSizeForParam = null; } for (int i = 1; i < paramEnds.length; i++) { val n = paramTable.get(paramNames.get(i)).length(); paramEnds[i] = paramEnds[i - 1] + n; - if(subset){ - long ss = n / maxPerParam; + if(c.subset){ + long ss = n / c.maxPerParam; if(ss == 0){ ss = n; } @@ -300,9 +316,9 @@ public class GradientCheckUtil { } } - if(print) { + if(c.print == PrintMode.ALL) { int i=0; - for (Layer l : mln.getLayers()) { + for (Layer l : c.net.getLayers()) { Set s = l.paramTable().keySet(); log.info("Layer " + i + ": " + l.getClass().getSimpleName() + " - params " + s); i++; @@ -312,36 +328,40 @@ public class GradientCheckUtil { int totalNFailures = 0; double maxError = 0.0; - DataSet ds = new DataSet(input, labels, inputMask, labelMask); + DataSet ds = new DataSet(c.input, c.labels, c.inputMask, c.labelMask); int currParamNameIdx = 0; - INDArray params = mln.params(); //Assumption here: params is a view that we can modify in-place + if(c.excludeParams != null && !c.excludeParams.isEmpty()){ + log.info("NOTE: parameters will be skipped due to config: {}", c.excludeParams); + } + + INDArray params = c.net.params(); //Assumption here: params is a view that we can modify in-place for (long i = 0; i < nParams; ) { //Get param name if (i >= paramEnds[currParamNameIdx]) { currParamNameIdx++; } String paramName = paramNames.get(currParamNameIdx); - if(excludeParams != null && excludeParams.contains(paramName)){ - log.info("Skipping parameters for parameter name: {}", paramName); + if(c.excludeParams != null && c.excludeParams.contains(paramName)){ +// log.info("Skipping parameters for parameter name: {}", paramName); i = paramEnds[currParamNameIdx++]; continue; } //(w+epsilon): Do forward pass and score double origValue = params.getDouble(i); - params.putScalar(i, origValue + epsilon); - if(callEachIter != null){ - callEachIter.accept(mln); + params.putScalar(i, origValue + c.epsilon); + if(c.callEachIter != null){ + c.callEachIter.accept(c.net); } - double scorePlus = mln.score(ds, true); + double scorePlus = c.net.score(ds, true); //(w-epsilon): Do forward pass and score - params.putScalar(i, origValue - epsilon); - if(callEachIter != null){ - callEachIter.accept(mln); + params.putScalar(i, origValue - c.epsilon); + if(c.callEachIter != null){ + c.callEachIter.accept(c.net); } - double scoreMinus = mln.score(ds, true); + double scoreMinus = c.net.score(ds, true); //Reset original param value params.putScalar(i, origValue); @@ -349,7 +369,7 @@ public class GradientCheckUtil { //Calculate numerical parameter gradient: double scoreDelta = scorePlus - scoreMinus; - double numericalGradient = scoreDelta / (2 * epsilon); + double numericalGradient = scoreDelta / (2 * c.epsilon); if (Double.isNaN(numericalGradient)) throw new IllegalStateException("Numerical gradient was NaN for parameter " + i + " of " + nParams); @@ -363,30 +383,29 @@ public class GradientCheckUtil { if (relError > maxError) maxError = relError; - if (relError > maxRelError || Double.isNaN(relError)) { + if (relError > c.maxRelError || Double.isNaN(relError)) { double absError = Math.abs(backpropGradient - numericalGradient); - if (absError < minAbsoluteError) { - if(print) { + if (absError < c.minAbsoluteError) { + if(c.print == PrintMode.ALL || c.print == PrintMode.ZEROS && absError == 0.0) { log.info("Param " + i + " (" + paramName + ") passed: grad= " + backpropGradient + ", numericalGrad= " + numericalGradient + ", relError= " + relError - + "; absolute error = " + absError + " < minAbsoluteError = " + minAbsoluteError); + + "; absolute error = " + absError + " < minAbsoluteError = " + c.minAbsoluteError); } } else { - if (print) - log.info("Param " + i + " (" + paramName + ") FAILED: grad= " + backpropGradient - + ", numericalGrad= " + numericalGradient + ", relError= " + relError - + ", scorePlus=" + scorePlus + ", scoreMinus= " + scoreMinus + ", paramValue = " + origValue); - if (exitOnFirstError) + log.info("Param " + i + " (" + paramName + ") FAILED: grad= " + backpropGradient + + ", numericalGrad= " + numericalGradient + ", relError= " + relError + + ", scorePlus=" + scorePlus + ", scoreMinus= " + scoreMinus + ", paramValue = " + origValue); + if (c.exitOnFirstError) return false; totalNFailures++; } - } else if (print) { + } else if (c.print == PrintMode.ALL) { log.info("Param " + i + " (" + paramName + ") passed: grad= " + backpropGradient + ", numericalGrad= " + numericalGradient + ", relError= " + relError); } long step; - if(subset){ + if(c.subset){ step = stepSizeForParam.get(paramName); if(i + step > paramEnds[currParamNameIdx]+1){ step = paramEnds[currParamNameIdx]+1 - i; @@ -398,83 +417,25 @@ public class GradientCheckUtil { i += step; } - if (print) { - val nPass = nParams - totalNFailures; - log.info("GradientCheckUtil.checkGradients(): " + nParams + " params checked, " + nPass + " passed, " + val nPass = nParams - totalNFailures; + log.info("GradientCheckUtil.checkGradients(): " + nParams + " params checked, " + nPass + " passed, " + totalNFailures + " failed. Largest relative error = " + maxError); - } return totalNFailures == 0; } - - - /**Check backprop gradients for a ComputationGraph - * @param graph ComputationGraph to test. This must be initialized. - * @param epsilon Usually on the order of 1e-4 or so. - * @param maxRelError Maximum relative error. Usually < 0.01, though maybe more for deep networks - * @param minAbsoluteError Minimum absolute error to cause a failure. Numerical gradients can be non-zero due to precision issues. - * For example, 0.0 vs. 1e-18: relative error is 1.0, but not really a failure - * @param print Whether to print full pass/failure details for each parameter gradient - * @param exitOnFirstError If true: return upon first failure. If false: continue checking even if - * one parameter gradient has failed. Typically use false for debugging, true for unit tests. - * @param inputs Input arrays to use for forward pass. May be mini-batch data. - * @param labels Labels/targets (output) arrays to use to calculate backprop gradient. May be mini-batch data. - * @return true if gradients are passed, false otherwise. - */ - public static boolean checkGradients(ComputationGraph graph, double epsilon, double maxRelError, - double minAbsoluteError, boolean print, boolean exitOnFirstError, INDArray[] inputs, - INDArray[] labels) { - return checkGradients(graph, epsilon, maxRelError, minAbsoluteError, print, exitOnFirstError, inputs, labels, null, null, null); - } - - public static boolean checkGradients(ComputationGraph graph, double epsilon, double maxRelError, - double minAbsoluteError, boolean print, boolean exitOnFirstError, INDArray[] inputs, - INDArray[] labels, INDArray[] fMask, INDArray[] lMask) { - return checkGradients(graph, epsilon, maxRelError, minAbsoluteError, print, exitOnFirstError, inputs, - labels, fMask, lMask, null); - } - - public static boolean checkGradients(ComputationGraph graph, double epsilon, double maxRelError, - double minAbsoluteError, boolean print, boolean exitOnFirstError, INDArray[] inputs, - INDArray[] labels, INDArray[] fMask, INDArray[] lMask, Set excludeParams) { - return checkGradients(graph, epsilon, maxRelError, minAbsoluteError, print, exitOnFirstError, inputs, - labels, fMask, lMask, excludeParams, (Consumer)null); - } - - public static boolean checkGradients(ComputationGraph graph, double epsilon, double maxRelError, - double minAbsoluteError, boolean print, boolean exitOnFirstError, INDArray[] inputs, - INDArray[] labels, INDArray[] fMask, INDArray[] lMask, Set excludeParams, - final Integer rngSeedResetEachIter) { - Consumer c = null; - if(rngSeedResetEachIter != null){ - c = new Consumer() { - @Override - public void accept(ComputationGraph computationGraph) { - Nd4j.getRandom().setSeed(rngSeedResetEachIter); - } - }; - } - - return checkGradients(graph, epsilon, maxRelError, minAbsoluteError, print, exitOnFirstError, inputs, - labels, fMask, lMask, excludeParams, c); - } - - public static boolean checkGradients(ComputationGraph graph, double epsilon, double maxRelError, - double minAbsoluteError, boolean print, boolean exitOnFirstError, INDArray[] inputs, - INDArray[] labels, INDArray[] fMask, INDArray[] lMask, Set excludeParams, - Consumer callEachIter) { + public static boolean checkGradients(GraphConfig c){ //Basic sanity checks on input: - if (epsilon <= 0.0 || epsilon > 0.1) + if (c.epsilon <= 0.0 || c.epsilon > 0.1) throw new IllegalArgumentException("Invalid epsilon: expect epsilon in range (0,0.1], usually 1e-4 or so"); - if (maxRelError <= 0.0 || maxRelError > 0.25) - throw new IllegalArgumentException("Invalid maxRelativeError: " + maxRelError); + if (c.maxRelError <= 0.0 || c.maxRelError > 0.25) + throw new IllegalArgumentException("Invalid maxRelativeError: " + c.maxRelError); - if (graph.getNumInputArrays() != inputs.length) - throw new IllegalArgumentException("Invalid input arrays: expect " + graph.getNumInputArrays() + " inputs"); - if (graph.getNumOutputArrays() != labels.length) + if (c.net.getNumInputArrays() != c.inputs.length) + throw new IllegalArgumentException("Invalid input arrays: expect " + c.net.getNumInputArrays() + " inputs"); + if (c.net.getNumOutputArrays() != c.labels.length) throw new IllegalArgumentException( - "Invalid labels arrays: expect " + graph.getNumOutputArrays() + " outputs"); + "Invalid labels arrays: expect " + c.net.getNumOutputArrays() + " outputs"); DataType dataType = DataTypeUtil.getDtypeFromContext(); if (dataType != DataType.DOUBLE) { @@ -483,21 +444,21 @@ public class GradientCheckUtil { + "DataTypeUtil.setDTypeForContext(DataType.DOUBLE); before using GradientCheckUtil"); } - DataType netDataType = graph.getConfiguration().getDataType(); + DataType netDataType = c.net.getConfiguration().getDataType(); if (netDataType != DataType.DOUBLE) { throw new IllegalStateException("Cannot perform gradient check: Network datatype is not set to double precision (" + "is: " + netDataType + "). Double precision must be used for gradient checks. Create network with .dataType(DataType.DOUBLE) before using GradientCheckUtil"); } - if(netDataType != graph.params().dataType()){ + if(netDataType != c.net.params().dataType()){ throw new IllegalStateException("Parameters datatype does not match network configuration datatype (" - + "is: " + graph.params().dataType() + "). If network datatype is set to DOUBLE, parameters must also be DOUBLE."); + + "is: " + c.net.params().dataType() + "). If network datatype is set to DOUBLE, parameters must also be DOUBLE."); } //Check configuration int layerCount = 0; - for (String vertexName : graph.getConfiguration().getVertices().keySet()) { - GraphVertex gv = graph.getConfiguration().getVertices().get(vertexName); + for (String vertexName : c.net.getConfiguration().getVertices().keySet()) { + GraphVertex gv = c.net.getConfiguration().getVertices().get(vertexName); if (!(gv instanceof LayerVertex)) continue; LayerVertex lv = (LayerVertex) gv; @@ -529,7 +490,7 @@ public class GradientCheckUtil { } } - if (lv.getLayerConf().getLayer().getIDropout() != null && callEachIter == null) { + if (lv.getLayerConf().getLayer().getIDropout() != null && c.callEachIter == null) { throw new IllegalStateException("When gradient checking dropout, rng seed must be reset each iteration, or no" + " dropout should be present during gradient checks - got dropout = " + lv.getLayerConf().getLayer().getIDropout() + " for layer " + layerCount); @@ -537,34 +498,34 @@ public class GradientCheckUtil { } //Set softmax clipping to 0 if necessary, to avoid spurious failures due to clipping - for(Layer l : graph.getLayers()){ + for(Layer l : c.net.getLayers()){ if(l instanceof IOutputLayer){ configureLossFnClippingIfPresent((IOutputLayer) l); } } - for (int i = 0; i < inputs.length; i++) - graph.setInput(i, inputs[i]); - for (int i = 0; i < labels.length; i++) - graph.setLabel(i, labels[i]); + for (int i = 0; i < c.inputs.length; i++) + c.net.setInput(i, c.inputs[i]); + for (int i = 0; i < c.labels.length; i++) + c.net.setLabel(i, c.labels[i]); - graph.setLayerMaskArrays(fMask, lMask); + c.net.setLayerMaskArrays(c.inputMask, c.labelMask); - if(callEachIter != null){ - callEachIter.accept(graph); + if(c.callEachIter != null){ + c.callEachIter.accept(c.net); } - graph.computeGradientAndScore(); - Pair gradAndScore = graph.gradientAndScore(); + c.net.computeGradientAndScore(); + Pair gradAndScore = c.net.gradientAndScore(); - ComputationGraphUpdater updater = new ComputationGraphUpdater(graph); - updater.update(gradAndScore.getFirst(), 0, 0, graph.batchSize(), LayerWorkspaceMgr.noWorkspaces()); + ComputationGraphUpdater updater = new ComputationGraphUpdater(c.net); + updater.update(gradAndScore.getFirst(), 0, 0, c.net.batchSize(), LayerWorkspaceMgr.noWorkspaces()); INDArray gradientToCheck = gradAndScore.getFirst().gradient().dup(); //need dup: gradients are a *view* of the full gradient array (which will change every time backprop is done) - INDArray originalParams = graph.params().dup(); //need dup: params are a *view* of full parameters + INDArray originalParams = c.net.params().dup(); //need dup: params are a *view* of full parameters val nParams = originalParams.length(); - Map paramTable = graph.paramTable(); + Map paramTable = c.net.paramTable(); List paramNames = new ArrayList<>(paramTable.keySet()); val paramEnds = new long[paramNames.size()]; paramEnds[0] = paramTable.get(paramNames.get(0)).length(); @@ -572,19 +533,23 @@ public class GradientCheckUtil { paramEnds[i] = paramEnds[i - 1] + paramTable.get(paramNames.get(i)).length(); } + if(c.excludeParams != null && !c.excludeParams.isEmpty()){ + log.info("NOTE: parameters will be skipped due to config: {}", c.excludeParams); + } + int currParamNameIdx = 0; int totalNFailures = 0; double maxError = 0.0; - MultiDataSet mds = new MultiDataSet(inputs, labels, fMask, lMask); - INDArray params = graph.params(); //Assumption here: params is a view that we can modify in-place + MultiDataSet mds = new MultiDataSet(c.inputs, c.labels, c.inputMask, c.labelMask); + INDArray params = c.net.params(); //Assumption here: params is a view that we can modify in-place for (long i = 0; i < nParams; i++) { //Get param name if (i >= paramEnds[currParamNameIdx]) { currParamNameIdx++; } String paramName = paramNames.get(currParamNameIdx); - if(excludeParams != null && excludeParams.contains(paramName)){ - log.info("Skipping parameters for parameter name: {}", paramName); + if(c.excludeParams != null && c.excludeParams.contains(paramName)){ + //log.info("Skipping parameters for parameter name: {}", paramName); i = paramEnds[currParamNameIdx++]; continue; } @@ -592,18 +557,18 @@ public class GradientCheckUtil { //(w+epsilon): Do forward pass and score double origValue = params.getDouble(i); - params.putScalar(i, origValue + epsilon); - if(callEachIter != null){ - callEachIter.accept(graph); + params.putScalar(i, origValue + c.epsilon); + if(c.callEachIter != null){ + c.callEachIter.accept(c.net); } - double scorePlus = graph.score(mds, true); //training == true for batch norm, etc (scores and gradients need to be calculated on same thing) + double scorePlus = c.net.score(mds, true); //training == true for batch norm, etc (scores and gradients need to be calculated on same thing) //(w-epsilon): Do forward pass and score - params.putScalar(i, origValue - epsilon); - if(callEachIter != null){ - callEachIter.accept(graph); + params.putScalar(i, origValue - c.epsilon); + if(c.callEachIter != null){ + c.callEachIter.accept(c.net); } - double scoreMinus = graph.score(mds, true); + double scoreMinus = c.net.score(mds, true); //Reset original param value params.putScalar(i, origValue); @@ -611,7 +576,7 @@ public class GradientCheckUtil { //Calculate numerical parameter gradient: double scoreDelta = scorePlus - scoreMinus; - double numericalGradient = scoreDelta / (2 * epsilon); + double numericalGradient = scoreDelta / (2 * c.epsilon); if (Double.isNaN(numericalGradient)) throw new IllegalStateException("Numerical gradient was NaN for parameter " + i + " of " + nParams); @@ -625,32 +590,31 @@ public class GradientCheckUtil { if (relError > maxError) maxError = relError; - if (relError > maxRelError || Double.isNaN(relError)) { + if (relError > c.maxRelError || Double.isNaN(relError)) { double absError = Math.abs(backpropGradient - numericalGradient); - if (absError < minAbsoluteError) { - log.info("Param " + i + " (" + paramName + ") passed: grad= " + backpropGradient - + ", numericalGrad= " + numericalGradient + ", relError= " + relError - + "; absolute error = " + absError + " < minAbsoluteError = " + minAbsoluteError); + if (absError < c.minAbsoluteError) { + if(c.print == PrintMode.ALL || c.print == PrintMode.ZEROS && absError == 0.0) { + log.info("Param " + i + " (" + paramName + ") passed: grad= " + backpropGradient + + ", numericalGrad= " + numericalGradient + ", relError= " + relError + + "; absolute error = " + absError + " < minAbsoluteError = " + c.minAbsoluteError); + } } else { - if (print) - log.info("Param " + i + " (" + paramName + ") FAILED: grad= " + backpropGradient - + ", numericalGrad= " + numericalGradient + ", relError= " + relError - + ", scorePlus=" + scorePlus + ", scoreMinus= " + scoreMinus + ", paramValue = " + origValue); - if (exitOnFirstError) + log.info("Param " + i + " (" + paramName + ") FAILED: grad= " + backpropGradient + + ", numericalGrad= " + numericalGradient + ", relError= " + relError + + ", scorePlus=" + scorePlus + ", scoreMinus= " + scoreMinus + ", paramValue = " + origValue); + if (c.exitOnFirstError) return false; totalNFailures++; } - } else if (print) { + } else if (c.print == PrintMode.ALL) { log.info("Param " + i + " (" + paramName + ") passed: grad= " + backpropGradient + ", numericalGrad= " + numericalGradient + ", relError= " + relError); } } - if (print) { - val nPass = nParams - totalNFailures; - log.info("GradientCheckUtil.checkGradients(): " + nParams + " params checked, " + nPass + " passed, " - + totalNFailures + " failed. Largest relative error = " + maxError); - } + val nPass = nParams - totalNFailures; + log.info("GradientCheckUtil.checkGradients(): " + nParams + " params checked, " + nPass + " passed, " + + totalNFailures + " failed. Largest relative error = " + maxError); return totalNFailures == 0; } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Convolution3D.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Convolution3D.java index cc26169cf..dc88116e5 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Convolution3D.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Convolution3D.java @@ -119,7 +119,7 @@ public class Convolution3D extends ConvolutionLayer { throw new IllegalStateException("Invalid input for Convolution3D layer (layer name=\"" + getLayerName() + "\"): Expected CNN3D input, got " + inputType); } - return InputTypeUtil.getOutputTypeCnn3DLayers(inputType, kernelSize, stride, padding, dilation, convolutionMode, + return InputTypeUtil.getOutputTypeCnn3DLayers(inputType, dataFormat, kernelSize, stride, padding, dilation, convolutionMode, nOut, layerIndex, getLayerName(), Convolution3DLayer.class); } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/ConvolutionLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/ConvolutionLayer.java index b0c5bb3d4..9e52981e2 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/ConvolutionLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/ConvolutionLayer.java @@ -34,6 +34,7 @@ import org.nd4j.base.Preconditions; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; +import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Map; @@ -118,19 +119,19 @@ public class ConvolutionLayer extends FeedForwardLayer { this.convolutionMode = builder.convolutionMode; this.dilation = builder.dilation; if (builder.kernelSize.length != dim) { - throw new IllegalArgumentException("Kernel argument should be a " + dim + "d array"); + throw new IllegalArgumentException("Kernel argument should be a " + dim + "d array, got " + Arrays.toString(builder.kernelSize)); } this.kernelSize = builder.kernelSize; if (builder.stride.length != dim) { - throw new IllegalArgumentException("Strides argument should be a " + dim + "d array"); + throw new IllegalArgumentException("Strides argument should be a " + dim + "d array, got " + Arrays.toString(builder.stride)); } this.stride = builder.stride; if (builder.padding.length != dim) { - throw new IllegalArgumentException("Padding argument should be a " + dim + "d array"); + throw new IllegalArgumentException("Padding argument should be a " + dim + "d array, got " + Arrays.toString(builder.padding)); } this.padding = builder.padding; if (builder.dilation.length != dim) { - throw new IllegalArgumentException("Dilation argument should be a " + dim + "d array"); + throw new IllegalArgumentException("Dilation argument should be a " + dim + "d array, got " + Arrays.toString(builder.dilation)); } this.dilation = builder.dilation; this.cudnnAlgoMode = builder.cudnnAlgoMode; diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Deconvolution3D.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Deconvolution3D.java new file mode 100644 index 000000000..01bd3ca83 --- /dev/null +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Deconvolution3D.java @@ -0,0 +1,219 @@ +/* ****************************************************************************** + * Copyright (c) 2019 Konduit K.K. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0. + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + * + * SPDX-License-Identifier: Apache-2.0 + ******************************************************************************/ + +package org.deeplearning4j.nn.conf.layers; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.NoArgsConstructor; +import lombok.ToString; +import org.deeplearning4j.nn.api.Layer; +import org.deeplearning4j.nn.api.ParamInitializer; +import org.deeplearning4j.nn.conf.ConvolutionMode; +import org.deeplearning4j.nn.conf.InputPreProcessor; +import org.deeplearning4j.nn.conf.NeuralNetConfiguration; +import org.deeplearning4j.nn.conf.inputs.InputType; +import org.deeplearning4j.nn.layers.convolution.Deconvolution2DLayer; +import org.deeplearning4j.nn.layers.convolution.Deconvolution3DLayer; +import org.deeplearning4j.nn.params.Deconvolution3DParamInitializer; +import org.deeplearning4j.nn.params.DeconvolutionParamInitializer; +import org.deeplearning4j.optimize.api.TrainingListener; +import org.deeplearning4j.util.ValidationUtils; +import org.nd4j.linalg.api.buffer.DataType; +import org.nd4j.linalg.api.ndarray.INDArray; + +import java.util.Collection; +import java.util.Map; + +/** + * 3D deconvolution layer configuration
+ * + * Deconvolutions are also known as transpose convolutions or fractionally strided convolutions. In essence, + * deconvolutions swap forward and backward pass with regular 3D convolutions. + * + * See the paper by Matt Zeiler for details: http://www.matthewzeiler.com/wp-content/uploads/2017/07/cvpr2010.pdf + * + * For an intuitive guide to convolution arithmetic and shapes, see: + * https://arxiv.org/abs/1603.07285v1 + * + * @author Alex Black + */ +@Data +@NoArgsConstructor +@ToString(callSuper = true) +@EqualsAndHashCode(callSuper = true) +public class Deconvolution3D extends ConvolutionLayer { + + private Convolution3D.DataFormat dataFormat = Convolution3D.DataFormat.NCDHW; // in libnd4j: 1 - NCDHW, 0 - NDHWC + + /** + * Deconvolution3D layer nIn in the input layer is the number of channels nOut is the number of filters to be used + * in the net or in other words the channels The builder specifies the filter/kernel size, the stride and padding + * The pooling layer takes the kernel size + */ + protected Deconvolution3D(Builder builder) { + super(builder); + this.dataFormat = builder.dataFormat; + initializeConstraints(builder); + } + + public boolean hasBias() { + return hasBias; + } + + @Override + public Deconvolution3D clone() { + Deconvolution3D clone = (Deconvolution3D) super.clone(); + if (clone.kernelSize != null) { + clone.kernelSize = clone.kernelSize.clone(); + } + if (clone.stride != null) { + clone.stride = clone.stride.clone(); + } + if (clone.padding != null) { + clone.padding = clone.padding.clone(); + } + return clone; + } + + @Override + public Layer instantiate(NeuralNetConfiguration conf, Collection trainingListeners, + int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) { + LayerValidation.assertNInNOutSet("Deconvolution2D", getLayerName(), layerIndex, getNIn(), getNOut()); + + Deconvolution3DLayer ret = + new Deconvolution3DLayer(conf, networkDataType); + ret.setListeners(trainingListeners); + ret.setIndex(layerIndex); + ret.setParamsViewArray(layerParamsView); + Map paramTable = initializer().init(conf, layerParamsView, initializeParams); + ret.setParamTable(paramTable); + ret.setConf(conf); + return ret; + } + + @Override + public ParamInitializer initializer() { + return Deconvolution3DParamInitializer.getInstance(); + } + + @Override + public InputPreProcessor getPreProcessorForInputType(InputType inputType) { + if (inputType == null) { + throw new IllegalStateException("Invalid input for Deconvolution3D layer (layer name=\"" + getLayerName() + "\"): input is null"); + } + + return InputTypeUtil.getPreProcessorForInputTypeCnn3DLayers(inputType, getLayerName()); + } + + @Override + public void setNIn(InputType inputType, boolean override) { + if (inputType == null || inputType.getType() != InputType.Type.CNN3D) { + throw new IllegalStateException("Invalid input for Deconvolution 3D layer (layer name=\"" + getLayerName() + "\"): Expected CNN3D input, got " + inputType); + } + + if (nIn <= 0 || override) { + InputType.InputTypeConvolutional3D c = (InputType.InputTypeConvolutional3D) inputType; + this.nIn = c.getChannels(); + } + } + + @Override + public InputType getOutputType(int layerIndex, InputType inputType) { + if (inputType == null || inputType.getType() != InputType.Type.CNN3D) { + throw new IllegalStateException("Invalid input for Deconvolution layer (layer name=\"" + getLayerName() + + "\"): Expected CNN input, got " + inputType); + } + + return InputTypeUtil.getOutputTypeDeconv3dLayer(inputType, kernelSize, stride, padding, dilation, convolutionMode, + dataFormat, nOut, layerIndex, getLayerName(), Deconvolution3DLayer.class); + } + + public static class Builder extends BaseConvBuilder { + + private Convolution3D.DataFormat dataFormat = Convolution3D.DataFormat.NCDHW; // in libnd4j: 1 - NCDHW, 0 - NDHWC + + public Builder() { + super(new int[] {2, 2, 2}, new int[] {1, 1, 1}, new int[] {0, 0, 0}, new int[] {1, 1, 1}, 3); + } + + @Override + protected boolean allowCausal() { + //Causal convolution - allowed for 1D only + return false; + } + + /** + * Set the convolution mode for the Convolution layer. See {@link ConvolutionMode} for more details + * + * @param convolutionMode Convolution mode for layer + */ + public Builder convolutionMode(ConvolutionMode convolutionMode) { + return super.convolutionMode(convolutionMode); + } + + /** + * Size of the convolution rows/columns + * + * @param kernelSize the height and width of the kernel + */ + public Builder kernelSize(int... kernelSize) { + this.setKernelSize(kernelSize); + return this; + } + + public Builder stride(int... stride) { + this.setStride(stride); + return this; + } + + public Builder padding(int... padding) { + this.setPadding(padding); + return this; + } + + @Override + public void setKernelSize(int... kernelSize) { + this.kernelSize = ValidationUtils.validate3NonNegative(kernelSize, "kernelSize"); + } + + @Override + public void setStride(int... stride) { + this.stride = ValidationUtils.validate3NonNegative(stride, "stride"); + } + + @Override + public void setPadding(int... padding) { + this.padding = ValidationUtils.validate3NonNegative(padding, "padding"); + } + + @Override + public void setDilation(int... dilation) { + this.dilation = ValidationUtils.validate3NonNegative(dilation, "dilation"); + } + + public Builder dataFormat(Convolution3D.DataFormat dataFormat){ + this.dataFormat = dataFormat; + return this; + } + + @Override + public Deconvolution3D build() { + return new Deconvolution3D(this); + } + } + +} diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/EmbeddingLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/EmbeddingLayer.java index 0227fee23..6cd8630d0 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/EmbeddingLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/EmbeddingLayer.java @@ -29,6 +29,7 @@ import org.deeplearning4j.nn.weights.embeddings.ArrayEmbeddingInitializer; import org.deeplearning4j.nn.weights.embeddings.EmbeddingInitializer; import org.deeplearning4j.nn.weights.embeddings.WeightInitEmbedding; import org.deeplearning4j.optimize.api.TrainingListener; +import org.nd4j.linalg.activations.impl.ActivationIdentity; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; @@ -104,7 +105,6 @@ public class EmbeddingLayer extends FeedForwardLayer { return hasBias; } - @NoArgsConstructor @Getter @Setter public static class Builder extends FeedForwardLayer.Builder { @@ -115,6 +115,13 @@ public class EmbeddingLayer extends FeedForwardLayer { */ private boolean hasBias = false; + public Builder(){ + //Default to Identity activation - i.e., don't inherit. + //For example, if user sets ReLU as global default, they very likely don't intend to use it for Embedding layer also + this.activationFn = new ActivationIdentity(); + } + + /** * If true: include bias parameters in the layer. False (default): no bias. * diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/EmbeddingSequenceLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/EmbeddingSequenceLayer.java index 3e5766af2..93585a1d0 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/EmbeddingSequenceLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/EmbeddingSequenceLayer.java @@ -30,6 +30,7 @@ import org.deeplearning4j.nn.weights.embeddings.ArrayEmbeddingInitializer; import org.deeplearning4j.nn.weights.embeddings.EmbeddingInitializer; import org.deeplearning4j.nn.weights.embeddings.WeightInitEmbedding; import org.deeplearning4j.optimize.api.TrainingListener; +import org.nd4j.linalg.activations.impl.ActivationIdentity; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; @@ -138,11 +139,16 @@ public class EmbeddingSequenceLayer extends FeedForwardLayer { } - @NoArgsConstructor @Getter @Setter public static class Builder extends FeedForwardLayer.Builder { + public Builder(){ + //Default to Identity activation - i.e., don't inherit. + //For example, if user sets ReLU as global default, they very likely don't intend to use it for Embedding layer also + this.activationFn = new ActivationIdentity(); + } + /** * If true: include bias parameters in the layer. False (default): no bias. * diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/FeedForwardLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/FeedForwardLayer.java index 026f0d350..206071e38 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/FeedForwardLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/FeedForwardLayer.java @@ -95,9 +95,8 @@ public abstract class FeedForwardLayer extends BaseLayer { case CNN3D: //CNN3D -> FF InputType.InputTypeConvolutional3D c3d = (InputType.InputTypeConvolutional3D) inputType; - //TODO don't hardcode NCDHW return new Cnn3DToFeedForwardPreProcessor(c3d.getDepth(), c3d.getHeight(), c3d.getWidth(), - c3d.getChannels(), true); + c3d.getChannels(), c3d.getDataFormat() == Convolution3D.DataFormat.NCDHW); default: throw new RuntimeException("Unknown input type: " + inputType); } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/InputTypeUtil.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/InputTypeUtil.java index 7c97930ae..eb78323b6 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/InputTypeUtil.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/InputTypeUtil.java @@ -36,6 +36,8 @@ import java.util.Arrays; @Slf4j public class InputTypeUtil { + private InputTypeUtil(){ } + public static InputType getOutputTypeDeconvLayer(InputType inputType, int[] kernelSize, int[] stride, int[] padding, int[] dilation, ConvolutionMode convolutionMode, long outputDepth, long layerIdx, String layerName, Class layerClass) { @@ -77,9 +79,60 @@ public class InputTypeUtil { return InputType.convolutional(hOut, wOut, outputDepth); } - public static InputType getOutputTypeCnn3DLayers(InputType inputType, int[] kernelSize, int[] stride, int[] padding, - int[] dilation, ConvolutionMode convolutionMode, long outputChannels, long layerIdx, - String layerName, Class layerClass) { + public static InputType getOutputTypeDeconv3dLayer(InputType inputType, int[] kernelSize, int[] stride, int[] padding, + int[] dilation, ConvolutionMode convolutionMode, Convolution3D.DataFormat dataFormat, + long outputDepth, long layerIdx, String layerName, Class layerClass) { + InputType.InputTypeConvolutional3D i = (InputType.InputTypeConvolutional3D) inputType; + + long hIn = i.getHeight(); + long wIn = i.getWidth(); + long dIn = i.getDepth(); + + + int padH = (padding == null ? 0 : padding[0]); //May be null for ConvolutionMode.Same + int padW = (padding == null ? 0 : padding[1]); + int padD = (padding == null ? 0 : padding[2]); + int kH = kernelSize[0]; + int kW = kernelSize[1]; + int kD = kernelSize[2]; + if (dilation[0] != 1) { + kH = kH + (kH - 1) * (dilation[0] - 1); + } + if (dilation[1] != 1) { + kW = kW + (kW - 1) * (dilation[1] - 1); + } + if (dilation[2] != 1) { + kD = kD + (kD - 1) * (dilation[2] - 1); + } + + int sH = stride[0]; + int sW = stride[1]; + int sD = stride[2]; + + if (sH <= 0 || sW <= 0 || sD <= 0) { + throw new DL4JInvalidConfigException(getConfigErrorCommonLine(layerIdx, layerName, layerClass, sH <= 0) + + " Invalid strides: strides must be > 0 (strideH = " + sH + ", strideW = " + sW + ", stride = " + sD + ")" + + "\n" + getConfigErrorCommonLastLine(inputType, kernelSize, stride, padding, outputDepth, + convolutionMode)); + } + + if (convolutionMode == ConvolutionMode.Same) { + long hOut = stride[0] * hIn; + long wOut = stride[1] * wIn; + long dOut = stride[2] * dIn; + return InputType.convolutional3D(dataFormat, dOut, hOut, wOut, outputDepth); + } + + long hOut = sH * (hIn - 1) + kH - 2 * padH; + long wOut = sW * (wIn - 1) + kW - 2 * padW; + long dOut = sD * (dIn - 1) + kD - 2 * padD; + + return InputType.convolutional3D(dataFormat, dOut, hOut, wOut, outputDepth); + } + + public static InputType getOutputTypeCnn3DLayers(InputType inputType, Convolution3D.DataFormat dataFormat, int[] kernelSize, int[] stride, int[] padding, + int[] dilation, ConvolutionMode convolutionMode, long outputChannels, long layerIdx, + String layerName, Class layerClass) { if (convolutionMode == null) { String name = layerName == null ? "(not named)" : layerName; throw new DL4JInvalidConfigException("Invalid configuration: convolution mode is null for layer (idx=" @@ -204,7 +257,7 @@ public class InputTypeUtil { int outH = (int) Math.ceil(inHeight / ((double) sH)); int outW = (int) Math.ceil(inWidth / ((double) sW)); - return InputType.convolutional3D(outD, outH, outW, outputChannels); + return InputType.convolutional3D(dataFormat, outD, outH, outW, outputChannels); } long dOut = (inDepth - kD + 2 * padD) / sD + 1; diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/PReLULayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/PReLULayer.java index db9a19ecc..17275e574 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/PReLULayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/PReLULayer.java @@ -25,6 +25,7 @@ import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.memory.LayerMemoryReport; import org.deeplearning4j.nn.conf.memory.MemoryReport; import org.deeplearning4j.nn.params.PReLUParamInitializer; +import org.deeplearning4j.nn.weights.WeightInitConstant; import org.deeplearning4j.optimize.api.TrainingListener; import org.nd4j.base.Preconditions; import org.nd4j.linalg.api.buffer.DataType; @@ -115,11 +116,14 @@ public class PReLULayer extends BaseLayer { .cacheMemory(MemoryReport.CACHE_MODE_ALL_ZEROS, MemoryReport.CACHE_MODE_ALL_ZEROS).build(); } - @NoArgsConstructor @Getter @Setter public static class Builder extends FeedForwardLayer.Builder { + public Builder(){ + this.weightInitFn = new WeightInitConstant(0); + } + /** * Explicitly set input shape of incoming activations so that parameters can be initialized properly. This * explicitly excludes the mini-batch dimension. diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling3DLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling3DLayer.java index 550e29e4f..b5d73bceb 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling3DLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/Subsampling3DLayer.java @@ -142,7 +142,7 @@ public class Subsampling3DLayer extends NoParamLayer { long inChannels = ((InputType.InputTypeConvolutional3D) inputType).getChannels(); if (inChannels > Integer.MAX_VALUE) throw new ND4JArraySizeException(); - return InputTypeUtil.getOutputTypeCnn3DLayers(inputType, kernelSize, stride, padding, new int[] {1, 1, 1}, // no dilation + return InputTypeUtil.getOutputTypeCnn3DLayers(inputType, dataFormat, kernelSize, stride, padding, new int[] {1, 1, 1}, // no dilation convolutionMode, (int) inChannels, layerIndex, getLayerName(), Subsampling3DLayer.class); } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/preprocessor/Cnn3DToFeedForwardPreProcessor.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/preprocessor/Cnn3DToFeedForwardPreProcessor.java index dba78df4a..185fd18d3 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/preprocessor/Cnn3DToFeedForwardPreProcessor.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/preprocessor/Cnn3DToFeedForwardPreProcessor.java @@ -101,7 +101,7 @@ public class Cnn3DToFeedForwardPreProcessor implements InputPreProcessor { throw new IllegalStateException("Invalid input array: expected shape in format " + "[minibatch, channels, channels, height, width] or " + "[minibatch, channels, height, width, channels]" - + "for numChannels: " + numChannels + ", inputDepth " + inputDepth + ", inputHeight " + inputHeight + + " for numChannels: " + numChannels + ", inputDepth " + inputDepth + ", inputHeight " + inputHeight + " and inputWidth " + inputWidth + ", but got " + Arrays.toString(input.shape())); } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ConvolutionLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ConvolutionLayer.java index 8be034735..8ae1a8531 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ConvolutionLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/ConvolutionLayer.java @@ -17,6 +17,7 @@ package org.deeplearning4j.nn.layers.convolution; +import lombok.extern.slf4j.Slf4j; import org.deeplearning4j.exception.DL4JInvalidInputException; import org.deeplearning4j.nn.api.MaskState; import org.deeplearning4j.nn.conf.CacheMode; @@ -53,8 +54,8 @@ import java.util.Arrays; * * @author Adam Gibson (original impl), Alex Black (current version) */ +@Slf4j public class ConvolutionLayer extends BaseLayer { - protected static final Logger log = LoggerFactory.getLogger(ConvolutionLayer.class); protected INDArray i2d; protected ConvolutionHelper helper = null; diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Deconvolution2DLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Deconvolution2DLayer.java index b0db7a5ed..3cb34b6ab 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Deconvolution2DLayer.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Deconvolution2DLayer.java @@ -70,7 +70,7 @@ public class Deconvolution2DLayer extends ConvolutionLayer { assertInputSet(true); if (input.rank() != 4) { throw new DL4JInvalidInputException("Got rank " + input.rank() - + " array as input to SubsamplingLayer with shape " + Arrays.toString(input.shape()) + + " array as input to Deconvolution2DLayer with shape " + Arrays.toString(input.shape()) + ". Expected rank 4 array with shape [minibatchSize, channels, inputHeight, inputWidth]. " + layerId()); } diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Deconvolution3DLayer.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Deconvolution3DLayer.java new file mode 100644 index 000000000..b9d9339ea --- /dev/null +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/convolution/Deconvolution3DLayer.java @@ -0,0 +1,231 @@ +/* ****************************************************************************** + * Copyright (c) 2019 Konduit K.K. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0. + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + * + * SPDX-License-Identifier: Apache-2.0 + ******************************************************************************/ + +package org.deeplearning4j.nn.layers.convolution; + +import lombok.val; +import org.deeplearning4j.exception.DL4JInvalidInputException; +import org.deeplearning4j.nn.conf.CacheMode; +import org.deeplearning4j.nn.conf.ConvolutionMode; +import org.deeplearning4j.nn.conf.NeuralNetConfiguration; +import org.deeplearning4j.nn.conf.layers.Convolution3D; +import org.deeplearning4j.nn.conf.layers.Deconvolution3D; +import org.deeplearning4j.nn.gradient.DefaultGradient; +import org.deeplearning4j.nn.gradient.Gradient; +import org.deeplearning4j.nn.layers.BaseLayer; +import org.deeplearning4j.nn.params.DeconvolutionParamInitializer; +import org.deeplearning4j.nn.workspace.ArrayType; +import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; +import org.deeplearning4j.util.ConvolutionUtils; +import org.nd4j.linalg.activations.IActivation; +import org.nd4j.linalg.api.buffer.DataType; +import org.nd4j.linalg.api.ndarray.INDArray; +import org.nd4j.linalg.api.ops.CustomOp; +import org.nd4j.linalg.api.ops.DynamicCustomOp; +import org.nd4j.linalg.api.shape.Shape; +import org.nd4j.linalg.factory.Nd4j; +import org.nd4j.linalg.primitives.Pair; +import org.nd4j.linalg.util.ArrayUtil; + +import java.util.Arrays; + +/** + * 3D deconvolution layer implementation. + * + * Deconvolutions are also known as transpose convolutions or fractionally strided convolutions. + * In essence, deconvolutions swap forward and backward pass with regular 3D convolutions. + * + * See the paper by Matt Zeiler for details: + * http://www.matthewzeiler.com/wp-content/uploads/2017/07/cvpr2010.pdf + * + * For an intuitive guide to convolution arithmetic and shapes, see: + * https://arxiv.org/abs/1603.07285v1 + * + * + * @author Alex Black + */ +public class Deconvolution3DLayer extends BaseLayer { + + public Deconvolution3DLayer(NeuralNetConfiguration conf, DataType dataType) { + super(conf, dataType); + } + + @Override + public Pair backpropGradient(INDArray epsilon, LayerWorkspaceMgr workspaceMgr) { + assertInputSet(true); + if (input.rank() != 5) { + throw new DL4JInvalidInputException("Got rank " + input.rank() + + " array as input to Deconvolution3DLayer with shape " + Arrays.toString(input.shape()) + + ". Expected rank 5 array with shape [minibatchSize, channels, inputHeight, inputWidth, inputDepth] or" + + " [minibatchSize, inputHeight, inputWidth, inputDepth, channels]. " + layerId()); + } + + INDArray weights = getParamWithNoise(DeconvolutionParamInitializer.WEIGHT_KEY, true, workspaceMgr); + + Convolution3D.DataFormat df = layerConf().getDataFormat(); + ConvolutionMode cm = layerConf().getConvolutionMode(); + + int[] dilation = layerConf().getDilation(); + int[] kernel = layerConf().getKernelSize(); + int[] strides = layerConf().getStride(); + int[] pad = layerConf().getPadding(); + + INDArray biasGradView = gradientViews.get(DeconvolutionParamInitializer.BIAS_KEY); + INDArray weightGradView = gradientViews.get(DeconvolutionParamInitializer.WEIGHT_KEY); + + INDArray outEps = workspaceMgr.create(ArrayType.ACTIVATION_GRAD, weights.dataType(), input.shape(), 'c'); + + Integer sameMode = (layerConf().getConvolutionMode() == ConvolutionMode.Same) ? 1 : 0; + + int[] args = new int[] { + kernel[0], kernel[1], kernel[2], strides[0], strides[1], strides[2], + pad[0], pad[1], pad[2], dilation[0], dilation[1], dilation[2], sameMode, + df == Convolution3D.DataFormat.NCDHW ? 0 : 1 + }; + + INDArray delta; + IActivation afn = layerConf().getActivationFn(); + INDArray preOutput = preOutput(true, workspaceMgr); + delta = afn.backprop(preOutput, epsilon).getFirst(); + + INDArray[] opInputs; + INDArray[] opOutputs; + if(layerConf().hasBias()){ + INDArray bias = getParamWithNoise(DeconvolutionParamInitializer.BIAS_KEY, true, workspaceMgr); + opInputs = new INDArray[]{input, weights, bias, delta}; + opOutputs = new INDArray[]{outEps, weightGradView, biasGradView}; + } else { + opInputs = new INDArray[]{input, weights, delta}; + opOutputs = new INDArray[]{outEps, weightGradView}; + } + CustomOp op = DynamicCustomOp.builder("deconv3d_bp") + .addInputs(opInputs) + .addIntegerArguments(args) + .addOutputs(opOutputs) + .callInplace(false) + .build(); + Nd4j.getExecutioner().exec(op); + + + Gradient retGradient = new DefaultGradient(); + if(layerConf().hasBias()){ + retGradient.setGradientFor(DeconvolutionParamInitializer.BIAS_KEY, biasGradView); + } + retGradient.setGradientFor(DeconvolutionParamInitializer.WEIGHT_KEY, weightGradView, 'c'); + weightNoiseParams.clear(); + + return new Pair<>(retGradient, outEps); + } + + protected INDArray preOutput(boolean training , LayerWorkspaceMgr workspaceMgr) { + + INDArray bias = getParamWithNoise(DeconvolutionParamInitializer.BIAS_KEY, training, workspaceMgr); + INDArray weights = getParamWithNoise(DeconvolutionParamInitializer.WEIGHT_KEY, training, workspaceMgr); + + //Input validation: expect rank 5 matrix + if (input.rank() != 5) { + throw new DL4JInvalidInputException("Got rank " + input.rank() + + " array as input to Deconvolution3DLayer with shape " + Arrays.toString(input.shape()) + + ". Expected rank 5 array with shape [minibatchSize, channels, inputHeight, inputWidth, inputDepth] or" + + " [minibatchSize, inputHeight, inputWidth, inputDepth, channels]. " + layerId()); + } + + Convolution3D.DataFormat df = layerConf().getDataFormat(); + boolean ncdhw = layerConf().getDataFormat() == Convolution3D.DataFormat.NCDHW; + int chDim = ncdhw ? 1 : 4; + if (input.size(chDim) != layerConf().getNIn() ) { + String layerName = conf.getLayer().getLayerName(); + if (layerName == null) + layerName = "(not named)"; + throw new DL4JInvalidInputException("Cannot do forward pass in Deconvolution3D layer (layer name = " + layerName + + ", layer index = " + index + "): input array channels does not match CNN layer configuration" + + " (data input channels = " + input.size(chDim) + ", " + (ncdhw ? "[minibatch,channels,height,width,depth]=" : "[minibatch,height,width,depth,channels]=") + + Arrays.toString(input.shape()) + "; expected" + " input channels = " + layerConf().getNIn() + ") " + + layerId()); + } + + int[] dilation = layerConf().getDilation(); + int[] kernel = layerConf().getKernelSize(); + int[] strides = layerConf().getStride(); + + int[] pad; + ConvolutionMode cm = layerConf().getConvolutionMode(); + long[] outSize; + int[] inSize = df == Convolution3D.DataFormat.NCDHW ? new int[]{(int)input.size(2), (int)input.size(3), (int)input.size(4)} : new int[]{(int)input.size(1), (int)input.size(2), (int)input.size(3)}; + if (cm == ConvolutionMode.Same) { + outSize = ConvolutionUtils.getDeconvolution3DOutputSize(input, kernel, strides, null, dilation, cm, layerConf().getDataFormat()); //Also performs validation + pad = ConvolutionUtils.getSameModeTopLeftPadding(ArrayUtil.toInts(outSize), inSize, kernel, strides, dilation ); + } else { + pad = layerConf().getPadding(); + outSize = ConvolutionUtils.getDeconvolution3DOutputSize(input, kernel, strides, pad, dilation, cm, layerConf().getDataFormat()); //Also performs validation + } + + long outH = outSize[0]; + long outW = outSize[1]; + long outD = outSize[2]; + + + val miniBatch = input.size(0); + long[] outShape = df == Convolution3D.DataFormat.NCDHW ? new long[]{miniBatch, layerConf().getNOut(), outH, outW, outD} : new long[]{miniBatch, outH, outW, outD, layerConf().getNOut()}; + INDArray output = workspaceMgr.create(ArrayType.ACTIVATIONS, input.dataType(), outShape, 'c'); + + int sameMode = (cm == ConvolutionMode.Same) ? 1 : 0; + + int[] args = new int[] { + kernel[0], kernel[1], kernel[2], strides[0], strides[1], strides[2], + pad[0], pad[1], pad[2], dilation[0], dilation[1], dilation[2], sameMode, + df == Convolution3D.DataFormat.NCDHW ? 0 : 1 + }; + + INDArray[] opInputs; + if (layerConf().hasBias()) { + opInputs = new INDArray[]{input, weights, bias}; + } else { + opInputs = new INDArray[]{input, weights}; + } + CustomOp op = DynamicCustomOp.builder("deconv3d") + .addInputs(opInputs) + .addIntegerArguments(args) + .addOutputs(output) + .callInplace(false) + .build(); + Nd4j.getExecutioner().exec(op); + + return output; + } + + @Override + public INDArray activate(boolean training, LayerWorkspaceMgr workspaceMgr) { + assertInputSet(false); + + if (cacheMode == null) + cacheMode = CacheMode.NONE; + + applyDropOutIfNecessary(training, workspaceMgr); + + INDArray z = preOutput(training, workspaceMgr); + + IActivation afn = layerConf().getActivationFn(); + + INDArray activation = afn.getActivation(z, training); + return activation; + } + + @Override + public boolean isPretrainLayer() { + return false; + } +} \ No newline at end of file diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/PReLU.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/PReLU.java index b35d946aa..4a0fc6aa0 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/PReLU.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/layers/feedforward/PReLU.java @@ -1,5 +1,6 @@ /******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. + * Copyright (c) 2019-2020 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at @@ -82,7 +83,10 @@ public class PReLU extends BaseLayer deltas = prelu.backprop(layerInput, epsilon); INDArray delta = deltas.getFirst(); - INDArray weightGradView = deltas.getSecond(); + INDArray weightGrad = deltas.getSecond(); + INDArray weightGradView = gradientViews.get(PReLUParamInitializer.WEIGHT_KEY); + weightGradView.assign(weightGrad); + delta = workspaceMgr.leverageTo(ArrayType.ACTIVATION_GRAD, delta); //Usually a no-op (except for perhaps identity) delta = backpropDropOutIfPresent(delta); @@ -98,4 +102,4 @@ public class PReLU extends BaseLayer init(NeuralNetConfiguration conf, INDArray paramsView, boolean initializeParams) { + Deconvolution3D layer = (Deconvolution3D) conf.getLayer(); + if (layer.getKernelSize().length != 3) throw new IllegalArgumentException("Filter size must be == 3"); + + Map params = Collections.synchronizedMap(new LinkedHashMap()); + + Deconvolution3D layerConf = (Deconvolution3D) conf.getLayer(); + val nOut = layerConf.getNOut(); + + if (layer.hasBias()) { + INDArray biasView = paramsView.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(0, nOut)); + INDArray weightView = paramsView.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(nOut, numParams(conf))); + params.put(BIAS_KEY, createBias(conf, biasView, initializeParams)); + params.put(WEIGHT_KEY, createWeightMatrix(conf, weightView, initializeParams)); + conf.addVariable(WEIGHT_KEY); + conf.addVariable(BIAS_KEY); + } else { + INDArray weightView = paramsView; + params.put(WEIGHT_KEY, createWeightMatrix(conf, weightView, initializeParams)); + conf.addVariable(WEIGHT_KEY); + } + + return params; + } + + @Override + public Map getGradientsFromFlattened(NeuralNetConfiguration conf, INDArray gradientView) { + + Deconvolution3D layerConf = (Deconvolution3D) conf.getLayer(); + + int[] kernel = layerConf.getKernelSize(); + val nIn = layerConf.getNIn(); + val nOut = layerConf.getNOut(); + + Map out = new LinkedHashMap<>(); + if (layerConf.hasBias()) { + INDArray biasGradientView = gradientView.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(0, nOut)); + INDArray weightGradientView = + gradientView.get(NDArrayIndex.interval(0,0,true), NDArrayIndex.interval(nOut, numParams(conf))) + .reshape('c', kernel[0], kernel[1], kernel[2], nOut, nIn); + out.put(BIAS_KEY, biasGradientView); + out.put(WEIGHT_KEY, weightGradientView); + } else { + INDArray weightGradientView = gradientView.reshape('c', kernel[0], kernel[1], kernel[2], nOut, nIn); + out.put(WEIGHT_KEY, weightGradientView); + } + return out; + } + + + protected INDArray createWeightMatrix(NeuralNetConfiguration conf, INDArray weightView, boolean initializeParams) { + /* + Create a 5d weight matrix of: + (number of kernels, num input channels, kernel depth, kernel height, kernel width) + Note c order is used specifically for the CNN weights, as opposed to f order elsewhere + Inputs to the convolution layer are: + (batch size, num input feature maps, image depth, image height, image width) + */ + Deconvolution3D layerConf = (Deconvolution3D) conf.getLayer(); + + if (initializeParams) { + int[] kernel = layerConf.getKernelSize(); + int[] stride = layerConf.getStride(); + + val inputDepth = layerConf.getNIn(); + val outputDepth = layerConf.getNOut(); + + double fanIn = inputDepth * kernel[0] * kernel[1] * kernel[2]; + double fanOut = outputDepth * kernel[0] * kernel[1] * kernel[2] / + ((double) stride[0] * stride[1] * stride[2]); + + //libnd4j: [kD, kH, kW, oC, iC] + val weightsShape = new long[]{kernel[0], kernel[1], kernel[2], outputDepth, inputDepth}; + + return layerConf.getWeightInitFn().init(fanIn, fanOut, weightsShape, 'c', weightView); + } else { + int[] kernel = layerConf.getKernelSize(); + return WeightInitUtil.reshapeWeights( + new long[]{kernel[0], kernel[1], kernel[2], layerConf.getNOut(), layerConf.getNIn()}, weightView, 'c'); + } + } +} diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/optimize/listeners/ParamAndGradientIterationListener.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/optimize/listeners/ParamAndGradientIterationListener.java deleted file mode 100644 index fe4d01b1a..000000000 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/optimize/listeners/ParamAndGradientIterationListener.java +++ /dev/null @@ -1,235 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.deeplearning4j.optimize.listeners; - -import lombok.Builder; -import org.deeplearning4j.nn.api.Model; -import org.deeplearning4j.optimize.api.BaseTrainingListener; -import org.nd4j.linalg.api.ndarray.INDArray; -import org.nd4j.linalg.ops.transforms.Transforms; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardOpenOption; -import java.util.Map; - -/** - * An iteration listener that provides details on parameters and gradients at each iteration during traning. - * Attempts to provide much of the same information as the UI histogram iteration listener, but in a text-based - * format (for example, when learning on a system accessed via SSH etc). - * i.e., is intended to aid network tuning and debugging
- * This iteration listener is set up to calculate mean, min, max, and mean absolute value - * of each type of parameter and gradient in the network at each iteration.
- * - * @author Alex Black - * @deprecated StatsListener can be used instead, storing data using FileStatsStorage - UI is not required - */ -public class ParamAndGradientIterationListener extends BaseTrainingListener { - private static final int MAX_WRITE_FAILURE_MESSAGES = 10; - private static final Logger logger = LoggerFactory.getLogger(ParamAndGradientIterationListener.class); - - private int iterations; - private long totalIterationCount = 0; - private boolean printMean = true; - private boolean printHeader = true; - private boolean printMinMax = true; - private boolean printMeanAbsValue = true; - private File file; - private Path filePath; - private boolean outputToConsole; - private boolean outputToFile; - private boolean outputToLogger; - private String delimiter = "\t"; - - - private int writeFailureCount = 0; - - - /** Default constructor for output to console only every iteration, tab delimited */ - public ParamAndGradientIterationListener() { - this(1, true, true, true, true, true, false, false, null, "\t"); - } - - /**Full constructor with all options. - * Note also: ParamAndGradientIterationListener.builder() can be used instead of this constructor. - * @param iterations calculate and report values every 'iterations' iterations - * @param printHeader Whether to output a header row (i.e., names for each column) - * @param printMean Calculate and display the mean of parameters and gradients - * @param printMinMax Calculate and display the min/max of the parameters and gradients - * @param printMeanAbsValue Calculate and display the mean absolute value - * @param outputToConsole If true, display the values to the console (System.out.println()) - * @param outputToFile If true, write the values to a file, one per line - * @param outputToLogger If true, log the values - * @param file File to write values to. May be null, not used if outputToFile == false - * @param delimiter delimiter (for example, "\t" or "," etc) - */ - @Builder - public ParamAndGradientIterationListener(int iterations, boolean printHeader, boolean printMean, - boolean printMinMax, boolean printMeanAbsValue, boolean outputToConsole, boolean outputToFile, - boolean outputToLogger, File file, String delimiter) { - this.printHeader = printHeader; - this.printMean = printMean; - this.printMinMax = printMinMax; - this.printMeanAbsValue = printMeanAbsValue; - this.iterations = iterations; - this.file = file; - if (this.file != null) { - this.filePath = file.toPath(); - } - this.outputToConsole = outputToConsole; - this.outputToFile = outputToFile; - this.outputToLogger = outputToLogger; - this.delimiter = delimiter; - } - - @Override - public void iterationDone(Model model, int iteration, int epoch) { - totalIterationCount++; - - if (totalIterationCount == 1 && printHeader) { - Map params = model.paramTable(); - model.conf().getVariables(); - - StringBuilder sb = new StringBuilder(); - - sb.append("n"); - sb.append(delimiter); - sb.append("score"); - - for (String s : params.keySet()) { - //Parameters: - if (printMean) - sb.append(delimiter).append(s).append("_mean"); - //Min, max - if (printMinMax) { - sb.append(delimiter).append(s).append("_min").append(delimiter).append(s).append("_max"); - } - if (printMeanAbsValue) - sb.append(delimiter).append(s).append("_meanAbsValue"); - - //Gradients: - if (printMean) - sb.append(delimiter).append(s).append("_meanG"); - //Min, max - if (printMinMax) { - sb.append(delimiter).append(s).append("_minG").append(delimiter).append(s).append("_maxG"); - } - if (printMeanAbsValue) - sb.append(delimiter).append(s).append("_meanAbsValueG"); - } - sb.append("\n"); - - if (outputToFile) { - try { - Files.write(filePath, sb.toString().getBytes(), StandardOpenOption.CREATE, - StandardOpenOption.TRUNCATE_EXISTING); - } catch (IOException e) { - if (writeFailureCount++ < MAX_WRITE_FAILURE_MESSAGES) { - //Print error message - logger.warn("Error writing to file: {}", e); - } - if (writeFailureCount == MAX_WRITE_FAILURE_MESSAGES) { - logger.warn("Max file write messages displayed. No more failure messages will be printed"); - } - } - } - - if (outputToLogger) - logger.info(sb.toString()); - if (outputToConsole) - System.out.println(sb.toString()); - } - - if (totalIterationCount % iterations != 0) - return; //No op this iteration - - Map params = model.paramTable(); - Map grads = model.gradient().gradientForVariable(); - - StringBuilder sb = new StringBuilder(); - sb.append(totalIterationCount); - sb.append(delimiter); - sb.append(model.score()); - - - //Calculate actual values for parameters and gradients - for (Map.Entry entry : params.entrySet()) { - INDArray currParams = entry.getValue(); - INDArray currGrad = grads.get(entry.getKey()); - - //Parameters: - if (printMean) { - sb.append(delimiter); - sb.append(currParams.meanNumber().doubleValue()); - } - if (printMinMax) { - sb.append(delimiter); - sb.append(currParams.minNumber().doubleValue()); - sb.append(delimiter); - sb.append(currParams.maxNumber().doubleValue()); - } - if (printMeanAbsValue) { - sb.append(delimiter); - INDArray abs = Transforms.abs(currParams.dup()); - sb.append(abs.meanNumber().doubleValue()); - } - - //Gradients: - if (printMean) { - sb.append(delimiter); - sb.append(currGrad.meanNumber().doubleValue()); - } - if (printMinMax) { - sb.append(delimiter); - sb.append(currGrad.minNumber().doubleValue()); - sb.append(delimiter); - sb.append(currGrad.maxNumber().doubleValue()); - } - if (printMeanAbsValue) { - sb.append(delimiter); - INDArray abs = Transforms.abs(currGrad.dup()); - sb.append(abs.meanNumber().doubleValue()); - } - } - sb.append("\n"); - - String out = sb.toString(); - if (outputToLogger) - logger.info(out); - if (outputToConsole) - System.out.print(out); - - if (outputToFile) { - try { - Files.write(filePath, out.getBytes(), StandardOpenOption.CREATE, StandardOpenOption.APPEND); - } catch (IOException e) { - if (writeFailureCount++ < MAX_WRITE_FAILURE_MESSAGES) { - //Print error message - logger.warn("Error writing to file: {}", e); - } - if (writeFailureCount == MAX_WRITE_FAILURE_MESSAGES) { - logger.warn("Max file write messages displayed. No more failure messages will be printed"); - } - } - } - - } -} diff --git a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java index 3a447c361..399af4b2d 100644 --- a/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java +++ b/deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/util/ConvolutionUtils.java @@ -94,6 +94,49 @@ public class ConvolutionUtils { return new int[]{hOut, wOut}; } + /** + * Get the output size of a deconvolution operation for given input data. In deconvolution, we compute the inverse + * of the shape computation of a convolution. + * + * @param inputData Input data + * @param kernel Kernel size (height/width) + * @param strides Strides (height/width) + * @param padding Padding (height/width) + * @param convolutionMode Convolution mode (Same, Strict, Truncate) + * @param dilation Kernel dilation (height/width) + * @return Output size: int[2] with output height/width + */ + public static long[] getDeconvolution3DOutputSize(INDArray inputData, int[] kernel, int[] strides, int[] padding, int[] dilation, + ConvolutionMode convolutionMode, Convolution3D.DataFormat dataFormat) { + + long hIn, wIn, dIn; + if(dataFormat == Convolution3D.DataFormat.NCDHW){ + hIn = inputData.size(2); + wIn = inputData.size(3); + dIn = inputData.size(4); + } else { + hIn = inputData.size(1); + wIn = inputData.size(2); + dIn = inputData.size(3); + } + + + int[] eKernel = effectiveKernelSize(kernel, dilation); + + if (convolutionMode == ConvolutionMode.Same) { + long hOut = strides[0] * hIn; + long wOut = strides[1] * wIn; + long dOut = strides[2] * dIn; + return new long[]{hOut, wOut, dOut}; + } + + long hOut = strides[0] * (hIn - 1) + eKernel[0] - 2 * padding[0]; + long wOut = strides[1] * (wIn - 1) + eKernel[1] - 2 * padding[1]; + long dOut = strides[2] * (dIn - 1) + eKernel[2] - 2 * padding[2]; + + return new long[]{hOut, wOut, dOut}; + } + /** * Get the output size (height/width) for the given input data and CNN configuration @@ -307,11 +350,15 @@ public class ConvolutionUtils { */ public static int[] getSameModeTopLeftPadding(int[] outSize, int[] inSize, int[] kernel, int[] strides, int[] dilation) { int[] eKernel = effectiveKernelSize(kernel, dilation); - int[] outPad = new int[2]; - outPad[0] = ((outSize[0] - 1) * strides[0] + eKernel[0] - inSize[0]) / 2; //Note that padBottom is 1 bigger than this if bracketed term is not divisible by 2 - outPad[1] = ((outSize[1] - 1) * strides[1] + eKernel[1] - inSize[1]) / 2; //As above - Preconditions.checkState(outPad[0] >= 0 && outPad[1] >= 0, "Invalid padding values calculated: %s - layer configuration is invalid? Input size %s, output size %s, kernel %s, strides %s, dilation %s", + int[] outPad = new int[kernel.length]; + boolean allGt0 = true; + for( int i=0; i= 0; + } + Preconditions.checkState(allGt0, "Invalid padding values calculated: %s - layer configuration is invalid? Input size %s, output size %s, kernel %s, strides %s, dilation %s", outPad, inSize, outSize, kernel, strides, dilation); + return outPad; } diff --git a/deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/BaseDL4JTest.java b/deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/BaseDL4JTest.java deleted file mode 100644 index 05d0957fb..000000000 --- a/deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/BaseDL4JTest.java +++ /dev/null @@ -1,140 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.deeplearning4j; - -import lombok.extern.slf4j.Slf4j; -import org.bytedeco.javacpp.Pointer; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.memory.MemoryWorkspace; -import org.nd4j.linalg.api.ops.executioner.OpExecutioner; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.profiler.ProfilerConfig; - -import java.lang.management.ManagementFactory; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -@Slf4j -public class BaseDL4JTest { - - @Rule - public TestName name = new TestName(); - - protected long startTime; - protected int threadCountBefore; - - /** - * Override this to set the profiling mode for the tests defined in the child class - */ - public OpExecutioner.ProfilingMode getProfilingMode(){ - return OpExecutioner.ProfilingMode.SCOPE_PANIC; - } - - /** - * Override this to set the datatype of the tests defined in the child class - */ - public DataType getDataType(){ - return DataType.DOUBLE; - } - - public DataType getDefaultFPDataType(){ - return getDataType(); - } - - @Before - public void beforeTest(){ - log.info("{}.{}", getClass().getSimpleName(), name.getMethodName()); - Nd4j.getExecutioner().setProfilingMode(getProfilingMode()); - Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build()); - Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType()); - startTime = System.currentTimeMillis(); - threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount(); - } - - @After - public void afterTest(){ - //Attempt to keep workspaces isolated between tests - Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); - MemoryWorkspace currWS = Nd4j.getMemoryManager().getCurrentWorkspace(); - Nd4j.getMemoryManager().setCurrentWorkspace(null); - if(currWS != null){ - //Not really safe to continue testing under this situation... other tests will likely fail with obscure - // errors that are hard to track back to this - log.error("Open workspace leaked from test! Exiting - {}, isOpen = {} - {}", currWS.getId(), currWS.isScopeActive(), currWS); - System.exit(1); - } - - StringBuilder sb = new StringBuilder(); - long maxPhys = Pointer.maxPhysicalBytes(); - long maxBytes = Pointer.maxBytes(); - long currPhys = Pointer.physicalBytes(); - long currBytes = Pointer.totalBytes(); - - long jvmTotal = Runtime.getRuntime().totalMemory(); - long jvmMax = Runtime.getRuntime().maxMemory(); - - int threadsAfter = ManagementFactory.getThreadMXBean().getThreadCount(); - - long duration = System.currentTimeMillis() - startTime; - sb.append(getClass().getSimpleName()).append(".").append(name.getMethodName()) - .append(": ").append(duration).append(" ms") - .append(", threadCount: (").append(threadCountBefore).append("->").append(threadsAfter).append(")") - .append(", jvmTotal=").append(jvmTotal) - .append(", jvmMax=").append(jvmMax) - .append(", totalBytes=").append(currBytes).append(", maxBytes=").append(maxBytes) - .append(", currPhys=").append(currPhys).append(", maxPhys=").append(maxPhys); - - List ws = Nd4j.getWorkspaceManager().getAllWorkspacesForCurrentThread(); - if(ws != null && ws.size() > 0){ - long currSize = 0; - for(MemoryWorkspace w : ws){ - currSize += w.getCurrentSize(); - } - if(currSize > 0){ - sb.append(", threadWSSize=").append(currSize) - .append(" (").append(ws.size()).append(" WSs)"); - } - } - - - Properties p = Nd4j.getExecutioner().getEnvironmentInformation(); - Object o = p.get("cuda.devicesInformation"); - if(o instanceof List){ - List> l = (List>) o; - if(l.size() > 0) { - - sb.append(" [").append(l.size()) - .append(" GPUs: "); - - for (int i = 0; i < l.size(); i++) { - Map m = l.get(i); - if(i > 0) - sb.append(","); - sb.append("(").append(m.get("cuda.freeMemory")).append(" free, ") - .append(m.get("cuda.totalMemory")).append(" total)"); - } - sb.append("]"); - } - } - log.info(sb.toString()); - } -} diff --git a/deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/optimize/solvers/accumulation/EncodedGradientsAccumulatorTest.java b/deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/optimize/solvers/accumulation/EncodedGradientsAccumulatorTest.java index a501d4e1f..380807c8d 100644 --- a/deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/optimize/solvers/accumulation/EncodedGradientsAccumulatorTest.java +++ b/deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/optimize/solvers/accumulation/EncodedGradientsAccumulatorTest.java @@ -34,6 +34,11 @@ import static org.junit.Assert.assertTrue; @Slf4j public class EncodedGradientsAccumulatorTest extends BaseDL4JTest { + @Override + public long getTimeoutMilliseconds() { + return 1200000L; + } + /** * This test ensures, that memory amount assigned to buffer is enough for any number of updates * @throws Exception diff --git a/deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/optimize/solvers/accumulation/SmartFancyBlockingQueueTest.java b/deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/optimize/solvers/accumulation/SmartFancyBlockingQueueTest.java index 725a9db8a..f5afb0e48 100644 --- a/deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/optimize/solvers/accumulation/SmartFancyBlockingQueueTest.java +++ b/deeplearning4j/deeplearning4j-nn/src/test/java/org/deeplearning4j/optimize/solvers/accumulation/SmartFancyBlockingQueueTest.java @@ -292,10 +292,10 @@ public class SmartFancyBlockingQueueTest extends BaseDL4JTest { } // each reader will read 250 updates. supposedly equal :) - val means = new long[4]; + final long[] means = new long[4]; val readers = new ArrayList(); for (int e = 0; e < 4; e++) { - val f = e; + final int f = e; means[f] = 0; val t = new Thread(new Runnable() { @Override diff --git a/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/pom.xml b/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/pom.xml index 62f95f736..004d998f3 100644 --- a/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/pom.xml +++ b/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/pom.xml @@ -73,6 +73,13 @@ ${logback.version} test + + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/src/test/java/org/deeplearning4j/remote/BinaryModelServerTest.java b/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/src/test/java/org/deeplearning4j/remote/BinaryModelServerTest.java index be2633b87..c57b0fa30 100644 --- a/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/src/test/java/org/deeplearning4j/remote/BinaryModelServerTest.java +++ b/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/src/test/java/org/deeplearning4j/remote/BinaryModelServerTest.java @@ -2,6 +2,7 @@ package org.deeplearning4j.remote; import lombok.val; import org.datavec.image.loader.Java2DNativeImageLoader; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.graph.ComputationGraph; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.deeplearning4j.remote.helpers.ImageConversionUtils; @@ -30,7 +31,7 @@ import java.util.concurrent.TimeUnit; import static org.deeplearning4j.parallelism.inference.InferenceMode.SEQUENTIAL; import static org.junit.Assert.*; -public class BinaryModelServerTest { +public class BinaryModelServerTest extends BaseDL4JTest { private final int PORT = 18080; @After diff --git a/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/src/test/java/org/deeplearning4j/remote/JsonModelServerTest.java b/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/src/test/java/org/deeplearning4j/remote/JsonModelServerTest.java index aa353f307..fd79cc780 100644 --- a/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/src/test/java/org/deeplearning4j/remote/JsonModelServerTest.java +++ b/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/src/test/java/org/deeplearning4j/remote/JsonModelServerTest.java @@ -21,6 +21,7 @@ import lombok.Data; import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; import lombok.val; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.conf.ComputationGraphConfiguration; import org.deeplearning4j.nn.conf.MultiLayerConfiguration; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; @@ -63,7 +64,7 @@ import static org.deeplearning4j.parallelism.inference.InferenceMode.SEQUENTIAL; import static org.junit.Assert.*; @Slf4j -public class JsonModelServerTest { +public class JsonModelServerTest extends BaseDL4JTest { private static final MultiLayerNetwork model; private final int PORT = 18080; diff --git a/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/src/test/java/org/deeplearning4j/remote/ServletTest.java b/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/src/test/java/org/deeplearning4j/remote/ServletTest.java index 1b347d112..ede253efa 100644 --- a/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/src/test/java/org/deeplearning4j/remote/ServletTest.java +++ b/deeplearning4j/deeplearning4j-remote/deeplearning4j-json-server/src/test/java/org/deeplearning4j/remote/ServletTest.java @@ -20,6 +20,7 @@ import lombok.val; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.HttpClientBuilder; +import org.deeplearning4j.BaseDL4JTest; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -34,7 +35,7 @@ import java.io.IOException; import static org.junit.Assert.assertEquals; -public class ServletTest { +public class ServletTest extends BaseDL4JTest { private JsonModelServer server; diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/pom.xml b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/pom.xml index 97515cf5e..2c7a94de8 100644 --- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/pom.xml +++ b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/pom.xml @@ -86,6 +86,13 @@ logback-classic test + + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/src/test/java/org/deeplearning4j/parallelism/parameterserver/BaseDL4JTest.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/src/test/java/org/deeplearning4j/parallelism/parameterserver/BaseDL4JTest.java deleted file mode 100644 index 6684c6384..000000000 --- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/src/test/java/org/deeplearning4j/parallelism/parameterserver/BaseDL4JTest.java +++ /dev/null @@ -1,144 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.deeplearning4j.parallelism.parameterserver; - -import lombok.extern.slf4j.Slf4j; -import org.bytedeco.javacpp.Pointer; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; -import org.junit.rules.Timeout; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.memory.MemoryWorkspace; -import org.nd4j.linalg.api.ops.executioner.OpExecutioner; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.profiler.ProfilerConfig; - -import java.lang.management.ManagementFactory; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -@Slf4j -public class BaseDL4JTest { - - @Rule - public Timeout timeout = Timeout.seconds(600); - - @Rule - public TestName name = new TestName(); - - protected long startTime; - protected int threadCountBefore; - - /** - * Override this to set the profiling mode for the tests defined in the child class - */ - public OpExecutioner.ProfilingMode getProfilingMode(){ - return OpExecutioner.ProfilingMode.SCOPE_PANIC; - } - - /** - * Override this to set the datatype of the tests defined in the child class - */ - public DataType getDataType(){ - return DataType.DOUBLE; - } - - public DataType getDefaultFPDataType(){ - return getDataType(); - } - - @Before - public void beforeTest(){ - log.info("{}.{}", getClass().getSimpleName(), name.getMethodName()); - Nd4j.getExecutioner().setProfilingMode(getProfilingMode()); - Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build()); - Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType()); - startTime = System.currentTimeMillis(); - threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount(); - } - - @After - public void afterTest(){ - //Attempt to keep workspaces isolated between tests - Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); - MemoryWorkspace currWS = Nd4j.getMemoryManager().getCurrentWorkspace(); - Nd4j.getMemoryManager().setCurrentWorkspace(null); - if(currWS != null){ - //Not really safe to continue testing under this situation... other tests will likely fail with obscure - // errors that are hard to track back to this - log.error("Open workspace leaked from test! Exiting - {}, isOpen = {} - {}", currWS.getId(), currWS.isScopeActive(), currWS); - System.exit(1); - } - - StringBuilder sb = new StringBuilder(); - long maxPhys = Pointer.maxPhysicalBytes(); - long maxBytes = Pointer.maxBytes(); - long currPhys = Pointer.physicalBytes(); - long currBytes = Pointer.totalBytes(); - - long jvmTotal = Runtime.getRuntime().totalMemory(); - long jvmMax = Runtime.getRuntime().maxMemory(); - - int threadsAfter = ManagementFactory.getThreadMXBean().getThreadCount(); - - long duration = System.currentTimeMillis() - startTime; - sb.append(getClass().getSimpleName()).append(".").append(name.getMethodName()) - .append(": ").append(duration).append(" ms") - .append(", threadCount: (").append(threadCountBefore).append("->").append(threadsAfter).append(")") - .append(", jvmTotal=").append(jvmTotal) - .append(", jvmMax=").append(jvmMax) - .append(", totalBytes=").append(currBytes).append(", maxBytes=").append(maxBytes) - .append(", currPhys=").append(currPhys).append(", maxPhys=").append(maxPhys); - - List ws = Nd4j.getWorkspaceManager().getAllWorkspacesForCurrentThread(); - if(ws != null && ws.size() > 0){ - long currSize = 0; - for(MemoryWorkspace w : ws){ - currSize += w.getCurrentSize(); - } - if(currSize > 0){ - sb.append(", threadWSSize=").append(currSize) - .append(" (").append(ws.size()).append(" WSs)"); - } - } - - - Properties p = Nd4j.getExecutioner().getEnvironmentInformation(); - Object o = p.get("cuda.devicesInformation"); - if(o instanceof List){ - List> l = (List>) o; - if(l.size() > 0) { - - sb.append(" [").append(l.size()) - .append(" GPUs: "); - - for (int i = 0; i < l.size(); i++) { - Map m = l.get(i); - if(i > 0) - sb.append(","); - sb.append("(").append(m.get("cuda.freeMemory")).append(" free, ") - .append(m.get("cuda.totalMemory")).append(" total)"); - } - sb.append("]"); - } - } - log.info(sb.toString()); - } -} diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/src/test/java/org/deeplearning4j/parallelism/parameterserver/ParameterServerParallelWrapperTest.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/src/test/java/org/deeplearning4j/parallelism/parameterserver/ParameterServerParallelWrapperTest.java index 07b5b41a7..beb9af5b4 100644 --- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/src/test/java/org/deeplearning4j/parallelism/parameterserver/ParameterServerParallelWrapperTest.java +++ b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper-parameter-server/src/test/java/org/deeplearning4j/parallelism/parameterserver/ParameterServerParallelWrapperTest.java @@ -17,6 +17,7 @@ package org.deeplearning4j.parallelism.parameterserver; import lombok.extern.slf4j.Slf4j; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator; import org.deeplearning4j.nn.conf.MultiLayerConfiguration; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/pom.xml b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/pom.xml index 08eed7f15..3c083d40d 100644 --- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/pom.xml +++ b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/pom.xml @@ -90,6 +90,13 @@ ${project.version} test + + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/BaseDL4JTest.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/BaseDL4JTest.java deleted file mode 100644 index f97073042..000000000 --- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/BaseDL4JTest.java +++ /dev/null @@ -1,140 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.deeplearning4j.parallelism; - -import lombok.extern.slf4j.Slf4j; -import org.bytedeco.javacpp.Pointer; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.memory.MemoryWorkspace; -import org.nd4j.linalg.api.ops.executioner.OpExecutioner; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.profiler.ProfilerConfig; - -import java.lang.management.ManagementFactory; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -@Slf4j -public class BaseDL4JTest { - - @Rule - public TestName name = new TestName(); - - protected long startTime; - protected int threadCountBefore; - - /** - * Override this to set the profiling mode for the tests defined in the child class - */ - public OpExecutioner.ProfilingMode getProfilingMode(){ - return OpExecutioner.ProfilingMode.SCOPE_PANIC; - } - - /** - * Override this to set the datatype of the tests defined in the child class - */ - public DataType getDataType(){ - return DataType.DOUBLE; - } - - public DataType getDefaultFPDataType(){ - return getDataType(); - } - - @Before - public void beforeTest(){ - log.info("{}.{}", getClass().getSimpleName(), name.getMethodName()); - Nd4j.getExecutioner().setProfilingMode(getProfilingMode()); - Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build()); - Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType()); - startTime = System.currentTimeMillis(); - threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount(); - } - - @After - public void afterTest(){ - //Attempt to keep workspaces isolated between tests - Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); - MemoryWorkspace currWS = Nd4j.getMemoryManager().getCurrentWorkspace(); - Nd4j.getMemoryManager().setCurrentWorkspace(null); - if(currWS != null){ - //Not really safe to continue testing under this situation... other tests will likely fail with obscure - // errors that are hard to track back to this - log.error("Open workspace leaked from test! Exiting - {}, isOpen = {} - {}", currWS.getId(), currWS.isScopeActive(), currWS); - System.exit(1); - } - - StringBuilder sb = new StringBuilder(); - long maxPhys = Pointer.maxPhysicalBytes(); - long maxBytes = Pointer.maxBytes(); - long currPhys = Pointer.physicalBytes(); - long currBytes = Pointer.totalBytes(); - - long jvmTotal = Runtime.getRuntime().totalMemory(); - long jvmMax = Runtime.getRuntime().maxMemory(); - - int threadsAfter = ManagementFactory.getThreadMXBean().getThreadCount(); - - long duration = System.currentTimeMillis() - startTime; - sb.append(getClass().getSimpleName()).append(".").append(name.getMethodName()) - .append(": ").append(duration).append(" ms") - .append(", threadCount: (").append(threadCountBefore).append("->").append(threadsAfter).append(")") - .append(", jvmTotal=").append(jvmTotal) - .append(", jvmMax=").append(jvmMax) - .append(", totalBytes=").append(currBytes).append(", maxBytes=").append(maxBytes) - .append(", currPhys=").append(currPhys).append(", maxPhys=").append(maxPhys); - - List ws = Nd4j.getWorkspaceManager().getAllWorkspacesForCurrentThread(); - if(ws != null && ws.size() > 0){ - long currSize = 0; - for(MemoryWorkspace w : ws){ - currSize += w.getCurrentSize(); - } - if(currSize > 0){ - sb.append(", threadWSSize=").append(currSize) - .append(" (").append(ws.size()).append(" WSs)"); - } - } - - - Properties p = Nd4j.getExecutioner().getEnvironmentInformation(); - Object o = p.get("cuda.devicesInformation"); - if(o instanceof List){ - List> l = (List>) o; - if(l.size() > 0) { - - sb.append(" [").append(l.size()) - .append(" GPUs: "); - - for (int i = 0; i < l.size(); i++) { - Map m = l.get(i); - if(i > 0) - sb.append(","); - sb.append("(").append(m.get("cuda.freeMemory")).append(" free, ") - .append(m.get("cuda.totalMemory")).append(" total)"); - } - sb.append("]"); - } - } - log.info(sb.toString()); - } -} diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/InplaceParallelInferenceTest.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/InplaceParallelInferenceTest.java index 0f3e95930..d089781f1 100644 --- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/InplaceParallelInferenceTest.java +++ b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/InplaceParallelInferenceTest.java @@ -17,6 +17,7 @@ package org.deeplearning4j.parallelism; import lombok.val; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; import org.deeplearning4j.nn.conf.layers.OutputLayer; import org.deeplearning4j.nn.graph.ComputationGraph; diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/ParallelInferenceTest.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/ParallelInferenceTest.java index e4d48937e..42e8437e7 100644 --- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/ParallelInferenceTest.java +++ b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/ParallelInferenceTest.java @@ -19,6 +19,7 @@ package org.deeplearning4j.parallelism; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import lombok.val; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.exception.DL4JInvalidInputException; import org.deeplearning4j.nn.api.Model; import org.deeplearning4j.nn.conf.ComputationGraphConfiguration; diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/ParallelWrapperTest.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/ParallelWrapperTest.java index 52a0c0109..32c70f3ff 100644 --- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/ParallelWrapperTest.java +++ b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/ParallelWrapperTest.java @@ -17,6 +17,7 @@ package org.deeplearning4j.parallelism; import lombok.val; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.datasets.iterator.EarlyTerminationDataSetIterator; import org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator; import org.deeplearning4j.eval.Evaluation; diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/TestListeners.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/TestListeners.java index ea48549ba..9593a0799 100644 --- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/TestListeners.java +++ b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/TestListeners.java @@ -16,6 +16,7 @@ package org.deeplearning4j.parallelism; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.api.storage.StatsStorage; import org.deeplearning4j.api.storage.StatsStorageRouter; import org.deeplearning4j.api.storage.listener.RoutingIterationListener; diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/TestParallelEarlyStopping.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/TestParallelEarlyStopping.java index 4aeb85acd..ac2b018e2 100644 --- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/TestParallelEarlyStopping.java +++ b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/TestParallelEarlyStopping.java @@ -16,6 +16,7 @@ package org.deeplearning4j.parallelism; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator; import org.deeplearning4j.earlystopping.EarlyStoppingConfiguration; import org.deeplearning4j.earlystopping.EarlyStoppingModelSaver; diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/TestParallelEarlyStoppingUI.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/TestParallelEarlyStoppingUI.java index cce4f490a..160d4df58 100644 --- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/TestParallelEarlyStoppingUI.java +++ b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/TestParallelEarlyStoppingUI.java @@ -16,6 +16,7 @@ package org.deeplearning4j.parallelism; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.api.storage.StatsStorage; import org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator; import org.deeplearning4j.earlystopping.EarlyStoppingConfiguration; diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/factory/DefaultTrainerContextTest.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/factory/DefaultTrainerContextTest.java index a8eca6a56..c96ca4a19 100644 --- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/factory/DefaultTrainerContextTest.java +++ b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/factory/DefaultTrainerContextTest.java @@ -27,7 +27,7 @@ import org.deeplearning4j.nn.conf.layers.OutputLayer; import org.deeplearning4j.nn.conf.layers.SubsamplingLayer; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.deeplearning4j.nn.weights.WeightInit; -import org.deeplearning4j.parallelism.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.parallelism.ParallelWrapper; import org.deeplearning4j.parallelism.trainer.SymmetricTrainer; import org.junit.Test; diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/factory/SymmetricTrainerContextTest.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/factory/SymmetricTrainerContextTest.java index cf6fe92de..0258caac9 100644 --- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/factory/SymmetricTrainerContextTest.java +++ b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/factory/SymmetricTrainerContextTest.java @@ -27,7 +27,7 @@ import org.deeplearning4j.nn.conf.layers.OutputLayer; import org.deeplearning4j.nn.conf.layers.SubsamplingLayer; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.deeplearning4j.nn.weights.WeightInit; -import org.deeplearning4j.parallelism.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.parallelism.ParallelWrapper; import org.deeplearning4j.parallelism.trainer.SymmetricTrainer; import org.junit.Test; diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/inference/observers/BatchedInferenceObservableTest.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/inference/observers/BatchedInferenceObservableTest.java index 5b5173b20..facf506d6 100644 --- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/inference/observers/BatchedInferenceObservableTest.java +++ b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/inference/observers/BatchedInferenceObservableTest.java @@ -17,7 +17,7 @@ package org.deeplearning4j.parallelism.inference.observers; import lombok.extern.slf4j.Slf4j; -import org.deeplearning4j.parallelism.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.junit.After; import org.junit.Before; import org.junit.Test; diff --git a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/main/ParallelWrapperMainTest.java b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/main/ParallelWrapperMainTest.java index 54bc7f4f6..ae6672b47 100644 --- a/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/main/ParallelWrapperMainTest.java +++ b/deeplearning4j/deeplearning4j-scaleout/deeplearning4j-scaleout-parallelwrapper/src/test/java/org/deeplearning4j/parallelism/main/ParallelWrapperMainTest.java @@ -27,7 +27,7 @@ import org.deeplearning4j.nn.conf.layers.OutputLayer; import org.deeplearning4j.nn.conf.layers.SubsamplingLayer; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.deeplearning4j.nn.weights.WeightInit; -import org.deeplearning4j.parallelism.BaseDL4JTest; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.util.ModelSerializer; import org.junit.Rule; import org.junit.Test; diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/pom.xml b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/pom.xml index 42e799b69..42b6e42cf 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/pom.xml +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/pom.xml @@ -60,6 +60,13 @@ ${spark.version} provided + + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/src/test/java/org/deeplearning4j/spark/models/sequencevectors/SparkSequenceVectorsTest.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/src/test/java/org/deeplearning4j/spark/models/sequencevectors/SparkSequenceVectorsTest.java index 5f71ce497..363b4e293 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/src/test/java/org/deeplearning4j/spark/models/sequencevectors/SparkSequenceVectorsTest.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/src/test/java/org/deeplearning4j/spark/models/sequencevectors/SparkSequenceVectorsTest.java @@ -19,6 +19,7 @@ package org.deeplearning4j.spark.models.sequencevectors; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.models.sequencevectors.sequence.Sequence; import org.deeplearning4j.models.sequencevectors.sequence.ShallowSequenceElement; import org.deeplearning4j.models.word2vec.VocabWord; @@ -41,7 +42,7 @@ import static org.junit.Assert.assertNotEquals; /** * @author raver119@gmail.com */ -public class SparkSequenceVectorsTest { +public class SparkSequenceVectorsTest extends BaseDL4JTest { protected static List> sequencesCyclic; private JavaSparkContext sc; diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/src/test/java/org/deeplearning4j/spark/models/sequencevectors/export/ExportContainerTest.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/src/test/java/org/deeplearning4j/spark/models/sequencevectors/export/ExportContainerTest.java index 70ecc0dbe..604181109 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/src/test/java/org/deeplearning4j/spark/models/sequencevectors/export/ExportContainerTest.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/src/test/java/org/deeplearning4j/spark/models/sequencevectors/export/ExportContainerTest.java @@ -16,6 +16,7 @@ package org.deeplearning4j.spark.models.sequencevectors.export; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.models.word2vec.VocabWord; import org.junit.Before; import org.junit.Test; @@ -26,7 +27,7 @@ import static org.junit.Assert.assertEquals; /** * @author raver119@gmail.com */ -public class ExportContainerTest { +public class ExportContainerTest extends BaseDL4JTest { @Before public void setUp() throws Exception { diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/src/test/java/org/deeplearning4j/spark/models/word2vec/SparkWord2VecTest.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/src/test/java/org/deeplearning4j/spark/models/word2vec/SparkWord2VecTest.java index 809d74138..82a04eab8 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/src/test/java/org/deeplearning4j/spark/models/word2vec/SparkWord2VecTest.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp-java8/src/test/java/org/deeplearning4j/spark/models/word2vec/SparkWord2VecTest.java @@ -20,6 +20,7 @@ import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.function.VoidFunction; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.models.embeddings.loader.VectorsConfiguration; import org.deeplearning4j.models.sequencevectors.sequence.SequenceElement; import org.deeplearning4j.models.sequencevectors.sequence.ShallowSequenceElement; @@ -46,7 +47,7 @@ import static org.junit.Assert.*; * * @author raver119@gmail.com */ -public class SparkWord2VecTest { +public class SparkWord2VecTest extends BaseDL4JTest { private static List sentences; private JavaSparkContext sc; diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/pom.xml b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/pom.xml index 0a92d19ab..c4e8dc7ab 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/pom.xml +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/pom.xml @@ -65,6 +65,13 @@ jackson-module-scala_2.11 2.6.7.1 + + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/test/java/org/deeplearning4j/spark/text/BaseSparkTest.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/test/java/org/deeplearning4j/spark/text/BaseSparkTest.java index 152ef4db5..475572edd 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/test/java/org/deeplearning4j/spark/text/BaseSparkTest.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/test/java/org/deeplearning4j/spark/text/BaseSparkTest.java @@ -18,6 +18,7 @@ package org.deeplearning4j.spark.text; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaSparkContext; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.spark.models.embeddings.word2vec.Word2VecVariables; import org.junit.After; import org.junit.Before; @@ -30,7 +31,7 @@ import java.util.Map; /** * Created by agibsonccc on 1/23/15. */ -public abstract class BaseSparkTest implements Serializable { +public abstract class BaseSparkTest extends BaseDL4JTest implements Serializable { protected transient JavaSparkContext sc; @Before diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/pom.xml b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/pom.xml index fc1e96ec0..daf0dd9b7 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/pom.xml +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/pom.xml @@ -66,6 +66,13 @@ ${spark.version} provided + + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/src/test/java/org/deeplearning4j/spark/parameterserver/BaseSparkTest.java b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/src/test/java/org/deeplearning4j/spark/parameterserver/BaseSparkTest.java index a90ce4b8c..ccab68e9e 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/src/test/java/org/deeplearning4j/spark/parameterserver/BaseSparkTest.java +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/src/test/java/org/deeplearning4j/spark/parameterserver/BaseSparkTest.java @@ -19,6 +19,7 @@ package org.deeplearning4j.spark.parameterserver; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.conf.MultiLayerConfiguration; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; import org.deeplearning4j.spark.impl.multilayer.SparkDl4jMultiLayer; @@ -41,7 +42,7 @@ import java.util.Random; /** * Created by agibsonccc on 1/23/15. */ -public abstract class BaseSparkTest implements Serializable { +public abstract class BaseSparkTest extends BaseDL4JTest implements Serializable { protected transient JavaSparkContext sc; protected transient INDArray labels; protected transient INDArray input; diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/src/test/resources/log4j.properties b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/src/test/resources/log4j.properties index 29de0de02..5d1edb39f 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/src/test/resources/log4j.properties +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/src/test/resources/log4j.properties @@ -1,4 +1,3 @@ - ################################################################################ # Copyright (c) 2015-2019 Skymind, Inc. # @@ -21,10 +20,12 @@ log4j.appender.Console.layout=org.apache.log4j.PatternLayout log4j.appender.Console.layout.ConversionPattern=%d{ABSOLUTE} %-5p ~ %m%n log4j.appender.org.springframework=DEBUG -log4j.appender.org.deeplearning4j=INFO +log4j.appender.org.deeplearning4j=DEBUG log4j.appender.org.nd4j=DEBUG log4j.logger.org.springframework=INFO log4j.logger.org.deeplearning4j=DEBUG log4j.logger.org.nd4j=DEBUG +log4j.logger.org.apache.spark=WARN + diff --git a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/src/test/resources/logback.xml b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/src/test/resources/logback.xml index 47c108b71..9dec22fae 100644 --- a/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/src/test/resources/logback.xml +++ b/deeplearning4j/deeplearning4j-scaleout/spark/dl4j-spark-parameterserver/src/test/resources/logback.xml @@ -1,5 +1,5 @@ org.webjars.npm @@ -417,6 +424,16 @@ weaverjs 1.2.0 + + org.webjars + explorercanvas + r3-1 + + + org.webjars + bootstrap + 2.2.2-1 + diff --git a/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestRemoteReceiver.java b/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestRemoteReceiver.java index a5469bd1d..0b81e45b4 100644 --- a/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestRemoteReceiver.java +++ b/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestRemoteReceiver.java @@ -17,6 +17,7 @@ package org.deeplearning4j.ui; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.api.storage.Persistable; import org.deeplearning4j.api.storage.StorageMetaData; import org.deeplearning4j.api.storage.impl.CollectionStatsStorageRouter; @@ -51,7 +52,7 @@ import static org.junit.Assert.assertEquals; * Created by Alex on 10/11/2016. */ @Ignore -public class TestRemoteReceiver { +public class TestRemoteReceiver extends BaseDL4JTest { @Test @Ignore diff --git a/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestSameDiffUI.java b/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestSameDiffUI.java index a9da39dbc..4ba24eafa 100644 --- a/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestSameDiffUI.java +++ b/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestSameDiffUI.java @@ -18,6 +18,7 @@ package org.deeplearning4j.ui; import lombok.extern.slf4j.Slf4j; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.ui.api.UIServer; import org.junit.Ignore; import org.junit.Rule; @@ -35,7 +36,7 @@ import java.util.Arrays; @Ignore @Slf4j -public class TestSameDiffUI { +public class TestSameDiffUI extends BaseDL4JTest { @Rule public TemporaryFolder testDir = new TemporaryFolder(); diff --git a/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestVertxUI.java b/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestVertxUI.java index fc5c3c4ac..43e6c76df 100644 --- a/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestVertxUI.java +++ b/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestVertxUI.java @@ -18,6 +18,7 @@ package org.deeplearning4j.ui; import org.apache.commons.io.IOUtils; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.api.storage.StatsStorage; import org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator; import org.deeplearning4j.nn.api.OptimizationAlgorithm; @@ -56,7 +57,7 @@ import static org.junit.Assert.*; * Created by Alex on 08/10/2016. */ @Ignore -public class TestVertxUI { +public class TestVertxUI extends BaseDL4JTest { @Before public void setUp() throws Exception { UIServer.stopInstance(); diff --git a/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestVertxUIMultiSession.java b/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestVertxUIMultiSession.java index b640be8c7..1eeceb2aa 100644 --- a/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestVertxUIMultiSession.java +++ b/deeplearning4j/deeplearning4j-ui-parent/deeplearning4j-vertx/src/test/java/org/deeplearning4j/ui/TestVertxUIMultiSession.java @@ -18,6 +18,7 @@ package org.deeplearning4j.ui; import io.netty.handler.codec.http.HttpResponseStatus; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.api.storage.StatsStorage; import org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator; import org.deeplearning4j.nn.api.OptimizationAlgorithm; @@ -52,7 +53,7 @@ import static org.junit.Assert.*; * @author Tamas Fenyvesi */ @Ignore -public class TestVertxUIMultiSession { +public class TestVertxUIMultiSession extends BaseDL4JTest { @Before public void setUp() throws Exception { UIServer.stopInstance(); diff --git a/deeplearning4j/deeplearning4j-zoo/pom.xml b/deeplearning4j/deeplearning4j-zoo/pom.xml index 976d7500b..bec71ec04 100644 --- a/deeplearning4j/deeplearning4j-zoo/pom.xml +++ b/deeplearning4j/deeplearning4j-zoo/pom.xml @@ -71,6 +71,13 @@ ${deeplearning4j.version} test + + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/BaseDL4JTest.java b/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/BaseDL4JTest.java deleted file mode 100644 index 5d5cbd8a8..000000000 --- a/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/BaseDL4JTest.java +++ /dev/null @@ -1,145 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.deeplearning4j.zoo; - -import lombok.extern.slf4j.Slf4j; -import org.bytedeco.javacpp.Pointer; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; -import org.junit.rules.Timeout; -import org.nd4j.linalg.api.buffer.DataBuffer; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.memory.MemoryWorkspace; -import org.nd4j.linalg.api.ops.executioner.OpExecutioner; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.profiler.ProfilerConfig; - -import java.lang.management.ManagementFactory; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -@Slf4j -public class BaseDL4JTest { - - @Rule - public Timeout timeout = Timeout.seconds(600); - - @Rule - public TestName name = new TestName(); - - protected long startTime; - protected int threadCountBefore; - - /** - * Override this to set the profiling mode for the tests defined in the child class - */ - public OpExecutioner.ProfilingMode getProfilingMode(){ - return OpExecutioner.ProfilingMode.SCOPE_PANIC; - } - - /** - * Override this to set the datatype of the tests defined in the child class - */ - public DataType getDataType(){ - return DataType.DOUBLE; - } - - public DataType getDefaultFPDataType(){ - return getDataType(); - } - - @Before - public void beforeTest(){ - log.info("{}.{}", getClass().getSimpleName(), name.getMethodName()); - Nd4j.getExecutioner().setProfilingMode(getProfilingMode()); - Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build()); - Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType()); - startTime = System.currentTimeMillis(); - threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount(); - } - - @After - public void afterTest(){ - //Attempt to keep workspaces isolated between tests - Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); - MemoryWorkspace currWS = Nd4j.getMemoryManager().getCurrentWorkspace(); - Nd4j.getMemoryManager().setCurrentWorkspace(null); - if(currWS != null){ - //Not really safe to continue testing under this situation... other tests will likely fail with obscure - // errors that are hard to track back to this - log.error("Open workspace leaked from test! Exiting - {}, isOpen = {} - {}", currWS.getId(), currWS.isScopeActive(), currWS); - System.exit(1); - } - - StringBuilder sb = new StringBuilder(); - long maxPhys = Pointer.maxPhysicalBytes(); - long maxBytes = Pointer.maxBytes(); - long currPhys = Pointer.physicalBytes(); - long currBytes = Pointer.totalBytes(); - - long jvmTotal = Runtime.getRuntime().totalMemory(); - long jvmMax = Runtime.getRuntime().maxMemory(); - - int threadsAfter = ManagementFactory.getThreadMXBean().getThreadCount(); - - long duration = System.currentTimeMillis() - startTime; - sb.append(getClass().getSimpleName()).append(".").append(name.getMethodName()) - .append(": ").append(duration).append(" ms") - .append(", threadCount: (").append(threadCountBefore).append("->").append(threadsAfter).append(")") - .append(", jvmTotal=").append(jvmTotal) - .append(", jvmMax=").append(jvmMax) - .append(", totalBytes=").append(currBytes).append(", maxBytes=").append(maxBytes) - .append(", currPhys=").append(currPhys).append(", maxPhys=").append(maxPhys); - - List ws = Nd4j.getWorkspaceManager().getAllWorkspacesForCurrentThread(); - if(ws != null && ws.size() > 0){ - long currSize = 0; - for(MemoryWorkspace w : ws){ - currSize += w.getCurrentSize(); - } - if(currSize > 0){ - sb.append(", threadWSSize=").append(currSize) - .append(" (").append(ws.size()).append(" WSs)"); - } - } - - - Properties p = Nd4j.getExecutioner().getEnvironmentInformation(); - Object o = p.get("cuda.devicesInformation"); - if(o instanceof List){ - List> l = (List>) o; - if(l.size() > 0) { - - sb.append(" [").append(l.size()) - .append(" GPUs: "); - - for (int i = 0; i < l.size(); i++) { - Map m = l.get(i); - if(i > 0) - sb.append(","); - sb.append("(").append(m.get("cuda.freeMemory")).append(" free, ") - .append(m.get("cuda.totalMemory")).append(" total)"); - } - sb.append("]"); - } - } - log.info(sb.toString()); - } -} diff --git a/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/MiscTests.java b/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/MiscTests.java index 9349f05a5..af0205f00 100644 --- a/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/MiscTests.java +++ b/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/MiscTests.java @@ -16,6 +16,7 @@ package org.deeplearning4j.zoo; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.conf.layers.OutputLayer; import org.deeplearning4j.nn.graph.ComputationGraph; import org.deeplearning4j.nn.transferlearning.TransferLearning; diff --git a/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/TestDownload.java b/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/TestDownload.java index 1b9853b58..bb41443bb 100644 --- a/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/TestDownload.java +++ b/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/TestDownload.java @@ -17,6 +17,7 @@ package org.deeplearning4j.zoo; import lombok.extern.slf4j.Slf4j; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.common.resources.DL4JResources; import org.deeplearning4j.nn.conf.WorkspaceMode; import org.deeplearning4j.zoo.model.LeNet; diff --git a/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/TestImageNet.java b/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/TestImageNet.java index 877c7699a..b1963b9b6 100644 --- a/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/TestImageNet.java +++ b/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/TestImageNet.java @@ -19,6 +19,7 @@ package org.deeplearning4j.zoo; import lombok.extern.slf4j.Slf4j; import org.datavec.image.loader.NativeImageLoader; import org.datavec.image.transform.ColorConversionTransform; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.nn.graph.ComputationGraph; import org.deeplearning4j.nn.layers.objdetect.DetectedObject; import org.deeplearning4j.nn.layers.objdetect.YoloUtils; diff --git a/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/TestInstantiation.java b/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/TestInstantiation.java index 4c8e66191..d70137775 100644 --- a/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/TestInstantiation.java +++ b/deeplearning4j/deeplearning4j-zoo/src/test/java/org/deeplearning4j/zoo/TestInstantiation.java @@ -17,6 +17,7 @@ package org.deeplearning4j.zoo; import lombok.extern.slf4j.Slf4j; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.datasets.iterator.AsyncDataSetIterator; import org.deeplearning4j.datasets.iterator.impl.BenchmarkDataSetIterator; import org.deeplearning4j.nn.api.Model; diff --git a/deeplearning4j/dl4j-integration-tests/pom.xml b/deeplearning4j/dl4j-integration-tests/pom.xml index 27461c923..43e6bfa60 100644 --- a/deeplearning4j/dl4j-integration-tests/pom.xml +++ b/deeplearning4j/dl4j-integration-tests/pom.xml @@ -68,6 +68,13 @@ test + + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/dl4j-integration-tests/src/test/java/org/deeplearning4j/integration/BaseDL4JTest.java b/deeplearning4j/dl4j-integration-tests/src/test/java/org/deeplearning4j/integration/BaseDL4JTest.java deleted file mode 100644 index f6294b9cf..000000000 --- a/deeplearning4j/dl4j-integration-tests/src/test/java/org/deeplearning4j/integration/BaseDL4JTest.java +++ /dev/null @@ -1,141 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.deeplearning4j.integration; - -import lombok.extern.slf4j.Slf4j; -import org.bytedeco.javacpp.Pointer; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; -import org.nd4j.linalg.api.buffer.DataBuffer; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.memory.MemoryWorkspace; -import org.nd4j.linalg.api.ops.executioner.OpExecutioner; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.profiler.ProfilerConfig; - -import java.lang.management.ManagementFactory; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -@Slf4j -public class BaseDL4JTest { - - @Rule - public TestName name = new TestName(); - - protected long startTime; - protected int threadCountBefore; - - /** - * Override this to set the profiling mode for the tests defined in the child class - */ - public OpExecutioner.ProfilingMode getProfilingMode(){ - return OpExecutioner.ProfilingMode.SCOPE_PANIC; - } - - /** - * Override this to set the datatype of the tests defined in the child class - */ - public DataType getDataType(){ - return DataType.DOUBLE; - } - - public DataType getDefaultFPDataType(){ - return getDataType(); - } - - @Before - public void beforeTest(){ - log.info("{}.{}", getClass().getSimpleName(), name.getMethodName()); - Nd4j.getExecutioner().setProfilingMode(getProfilingMode()); - Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build()); - Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType()); - startTime = System.currentTimeMillis(); - threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount(); - } - - @After - public void afterTest(){ - //Attempt to keep workspaces isolated between tests - Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); - MemoryWorkspace currWS = Nd4j.getMemoryManager().getCurrentWorkspace(); - Nd4j.getMemoryManager().setCurrentWorkspace(null); - if(currWS != null){ - //Not really safe to continue testing under this situation... other tests will likely fail with obscure - // errors that are hard to track back to this - log.error("Open workspace leaked from test! Exiting - {}, isOpen = {} - {}", currWS.getId(), currWS.isScopeActive(), currWS); - System.exit(1); - } - - StringBuilder sb = new StringBuilder(); - long maxPhys = Pointer.maxPhysicalBytes(); - long maxBytes = Pointer.maxBytes(); - long currPhys = Pointer.physicalBytes(); - long currBytes = Pointer.totalBytes(); - - long jvmTotal = Runtime.getRuntime().totalMemory(); - long jvmMax = Runtime.getRuntime().maxMemory(); - - int threadsAfter = ManagementFactory.getThreadMXBean().getThreadCount(); - - long duration = System.currentTimeMillis() - startTime; - sb.append(getClass().getSimpleName()).append(".").append(name.getMethodName()) - .append(": ").append(duration).append(" ms") - .append(", threadCount: (").append(threadCountBefore).append("->").append(threadsAfter).append(")") - .append(", jvmTotal=").append(jvmTotal) - .append(", jvmMax=").append(jvmMax) - .append(", totalBytes=").append(currBytes).append(", maxBytes=").append(maxBytes) - .append(", currPhys=").append(currPhys).append(", maxPhys=").append(maxPhys); - - List ws = Nd4j.getWorkspaceManager().getAllWorkspacesForCurrentThread(); - if(ws != null && ws.size() > 0){ - long currSize = 0; - for(MemoryWorkspace w : ws){ - currSize += w.getCurrentSize(); - } - if(currSize > 0){ - sb.append(", threadWSSize=").append(currSize) - .append(" (").append(ws.size()).append(" WSs)"); - } - } - - - Properties p = Nd4j.getExecutioner().getEnvironmentInformation(); - Object o = p.get("cuda.devicesInformation"); - if(o instanceof List){ - List> l = (List>) o; - if(l.size() > 0) { - - sb.append(" [").append(l.size()) - .append(" GPUs: "); - - for (int i = 0; i < l.size(); i++) { - Map m = l.get(i); - if(i > 0) - sb.append(","); - sb.append("(").append(m.get("cuda.freeMemory")).append(" free, ") - .append(m.get("cuda.totalMemory")).append(" total)"); - } - sb.append("]"); - } - } - log.info(sb.toString()); - } -} diff --git a/deeplearning4j/dl4j-integration-tests/src/test/java/org/deeplearning4j/integration/IntegrationTests.java b/deeplearning4j/dl4j-integration-tests/src/test/java/org/deeplearning4j/integration/IntegrationTests.java index 4e1cb95f5..8e2ceef79 100644 --- a/deeplearning4j/dl4j-integration-tests/src/test/java/org/deeplearning4j/integration/IntegrationTests.java +++ b/deeplearning4j/dl4j-integration-tests/src/test/java/org/deeplearning4j/integration/IntegrationTests.java @@ -16,6 +16,7 @@ package org.deeplearning4j.integration; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.integration.testcases.*; import org.junit.AfterClass; import org.junit.Ignore; diff --git a/deeplearning4j/dl4j-perf/pom.xml b/deeplearning4j/dl4j-perf/pom.xml index 239eead6b..1e1dd07f9 100644 --- a/deeplearning4j/dl4j-perf/pom.xml +++ b/deeplearning4j/dl4j-perf/pom.xml @@ -79,6 +79,13 @@ ${project.version} test + + + org.deeplearning4j + deeplearning4j-common-tests + ${project.version} + test + diff --git a/deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/BaseDL4JTest.java b/deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/BaseDL4JTest.java deleted file mode 100644 index 9ead56a7e..000000000 --- a/deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/BaseDL4JTest.java +++ /dev/null @@ -1,140 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.deeplearning4j.perf.listener; - -import lombok.extern.slf4j.Slf4j; -import org.bytedeco.javacpp.Pointer; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; -import org.nd4j.linalg.api.buffer.DataType; -import org.nd4j.linalg.api.memory.MemoryWorkspace; -import org.nd4j.linalg.api.ops.executioner.OpExecutioner; -import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.profiler.ProfilerConfig; - -import java.lang.management.ManagementFactory; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -@Slf4j -public class BaseDL4JTest { - - @Rule - public TestName name = new TestName(); - - protected long startTime; - protected int threadCountBefore; - - /** - * Override this to set the profiling mode for the tests defined in the child class - */ - public OpExecutioner.ProfilingMode getProfilingMode(){ - return OpExecutioner.ProfilingMode.SCOPE_PANIC; - } - - /** - * Override this to set the datatype of the tests defined in the child class - */ - public DataType getDataType(){ - return DataType.DOUBLE; - } - - public DataType getDefaultFPDataType(){ - return getDataType(); - } - - @Before - public void beforeTest(){ - log.info("{}.{}", getClass().getSimpleName(), name.getMethodName()); - Nd4j.getExecutioner().setProfilingMode(getProfilingMode()); - Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build()); - Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType()); - startTime = System.currentTimeMillis(); - threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount(); - } - - @After - public void afterTest(){ - //Attempt to keep workspaces isolated between tests - Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); - MemoryWorkspace currWS = Nd4j.getMemoryManager().getCurrentWorkspace(); - Nd4j.getMemoryManager().setCurrentWorkspace(null); - if(currWS != null){ - //Not really safe to continue testing under this situation... other tests will likely fail with obscure - // errors that are hard to track back to this - log.error("Open workspace leaked from test! Exiting - {}, isOpen = {} - {}", currWS.getId(), currWS.isScopeActive(), currWS); - System.exit(1); - } - - StringBuilder sb = new StringBuilder(); - long maxPhys = Pointer.maxPhysicalBytes(); - long maxBytes = Pointer.maxBytes(); - long currPhys = Pointer.physicalBytes(); - long currBytes = Pointer.totalBytes(); - - long jvmTotal = Runtime.getRuntime().totalMemory(); - long jvmMax = Runtime.getRuntime().maxMemory(); - - int threadsAfter = ManagementFactory.getThreadMXBean().getThreadCount(); - - long duration = System.currentTimeMillis() - startTime; - sb.append(getClass().getSimpleName()).append(".").append(name.getMethodName()) - .append(": ").append(duration).append(" ms") - .append(", threadCount: (").append(threadCountBefore).append("->").append(threadsAfter).append(")") - .append(", jvmTotal=").append(jvmTotal) - .append(", jvmMax=").append(jvmMax) - .append(", totalBytes=").append(currBytes).append(", maxBytes=").append(maxBytes) - .append(", currPhys=").append(currPhys).append(", maxPhys=").append(maxPhys); - - List ws = Nd4j.getWorkspaceManager().getAllWorkspacesForCurrentThread(); - if(ws != null && ws.size() > 0){ - long currSize = 0; - for(MemoryWorkspace w : ws){ - currSize += w.getCurrentSize(); - } - if(currSize > 0){ - sb.append(", threadWSSize=").append(currSize) - .append(" (").append(ws.size()).append(" WSs)"); - } - } - - - Properties p = Nd4j.getExecutioner().getEnvironmentInformation(); - Object o = p.get("cuda.devicesInformation"); - if(o instanceof List){ - List> l = (List>) o; - if(l.size() > 0) { - - sb.append(" [").append(l.size()) - .append(" GPUs: "); - - for (int i = 0; i < l.size(); i++) { - Map m = l.get(i); - if(i > 0) - sb.append(","); - sb.append("(").append(m.get("cuda.freeMemory")).append(" free, ") - .append(m.get("cuda.totalMemory")).append(" total)"); - } - sb.append("]"); - } - } - log.info(sb.toString()); - } -} diff --git a/deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/SystemPollingTest.java b/deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/SystemPollingTest.java index f781914ec..ae46692fc 100644 --- a/deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/SystemPollingTest.java +++ b/deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/SystemPollingTest.java @@ -17,6 +17,7 @@ package org.deeplearning4j.perf.listener; import org.apache.commons.io.FileUtils; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; diff --git a/deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/TestHardWareMetric.java b/deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/TestHardWareMetric.java index 19436ce9d..b9589398b 100644 --- a/deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/TestHardWareMetric.java +++ b/deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/TestHardWareMetric.java @@ -16,6 +16,7 @@ package org.deeplearning4j.perf.listener; +import org.deeplearning4j.BaseDL4JTest; import org.junit.Ignore; import org.junit.Test; import oshi.json.SystemInfo; diff --git a/deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/TestSystemInfoPrintListener.java b/deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/TestSystemInfoPrintListener.java index 69edb363b..6ce531f12 100644 --- a/deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/TestSystemInfoPrintListener.java +++ b/deeplearning4j/dl4j-perf/src/test/java/org/deeplearning4j/perf/listener/TestSystemInfoPrintListener.java @@ -16,6 +16,7 @@ package org.deeplearning4j.perf.listener; +import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator; import org.deeplearning4j.nn.conf.MultiLayerConfiguration; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; diff --git a/deeplearning4j/pom.xml b/deeplearning4j/pom.xml index f9b1eecce..15be211de 100644 --- a/deeplearning4j/pom.xml +++ b/deeplearning4j/pom.xml @@ -145,6 +145,7 @@ dl4j-integration-tests deeplearning4j-common deeplearning4j-remote + deeplearning4j-common-tests diff --git a/libnd4j/include/ops/declarable/helpers/cpu/compare_elem.cpp b/libnd4j/include/ops/declarable/helpers/cpu/compare_elem.cpp index 5f7fbf694..e5e51d38f 100644 --- a/libnd4j/include/ops/declarable/helpers/cpu/compare_elem.cpp +++ b/libnd4j/include/ops/declarable/helpers/cpu/compare_elem.cpp @@ -56,7 +56,7 @@ namespace nd4j { sumt = samediff::Threads::parallel_long(func, LAMBDA_SUML, 0, length - 1); } - nd4j_printf("Sum: %lld\n", sumt) + //nd4j_printf("Sum: %lld\n", sumt) output = (sumt > -1); diff --git a/libnd4j/pom.xml b/libnd4j/pom.xml index 374bc5640..20b9d6562 100644 --- a/libnd4j/pom.xml +++ b/libnd4j/pom.xml @@ -326,6 +326,8 @@ --compute ${libnd4j.compute} ${libnd4j.tests} + -j + ${libnd4j.buildthreads} ${project.basedir} diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/listeners/At.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/listeners/At.java index 5427b4cd7..e05d067c6 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/listeners/At.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/listeners/At.java @@ -1,6 +1,7 @@ package org.nd4j.autodiff.listeners; import lombok.*; +import org.nd4j.autodiff.samediff.internal.FrameIter; /** * @@ -20,13 +21,14 @@ public class At { private int iteration; private int trainingThreadNum; private long javaThreadNum; + private FrameIter frameIter; private Operation operation; /** * @return A new instance with everything set to 0, and operation set to INFERENCE */ public static At defaultAt(){ - return new At(0, 0, 0, 0, Operation.INFERENCE); + return new At(0, 0, 0, 0, null, Operation.INFERENCE); } /** @@ -34,7 +36,7 @@ public class At { * @return A new instance with everything set to 0, except for the specified operation */ public static At defaultAt(@NonNull Operation op){ - return new At(0, 0, 0, 0, op); + return new At(0, 0, 0, 0, null, op); } /** @@ -76,7 +78,7 @@ public class At { * @return A copy of the current At instance */ public At copy(){ - return new At(epoch, iteration, trainingThreadNum, javaThreadNum, operation); + return new At(epoch, iteration, trainingThreadNum, javaThreadNum, frameIter, operation); } /** @@ -84,6 +86,6 @@ public class At { * @return A copy of the current instance, but with the specified operation */ public At copy(Operation operation){ - return new At(epoch, iteration, trainingThreadNum, javaThreadNum, operation); + return new At(epoch, iteration, trainingThreadNum, javaThreadNum, frameIter, operation); } } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/SameDiff.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/SameDiff.java index 449c2ef78..ceccdae65 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/SameDiff.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/SameDiff.java @@ -1804,7 +1804,7 @@ public class SameDiff extends SDBaseOps { if (validationData != null && (validationFrequency <= 0 || i % validationFrequency == 0)) { long validationStart = System.currentTimeMillis(); - outputHelper(validationData, new At(at.epoch(), 0, 0, 0, Operation.TRAINING_VALIDATION), + outputHelper(validationData, new At(at.epoch(), 0, 0, 0, null, Operation.TRAINING_VALIDATION), listenersWitHistory); long validationTime = System.currentTimeMillis() - validationStart; diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/internal/AbstractSession.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/internal/AbstractSession.java index 1f93dbe94..c95f26b1f 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/internal/AbstractSession.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/internal/AbstractSession.java @@ -921,32 +921,6 @@ public abstract class AbstractSession { } } - /** - * FrameIter: Identifies a frame + iteration (but not a specific op or variable).
- * Note that frames can be nested - which generally represents nested loop situations. - */ - @Data - @AllArgsConstructor - public static class FrameIter { - private String frame; - private int iteration; - private FrameIter parentFrame; - - @Override - public String toString() { - return "(\"" + frame + "\"," + iteration + (parentFrame == null ? "" : ",parent=" + parentFrame.toString()) + ")"; - } - - @Override - public FrameIter clone() { - return new FrameIter(frame, iteration, (parentFrame == null ? null : parentFrame.clone())); - } - - public VarId toVarId(String name) { - return new VarId(name, frame, iteration, parentFrame); - } - } - /** * ExecType: Execution type, as used in ExecStep
* OP: Operation execution
diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/internal/FrameIter.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/internal/FrameIter.java new file mode 100644 index 000000000..4ca555327 --- /dev/null +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/internal/FrameIter.java @@ -0,0 +1,46 @@ +/* ****************************************************************************** + * Copyright (c) 2019-2020 Konduit K.K. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0. + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + * + * SPDX-License-Identifier: Apache-2.0 + ******************************************************************************/ + +package org.nd4j.autodiff.samediff.internal; + +import lombok.AllArgsConstructor; +import lombok.Data; + +/** + * FrameIter: Identifies a frame + iteration (but not a specific op or variable).
+ * Note that frames can be nested - which generally represents nested loop situations. + */ +@Data +@AllArgsConstructor +public class FrameIter { + private String frame; + private int iteration; + private FrameIter parentFrame; + + @Override + public String toString() { + return "(\"" + frame + "\"," + iteration + (parentFrame == null ? "" : ",parent=" + parentFrame.toString()) + ")"; + } + + @Override + public FrameIter clone() { + return new FrameIter(frame, iteration, (parentFrame == null ? null : parentFrame.clone())); + } + + public AbstractSession.VarId toVarId(String name) { + return new AbstractSession.VarId(name, frame, iteration, parentFrame); + } +} diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/internal/InferenceSession.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/internal/InferenceSession.java index 4a6a5ce53..7640d450c 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/internal/InferenceSession.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/internal/InferenceSession.java @@ -206,6 +206,7 @@ public class InferenceSession extends AbstractSession { @Override public INDArray[] getOutputs(SameDiffOp op, FrameIter outputFrameIter, Set opInputs, Set allIterInputs, Set constAndPhInputs, List listeners, At at, MultiDataSet batch, Set allReqVariables) { + at.setFrameIter(outputFrameIter); if (listeners != null && listeners.size() > 0) { SameDiffOp sdOp = sameDiff.getOps().get(op.getOp().getOwnName()); for (Listener l : listeners) { diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/serde/FlatBuffersMapper.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/serde/FlatBuffersMapper.java index 5b4cb497b..a88a9c84f 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/serde/FlatBuffersMapper.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/samediff/serde/FlatBuffersMapper.java @@ -274,7 +274,6 @@ public class FlatBuffersMapper { return OpType.TRANSFORM_STRICT; case SPECIAL: return OpType.TRANSFORM_STRICT; - case VARIANCE: case REDUCE_FLOAT: return OpType.REDUCE_FLOAT; case REDUCE_BOOL: @@ -302,6 +301,7 @@ public class FlatBuffersMapper { case PAIRWISE_BOOL: return OpType.PAIRWISE_BOOL; case SUMMARYSTATS: + case VARIANCE: return OpType.SUMMARYSTATS; default: throw new UnsupportedOperationException("Unknown op type passed in: " + type); @@ -799,7 +799,8 @@ public class FlatBuffersMapper { } int[] dims; - if (node.opType() == Op.Type.REDUCE_FLOAT || node.opType() == Op.Type.REDUCE_SAME || node.opType() == Op.Type.REDUCE_BOOL || node.opType() == Op.Type.REDUCE_LONG || node.opType() == Op.Type.INDEXREDUCE || node.opType() == Op.Type.REDUCE3) { + if (node.opType() == Op.Type.REDUCE_FLOAT || node.opType() == Op.Type.REDUCE_SAME || node.opType() == Op.Type.REDUCE_BOOL + || node.opType() == Op.Type.REDUCE_LONG || node.opType() == Op.Type.INDEXREDUCE || node.opType() == Op.Type.REDUCE3) { dims = node.getDimensions(); if (dims == null) dims = new int[0]; diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/validation/GradCheckUtil.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/validation/GradCheckUtil.java index 0f1e0bd52..b35369ad4 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/validation/GradCheckUtil.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/validation/GradCheckUtil.java @@ -47,7 +47,7 @@ public class GradCheckUtil { public enum Subset {EVERY_N, RANDOM} - public static final boolean DEFAULT_PRINT = true; + public static final boolean DEFAULT_PRINT = false; public static final boolean DEFAULT_EXIT_FIRST_FAILURE = false; public static final boolean DEFAULT_DEBUG_MODE = false; public static final double DEFAULT_EPS = 1e-5; @@ -330,11 +330,10 @@ public class GradCheckUtil { + "; absolute error = " + absError + " < minAbsoluteError = " + minAbsError); } } else { - if (print) - log.info("Param " + i + " (" + name + strIdx + ") FAILED: grad= " + analyticGrad - + ", numericalGrad= " + numericalGrad + ", relError= " + relError - + ", absError=" + absError - + ", scorePlus=" + scorePlus + ", scoreMinus= " + scoreMinus); + log.info("Param " + i + " (" + name + strIdx + ") FAILED: grad= " + analyticGrad + + ", numericalGrad= " + numericalGrad + ", relError= " + relError + + ", absError=" + absError + + ", scorePlus=" + scorePlus + ", scoreMinus= " + scoreMinus); if (exitOnFirstFailure) return false; totalNFailures++; @@ -347,11 +346,9 @@ public class GradCheckUtil { } } - if (print) { - int nPass = totalCount - totalNFailures; - log.info("GradCheckUtil.checkGradients(): " + totalCount + " params checked, " + nPass + " passed, " - + totalNFailures + " failed. Largest relative error = " + maxError); - } + int nPass = totalCount - totalNFailures; + log.info("GradCheckUtil.checkGradients(): " + totalCount + " params checked, " + nPass + " passed, " + + totalNFailures + " failed. Largest relative error = " + maxError); if(debugMode && !debugBefore){ sd.disableDebugging(); diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/validation/TestCase.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/validation/TestCase.java index fad760bb3..2767a22f9 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/validation/TestCase.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/autodiff/validation/TestCase.java @@ -45,7 +45,7 @@ import java.util.*; public class TestCase { public enum TestSerialization {BEFORE_EXEC, AFTER_EXEC, BOTH, NONE}; - public static final boolean GC_DEFAULT_PRINT = true; + public static final boolean GC_DEFAULT_PRINT = false; public static final boolean GC_DEFAULT_EXIT_FIRST_FAILURE = false; public static final boolean GC_DEFAULT_DEBUG_MODE = false; public static final double GC_DEFAULT_EPS = 1e-5; diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/imports/graphmapper/tf/TFGraphMapper.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/imports/graphmapper/tf/TFGraphMapper.java index f54b532e8..b8e80ecf9 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/imports/graphmapper/tf/TFGraphMapper.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/imports/graphmapper/tf/TFGraphMapper.java @@ -293,6 +293,12 @@ public class TFGraphMapper { for (int i = 0; i < nIn; i++) { String origInName = nd.getInput(i); String inName = stripControl(origInName); + + if(inName.endsWith(":0")){ + //Strip ":0" suffix. Some ops can depend on placeholders, like "image_tensor:0" but in SameDiff this is a variable called "image_tensor" + inName = inName.substring(0, inName.length()-2); + } + boolean isControlDep = isControlDep(origInName); if (isControlDep) { if (controlDeps == null) @@ -443,6 +449,11 @@ public class TFGraphMapper { String s = nextOpDef.getInput(i); String inName = stripControl(nextOpDef.getInput(i)); + if(inName.endsWith(":0")){ + //Strip ":0" suffix. Some ops can depend on placeholders, like "image_tensor:0" but in SameDiff this is a variable called "image_tensor" + inName = inName.substring(0, inName.length()-2); + } + // log.info("Input: {}, {}", s, inName); if (!sd.hasVariable(inName) && !skipCase) { diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationPReLU.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationPReLU.java index 73321a1c1..4de48cb2d 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationPReLU.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationPReLU.java @@ -1,5 +1,6 @@ /* ***************************************************************************** * Copyright (c) 2015-2018 Skymind, Inc. + * Copyright (c) 2019-2020 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at @@ -61,9 +62,11 @@ public class ActivationPReLU extends BaseActivationFunction { @Override public Pair backprop(INDArray in, INDArray epsilon) { assertShape(in, epsilon); - INDArray dLdalpha = Nd4j.create(alpha.shape()); + INDArray dLdalpha = alpha.ulike(); + INDArray outTemp = in.ulike(); DynamicCustomOp.DynamicCustomOpsBuilder preluBp = DynamicCustomOp.builder("prelu_bp") - .addInputs(in, alpha, epsilon).addOutputs(in, alpha); + .addInputs(in, alpha, epsilon) + .addOutputs(outTemp, dLdalpha); if (sharedAxes != null) { for (long axis: sharedAxes) { @@ -71,6 +74,7 @@ public class ActivationPReLU extends BaseActivationFunction { } } Nd4j.getExecutioner().execAndReturn(preluBp.build()); + in.assign(outTemp); return new Pair<>(in, dLdalpha); } @@ -78,4 +82,4 @@ public class ActivationPReLU extends BaseActivationFunction { public String toString() { return "prelu"; } -} +} \ No newline at end of file diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationReLU.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationReLU.java index 2bafd5472..02fe528c2 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationReLU.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/activations/impl/ActivationReLU.java @@ -18,11 +18,13 @@ package org.nd4j.linalg.activations.impl; import lombok.EqualsAndHashCode; import lombok.Getter; -import org.nd4j.linalg.api.ops.impl.scalar.RectifiedLinearDerivative; +import org.nd4j.linalg.api.buffer.DataType; +import org.nd4j.linalg.api.ops.impl.scalar.*; +import org.nd4j.linalg.api.ops.impl.transforms.gradient.LeakyReLUBp; +import org.nd4j.linalg.api.ops.impl.transforms.gradient.LeakyReLUDerivative; import org.nd4j.linalg.primitives.Pair; import org.nd4j.linalg.activations.BaseActivationFunction; import org.nd4j.linalg.api.ndarray.INDArray; -import org.nd4j.linalg.api.ops.impl.scalar.RectifiedLinear; import org.nd4j.linalg.factory.Nd4j; /** @@ -32,16 +34,72 @@ import org.nd4j.linalg.factory.Nd4j; @Getter public class ActivationReLU extends BaseActivationFunction { + private Double max; + private Double threshold; + private Double negativeSlope; + + public ActivationReLU(){ + this(null, null, null); + } + + public ActivationReLU(Double maxValue, Double threshold, Double negativeSlope){ + this.max = maxValue; + this.threshold = threshold; + this.negativeSlope = negativeSlope; + } + @Override public INDArray getActivation(INDArray in, boolean training) { - Nd4j.getExecutioner().execAndReturn(new RectifiedLinear(in)); + if(negativeSlope != null || threshold != null){ + double t = threshold == null ? 0.0 : threshold; + double ns = negativeSlope == null ? 0.0 : negativeSlope; + if(t == 0.0) { + Nd4j.getExecutioner().execAndReturn(new LeakyReLU(in, ns)); + } else { + //Non-zero threshold, and non-zero slope + //TODO optimize this... but, extremely rare case in practice? + INDArray oneGte = in.gte(t).castTo(in.dataType()); + INDArray oneLt = in.lt(t).castTo(in.dataType()); + INDArray lower = oneLt.muli(ns).muli(in.sub(threshold)); + INDArray upper = oneGte.muli(in); + in.assign(lower.addi(upper)); + } + } else { + Nd4j.getExecutioner().exec(new RectifiedLinear(in, in)); + } + if(max != null){ + Nd4j.exec(new ScalarMin(in, null, in, max)); + } return in; } @Override public Pair backprop(INDArray in, INDArray epsilon) { assertShape(in, epsilon); - INDArray dLdz = Nd4j.exec(new RectifiedLinearDerivative(in, epsilon, in.ulike()))[0]; + + INDArray dLdz; + INDArray maxMask = (max == null || max == 0.0 ? null : in.lt(max)); + if(negativeSlope != null || threshold != null){ + double t = threshold == null ? 0.0 : threshold; + double ns = negativeSlope == null ? 0.0 : negativeSlope; + if(t == 0.0) { + dLdz = Nd4j.getExecutioner().exec(new LeakyReLUBp(in, epsilon, in.ulike(), ns))[0]; + } else { + //Non-zero threshold, and non-zero slope + //TODO optimize this... but, extremely rare case in practice? + INDArray oneGte = in.gte(t).castTo(in.dataType()); + INDArray oneLt = in.lt(t).castTo(in.dataType()); + INDArray lower = oneLt.muli(ns); + INDArray upper = oneGte; + dLdz = in.assign(lower.addi(upper)).muli(epsilon); + } + } else { + dLdz = Nd4j.getExecutioner().exec(new RectifiedLinearDerivative(in, epsilon, in.ulike(), threshold == null ? 0.0 : threshold))[0]; + } + + if(maxMask != null){ + dLdz.muli(maxMask); + } return new Pair<>(dLdz, null); } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BiasAdd.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BiasAdd.java index 3487cc216..c80f9acf1 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BiasAdd.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/broadcast/BiasAdd.java @@ -69,6 +69,8 @@ public class BiasAdd extends DynamicCustomOp { super.initFromTensorFlow(nodeDef, initWith, attributesForNode, graph); if(attributesForNode.containsKey("data_format")){ nchw = "NCHW".equalsIgnoreCase(attributesForNode.get("data_format").getS().toStringUtf8()); + } else { + nchw = false; //TF default is NHWC } bArguments.clear(); bArguments.add(nchw); diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/config/Conv1DConfig.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/config/Conv1DConfig.java index 196876cb2..c62341f28 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/config/Conv1DConfig.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/layers/convolution/config/Conv1DConfig.java @@ -86,7 +86,7 @@ public class Conv1DConfig extends BaseConvolutionConfig { ret.put("s", s); ret.put("p", p); ret.put("d", d); - ret.put("isSameMode", paddingMode); + ret.put("paddingMode", paddingMode); ret.put("dataFormat", dataFormat); return ret; } diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/RectifiedLinearDerivative.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/RectifiedLinearDerivative.java index 7e4d0fa09..0ee0c07f7 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/RectifiedLinearDerivative.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/scalar/RectifiedLinearDerivative.java @@ -19,8 +19,13 @@ public class RectifiedLinearDerivative extends DynamicCustomOp { super(sd, new SDVariable[]{input, gradient}); } - public RectifiedLinearDerivative(@NonNull INDArray input, @NonNull INDArray gradient, INDArray output){ + public RectifiedLinearDerivative(@NonNull INDArray input, @NonNull INDArray gradient, INDArray output) { + this(input, gradient, output, 0.0); + } + + public RectifiedLinearDerivative(@NonNull INDArray input, @NonNull INDArray gradient, INDArray output, double scalar){ super(new INDArray[]{input, gradient}, wrapOrNull(output)); + addTArgument(scalar); } @Override diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/summarystats/Variance.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/summarystats/Variance.java index 2b5a49682..504012703 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/summarystats/Variance.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/impl/summarystats/Variance.java @@ -88,6 +88,8 @@ public class Variance extends BaseReduceOp { return 0; } + + @Override public String opName() { return "var"; diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/factory/Nd4j.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/factory/Nd4j.java index 2e2efadda..94b17142b 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/factory/Nd4j.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/factory/Nd4j.java @@ -537,6 +537,14 @@ public class Nd4j { return ret; } + /** + * Get the backend Environment instance + * @return The backend Environment instance + */ + public static Environment getEnvironment(){ + return backend.getEnvironment(); + } + /** * Get the operation executioner instance. * diff --git a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-native-api/src/main/java/org/nd4j/nativeblas/Nd4jBlas.java b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-native-api/src/main/java/org/nd4j/nativeblas/Nd4jBlas.java index 5de827d1a..fa92f94f5 100644 --- a/nd4j/nd4j-backends/nd4j-api-parent/nd4j-native-api/src/main/java/org/nd4j/nativeblas/Nd4jBlas.java +++ b/nd4j/nd4j-backends/nd4j-api-parent/nd4j-native-api/src/main/java/org/nd4j/nativeblas/Nd4jBlas.java @@ -21,6 +21,7 @@ import lombok.extern.slf4j.Slf4j; import org.bytedeco.javacpp.Loader; import org.bytedeco.javacpp.Pointer; import org.nd4j.config.ND4JEnvironmentVars; +import org.nd4j.config.ND4JSystemProperties; import org.nd4j.linalg.api.blas.Blas; @@ -52,7 +53,10 @@ public abstract class Nd4jBlas implements Blas { setMaxThreads(numThreads); } - log.info("Number of threads used for OpenMP BLAS: {}", getMaxThreads()); + String logInit = System.getProperty(ND4JSystemProperties.LOG_INITIALIZATION); + if(logInit == null || logInit.isEmpty() || Boolean.parseBoolean(logInit)) { + log.info("Number of threads used for OpenMP BLAS: {}", getMaxThreads()); + } } } diff --git a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/JCublasBackend.java b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/JCublasBackend.java index 34970dc19..2f3ad94df 100644 --- a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/JCublasBackend.java +++ b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/JCublasBackend.java @@ -20,10 +20,12 @@ import lombok.extern.slf4j.Slf4j; import org.bytedeco.javacpp.Loader; import org.nd4j.config.ND4JSystemProperties; import org.nd4j.linalg.api.environment.Nd4jEnvironment; +import org.nd4j.linalg.factory.Environment; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.factory.Nd4jBackend; import org.nd4j.linalg.io.ClassPathResource; import org.nd4j.linalg.io.Resource; +import org.nd4j.nativeblas.CudaEnvironment; import org.nd4j.nativeblas.Nd4jCuda; import java.util.List; @@ -86,6 +88,11 @@ public class JCublasBackend extends Nd4jBackend { return JCublasNDArray.class; } + @Override + public Environment getEnvironment() { + return CudaEnvironment.getInstance(); + } + @Override public void logBackendInit() { String logInitProperty = System.getProperty(ND4JSystemProperties.LOG_INITIALIZATION, "true"); diff --git a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/buffer/BaseCudaDataBuffer.java b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/buffer/BaseCudaDataBuffer.java index 5e0583d56..9564fb15e 100644 --- a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/buffer/BaseCudaDataBuffer.java +++ b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/linalg/jcublas/buffer/BaseCudaDataBuffer.java @@ -462,6 +462,9 @@ public abstract class BaseCudaDataBuffer extends BaseDataBuffer implements JCuda indexer = LongIndexer.create((LongPointer) pointer); break; case UINT16: + this.pointer = new CudaPointer(allocationPoint.getPointers().getHostPointer(), originalBuffer.length()).asShortPointer(); + indexer = UShortIndexer.create((ShortPointer) pointer); + break; case SHORT: this.pointer = new CudaPointer(allocationPoint.getPointers().getHostPointer(), originalBuffer.length()).asShortPointer(); indexer = ShortIndexer.create((ShortPointer) pointer); diff --git a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/nativeblas/CudaEnvironment.java b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/nativeblas/CudaEnvironment.java new file mode 100644 index 000000000..83e9c156f --- /dev/null +++ b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-cuda/src/main/java/org/nd4j/nativeblas/CudaEnvironment.java @@ -0,0 +1,170 @@ +/* ****************************************************************************** + * Copyright (c) 2019 Konduit K.K. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0. + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + * + * SPDX-License-Identifier: Apache-2.0 + ******************************************************************************/ +package org.nd4j.nativeblas; + +import org.nd4j.linalg.factory.Environment; +import org.nd4j.nativeblas.Nd4jCuda; + +/** + * CUDA backend implementation of {@link Environment} + * + * @author Alex Black + */ +public class CudaEnvironment implements Environment { + + + private static final CudaEnvironment INSTANCE = new CudaEnvironment(Nd4jCuda.Environment.getInstance()); + + private final Nd4jCuda.Environment e; + + public static CudaEnvironment getInstance(){ + return INSTANCE; + } + + protected CudaEnvironment(Nd4jCuda.Environment environment){ + this.e = environment; + } + + @Override + public int blasMajorVersion() { + return e.blasMajorVersion(); + } + + @Override + public int blasMinorVersion() { + return e.blasMinorVersion(); + } + + @Override + public int blasPatchVersion() { + return e.blasMajorVersion(); + } + + @Override + public boolean isVerbose() { + return e.isVerbose(); + } + + @Override + public void setVerbose(boolean reallyVerbose) { + e.setVerbose(reallyVerbose); + } + + @Override + public boolean isDebug() { + return e.isDebug(); + } + + @Override + public boolean isProfiling() { + return e.isProfiling(); + } + + @Override + public boolean isDetectingLeaks() { + return e.isDetectingLeaks(); + } + + @Override + public boolean isDebugAndVerbose() { + return e.isDebugAndVerbose(); + } + + @Override + public void setDebug(boolean reallyDebug) { + e.setDebug(reallyDebug); + } + + @Override + public void setProfiling(boolean reallyProfile) { + e.setProfiling(reallyProfile); + } + + @Override + public void setLeaksDetector(boolean reallyDetect) { + e.setLeaksDetector(reallyDetect); + } + + @Override + public boolean helpersAllowed() { + return e.helpersAllowed(); + } + + @Override + public void allowHelpers(boolean reallyAllow) { + e.allowHelpers(reallyAllow); + } + + @Override + public int tadThreshold() { + return e.tadThreshold(); + } + + @Override + public void setTadThreshold(int threshold) { + e.setTadThreshold(threshold); + } + + @Override + public int elementwiseThreshold() { + return e.elementwiseThreshold(); + } + + @Override + public void setElementwiseThreshold(int threshold) { + e.setElementwiseThreshold(threshold); + } + + @Override + public int maxThreads() { + return e.maxThreads(); + } + + @Override + public void setMaxThreads(int max) { + e.setMaxThreads(max); + } + + @Override + public int maxMasterThreads() { + return e.maxMasterThreads(); + } + + @Override + public void setMaxMasterThreads(int max) { + e.setMaxMasterThreads(max); + } + + @Override + public void setMaxPrimaryMemory(long maxBytes) { + e.setMaxPrimaryMemory(maxBytes); + } + + @Override + public void setMaxSpecialMemory(long maxBytes) { + e.setMaxSpecialyMemory(maxBytes); + } + + @Override + public void setMaxDeviceMemory(long maxBytes) { + e.setMaxDeviceMemory(maxBytes); + } + + @Override + public boolean isCPU() { + return e.isCPU(); + } +} diff --git a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuBackend.java b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuBackend.java index 627105bda..d3fe308e6 100644 --- a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuBackend.java +++ b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuBackend.java @@ -16,6 +16,7 @@ package org.nd4j.linalg.cpu.nativecpu; +import org.nd4j.linalg.factory.Environment; import org.nd4j.linalg.factory.Nd4jBackend; import org.nd4j.linalg.io.ClassPathResource; import org.nd4j.linalg.io.Resource; @@ -61,6 +62,11 @@ public class CpuBackend extends Nd4jBackend { return NDArray.class; } + @Override + public Environment getEnvironment() { + return CpuEnvironment.getInstance(); + } + @Override public void logBackendInit() { //No additional logging for CPU backend diff --git a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuEnvironment.java b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuEnvironment.java new file mode 100644 index 000000000..a97b52d9f --- /dev/null +++ b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuEnvironment.java @@ -0,0 +1,170 @@ +/* ****************************************************************************** + * Copyright (c) 2019 Konduit K.K. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0. + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + * + * SPDX-License-Identifier: Apache-2.0 + ******************************************************************************/ +package org.nd4j.linalg.cpu.nativecpu; + +import org.nd4j.linalg.factory.Environment; +import org.nd4j.nativeblas.Nd4jCpu; + +/** + * CPU backend implementation of {@link Environment} + * + * @author Alex Black + */ +public class CpuEnvironment implements Environment { + + + private static final CpuEnvironment INSTANCE = new CpuEnvironment(Nd4jCpu.Environment.getInstance()); + + private final Nd4jCpu.Environment e; + + public static CpuEnvironment getInstance(){ + return INSTANCE; + } + + protected CpuEnvironment(Nd4jCpu.Environment environment){ + this.e = environment; + } + + @Override + public int blasMajorVersion() { + return e.blasMajorVersion(); + } + + @Override + public int blasMinorVersion() { + return e.blasMinorVersion(); + } + + @Override + public int blasPatchVersion() { + return e.blasMajorVersion(); + } + + @Override + public boolean isVerbose() { + return e.isVerbose(); + } + + @Override + public void setVerbose(boolean reallyVerbose) { + e.setVerbose(reallyVerbose); + } + + @Override + public boolean isDebug() { + return e.isDebug(); + } + + @Override + public boolean isProfiling() { + return e.isProfiling(); + } + + @Override + public boolean isDetectingLeaks() { + return e.isDetectingLeaks(); + } + + @Override + public boolean isDebugAndVerbose() { + return e.isDebugAndVerbose(); + } + + @Override + public void setDebug(boolean reallyDebug) { + e.setDebug(reallyDebug); + } + + @Override + public void setProfiling(boolean reallyProfile) { + e.setProfiling(reallyProfile); + } + + @Override + public void setLeaksDetector(boolean reallyDetect) { + e.setLeaksDetector(reallyDetect); + } + + @Override + public boolean helpersAllowed() { + return e.helpersAllowed(); + } + + @Override + public void allowHelpers(boolean reallyAllow) { + e.allowHelpers(reallyAllow); + } + + @Override + public int tadThreshold() { + return e.tadThreshold(); + } + + @Override + public void setTadThreshold(int threshold) { + e.setTadThreshold(threshold); + } + + @Override + public int elementwiseThreshold() { + return e.elementwiseThreshold(); + } + + @Override + public void setElementwiseThreshold(int threshold) { + e.setElementwiseThreshold(threshold); + } + + @Override + public int maxThreads() { + return e.maxThreads(); + } + + @Override + public void setMaxThreads(int max) { + e.setMaxThreads(max); + } + + @Override + public int maxMasterThreads() { + return e.maxMasterThreads(); + } + + @Override + public void setMaxMasterThreads(int max) { + e.setMaxMasterThreads(max); + } + + @Override + public void setMaxPrimaryMemory(long maxBytes) { + e.setMaxPrimaryMemory(maxBytes); + } + + @Override + public void setMaxSpecialMemory(long maxBytes) { + e.setMaxSpecialyMemory(maxBytes); + } + + @Override + public void setMaxDeviceMemory(long maxBytes) { + e.setMaxDeviceMemory(maxBytes); + } + + @Override + public boolean isCPU() { + return e.isCPU(); + } +} diff --git a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuNDArrayFactory.java b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuNDArrayFactory.java index 03904125d..5f48b3c64 100644 --- a/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuNDArrayFactory.java +++ b/nd4j/nd4j-backends/nd4j-backend-impls/nd4j-native/src/main/java/org/nd4j/linalg/cpu/nativecpu/CpuNDArrayFactory.java @@ -25,27 +25,19 @@ import org.nd4j.config.ND4JSystemProperties; import org.nd4j.linalg.api.buffer.*; import org.nd4j.linalg.api.ops.custom.Flatten; import org.nd4j.linalg.api.ops.impl.shape.Concat; -import org.nd4j.linalg.api.ops.performance.PerformanceTracker; import org.nd4j.linalg.api.shape.options.ArrayOptionsHelper; import org.nd4j.linalg.api.shape.options.ArrayType; import org.nd4j.linalg.compression.CompressionUtils; -import org.nd4j.linalg.memory.MemcpyDirection; import org.nd4j.linalg.primitives.Pair; import org.bytedeco.javacpp.*; import org.nd4j.linalg.api.memory.MemoryWorkspace; import org.nd4j.linalg.api.ndarray.INDArray; -import org.nd4j.linalg.api.shape.Shape; -import org.nd4j.linalg.api.shape.options.ArrayOptionsHelper; -import org.nd4j.linalg.api.shape.options.ArrayType; -import org.nd4j.linalg.cache.TADManager; import org.nd4j.linalg.compression.CompressedDataBuffer; import org.nd4j.linalg.compression.CompressionDescriptor; import org.nd4j.linalg.compression.CompressionType; -import org.nd4j.linalg.compression.CompressionUtils; import org.nd4j.linalg.cpu.nativecpu.blas.*; import org.nd4j.linalg.exception.ND4JIllegalStateException; import org.nd4j.linalg.factory.Nd4j; -import org.nd4j.linalg.primitives.Pair; import org.nd4j.linalg.util.ArrayUtil; import org.nd4j.nativeblas.BaseNativeNDArrayFactory; import org.nd4j.nativeblas.LongPointerWrapper; @@ -102,7 +94,8 @@ public class CpuNDArrayFactory extends BaseNativeNDArrayFactory { System.exit(1); } - if (!nativeOps.isOptimalRequirementsMet() && !Boolean.parseBoolean(System.getenv(ND4JEnvironmentVars.ND4J_IGNORE_AVX))) { + if (!nativeOps.isOptimalRequirementsMet() && !Boolean.parseBoolean(System.getenv(ND4JEnvironmentVars.ND4J_IGNORE_AVX)) && + !Boolean.parseBoolean(System.getProperty(ND4JSystemProperties.ND4J_IGNORE_AVX))) { val binaryLevel = nativeOps.binaryLevel(); val optimalLevel = nativeOps.optimalLevel(); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/TestSessions.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/TestSessions.java index 09e94acc7..56acfa828 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/TestSessions.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/TestSessions.java @@ -22,6 +22,7 @@ import org.nd4j.autodiff.listeners.Operation; import org.nd4j.autodiff.samediff.SDVariable; import org.nd4j.autodiff.samediff.SameDiff; import org.nd4j.autodiff.samediff.internal.AbstractSession; +import org.nd4j.autodiff.samediff.internal.FrameIter; import org.nd4j.autodiff.samediff.internal.InferenceSession; import org.nd4j.autodiff.samediff.internal.memory.NoOpMemoryMgr; import org.nd4j.imports.graphmapper.tf.TFGraphMapper; @@ -112,7 +113,6 @@ public class TestSessions extends BaseNd4jTest { m.put("x", x); m.put("y", y); - System.out.println("----------------------------------"); Map outMap = is.output(Collections.singletonList("d"), m, null, Collections.emptyList(), null, At.defaultAt(Operation.TRAINING)); @@ -144,7 +144,6 @@ public class TestSessions extends BaseNd4jTest { m.put("x", x); m.put("y", y); - System.out.println("----------------------------------"); InferenceSession is = new InferenceSession(sd); // String outName = merge.name(); String outName = outVar.name(); @@ -183,14 +182,14 @@ public class TestSessions extends BaseNd4jTest { InferenceSession is = new InferenceSession(sd); String n = merge.name(); - System.out.println("----------------------------------"); +// System.out.println("----------------------------------"); Map outMap = is.output(Collections.singletonList(n), m, null, Collections.emptyList(), null, At.defaultAt(Operation.TRAINING)); assertEquals(1, outMap.size()); assertEquals(expTrue, outMap.get(n)); - System.out.println("----------------------------------"); +// System.out.println("----------------------------------"); //Check false case: bArr.assign(0); is = new InferenceSession(sd); @@ -217,9 +216,10 @@ public class TestSessions extends BaseNd4jTest { File f = new ClassPathResource("tf_graphs/examples/while1/iter_" + numIter + "/frozen_model.pb").getFile(); SameDiff sd = TFGraphMapper.importGraph(f); - System.out.println(sd.summary()); +// System.out.println(sd.summary()); + sd.summary(); - System.out.println("----------------------------------"); +// System.out.println("----------------------------------"); //This particular test/graph doesn't use placeholders InferenceSession is = new InferenceSession(sd); is.setMmgr(new NoOpMemoryMgr()); //So arrays aren't deallocated during execution @@ -239,17 +239,17 @@ public class TestSessions extends BaseNd4jTest { //Some sanity checks on the internal state: //Check 1: "while/Less" should be executed numIter+1 times... i.e., numIter times through the loop, plus once to exit for( int i=0; i NO_BP_YET = new HashSet<>(); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/MiscOpValidation.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/MiscOpValidation.java index 7f6daf78f..e02e4b91d 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/MiscOpValidation.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/MiscOpValidation.java @@ -1648,8 +1648,8 @@ public class MiscOpValidation extends BaseOpValidation { INDArray vArr = gm.get(v.name()); INDArray wArr = gm.get(w.name()); - System.out.println(vArr); - System.out.println(wArr); +// System.out.println(vArr); +// System.out.println(wArr); assertEquals(Nd4j.zeros(DataType.DOUBLE, 3, 4), wArr); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/ReductionBpOpValidation.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/ReductionBpOpValidation.java index 0d74f07f3..58afb2acb 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/ReductionBpOpValidation.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/ReductionBpOpValidation.java @@ -532,8 +532,8 @@ public class ReductionBpOpValidation extends BaseOpValidation { .subi(mean).divi(stdev * 2) .muli(0.5); //* dL/dOut - System.out.println(dLdInExp.shapeInfoToString()); - System.out.println(Arrays.toString(dLdInExp.data().asFloat())); +// System.out.println(dLdInExp.shapeInfoToString()); +// System.out.println(Arrays.toString(dLdInExp.data().asFloat())); INDArray dLdIn = Nd4j.createUninitialized(new long[]{3}); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/ReductionOpValidation.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/ReductionOpValidation.java index 802ed9be9..51e8fd714 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/ReductionOpValidation.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/ReductionOpValidation.java @@ -935,7 +935,7 @@ public class ReductionOpValidation extends BaseOpValidation { INDArray expOut; SDVariable reduced; String name; - System.out.println(i); +// System.out.println(i); switch (i) { case 0: reduced = sd.math().manhattanDistance(in, in2, reduceDims); @@ -970,7 +970,7 @@ public class ReductionOpValidation extends BaseOpValidation { default: throw new RuntimeException(); } - System.out.println(i + " - end"); +// System.out.println(i + " - end"); long[] expShape; @@ -1011,7 +1011,9 @@ public class ReductionOpValidation extends BaseOpValidation { @Test public void testReductionsBackwards() { - for (int i = 0; i < 7; i++) { +// for (int i = 0; i < 7; i++) { + int i=5; + { SameDiff sd = SameDiff.create(); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/RnnOpValidation.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/RnnOpValidation.java index 988b8da69..8a4f8164a 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/RnnOpValidation.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/opvalidation/RnnOpValidation.java @@ -194,9 +194,9 @@ public class RnnOpValidation extends BaseOpValidation { INDArray out5 = Nd4j.create(new float[]{-0.17905743f, 0.19177397f}, new int[]{1,2}); //Cell state INDArray out6 = Nd4j.create(new float[]{-0.04025514f, 0.10104967f}, new int[]{1,2}); //Output - for(int i=0; i l = op.calculateOutputShape(); - System.out.println(Arrays.toString(l.get(0).getShape())); +// System.out.println(Arrays.toString(l.get(0).getShape())); assertArrayEquals(new long[]{4, 3}, l.get(0).getShape()); op = DynamicCustomOp.builder("permute") @@ -2382,7 +2382,7 @@ public class ShapeOpValidation extends BaseOpValidation { .addIntegerArguments(1, 0) .build(); l = op.calculateOutputShape(); - System.out.println(Arrays.toString(l.get(0).getShape())); +// System.out.println(Arrays.toString(l.get(0).getShape())); assertArrayEquals(new long[]{4, 3}, l.get(0).getShape()); @@ -2391,7 +2391,7 @@ public class ShapeOpValidation extends BaseOpValidation { Nd4j.createFromArray(1, 2, 0)) .build(); l = op.calculateOutputShape(); - System.out.println(Arrays.toString(l.get(0).getShape())); +// System.out.println(Arrays.toString(l.get(0).getShape())); assertArrayEquals(new long[]{4, 5, 3}, l.get(0).getShape()); } @@ -2419,7 +2419,7 @@ public class ShapeOpValidation extends BaseOpValidation { INDArray in = Nd4j.linspace(DataType.FLOAT, 1, 6, 1).reshape(3,2); INDArray permute = Nd4j.createFromArray(1,0); - System.out.println(in); +// System.out.println(in); SameDiff sd = SameDiff.create(); SDVariable v = sd.var(in); @@ -2457,8 +2457,8 @@ public class ShapeOpValidation extends BaseOpValidation { DynamicCustomOp op = b.build(); Nd4j.exec(op); - System.out.println(in); - System.out.println(op.outputArguments()[0]); +// System.out.println(in); +// System.out.println(op.outputArguments()[0]); assertEquals(exp, op.getOutputArgument(0)); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/ConvConfigTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/ConvConfigTests.java index a6f7b6bea..75f1615d7 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/ConvConfigTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/ConvConfigTests.java @@ -22,10 +22,21 @@ import static org.junit.Assert.fail; import org.junit.Assert; import org.junit.Test; +import org.nd4j.linalg.BaseNd4jTest; import org.nd4j.linalg.api.ops.impl.layers.convolution.DeConv2D; import org.nd4j.linalg.api.ops.impl.layers.convolution.config.*; +import org.nd4j.linalg.factory.Nd4jBackend; -public class ConvConfigTests { +public class ConvConfigTests extends BaseNd4jTest { + + public ConvConfigTests(Nd4jBackend backend) { + super(backend); + } + + @Override + public char ordering() { + return 'c'; + } @Test public void testDeConv2D(){ diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/LogisticPredictions.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/LogisticPredictions.java deleted file mode 100644 index 8eb63d57e..000000000 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/LogisticPredictions.java +++ /dev/null @@ -1,40 +0,0 @@ -/******************************************************************************* - * Copyright (c) 2015-2018 Skymind, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - ******************************************************************************/ - -package org.nd4j.autodiff.samediff; - -import org.nd4j.linalg.api.ndarray.INDArray; - -import java.util.Map; - -public class LogisticPredictions implements SameDiffFunctionDefinition { - /** - * @param sameDiff - * @param inputs - * @param variableInputs - * @return - */ - @Override - public SDVariable[] define(SameDiff sameDiff, Map inputs, SDVariable[] variableInputs) { - SDVariable input = sameDiff.var("x",inputs.get("x")); - SDVariable w = sameDiff.var("w",inputs.get("w")); - SDVariable y = sameDiff.var("y",inputs.get("y")); - SDVariable preOutput = sameDiff.mmul(input,w); - SDVariable sigmoid = sameDiff.nn().sigmoid(preOutput); - - return new SDVariable[]{sigmoid}; - } -} diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/SameDiffSpecifiedLossVarsTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/SameDiffSpecifiedLossVarsTests.java index 303739ea1..a08a390a9 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/SameDiffSpecifiedLossVarsTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/SameDiffSpecifiedLossVarsTests.java @@ -90,7 +90,8 @@ public class SameDiffSpecifiedLossVarsTests extends BaseNd4jTest { SDVariable loss1 = add.std("l1", true); SDVariable loss2 = mmul.mean("l2"); - System.out.println(sd.summary()); +// System.out.println(sd.summary()); + sd.summary(); if(i == 0){ sd.setLossVariables("l1", "l2"); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/SameDiffTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/SameDiffTests.java index db8d7d551..878289beb 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/SameDiffTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/SameDiffTests.java @@ -709,9 +709,9 @@ public class SameDiffTests extends BaseNd4jTest { val s = in2.add(5.0); Map map = sd.outputAll(null); - log.info("Result M: {}", map.get(m.name())); - log.info("Result F: {}", map.get(f.name())); - log.info("Result S: {}", map.get(s.name())); +// log.info("Result M: {}", map.get(m.name())); +// log.info("Result F: {}", map.get(f.name())); +// log.info("Result S: {}", map.get(s.name())); } @Test @@ -1654,7 +1654,6 @@ public class SameDiffTests extends BaseNd4jTest { INDArray expOut = Nd4j.create(DataType.BOOL, ia.shape()); Nd4j.exec(new IsStrictlyIncreasing(ia, expOut)); - System.out.println(expOut); } @Test @@ -1997,8 +1996,6 @@ public class SameDiffTests extends BaseNd4jTest { SDVariable varIndices = sd.constant("indices", indices); SDVariable gather = sd.gather(var, varIndices, 0); - System.out.println(in); - INDArray exp = Nd4j.pullRows(in, 1, new int[]{0, 1, 5}); //Along dimension 1 -> equiv to "indexes for axis 0" INDArray act = gather.eval(); @@ -2020,8 +2017,6 @@ public class SameDiffTests extends BaseNd4jTest { Nd4j.exec(op); - System.out.println(out); - INDArray exp = Nd4j.pullRows(in, 1, new int[]{0, 1, 5}); //Along dimension 1 == indexes for dimension 0 assertEquals(exp, out); @@ -2396,13 +2391,14 @@ public class SameDiffTests extends BaseNd4jTest { Map phMap = new HashMap<>(); phMap.put(fn.getGradPlaceholderName(), grad); - log.info("--------------- out.eval() ---------------"); +// log.info("--------------- out.eval() ---------------"); out.eval(); - log.info("--------------- sd.execBackwards() #1 ---------------"); +// log.info("--------------- sd.execBackwards() #1 ---------------"); sd.calculateGradients(phMap, "in", "W", "b"); - log.info("--------------- sd.execBackwards() #2 ---------------"); - System.out.println(sd.getFunction("grad").summary()); +// log.info("--------------- sd.execBackwards() #2 ---------------"); +// System.out.println(sd.getFunction("grad").summary()); + sd.getFunction("grad").summary(); in.setArray(Nd4j.linspace(1, 10, 10).reshape(2, 5)); grad = Nd4j.linspace(1, 8, 8).reshape(2, 4); @@ -3232,7 +3228,8 @@ public class SameDiffTests extends BaseNd4jTest { Map secondBranch = Maps.newHashMap(); secondBranch.put("a", Nd4j.createFromArray(7.0)); - System.out.println(sd.summary()); +// System.out.println(sd.summary()); + sd.summary(); INDArray outArr = sd.output(secondBranch, "out").get("out"); assertEquals(Nd4j.createFromArray(14.0), outArr); @@ -3429,11 +3426,11 @@ public class SameDiffTests extends BaseNd4jTest { SDVariable rand1 = sd1.var("random", new UniformInitScheme('c', 3), DataType.FLOAT, 3, 1); - Nd4j.getRandom().setSeed(0); - System.out.println(rand0.eval()); - - Nd4j.getRandom().setSeed(0); - System.out.println(rand1.eval()); +// Nd4j.getRandom().setSeed(0); +// System.out.println(rand0.eval()); +// +// Nd4j.getRandom().setSeed(0); +// System.out.println(rand1.eval()); INDArray a0 = rand0.eval(); Nd4j.getRandom().setSeed(0); @@ -3520,4 +3517,19 @@ public class SameDiffTests extends BaseNd4jTest { assertEquals(config, fromJson); } } + + @Test + public void testRngSanityCheck(){ + Nd4j.getRandom().setSeed(12345); + for(DataType dt : DataType.values()) { + if (!dt.isNumerical()) + continue; + SameDiff sameDiff = SameDiff.create(); + INDArray indaShape = Nd4j.createFromArray(3, 10); + SDVariable sdShape = sameDiff.constant(indaShape); + SDVariable random = sameDiff.random().uniform("data", 0.0, 10.0, sdShape, dt); + INDArray out = random.eval(); + String s = out.toString(); + } + } } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/listeners/CheckpointListenerTest.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/listeners/CheckpointListenerTest.java index cf99ebbaa..792fce892 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/listeners/CheckpointListenerTest.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/autodiff/samediff/listeners/CheckpointListenerTest.java @@ -125,7 +125,7 @@ public class CheckpointListenerTest extends BaseNd4jTest { boolean[] found = new boolean[names.size()]; for(File f : files){ String s = f.getAbsolutePath(); - System.out.println(s); +// System.out.println(s); for( int i=0; i threshold) e = new Evaluation(); @@ -899,10 +904,12 @@ public class EvalTest extends BaseNd4jTest { class0.putScalar(0, 1); e.eval(class0, class0); - System.out.println(e.stats()); +// System.out.println(e.stats()); + e.stats(); - System.out.println("\n\n\n\n"); - System.out.println(e.stats(false, true)); +// System.out.println("\n\n\n\n"); +// System.out.println(e.stats(false, true)); + e.stats(false, true); } @Test @@ -1033,7 +1040,7 @@ public class EvalTest extends BaseNd4jTest { e1.eval(one, one); String s1 = e1.stats(); - System.out.println(s1); +// System.out.println(s1); e1.reset(); e1.eval(zero, zero); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/evaluation/EvaluationBinaryTest.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/evaluation/EvaluationBinaryTest.java index c864f6004..62679ef6a 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/evaluation/EvaluationBinaryTest.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/evaluation/EvaluationBinaryTest.java @@ -240,7 +240,8 @@ public class EvaluationBinaryTest extends BaseNd4jTest { EvaluationBinary eb = new EvaluationBinary(4, 30); eb.eval(l1, p1); - System.out.println(eb.stats()); +// System.out.println(eb.stats()); + eb.stats(); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/evaluation/RegressionEvalTest.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/evaluation/RegressionEvalTest.java index 1bd6fd22c..b95d5c974 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/evaluation/RegressionEvalTest.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/evaluation/RegressionEvalTest.java @@ -72,7 +72,8 @@ public class RegressionEvalTest extends BaseNd4jTest { eval.eval(rand, rand); } - System.out.println(eval.stats()); +// System.out.println(eval.stats()); + eval.stats(); for (int i = 0; i < nCols; i++) { assertEquals(0.0, eval.meanSquaredError(i), 1e-6); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/ExecutionTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/ExecutionTests.java index d320ad6e3..88298b62b 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/ExecutionTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/ExecutionTests.java @@ -76,7 +76,7 @@ public class ExecutionTests extends BaseNd4jTest { Nd4j.create(1); val tg = TFGraphMapper.importGraphTxt(new ClassPathResource("tf_graphs/reduce_dim.pb.txt").getInputStream(), null, null); - System.out.println(tg.summary()); +// System.out.println(tg.summary()); Map result_0 = tg.outputAll(null); val exp_0 = Nd4j.create(DataType.FLOAT, 3).assign(3.0); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TFGraphs/TFGraphTestAllHelper.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TFGraphs/TFGraphTestAllHelper.java index afcd8c3e1..718a95d5a 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TFGraphs/TFGraphTestAllHelper.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TFGraphs/TFGraphTestAllHelper.java @@ -286,8 +286,7 @@ public class TFGraphTestAllHelper { + " with minAbsError=" + minAbsErrorOverride + "; largest observed relError=" + maxRE, 0, countExceeds); } } - log.info("\n\tTEST {} PASSED with {} arrays compared...", modelName, predictions.keySet().size()); - log.info("\n========================================================\n"); + log.info("TEST {} PASSED with {} arrays compared...", modelName, predictions.keySet().size()); } //Serialize and deserialize, check equality: @@ -393,7 +392,7 @@ public class TFGraphTestAllHelper { public static Pair> getGraphAfterExec(String baseDir, String modelFilename, String modelName, Map inputs, ExecuteWith executeWith, BiFunction graphLoaderFunction, List listeners, Set requiredOutputs, boolean printArraysDebugging) throws IOException { - log.info("\n\tRUNNING TEST " + modelName + "..."); + log.info("RUNNING TEST {}...", modelName); SameDiff graph = graphLoaderFunction.apply(new ClassPathResource(baseDir + "/" + modelName + "/" + modelFilename).getFile(), modelName); if(listeners != null){ graph.setListeners(listeners); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TFGraphs/TFGraphTestAllSameDiff.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TFGraphs/TFGraphTestAllSameDiff.java index f42b9cecf..2ea9a8142 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TFGraphs/TFGraphTestAllSameDiff.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TFGraphs/TFGraphTestAllSameDiff.java @@ -118,7 +118,10 @@ public class TFGraphTestAllSameDiff { //Note: Can't extend BaseNd4jTest here a "matrix_band_part/.*", // 12.20.2019 - https://github.com/eclipse/deeplearning4j/issues/8559 - "fused_batch_norm/.*" + "fused_batch_norm/.*", + + // AB 2020/01/04 - https://github.com/eclipse/deeplearning4j/issues/8592 + "emptyArrayTests/reshape/rank2_shape2-0_2-0--1" }; /* As per TFGraphTestList.printArraysDebugging - this field defines a set of regexes for test cases that should have diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TFGraphs/TFGraphTestZooModels.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TFGraphs/TFGraphTestZooModels.java index d08fb5148..d74504cb9 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TFGraphs/TFGraphTestZooModels.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TFGraphs/TFGraphTestZooModels.java @@ -60,9 +60,6 @@ public class TFGraphTestZooModels { //Note: Can't extend BaseNd4jTest here as we //2019/07/22 - Result value failure "xlnet_cased_L-24_H-1024_A-16", - // 2019/07/22 - OOM, Passes with sufficient memory (16GB heap, 32GB off-heap tested) - "compression_residual_gru", - // 2019/07/22 - OOM, Passes with sufficient memory (16GB heap, 32GB off-heap tested) "deeplabv3_xception_ade20k_train", @@ -72,15 +69,13 @@ public class TFGraphTestZooModels { //Note: Can't extend BaseNd4jTest here as we // Missing Multinormal op, see https://github.com/eclipse/deeplearning4j/issues/7913 "gpt-2_117M", - //2019/05/15 - "Invalid shape for op shape_of: shape has invalid values <= 0: shape=[0]" - //Also: https://github.com/deeplearning4j/deeplearning4j/issues/7112 + //2019/12/24 - https://github.com/eclipse/deeplearning4j/issues/8572 "ssd_mobilenet_v1_0.75_depth_300x300_coco14_sync_2018_07_03", - //2019/05/15 - CUSTOM CONV2D OP: rank of input array must be equal to 4, but got 0 instead ! - //Also: https://github.com/deeplearning4j/deeplearning4j/issues/7112 + //2019/12/24 - https://github.com/eclipse/deeplearning4j/issues/8572 "ssd_mobilenet_v1_coco_2018_01_28", - //2019/06/24 - size op dtypes / libnd4j size op issue: https://github.com/eclipse/deeplearning4j/issues/7938 + //2019/12/24 - https://github.com/eclipse/deeplearning4j/issues/8572 "faster_rcnn_resnet101_coco_2018_01_28", //2019/06/24 - JVM crash on linux-x86_64-cpu-avx2 and -avx512 CI machines only - runs fine elsewhere @@ -256,7 +251,9 @@ public class TFGraphTestZooModels { //Note: Can't extend BaseNd4jTest here as we OpValidationSuite.ignoreFailing(); } -// if(!modelName.startsWith("mobilenet_v2_1.0_224")){ +// if(!modelName.startsWith("ssd_mobilenet_v1_coco_2018_01_28")){ +// if(!modelName.startsWith("ssd_mobilenet_v1_0.75_depth_300x300_coco14_sync_2018_07_03")){ +// if(!modelName.startsWith("faster_rcnn_resnet101_coco_2018_01_28")){ // OpValidationSuite.ignoreFailing(); // } currentTestDir = testDir.newFolder(); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TensorFlowImportTest.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TensorFlowImportTest.java index 22b8b4492..d3a76bf28 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TensorFlowImportTest.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/imports/TensorFlowImportTest.java @@ -520,7 +520,8 @@ public class TensorFlowImportTest extends BaseNd4jTest { assertEquals(2, in1.first()); assertEquals(0, in1.second()); - System.out.println(tg.summary()); +// System.out.println(tg.summary()); + tg.summary(); int dimensionsLength = nodeSum.dimensionsLength(); assertEquals(1, dimensionsLength); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/BaseNd4jTest.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/BaseNd4jTest.java index 3f75a3293..87a0a9aef 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/BaseNd4jTest.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/BaseNd4jTest.java @@ -53,7 +53,14 @@ public abstract class BaseNd4jTest { public TestName testName = new TestName(); @Rule - public Timeout timeout = Timeout.seconds(30); + public Timeout timeout = Timeout.seconds(testTimeoutMilliseconds()); + + /** + * Override this method to set the default timeout for methods in the class + */ + public long testTimeoutMilliseconds(){ + return 30000L; + } protected long startTime; protected int threadCountBefore; @@ -76,6 +83,7 @@ public abstract class BaseNd4jTest { //Suppress ND4J initialization - don't need this logged for every test... System.setProperty(ND4JSystemProperties.LOG_INITIALIZATION, "false"); + System.setProperty(ND4JSystemProperties.ND4J_IGNORE_AVX, "true"); System.gc(); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/LoneTest.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/LoneTest.java index 577e19ecb..89a72e5ac 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/LoneTest.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/LoneTest.java @@ -19,7 +19,6 @@ package org.nd4j.linalg; import lombok.extern.slf4j.Slf4j; import lombok.val; import org.apache.commons.lang3.RandomUtils; -import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -27,7 +26,6 @@ import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.impl.transforms.custom.SoftMax; import org.nd4j.linalg.api.ops.impl.transforms.strict.Tanh; -import org.nd4j.linalg.api.shape.Shape; import org.nd4j.linalg.checkutil.NDArrayCreationUtil; import org.nd4j.linalg.dataset.DataSet; import org.nd4j.linalg.factory.Nd4j; @@ -57,9 +55,9 @@ public class LoneTest extends BaseNd4jTest { @Test public void testSoftmaxStability() { INDArray input = Nd4j.create(new double[]{-0.75, 0.58, 0.42, 1.03, -0.61, 0.19, -0.37, -0.40, -1.42, -0.04}).reshape(1, -1).transpose(); - System.out.println("Input transpose " + Shape.shapeToString(input.shapeInfo())); +// System.out.println("Input transpose " + Shape.shapeToString(input.shapeInfo())); INDArray output = Nd4j.create(DataType.DOUBLE, 10, 1); - System.out.println("Element wise stride of output " + output.elementWiseStride()); +// System.out.println("Element wise stride of output " + output.elementWiseStride()); Nd4j.getExecutioner().exec(new SoftMax(input, output)); } @@ -85,11 +83,13 @@ public class LoneTest extends BaseNd4jTest { first = first.get(NDArrayIndex.interval(4, 8), NDArrayIndex.interval(0, 2, 8)); for (int i = 0; i < first.tensorsAlongDimension(0); i++) { - System.out.println(first.tensorAlongDimension(i, 0)); +// System.out.println(first.tensorAlongDimension(i, 0)); + first.tensorAlongDimension(i, 0); } for (int i = 0; i < first.tensorsAlongDimension(1); i++) { - System.out.println(first.tensorAlongDimension(i, 1)); +// System.out.println(first.tensorAlongDimension(i, 1)); + first.tensorAlongDimension(i, 1); } second = second.get(NDArrayIndex.interval(3, 7), NDArrayIndex.all()); third = third.permute(0, 2, 1); @@ -115,7 +115,7 @@ public class LoneTest extends BaseNd4jTest { assertEquals(i + 1,rowVector.getColumn(i).getInt(0)); assertEquals(i + 1,rowVector.get(NDArrayIndex.point(0), NDArrayIndex.interval(i, j)).getInt(0)); assertEquals(i + 1,colVector.get(NDArrayIndex.interval(i, j), NDArrayIndex.point(0)).getInt(0)); - System.out.println("Making sure index interval will not crash with begin/end vals..."); +// System.out.println("Making sure index interval will not crash with begin/end vals..."); jj = colVector.get(NDArrayIndex.interval(i, i + 1)); jj = colVector.get(NDArrayIndex.interval(i, i + 1)); } @@ -164,20 +164,9 @@ public class LoneTest extends BaseNd4jTest { INDArray aD = Nd4j.linspace(-3, 4, 8).reshape(2, 4); INDArray b = Nd4j.getExecutioner().exec(new Tanh(aA)); //Nd4j.getExecutioner().execAndReturn(new TanhDerivative(aD)); - System.out.println(aA); - System.out.println(aD); - System.out.println(b); - } - - @Test(expected = IllegalStateException.class) - @Ignore // test is outdated - public void opsNotAllowed() { - INDArray A = Nd4j.ones(2, 3, 1); - INDArray B = Nd4j.ones(2, 3); - - System.out.println(A.add(B)); - System.out.println(B.add(A)); - +// System.out.println(aA); +// System.out.println(aD); +// System.out.println(b); } @Test @@ -191,7 +180,7 @@ public class LoneTest extends BaseNd4jTest { max = 64; A = Nd4j.linspace(1, max, max).reshape(1, max); currentArgMax = Nd4j.argMax(A).getInt(0); - System.out.println("Returned argMax is " + currentArgMax); +// System.out.println("Returned argMax is " + currentArgMax); assertEquals(max - 1, currentArgMax); } @@ -215,7 +204,7 @@ public class LoneTest extends BaseNd4jTest { INDArray res = Nd4j.vstack(cArrays); long time2 = System.currentTimeMillis(); - log.info("Time spent: {} ms", time2 - time1); +// log.info("Time spent: {} ms", time2 - time1); for (int e = 0; e < 32; e++) { INDArray tad = res.tensorAlongDimension(e, 1, 2); @@ -248,7 +237,7 @@ public class LoneTest extends BaseNd4jTest { Collections.sort(times); - log.info("p50: {}; avg: {};", times.get(times.size() / 2), time); +// log.info("p50: {}; avg: {};", times.get(times.size() / 2), time); } @Test(expected = Exception.class) @@ -270,25 +259,30 @@ public class LoneTest extends BaseNd4jTest { */ int[] ranksToCheck = new int[]{2, 3, 4, 5}; for (int rank = 0; rank < ranksToCheck.length; rank++) { - log.info("\nRunning through rank " + ranksToCheck[rank]); +// log.info("\nRunning through rank " + ranksToCheck[rank]); List> allF = NDArrayCreationUtil.getTestMatricesWithVaryingShapes(ranksToCheck[rank], 'f', DataType.FLOAT); Iterator> iter = allF.iterator(); while (iter.hasNext()) { Pair currentPair = iter.next(); INDArray origArrayF = currentPair.getFirst(); INDArray sameArrayC = origArrayF.dup('c'); - log.info("\nLooping through slices for shape " + currentPair.getSecond()); - log.info("\nOriginal array:\n" + origArrayF); +// log.info("\nLooping through slices for shape " + currentPair.getSecond()); +// log.info("\nOriginal array:\n" + origArrayF); + origArrayF.toString(); INDArray viewF = origArrayF.slice(0); INDArray viewC = sameArrayC.slice(0); - log.info("\nSlice 0, C order:\n" + viewC.toString()); - log.info("\nSlice 0, F order:\n" + viewF.toString()); +// log.info("\nSlice 0, C order:\n" + viewC.toString()); +// log.info("\nSlice 0, F order:\n" + viewF.toString()); + viewC.toString(); + viewF.toString(); for (int i = 0; i < viewF.slices(); i++) { //assertEquals(viewF.slice(i),viewC.slice(i)); for (int j = 0; j < viewF.slice(i).length(); j++) { //if (j>0) break; - log.info("\nC order slice " + i + ", element 0 :" + viewC.slice(i).getDouble(j)); //C order is fine - log.info("\nF order slice " + i + ", element 0 :" + viewF.slice(i).getDouble(j)); //throws index out of bound err on F order +// log.info("\nC order slice " + i + ", element 0 :" + viewC.slice(i).getDouble(j)); //C order is fine +// log.info("\nF order slice " + i + ", element 0 :" + viewF.slice(i).getDouble(j)); //throws index out of bound err on F order + viewC.slice(i).getDouble(j); + viewF.slice(i).getDouble(j); } } } @@ -300,17 +294,21 @@ public class LoneTest extends BaseNd4jTest { INDArray arr = Nd4j.create(1, 3); INDArray reshaped = arr.reshape('f', 3, 1); for (int i=0;i pair : testInputs) { String msg = pair.getSecond(); INDArray in = pair.getFirst(); - System.out.println("Count " + count); +// System.out.println("Count " + count); INDArray dup = in.dup(); INDArray dupc = in.dup('c'); INDArray dupf = in.dup('f'); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/Nd4jTestsC.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/Nd4jTestsC.java index 42144bb97..81540f3c4 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/Nd4jTestsC.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/Nd4jTestsC.java @@ -68,6 +68,7 @@ import org.nd4j.linalg.api.ops.impl.scalar.LeakyReLU; import org.nd4j.linalg.api.ops.impl.scalar.ReplaceNans; import org.nd4j.linalg.api.ops.impl.scalar.comparison.ScalarEquals; import org.nd4j.linalg.api.ops.impl.scatter.ScatterUpdate; +import org.nd4j.linalg.api.ops.impl.shape.Reshape; import org.nd4j.linalg.api.ops.impl.transforms.any.IsMax; import org.nd4j.linalg.api.ops.impl.transforms.bool.MatchConditionTransform; import org.nd4j.linalg.api.ops.impl.transforms.comparison.CompareAndSet; @@ -130,6 +131,10 @@ public class Nd4jTestsC extends BaseNd4jTest { l1 = Nd4j.getBlasWrapper().level1(); } + @Override + public long testTimeoutMilliseconds() { + return 90000; + } @Before public void before() throws Exception { @@ -239,8 +244,8 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray inDup = in.dup(); - System.out.println(in); - System.out.println(inDup); +// System.out.println(in); +// System.out.println(inDup); assertEquals(arr, in); //Passes: Original array "in" is OK, but array "inDup" is not!? assertEquals(in, inDup); //Fails @@ -577,7 +582,7 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray outAsc = Nd4j.sortRows(in, i, true); INDArray outDesc = Nd4j.sortRows(in, i, false); - System.out.println("outDesc: " + Arrays.toString(outAsc.data().asFloat())); +// System.out.println("outDesc: " + Arrays.toString(outAsc.data().asFloat())); for (int j = 0; j < nRows; j++) { assertEquals(outAsc.getDouble(j, i), j, 1e-1); int origRowIdxAsc = order.indexOf(j); @@ -811,10 +816,10 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray outc = Transforms.isMax(orig.dup('c')); assertEquals(exp, outc); - log.info("Orig: {}", orig.dup('f').data().asFloat()); +// log.info("Orig: {}", orig.dup('f').data().asFloat()); INDArray outf = Transforms.isMax(orig.dup('f'), orig.dup('f').ulike()); - log.info("OutF: {}", outf.data().asFloat()); +// log.info("OutF: {}", outf.data().asFloat()); assertEquals(exp, outf); } @@ -873,7 +878,7 @@ public class Nd4jTestsC extends BaseNd4jTest { //1d: col vector - System.out.println("----------------------------------"); +// System.out.println("----------------------------------"); INDArray col = Nd4j.create(new double[] {1, 2, 3, 1}, new long[] {4, 1}); INDArray alongDim0col = Nd4j.getExecutioner().exec(new IsMax(col.dup(), Nd4j.createUninitialized(DataType.BOOL, col.shape()), 0))[0]; INDArray alongDim1col = Nd4j.getExecutioner().exec(new IsMax(col.dup(), Nd4j.createUninitialized(DataType.BOOL, col.shape()),1))[0]; @@ -909,7 +914,7 @@ public class Nd4jTestsC extends BaseNd4jTest { //Along dim 1: //[0 0 1] //[0 1 0] - System.out.println("---------------------"); +// System.out.println("---------------------"); INDArray orig2d = Nd4j.create(new double[][] {{1, 0, 2}, {2, 3, 1}}); INDArray alongDim0c_2d = Nd4j.getExecutioner().exec(new IsMax(orig2d.dup('c'), Nd4j.createUninitialized(DataType.BOOL, orig2d.shape()), 0))[0]; INDArray alongDim0f_2d = Nd4j.getExecutioner().exec(new IsMax(orig2d.dup('f'), Nd4j.createUninitialized(DataType.BOOL, orig2d.shape(), 'f'), 0))[0]; @@ -932,7 +937,7 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray result = Nd4j.argMax(orig2d.dup('c'), 0); - System.out.println("IMAx result: " + result); +// System.out.println("IMAx result: " + result); } @Test @@ -941,10 +946,10 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray alongDim0c_2d = Nd4j.getExecutioner().exec(new IsMax(orig2d.dup('c'), Nd4j.createUninitialized(DataType.BOOL, orig2d.shape()), 0))[0]; INDArray expAlong0_2d = Nd4j.create(new boolean[][] {{false, false, true}, {true, true, false}}); - System.out.println("Original shapeInfo: " + orig2d.dup('c').shapeInfoDataBuffer()); +// System.out.println("Original shapeInfo: " + orig2d.dup('c').shapeInfoDataBuffer()); - System.out.println("Expected: " + Arrays.toString(expAlong0_2d.data().asFloat())); - System.out.println("Actual: " + Arrays.toString(alongDim0c_2d.data().asFloat())); +// System.out.println("Expected: " + Arrays.toString(expAlong0_2d.data().asFloat())); +// System.out.println("Actual: " + Arrays.toString(alongDim0c_2d.data().asFloat())); assertEquals(expAlong0_2d, alongDim0c_2d); } @@ -954,7 +959,7 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray bias = Nd4j.create(1, 3); BroadcastOp op = new BroadcastAddOp(z, bias, z, 3); Nd4j.getExecutioner().exec(op); - System.out.println("First: OK"); +// System.out.println("First: OK"); //OK at this point: executes successfully @@ -962,7 +967,7 @@ public class Nd4jTestsC extends BaseNd4jTest { bias = Nd4j.create(1, 3); op = new BroadcastAddOp(z, bias, z, 3); Nd4j.getExecutioner().exec(op); //Crashing here, when we are doing exactly the same thing as before... - System.out.println("Second: OK"); +// System.out.println("Second: OK"); } @@ -971,19 +976,19 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray expected = Nd4j.linspace(1, 9, 9, DataType.DOUBLE).reshape(3, 3); for (char order : new char[] {'c', 'f'}) { - System.out.println(order); +// System.out.println(order); INDArray arr1 = Nd4j.linspace(1, 6, 6, DataType.DOUBLE).reshape( 2, 3).dup('c'); INDArray arr2 = Nd4j.linspace(7, 9, 3, DataType.DOUBLE).reshape(1, 3).dup('c'); Nd4j.factory().setOrder(order); - log.info("arr1: {}", arr1.data()); - log.info("arr2: {}", arr2.data()); +// log.info("arr1: {}", arr1.data()); +// log.info("arr2: {}", arr2.data()); INDArray merged = Nd4j.vstack(arr1, arr2); - System.out.println(merged.data()); - System.out.println(expected); +// System.out.println(merged.data()); +// System.out.println(expected); assertEquals("Failed for [" + order + "] order", expected, merged); } @@ -1006,8 +1011,8 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray expAllZeros = Nd4j.getExecutioner().exec(new Eps(first, second, Nd4j.create(DataType.BOOL, 10))); INDArray expAllOnes = Nd4j.getExecutioner().exec(new Eps(first, first, Nd4j.create(DataType.BOOL, 10))); - System.out.println(expAllZeros); - System.out.println(expAllOnes); +// System.out.println(expAllZeros); +// System.out.println(expAllOnes); val allones = Nd4j.getExecutioner().exec(new All(expAllOnes)).getDouble(0); @@ -1053,7 +1058,7 @@ public class Nd4jTestsC extends BaseNd4jTest { }*/ for (val shape : shapes) { for (int[] dims : sumDims) { - System.out.println("Shape: " + Arrays.toString(shape) + ", sumDims=" + Arrays.toString(dims)); +// System.out.println("Shape: " + Arrays.toString(shape) + ", sumDims=" + Arrays.toString(dims)); int length = ArrayUtil.prod(shape); INDArray inC = Nd4j.linspace(1, length, length, DataType.DOUBLE).reshape('c', shape); INDArray inF = inC.dup('f'); @@ -1086,8 +1091,8 @@ public class Nd4jTestsC extends BaseNd4jTest { for (int alongDimension = 0; alongDimension < rank; alongDimension++) { - System.out.println("Testing rank " + rank + " along dimension " + alongDimension + ", (shape=" - + Arrays.toString(shape) + ")"); +// System.out.println("Testing rank " + rank + " along dimension " + alongDimension + ", (shape=" +// + Arrays.toString(shape) + ")"); INDArray arrC = Nd4j.linspace(1, length, length, DataType.DOUBLE).reshape('c', shape); INDArray arrF = arrC.dup('f'); val resC = Nd4j.getExecutioner().exec(new IsMax(arrC, alongDimension))[0]; @@ -1266,14 +1271,14 @@ public class Nd4jTestsC extends BaseNd4jTest { val dims = new int[][] {{0, 1}, {1, 0}, {0, 2}, {2, 0}, {1, 2}, {2, 1}}; double[][] exp = new double[][] {{16, 20}, {16, 20}, {14, 22}, {14, 22}, {10, 26}, {10, 26}}; - System.out.println("dims\texpected\t\tactual"); +// System.out.println("dims\texpected\t\tactual"); for (int i = 0; i < dims.length; i++) { val d = dims[i]; double[] e = exp[i]; INDArray out = in.sum(d); - System.out.println(Arrays.toString(d) + "\t" + Arrays.toString(e) + "\t" + out); +// System.out.println(Arrays.toString(d) + "\t" + Arrays.toString(e) + "\t" + out); assertEquals(Nd4j.create(e, out.shape()), out); } } @@ -1300,7 +1305,7 @@ public class Nd4jTestsC extends BaseNd4jTest { assertEquals(exp, outC); assertEquals(exp, outF); - System.out.println(Arrays.toString(d) + "\t" + outC + "\t" + outF); +// System.out.println(Arrays.toString(d) + "\t" + outC + "\t" + outF); } } @@ -1336,7 +1341,7 @@ public class Nd4jTestsC extends BaseNd4jTest { zC.setData(Nd4j.linspace(1, 24, 24, DataType.DOUBLE).data()); for (int tad = 0; tad < zC.tensorsAlongDimension(dim); tad++) { INDArray javaTad = zC.tensorAlongDimension(tad, dim); - System.out.println("Tad " + tad + " is " + zC.tensorAlongDimension(tad, dim)); +// System.out.println("Tad " + tad + " is " + zC.tensorAlongDimension(tad, dim)); } INDArray zF = Nd4j.create(shape, 'f'); @@ -1348,10 +1353,10 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray exp = Nd4j.create(expLinspaced[i], shape, 'c'); INDArray expF = Nd4j.create(shape, 'f'); expF.assign(exp); - for (int tad = 0; tad < zC.tensorsAlongDimension(dim); tad++) { - System.out.println(zC.tensorAlongDimension(tad, dim).offset() + " and f offset is " - + zF.tensorAlongDimension(tad, dim).offset()); - } +// for (int tad = 0; tad < zC.tensorsAlongDimension(dim); tad++) { +// System.out.println(zC.tensorAlongDimension(tad, dim).offset() + " and f offset is " +// + zF.tensorAlongDimension(tad, dim).offset()); +// } Nd4j.getExecutioner().exec(opc); Nd4j.getExecutioner().exec(opf); @@ -2087,7 +2092,8 @@ public class Nd4jTestsC extends BaseNd4jTest { for (int i = 0; i < 6; i++) { //This should fail for i >= 2, but doesn't - System.out.println(arr.size(i)); +// System.out.println(arr.size(i)); + arr.size(i); } } @@ -2101,7 +2107,7 @@ public class Nd4jTestsC extends BaseNd4jTest { allocate.asFloatBuffer().put(new float[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}); DataBuffer buff = Nd4j.createBuffer(allocate, DataType.FLOAT, 10); float sum = Nd4j.create(buff).sumNumber().floatValue(); - System.out.println(sum); +// System.out.println(sum); assertEquals(55f, sum, 0.001f); Nd4j.setDataType(initialType); @@ -2113,7 +2119,7 @@ public class Nd4jTestsC extends BaseNd4jTest { val res = Nd4j.create(DataType.BOOL, 5); Nd4j.getExecutioner().exec(new Eps(ones, ones, res)); - log.info("Result: {}", res); +// log.info("Result: {}", res); assertTrue(res.all()); } @@ -2126,8 +2132,8 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray expAllZeros1 = Nd4j.getExecutioner().exec(new Eps(first, second, Nd4j.create(DataType.BOOL, new long[] {1, 10}, 'f'))); INDArray expAllZeros2 = Nd4j.getExecutioner().exec(new Eps(second, first, Nd4j.create(DataType.BOOL, new long[] {1, 10}, 'f'))); - System.out.println(expAllZeros1); - System.out.println(expAllZeros2); +// System.out.println(expAllZeros1); +// System.out.println(expAllZeros2); assertTrue(expAllZeros1.none()); assertTrue(expAllZeros2.none()); @@ -2169,7 +2175,7 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray assertionRepeat = Nd4j.create(new double[][] {{1, 1, 2, 2}, {3, 3, 4, 4}}); assertArrayEquals(new long[] {2, 4}, assertionRepeat.shape()); assertEquals(assertionRepeat, repeatAlongDimension); - System.out.println(repeatAlongDimension); +// System.out.println(repeatAlongDimension); INDArray ret = Nd4j.create(new double[] {0, 1, 2}).reshape(1, 3); INDArray tile = Nd4j.tile(ret, 2, 2); INDArray assertion = Nd4j.create(new double[][] {{0, 1, 2, 0, 1, 2}, {0, 1, 2, 0, 1, 2}}); @@ -2600,7 +2606,7 @@ public class Nd4jTestsC extends BaseNd4jTest { // vec = vec.dup('c'); // vec = vec.dup('f'); - System.out.println("Vec: " + vec); +// System.out.println("Vec: " + vec); INDArray outC = arrC.muliRowVector(vec); INDArray outF = arrF.muliRowVector(vec); @@ -2640,7 +2646,7 @@ public class Nd4jTestsC extends BaseNd4jTest { double[][] ind = {{5.1, 3.5, 1.4}, {4.9, 3.0, 1.4}, {4.7, 3.2, 1.3}}; INDArray in = Nd4j.create(ind); INDArray stdev = in.std(1); - log.info("StdDev: {}", stdev.toDoubleVector()); +// log.info("StdDev: {}", stdev.toDoubleVector()); INDArray exp = Nd4j.create(new double[] {1.8556220879622372, 1.7521415467935233, 1.7039170558842744}); assertEquals(exp, stdev); } @@ -2870,10 +2876,10 @@ public class Nd4jTestsC extends BaseNd4jTest { public void testTemp() { Nd4j.getRandom().setSeed(12345); INDArray in = Nd4j.rand(new long[] {2, 2, 2}); - System.out.println("In:\n" + in); +// System.out.println("In:\n" + in); INDArray permuted = in.permute(0, 2, 1); //Permute, so we get correct order after reshaping INDArray out = permuted.reshape(4, 2); - System.out.println("Out:\n" + out); +// System.out.println("Out:\n" + out); int countZero = 0; for (int i = 0; i < 8; i++) @@ -2924,7 +2930,7 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray columnConcat = Nd4j.linspace(1, 6, 6, DataType.DOUBLE).reshape(2, 3); INDArray concatWith = Nd4j.zeros(2, 3); INDArray columnWiseConcat = Nd4j.concat(0, columnConcat, concatWith); - System.out.println(columnConcat); +// System.out.println(columnConcat); } @@ -2956,9 +2962,9 @@ public class Nd4jTestsC extends BaseNd4jTest { @Test public void testSoftmaxStability() { INDArray input = Nd4j.create(new double[] {-0.75, 0.58, 0.42, 1.03, -0.61, 0.19, -0.37, -0.40, -1.42, -0.04}).reshape(1, -1).transpose(); - System.out.println("Input transpose " + Shape.shapeToString(input.shapeInfo())); +// System.out.println("Input transpose " + Shape.shapeToString(input.shapeInfo())); INDArray output = Nd4j.create(10, 1); - System.out.println("Element wise stride of output " + output.elementWiseStride()); +// System.out.println("Element wise stride of output " + output.elementWiseStride()); Nd4j.getExecutioner().exec(new SoftMax(input, output)); } @@ -3175,7 +3181,7 @@ public class Nd4jTestsC extends BaseNd4jTest { for (int i = 0; i < 20; i++) { INDArray arr1 = Nd4j.zeros(1, 100); Nd4j.getExecutioner().execAndReturn(new SoftMax(arr1)); - System.out.println(Arrays.toString(arr1.data().asFloat())); +// System.out.println(Arrays.toString(arr1.data().asFloat())); } } @@ -3190,8 +3196,8 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray out = Nd4j.getExecutioner().exec(new LeakyReLU(arr, 0.01)); - System.out.println("Expected: " + Arrays.toString(expected)); - System.out.println("Actual: " + Arrays.toString(out.data().asDouble())); +// System.out.println("Expected: " + Arrays.toString(expected)); +// System.out.println("Actual: " + Arrays.toString(out.data().asDouble())); INDArray exp = Nd4j.create(expected); assertEquals(exp, out); @@ -3310,19 +3316,19 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray arr2c = Nd4j.create(shape2, 'c'); INDArray arr2f = Nd4j.create(shape2, 'f'); - log.info("2f data: {}", Arrays.toString(arr2f.data().asFloat())); +// log.info("2f data: {}", Arrays.toString(arr2f.data().asFloat())); arr2c.assign(arr); - System.out.println("--------------"); +// System.out.println("--------------"); arr2f.assign(arr); INDArray exp = Nd4j.linspace(1, length, length, DataType.DOUBLE).reshape('c', shape2); - log.info("arr data: {}", Arrays.toString(arr.data().asFloat())); - log.info("2c data: {}", Arrays.toString(arr2c.data().asFloat())); - log.info("2f data: {}", Arrays.toString(arr2f.data().asFloat())); - log.info("2c shape: {}", Arrays.toString(arr2c.shapeInfoDataBuffer().asInt())); - log.info("2f shape: {}", Arrays.toString(arr2f.shapeInfoDataBuffer().asInt())); +// log.info("arr data: {}", Arrays.toString(arr.data().asFloat())); +// log.info("2c data: {}", Arrays.toString(arr2c.data().asFloat())); +// log.info("2f data: {}", Arrays.toString(arr2f.data().asFloat())); +// log.info("2c shape: {}", Arrays.toString(arr2c.shapeInfoDataBuffer().asInt())); +// log.info("2f shape: {}", Arrays.toString(arr2f.shapeInfoDataBuffer().asInt())); assertEquals(exp, arr2c); assertEquals(exp, arr2f); } @@ -3336,21 +3342,21 @@ public class Nd4jTestsC extends BaseNd4jTest { 56.0, 68.0, 80.0, 92.0, 9.0, 21.0, 33.0, 45.0, 57.0, 69.0, 81.0, 93.0, 10.0, 22.0, 34.0, 46.0, 58.0, 70.0, 82.0, 94.0, 11.0, 23.0, 35.0, 47.0, 59.0, 71.0, 83.0, 95.0, 12.0, 24.0, 36.0, 48.0, 60.0, 72.0, 84.0, 96.0}, new long[] {12, 8}, 'f'); - log.info("arr2f shape: {}", Arrays.toString(arr2f.shapeInfoDataBuffer().asInt())); - log.info("arr2f data: {}", Arrays.toString(arr2f.data().asFloat())); - log.info("render: {}", arr2f); +// log.info("arr2f shape: {}", Arrays.toString(arr2f.shapeInfoDataBuffer().asInt())); +// log.info("arr2f data: {}", Arrays.toString(arr2f.data().asFloat())); +// log.info("render: {}", arr2f); - log.info("----------------------"); +// log.info("----------------------"); INDArray array = Nd4j.linspace(1, 96, 96, DataType.DOUBLE).reshape('c', 12, 8); - log.info("array render: {}", array); +// log.info("array render: {}", array); - log.info("----------------------"); +// log.info("----------------------"); INDArray arrayf = array.dup('f'); - log.info("arrayf render: {}", arrayf); - log.info("arrayf shape: {}", Arrays.toString(arrayf.shapeInfoDataBuffer().asInt())); - log.info("arrayf data: {}", Arrays.toString(arrayf.data().asFloat())); +// log.info("arrayf render: {}", arrayf); +// log.info("arrayf shape: {}", Arrays.toString(arrayf.shapeInfoDataBuffer().asInt())); +// log.info("arrayf data: {}", Arrays.toString(arrayf.data().asFloat())); } @Test @@ -3386,7 +3392,7 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray arr2f = arr.dup('f'); arr2c.addi(arr); - System.out.println("--------------"); +// System.out.println("--------------"); arr2f.addi(arr); INDArray exp = Nd4j.linspace(1, length, length, DataType.DOUBLE).reshape('c', shape2).mul(2.0); @@ -3394,8 +3400,8 @@ public class Nd4jTestsC extends BaseNd4jTest { assertEquals(exp, arr2c); assertEquals(exp, arr2f); - log.info("2c data: {}", Arrays.toString(arr2c.data().asFloat())); - log.info("2f data: {}", Arrays.toString(arr2f.data().asFloat())); +// log.info("2c data: {}", Arrays.toString(arr2c.data().asFloat())); +// log.info("2f data: {}", Arrays.toString(arr2f.data().asFloat())); assertTrue(arrayNotEquals(arr2c.data().asFloat(), arr2f.data().asFloat(), 1e-5f)); } @@ -3411,7 +3417,7 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray arr2f = arr.dup('f'); arr2c.addi(arr); - System.out.println("--------------"); +// System.out.println("--------------"); arr2f.addi(arr); INDArray exp = Nd4j.linspace(1, length, length, DataType.DOUBLE).reshape('c', shape2).dup('f').mul(2.0); @@ -3419,8 +3425,8 @@ public class Nd4jTestsC extends BaseNd4jTest { assertEquals(exp, arr2c); assertEquals(exp, arr2f); - log.info("2c data: {}", Arrays.toString(arr2c.data().asFloat())); - log.info("2f data: {}", Arrays.toString(arr2f.data().asFloat())); +// log.info("2c data: {}", Arrays.toString(arr2c.data().asFloat())); +// log.info("2f data: {}", Arrays.toString(arr2f.data().asFloat())); assertTrue(arrayNotEquals(arr2c.data().asFloat(), arr2f.data().asFloat(), 1e-5f)); } @@ -3436,7 +3442,7 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray arr2f = Nd4j.create(shape2, 'f'); arr2c.assign(arr); - System.out.println("--------------"); +// System.out.println("--------------"); arr2f.assign(arr); INDArray exp = Nd4j.linspace(1, length, length, DataType.DOUBLE).reshape('c', shape2); @@ -3466,8 +3472,8 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray exp = Nd4j.linspace(1, length, length, DataType.DOUBLE).reshape('c', shape2); - System.out.println("Zf data: " + Arrays.toString(z_f.data().asFloat())); - System.out.println("Zc data: " + Arrays.toString(z_c.data().asFloat())); +// System.out.println("Zf data: " + Arrays.toString(z_f.data().asFloat())); +// System.out.println("Zc data: " + Arrays.toString(z_c.data().asFloat())); assertEquals(exp, z_f); assertEquals(exp, z_c); @@ -3528,37 +3534,45 @@ public class Nd4jTestsC extends BaseNd4jTest { @Test public void testVarConst() { INDArray x = Nd4j.linspace(1, 100, 100, DataType.DOUBLE).reshape(10, 10); - System.out.println(x); +// System.out.println(x); assertFalse(Double.isNaN(x.var(0).sumNumber().doubleValue())); - System.out.println(x.var(0)); +// System.out.println(x.var(0)); + x.var(0); assertFalse(Double.isNaN(x.var(1).sumNumber().doubleValue())); - System.out.println(x.var(1)); +// System.out.println(x.var(1)); + x.var(1); - System.out.println("================================="); +// System.out.println("================================="); // 2d array - all elements are the same INDArray a = Nd4j.ones(10, 10).mul(10); - System.out.println(a); +// System.out.println(a); assertFalse(Double.isNaN(a.var(0).sumNumber().doubleValue())); - System.out.println(a.var(0)); +// System.out.println(a.var(0)); + a.var(0); assertFalse(Double.isNaN(a.var(1).sumNumber().doubleValue())); - System.out.println(a.var(1)); +// System.out.println(a.var(1)); + a.var(1); // 2d array - constant in one dimension - System.out.println("================================="); +// System.out.println("================================="); INDArray nums = Nd4j.linspace(1, 10, 10, DataType.DOUBLE); INDArray b = Nd4j.ones(10, 10).mulRowVector(nums); - System.out.println(b); +// System.out.println(b); assertFalse(Double.isNaN((Double) b.var(0).sumNumber())); - System.out.println(b.var(0)); +// System.out.println(b.var(0)); + b.var(0); assertFalse(Double.isNaN((Double) b.var(1).sumNumber())); - System.out.println(b.var(1)); +// System.out.println(b.var(1)); + b.var(1); - System.out.println("================================="); - System.out.println(b.transpose()); +// System.out.println("================================="); +// System.out.println(b.transpose()); assertFalse(Double.isNaN((Double) b.transpose().var(0).sumNumber())); - System.out.println(b.transpose().var(0)); +// System.out.println(b.transpose().var(0)); + b.transpose().var(0); assertFalse(Double.isNaN((Double) b.transpose().var(1).sumNumber())); - System.out.println(b.transpose().var(1)); +// System.out.println(b.transpose().var(1)); + b.transpose().var(1); } @Test @@ -3619,8 +3633,8 @@ public class Nd4jTestsC extends BaseNd4jTest { assertEquals(5, result.columns()); assertEquals(assertion, result); - System.out.println(assertion.toString()); - System.out.println(result.toString()); +// System.out.println(assertion.toString()); +// System.out.println(result.toString()); } @@ -3652,7 +3666,7 @@ public class Nd4jTestsC extends BaseNd4jTest { Nd4j.getExecutioner().exec(new ReplaceNans(array, 0.0)); - System.out.println("Array After: " + array); +// System.out.println("Array After: " + array); assertEquals(assertion, array); } @@ -3755,8 +3769,8 @@ public class Nd4jTestsC extends BaseNd4jTest { IAMax iaMax = new IAMax(arr.dup()); val imax = Nd4j.getExecutioner().execAndReturn(iMax).getFinalResult().intValue(); val iamax = Nd4j.getExecutioner().execAndReturn(iaMax).getFinalResult().intValue(); - System.out.println("IMAX: " + imax); - System.out.println("IAMAX: " + iamax); +// System.out.println("IMAX: " + imax); +// System.out.println("IAMAX: " + iamax); assertEquals(1, iamax); assertEquals(3, imax); } @@ -3770,8 +3784,8 @@ public class Nd4jTestsC extends BaseNd4jTest { IMin iMin = new IMin(arr.dup()); double imin = Nd4j.getExecutioner().execAndReturn(iMin).getFinalResult().doubleValue(); double iamin = Nd4j.getExecutioner().execAndReturn(iaMin).getFinalResult().doubleValue(); - System.out.println("IMin: " + imin); - System.out.println("IAMin: " + iamin); +// System.out.println("IMin: " + imin); +// System.out.println("IAMin: " + iamin); assertEquals(3, iamin, 1e-12); assertEquals(1, imin, 1e-12); } @@ -3783,7 +3797,7 @@ public class Nd4jTestsC extends BaseNd4jTest { for (char orderArr : orders) { for (char orderbc : orders) { - System.out.println(orderArr + "\t" + orderbc); +// System.out.println(orderArr + "\t" + orderbc); INDArray arrOrig = Nd4j.ones(3, 4, 5).dup(orderArr); //Broadcast on dimensions 0,1 @@ -3831,7 +3845,7 @@ public class Nd4jTestsC extends BaseNd4jTest { for (char orderArr : orders) { for (char orderbc : orders) { - System.out.println(orderArr + "\t" + orderbc); +// System.out.println(orderArr + "\t" + orderbc); INDArray arrOrig = Nd4j.ones(3, 4, 5, 6).dup(orderArr); //Broadcast on dimensions 0,1 @@ -4271,7 +4285,7 @@ public class Nd4jTestsC extends BaseNd4jTest { assertEquals(10, matrix.rows()); assertEquals(6, matrix.columns()); - log.info("Result: {}", matrix); +// log.info("Result: {}", matrix); for (int x = 0; x < 10; x++) { assertEquals((double) x, matrix.getRow(x).meanNumber().doubleValue(), 0.1); @@ -4284,7 +4298,7 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray array = Nd4j.create(10, 3, 96, 96); for (int i = 0; i < 10; i++) { - log.info("Trying i: {}", i); +// log.info("Trying i: {}", i); array.tensorAlongDimension(i, 1, 2, 3).putScalar(1, 2, 3, 1); } } @@ -4368,7 +4382,7 @@ public class Nd4jTestsC extends BaseNd4jTest { // Nd4j.getExecutioner().commit(); val executioner = (GridExecutioner) Nd4j.getExecutioner(); - log.info("Starting: -------------------------------"); +// log.info("Starting: -------------------------------"); //log.info("Point A: [{}]", executioner.getQueueLength()); @@ -4476,16 +4490,16 @@ public class Nd4jTestsC extends BaseNd4jTest { } Nd4j.getExecutioner().commit(); - log.info("original: \n{}", initial); +// log.info("original: \n{}", initial); Nd4j.getExecutioner().exec(new BroadcastLessThan(initial, mask, result, 1)); Nd4j.getExecutioner().commit(); - log.info("Comparison ----------------------------------------------"); +// log.info("Comparison ----------------------------------------------"); for (int i = 0; i < initial.rows(); i++) { val row = result.getRow(i); assertEquals("Failed at row " + i, exp, row); - log.info("-------------------"); +// log.info("-------------------"); } } @@ -4570,7 +4584,7 @@ public class Nd4jTestsC extends BaseNd4jTest { val row = haystack.getRow(1); val drow = row.dup(); - log.info("row shape: {}", row.shapeInfoDataBuffer()); +// log.info("row shape: {}", row.shapeInfoDataBuffer()); assertEquals(needle, drow); } @@ -4584,7 +4598,7 @@ public class Nd4jTestsC extends BaseNd4jTest { -1.25485503673}); INDArray reduced = Nd4j.getExecutioner().exec(new CosineDistance(haystack, needle, 1)); - log.info("Reduced: {}", reduced); +// log.info("Reduced: {}", reduced); INDArray exp = Nd4j.create(new double[] {0.577452, 0.0, 1.80182}); @@ -4667,7 +4681,7 @@ public class Nd4jTestsC extends BaseNd4jTest { .doubleValue(); assertEquals("Failed at " + i, reduced.getDouble(i), res, 0.001); - log.info("Euclidean: {} vs {} is {}", x, needle, res); +// log.info("Euclidean: {} vs {} is {}", x, needle, res); } } @@ -4688,7 +4702,7 @@ public class Nd4jTestsC extends BaseNd4jTest { .doubleValue(); assertEquals("Failed at " + i, reduced.getDouble(i), res, 0.001); - log.info("Euclidean: {} vs {} is {}", x, needle, res); +// log.info("Euclidean: {} vs {} is {}", x, needle, res); } } @@ -4710,7 +4724,7 @@ public class Nd4jTestsC extends BaseNd4jTest { .doubleValue(); assertEquals("Failed at " + i, reduced.getDouble(i), res, 0.001); - log.info("Cosine: {} vs {} is {}", x, needle, res); +// log.info("Cosine: {} vs {} is {}", x, needle, res); } } @@ -4820,7 +4834,7 @@ public class Nd4jTestsC extends BaseNd4jTest { x.getRow(r).putScalar(p, 1); } - log.info("X: {}", x); +// log.info("X: {}", x); INDArray y = Nd4j.create(new double[] {0, 0, 0, 0, 1, 0}); INDArray res = Nd4j.getExecutioner().exec(new HammingDistance(x, y, 1)); @@ -5217,7 +5231,7 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray array = Nd4j.create(new double[] {10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0}); INDArray exp = Nd4j.create(new double[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10}); - log.info("Array shapeInfo: {}", array.shapeInfoJava()); +// log.info("Array shapeInfo: {}", array.shapeInfoJava()); INDArray rev = Nd4j.reverse(array); @@ -5281,7 +5295,7 @@ public class Nd4jTestsC extends BaseNd4jTest { Nd4j.sort(matrix.getColumn(0), true); - log.info("Matrix: {}", matrix); +// log.info("Matrix: {}", matrix); assertEquals(exp, matrix.getColumn(0)); } @@ -5372,8 +5386,8 @@ public class Nd4jTestsC extends BaseNd4jTest { Transforms.reverse(array, false); - log.info("Reversed shapeInfo: {}", array.shapeInfoJava()); - log.info("Reversed: {}", array); +// log.info("Reversed shapeInfo: {}", array.shapeInfoJava()); +// log.info("Reversed: {}", array); Transforms.reverse(array, false); @@ -5390,7 +5404,7 @@ public class Nd4jTestsC extends BaseNd4jTest { val reversed = Transforms.reverse(array, true); - log.info("Reversed: {}", reversed); +// log.info("Reversed: {}", reversed); val rereversed = Transforms.reverse(reversed, true); @@ -5433,7 +5447,7 @@ public class Nd4jTestsC extends BaseNd4jTest { INDArray array = Nd4j.linspace(1, 2017152, 2017152, DataType.DOUBLE).reshape(1, -1); INDArray exp = array.dup(); Transforms.reverse(array, false); - log.info("Reverse: {}", array); +// log.info("Reverse: {}", array); long time1 = System.currentTimeMillis(); @@ -6182,8 +6196,8 @@ public class Nd4jTestsC extends BaseNd4jTest { val vectorN = Nd4j.create(new float[]{1, 2, 3}, new long[]{3}); val matrix = Nd4j.create(new float[]{1, 2, 3, 4, 5, 6, 7, 8, 9}, new long[] {3, 3}); - log.info("vectorN: {}", vectorN); - log.info("vectorL: {}", vectorL); +// log.info("vectorN: {}", vectorN); +// log.info("vectorL: {}", vectorL); val outN = matrix.mmul(vectorN); val outL = matrix.mmul(vectorL); @@ -6601,7 +6615,7 @@ public class Nd4jTestsC extends BaseNd4jTest { @Test public void testSummaryStatsEquality_1() { - log.info("Datatype: {}", Nd4j.dataType()); +// log.info("Datatype: {}", Nd4j.dataType()); for(boolean biasCorrected : new boolean[]{false, true}) { @@ -6610,9 +6624,9 @@ public class Nd4jTestsC extends BaseNd4jTest { val standardDeviation = new org.apache.commons.math3.stat.descriptive.moment.StandardDeviation(biasCorrected); double std2 = standardDeviation.evaluate(indArray1.data().asDouble()); - log.info("Bias corrected = {}", biasCorrected); - log.info("nd4j std: {}", std); - log.info("apache math3 std: {}", std2); +// log.info("Bias corrected = {}", biasCorrected); +// log.info("nd4j std: {}", std); +// log.info("apache math3 std: {}", std2); assertEquals(std, std2, 1e-5); } @@ -6760,18 +6774,11 @@ public class Nd4jTestsC extends BaseNd4jTest { Nd4j.getExecutioner().commit(); - log.info("Result shape: {}", result.shapeInfoDataBuffer().asLong()); +// log.info("Result shape: {}", result.shapeInfoDataBuffer().asLong()); Nd4j.setDataType(dtype); } - @Test - public void testSomething() { - val a = Nd4j.create(10, 20); - - log.info("Shape: {}", a.mean(0).shape()); - } - @Test public void testTranspose_Custom(){ @@ -7047,7 +7054,7 @@ public class Nd4jTestsC extends BaseNd4jTest { val z = Transforms.greaterThanOrEqual(x, y, true); val str = ez.toString(); - log.info("exp: {}", str); +// log.info("exp: {}", str); assertEquals(ex, x); assertEquals(ey, y); @@ -7694,7 +7701,7 @@ public class Nd4jTestsC extends BaseNd4jTest { public void testGetColumnRowVector(){ INDArray arr = Nd4j.create(1,4); INDArray col = arr.getColumn(0); - System.out.println(Arrays.toString(col.shape())); +// System.out.println(Arrays.toString(col.shape())); assertArrayEquals(new long[]{1}, col.shape()); } @@ -7815,7 +7822,7 @@ public class Nd4jTestsC extends BaseNd4jTest { double[] data = new double[]{15.0, 16.0}; INDArray vector = Nd4j.createFromArray(data).reshape(1,2); INDArray slice = vector.slice(0); - System.out.println(slice.shapeInfoToString()); +// System.out.println(slice.shapeInfoToString()); assertEquals(vector.reshape(2), slice); slice.assign(-1); assertEquals(Nd4j.createFromArray(-1.0, -1.0).reshape(1,2), vector); @@ -7824,9 +7831,11 @@ public class Nd4jTestsC extends BaseNd4jTest { @Test public void testSliceMatrix(){ INDArray arr = Nd4j.arange(4).reshape(2,2); - System.out.println(arr.slice(0)); - System.out.println(); - System.out.println(arr.slice(1)); +// System.out.println(arr.slice(0)); +// System.out.println(); +// System.out.println(arr.slice(1)); + arr.slice(0); + arr.slice(1); } @Test @@ -8137,7 +8146,7 @@ public class Nd4jTestsC extends BaseNd4jTest { List l = c.calculateOutputShape(); - System.out.println(Arrays.toString(l.get(0).getShape())); +// System.out.println(Arrays.toString(l.get(0).getShape())); //from [4,4,3] to [2,4,6] then crop to [2,4,5] } @@ -8208,6 +8217,20 @@ public class Nd4jTestsC extends BaseNd4jTest { assertEquals(exp, out); } + @Test + public void testEmptyReshapingMinus1(){ + INDArray arr0 = Nd4j.create(DataType.FLOAT, 2, 0); + INDArray arr1 = Nd4j.create(DataType.FLOAT, 0, 1, 2); + + INDArray out0 = Nd4j.exec(new Reshape(arr0, Nd4j.createFromArray(2, 0, -1), Nd4j.create(DataType.FLOAT, 2, 0, 0)))[0]; + INDArray out1 = Nd4j.exec(new Reshape(arr1, Nd4j.createFromArray(-1, 1), Nd4j.create(DataType.FLOAT, 0, 1)))[0]; + INDArray out2 = Nd4j.exec(new Reshape(arr1, Nd4j.createFromArray(10, -1), Nd4j.create(DataType.FLOAT, 10, 0)))[0]; + + assertArrayEquals(new long[]{2, 0, 0}, out0.shape()); + assertArrayEquals(new long[]{0, 1}, out1.shape()); + assertArrayEquals(new long[]{10, 0}, out2.shape()); + } + @Override public char ordering() { return 'c'; diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ShufflesTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ShufflesTests.java index f1fdf9c57..bd10bd74d 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ShufflesTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ShufflesTests.java @@ -48,17 +48,15 @@ public class ShufflesTests extends BaseNd4jTest { array.getRow(x).assign(x); } - System.out.println(array); +// System.out.println(array); OrderScanner2D scanner = new OrderScanner2D(array); assertArrayEquals(new float[] {0f, 1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f, 9f}, scanner.getMap(), 0.01f); - System.out.println(); - Nd4j.shuffle(array, 1); - System.out.println(array); +// System.out.println(array); ArrayUtil.argMin(new int[] {}); @@ -71,19 +69,12 @@ public class ShufflesTests extends BaseNd4jTest { for (int x = 0; x < 10; x++) { array.getColumn(x).assign(x); } - - System.out.println(array); +// System.out.println(array); OrderScanner2D scanner = new OrderScanner2D(array); - assertArrayEquals(new float[] {0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f}, scanner.getMap(), 0.01f); - - System.out.println(); - Nd4j.shuffle(array, 0); - - System.out.println(array); - +// System.out.println(array); assertTrue(scanner.compareColumn(array)); } @@ -94,20 +85,12 @@ public class ShufflesTests extends BaseNd4jTest { array.getRow(x).assign(x); } - System.out.println(array); - +// System.out.println(array); OrderScanner2D scanner = new OrderScanner2D(array); assertArrayEquals(new float[] {0f, 1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f, 9f, 10f}, scanner.getMap(), 0.01f); - - System.out.println(); - Nd4j.shuffle(array, 1); - - System.out.println(array); - - ArrayUtil.argMin(new int[] {}); - +// System.out.println(array); assertTrue(scanner.compareRow(array)); } @@ -119,26 +102,21 @@ public class ShufflesTests extends BaseNd4jTest { features.getRow(x).assign(x); labels.getRow(x).assign(x); } - - System.out.println(features); +// System.out.println(features); OrderScanner2D scanner = new OrderScanner2D(features); assertArrayEquals(new float[] {0f, 1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f, 9f}, scanner.getMap(), 0.01f); - System.out.println(); - List list = new ArrayList<>(); list.add(features); list.add(labels); Nd4j.shuffle(list, 1); - System.out.println(features); - - System.out.println(); - - System.out.println(labels); +// System.out.println(features); +// System.out.println(); +// System.out.println(labels); ArrayUtil.argMin(new int[] {}); @@ -164,24 +142,20 @@ public class ShufflesTests extends BaseNd4jTest { labels.slice(x).assign(x); } - System.out.println(features); +// System.out.println(features); OrderScanner3D scannerFeatures = new OrderScanner3D(features); OrderScanner3D scannerLabels = new OrderScanner3D(labels); - System.out.println(); - List list = new ArrayList<>(); list.add(features); list.add(labels); Nd4j.shuffle(list, 1, 2); - System.out.println(features); - - System.out.println("------------------"); - - System.out.println(labels); +// System.out.println(features); +// System.out.println("------------------"); +// System.out.println(labels); assertTrue(scannerFeatures.compareSlice(features)); assertTrue(scannerLabels.compareSlice(labels)); @@ -360,7 +334,7 @@ public class ShufflesTests extends BaseNd4jTest { } if (Arrays.equals(map, newMap)) { - System.out.println("Maps are equal"); +// System.out.println("Maps are equal"); return false; } @@ -407,7 +381,7 @@ public class ShufflesTests extends BaseNd4jTest { } if (Arrays.equals(map, newMap)) { - System.out.println("Maps are equal"); +// System.out.println("Maps are equal"); return false; } @@ -433,7 +407,7 @@ public class ShufflesTests extends BaseNd4jTest { } if (Arrays.equals(map, newMap)) { - System.out.println("Maps are equal"); +// System.out.println("Maps are equal"); return false; } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/activations/TestActivationJson.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/activations/TestActivation.java similarity index 63% rename from nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/activations/TestActivationJson.java rename to nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/activations/TestActivation.java index 9586a8160..1240c1213 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/activations/TestActivationJson.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/activations/TestActivation.java @@ -22,7 +22,11 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.nd4j.linalg.BaseNd4jTest; import org.nd4j.linalg.activations.impl.*; +import org.nd4j.linalg.api.buffer.DataType; +import org.nd4j.linalg.api.ndarray.INDArray; +import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.factory.Nd4jBackend; +import org.nd4j.linalg.primitives.Pair; import org.nd4j.shade.jackson.databind.*; import java.util.ArrayList; @@ -37,9 +41,9 @@ import static org.junit.Assert.assertEquals; * Created by Alex on 30/12/2016. */ @RunWith(Parameterized.class) -public class TestActivationJson extends BaseNd4jTest { +public class TestActivation extends BaseNd4jTest { - public TestActivationJson(Nd4jBackend backend) { + public TestActivation(Nd4jBackend backend) { super(backend); } @@ -59,6 +63,59 @@ public class TestActivationJson extends BaseNd4jTest { mapper.enable(SerializationFeature.INDENT_OUTPUT); } + @Test + public void testRelu(){ + + Double[] max = {null, 6.0, 2.5, 5.0}; + Double[] threshold = {0.0, 0.0, 0.75, 0.2}; + Double[] negativeSlope = {0.0, 0.0, 0.0, 0.3}; + + INDArray in = Nd4j.linspace(-10, 10, 1000, DataType.DOUBLE); + double[] dIn = in.data().asDouble(); + + for( int i=0; i 5000); } @@ -559,10 +559,10 @@ public class IndexingTestsC extends BaseNd4jTest { INDArray arr = Nd4j.linspace(DataType.FLOAT, 1, prod, prod).reshape('c', inShape).dup(order); INDArray sub = arr.get(indexes); - System.out.println(Arrays.toString(indexes)); - System.out.println(arr); - System.out.println(); - System.out.println(sub); +// System.out.println(Arrays.toString(indexes)); +// System.out.println(arr); +// System.out.println(); +// System.out.println(sub); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/api/ndarray/TestNdArrReadWriteTxt.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/api/ndarray/TestNdArrReadWriteTxt.java index 8e1d8bd8f..a44df5868 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/api/ndarray/TestNdArrReadWriteTxt.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/api/ndarray/TestNdArrReadWriteTxt.java @@ -57,7 +57,7 @@ public class TestNdArrReadWriteTxt extends BaseNd4jTest { public void compareAfterWrite() throws Exception { int [] ranksToCheck = new int[] {0,1,2,3,4}; for (int i=0; i lsd = op.calculateOutputShape(); assertEquals(1, lsd.size()); assertArrayEquals(new long[]{8, 8, 3}, lsd.get(0).getShape()); @@ -1358,7 +1358,7 @@ public class CustomOpsTests extends BaseNd4jTest { INDArray y = Nd4j.linspace(DataType.FLOAT, -5, 9, 1).reshape(3, 3); val c = Conditions.equals(0.0); - System.out.println("Y:\n" + y); +// System.out.println("Y:\n" + y); INDArray z = x.match(y, c); INDArray exp = Nd4j.createFromArray(new boolean[][]{ diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dataset/DataSetTest.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dataset/DataSetTest.java index f6098dd3d..a62dc631e 100755 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dataset/DataSetTest.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dataset/DataSetTest.java @@ -482,7 +482,7 @@ public class DataSetTest extends BaseNd4jTest { //Tests merging of different CNN masks: [mb,1,h,1], [mb,1,1,w], [mb,1,h,w] for( int t=0; t<3; t++) { - log.info("Starting test: {}", t); +// log.info("Starting test: {}", t); int nOut = 3; int width = 5; int height = 4; @@ -808,7 +808,7 @@ public class DataSetTest extends BaseNd4jTest { ds.shuffle(); INDArray fCol = f.getColumn(0); INDArray lCol = l.getColumn(0); - System.out.println(fCol + "\t" + ds.getExampleMetaData()); +// System.out.println(fCol + "\t" + ds.getExampleMetaData()); for (int j = 0; j < nExamples; j++) { int fVal = (int) fCol.getDouble(j); int lVal = (int) lCol.getDouble(j); @@ -836,7 +836,8 @@ public class DataSetTest extends BaseNd4jTest { public void testToString() { org.nd4j.linalg.dataset.api.DataSet ds = new DataSet(); //this should not throw a null pointer - System.out.println(ds); +// System.out.println(ds); + ds.toString(); //Checking printing of masks int numExamples = 10; @@ -853,7 +854,8 @@ public class DataSetTest extends BaseNd4jTest { } ds = DataSet.merge(list); - System.out.println(ds); +// System.out.println(ds); + ds.toString(); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dimensionalityreduction/TestPCA.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dimensionalityreduction/TestPCA.java index a28c026cc..981495eac 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dimensionalityreduction/TestPCA.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/dimensionalityreduction/TestPCA.java @@ -149,16 +149,15 @@ public class TestPCA extends BaseNd4jTest { INDArray reduced100 = myPCA.reducedBasis(1.0); assertTrue("100% variance coverage should include all eigenvectors", reduced100.columns() == m.columns()); NDArrayStrings ns = new NDArrayStrings(5); - System.out.println("Eigenvectors:\n" + ns.format(myPCA.getEigenvectors())); - System.out.println("Eigenvalues:\n" + ns.format(myPCA.getEigenvalues())); +// System.out.println("Eigenvectors:\n" + ns.format(myPCA.getEigenvectors())); +// System.out.println("Eigenvalues:\n" + ns.format(myPCA.getEigenvalues())); double variance = 0.0; // sample 1000 of the randomly generated samples with the reduced basis set for (long i = 0; i < 1000; i++) variance += myPCA.estimateVariance(m.getRow(i), reduced70.columns()); variance /= 1000.0; - System.out.println("Fraction of variance using 70% variance with " + reduced70.columns() + " columns: " - + variance); + System.out.println("Fraction of variance using 70% variance with " + reduced70.columns() + " columns: " + variance); assertTrue("Variance does not cover intended 70% variance", variance > 0.70); // create "dummy" data with the same exact trends INDArray testSample = myPCA.generateGaussianSamples(10000); @@ -171,8 +170,8 @@ public class TestPCA extends BaseNd4jTest { 0.5 * myPCA.getEigenvalues().columns())); assertTrue("Eigenvectors are not close enough", myPCA.getEigenvectors() .equalsWithEps(analyzePCA.getEigenvectors(), 0.1 * analyzePCA.getEigenvectors().length())); - System.out.println("Original cov:\n" + ns.format(myPCA.getCovarianceMatrix()) + "\nDummy cov:\n" - + ns.format(analyzePCA.getCovarianceMatrix())); +// System.out.println("Original cov:\n" + ns.format(myPCA.getCovarianceMatrix()) + "\nDummy cov:\n" +// + ns.format(analyzePCA.getCovarianceMatrix())); INDArray testSample2 = analyzePCA.convertBackToFeatures(analyzePCA.convertToComponents(testSample)); assertTrue("Transformation does not work.", testSample.equalsWithEps(testSample2, 1e-5 * testSample.length())); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/indexing/BooleanIndexingTest.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/indexing/BooleanIndexingTest.java index 324efed0e..49d079529 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/indexing/BooleanIndexingTest.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/indexing/BooleanIndexingTest.java @@ -139,11 +139,8 @@ public class BooleanIndexingTest extends BaseNd4jTest { @Test public void test2dAnd2() { INDArray array = Nd4j.zeros(10, 10); - array.slice(4).putScalar(2, 1e-5f); - - - System.out.println(array); +// System.out.println(array); assertFalse(BooleanIndexing.and(array, Conditions.equals(0f))); @@ -329,7 +326,7 @@ public class BooleanIndexingTest extends BaseNd4jTest { boolean result[] = BooleanIndexing.and(array, Conditions.equals(0.0), 1); boolean comp[] = new boolean[] {false, false, true}; - System.out.println("Result: " + Arrays.toString(result)); +// System.out.println("Result: " + Arrays.toString(result)); assertArrayEquals(comp, result); } @@ -338,12 +335,12 @@ public class BooleanIndexingTest extends BaseNd4jTest { INDArray array = Nd4j.ones(3, 10); array.getRow(2).assign(0.0).putScalar(0, 1.0); - System.out.println("Array: " + array); +// System.out.println("Array: " + array); boolean result[] = BooleanIndexing.or(array, Conditions.lessThan(0.9), 1); boolean comp[] = new boolean[] {false, false, true}; - System.out.println("Result: " + Arrays.toString(result)); +// System.out.println("Result: " + Arrays.toString(result)); assertArrayEquals(comp, result); } @@ -355,7 +352,7 @@ public class BooleanIndexingTest extends BaseNd4jTest { boolean result[] = BooleanIndexing.and(array, Conditions.lessThan(0.0), 1); boolean comp[] = new boolean[] {false, false, false}; - System.out.println("Result: " + Arrays.toString(result)); +// System.out.println("Result: " + Arrays.toString(result)); assertArrayEquals(comp, result); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/mixed/MixedDataTypesTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/mixed/MixedDataTypesTests.java index e92f03c39..15fbac932 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/mixed/MixedDataTypesTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/mixed/MixedDataTypesTests.java @@ -453,8 +453,8 @@ public class MixedDataTypesTests extends BaseNd4jTest { INDArray not = Transforms.not(asBool); // INDArray asFloat = not.castTo(DataType.FLOAT); - System.out.println(not); - System.out.println(asFloat); +// System.out.println(not); +// System.out.println(asFloat); INDArray exp = Nd4j.ones(DataType.FLOAT, 3, 5000); assertEquals(DataType.FLOAT, exp.dataType()); assertEquals(exp.dataType(), asFloat.dataType()); @@ -480,7 +480,7 @@ public class MixedDataTypesTests extends BaseNd4jTest { for(DataType dt : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) { INDArray arr = Nd4j.scalar(dt, 10.0); arr.assign(2.0); - System.out.println(dt + " - value: " + arr + " - " + arr.getDouble(0)); +// System.out.println(dt + " - value: " + arr + " - " + arr.getDouble(0)); } } @@ -488,17 +488,23 @@ public class MixedDataTypesTests extends BaseNd4jTest { public void testSimple(){ Nd4j.create(1); for(DataType dt : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF, DataType.INT, DataType.LONG}) { - System.out.println("----- " + dt + " -----"); +// System.out.println("----- " + dt + " -----"); INDArray arr = Nd4j.ones(dt,1, 5); - System.out.println("Ones: " + arr); +// System.out.println("Ones: " + arr); arr.assign(1.0); - System.out.println("assign(1.0): " + arr); - System.out.println("DIV: " + arr.div(8)); - System.out.println("MUL: " + arr.mul(8)); - System.out.println("SUB: " + arr.sub(8)); - System.out.println("ADD: " + arr.add(8)); - System.out.println("RDIV: " + arr.rdiv(8)); - System.out.println("RSUB: " + arr.rsub(8)); +// System.out.println("assign(1.0): " + arr); +// System.out.println("DIV: " + arr.div(8)); +// System.out.println("MUL: " + arr.mul(8)); +// System.out.println("SUB: " + arr.sub(8)); +// System.out.println("ADD: " + arr.add(8)); +// System.out.println("RDIV: " + arr.rdiv(8)); +// System.out.println("RSUB: " + arr.rsub(8)); + arr.div(8); + arr.mul(8); + arr.sub(8); + arr.add(8); + arr.rdiv(8); + arr.rsub(8); } } @@ -519,7 +525,7 @@ public class MixedDataTypesTests extends BaseNd4jTest { val boolAttached = bool.isAttached(); val doubleAttached = dbl.isAttached(); - System.out.println(i + "\tboolAttached=" + boolAttached + ", doubleAttached=" + doubleAttached ); +// System.out.println(i + "\tboolAttached=" + boolAttached + ", doubleAttached=" + doubleAttached ); //System.out.println("bool: " + bool); //java.lang.IllegalStateException: Indexer must never be null //System.out.println("double: " + dbl); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/nativ/NativeBlasTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/nativ/NativeBlasTests.java index 55d90a6aa..e8c016485 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/nativ/NativeBlasTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/nativ/NativeBlasTests.java @@ -256,7 +256,7 @@ public class NativeBlasTests extends BaseNd4jTest { val exp = A.mmul(B); - log.info("exp: {}", exp); +// log.info("exp: {}", exp); // ? assertEquals(exp, res); @@ -284,7 +284,7 @@ public class NativeBlasTests extends BaseNd4jTest { val exp = A.mmul(B); - log.info("exp mean: {}", exp.meanNumber()); +// log.info("exp mean: {}", exp.meanNumber()); // ? assertEquals(exp, res); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/nativ/OpsMappingTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/nativ/OpsMappingTests.java index ca9de0252..efb77abef 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/nativ/OpsMappingTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/nativ/OpsMappingTests.java @@ -59,9 +59,9 @@ public class OpsMappingTests extends BaseNd4jTest { return 'c'; } - @Test - public void testCustomOpsMapping() { - Nd4j.create(1); + @Override + public long testTimeoutMilliseconds() { + return 90000L; } @Test diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/DerivativeTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/DerivativeTests.java index 5a51b847d..1a70fa6c1 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/DerivativeTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/DerivativeTests.java @@ -176,9 +176,9 @@ public class DerivativeTests extends BaseNd4jTest { INDArray z = Transforms.hardSigmoid(xArr, true); INDArray zPrime = Nd4j.getExecutioner().exec(new HardSigmoidDerivative(xArr.dup())); - System.out.println(xArr); - System.out.println(z); - System.out.println(zPrime); +// System.out.println(xArr); +// System.out.println(z); +// System.out.println(zPrime); for (int i = 0; i < expHSOut.length; i++) { double relErrorHS = diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTests.java index e04250f69..042abca1f 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTests.java @@ -111,7 +111,7 @@ public class OpExecutionerTests extends BaseNd4jTest { new EuclideanDistance(distanceInputRow, distanceComp, result, 0)); INDArray euclideanAssertion = Nd4j.ones(4).castTo(DataType.DOUBLE); assertEquals(euclideanAssertion, result); - System.out.println(result); +// System.out.println(result); } @@ -517,7 +517,7 @@ public class OpExecutionerTests extends BaseNd4jTest { 0.27320877, 0.29476917, 0.29449323, 0.29720396, 0.31319344, 0.2803108, 0.28671616, 0.30462897, 0.3049033, 0.29277474, 0.29136384, 0.30316526, 0.2807459}, new int[] {150, 3}, 'f'); - System.out.println("Data:" + input.data().length()); +// System.out.println("Data:" + input.data().length()); val softMax = new SoftMax(input); Nd4j.getExecutioner().exec((CustomOp) softMax); assertEquals(assertion, softMax.outputArguments()[0]); @@ -589,12 +589,12 @@ public class OpExecutionerTests extends BaseNd4jTest { @Test public void testMeanSumSimple() { - System.out.println("3d"); +// System.out.println("3d"); INDArray arr = Nd4j.ones(1, 4, 4); assertEquals(Nd4j.ones(1), arr.mean(1, 2)); assertEquals(Nd4j.ones(1).muli(16), arr.sum(1, 2)); - System.out.println("4d"); +// System.out.println("4d"); INDArray arr4 = Nd4j.ones(1, 1, 4, 4); INDArray arr4m = arr4.mean(2, 3); INDArray arr4s = arr4.sum(2, 3); @@ -603,7 +603,7 @@ public class OpExecutionerTests extends BaseNd4jTest { for (int i = 0; i < arr4s.length(); i++) assertEquals(arr4s.getDouble(i), 16, 1e-1); - System.out.println("5d"); +// System.out.println("5d"); INDArray arr5 = Nd4j.ones(1, 1, 4, 4, 4); INDArray arr5m = arr5.mean(2, 3); INDArray arr5s = arr5.sum(2, 3); @@ -611,7 +611,7 @@ public class OpExecutionerTests extends BaseNd4jTest { assertEquals(arr5m.getDouble(i), 1, 1e-1); for (int i = 0; i < arr5s.length(); i++) assertEquals(arr5s.getDouble(i), 16, 1e-1); - System.out.println("6d"); +// System.out.println("6d"); INDArray arr6 = Nd4j.ones(1, 1, 4, 4, 4, 4); INDArray arr6Tad = arr6.tensorAlongDimension(0, 2, 3); INDArray arr6s = arr6.sum(2, 3); @@ -629,7 +629,7 @@ public class OpExecutionerTests extends BaseNd4jTest { INDArray arr6 = Nd4j.ones(1, 1, 4, 4, 4, 4); INDArray arr6s = arr6.sum(2, 3); - System.out.println("Arr6s: " + arr6.length()); +// System.out.println("Arr6s: " + arr6.length()); for (int i = 0; i < arr6s.length(); i++) assertEquals(16, arr6s.getDouble(i), 1e-1); } @@ -659,10 +659,10 @@ public class OpExecutionerTests extends BaseNd4jTest { } assertEquals("Failed for [" + order + "] order", exp, arr6s); - System.out.println("ORDER: " + order); - for (int i = 0; i < 6; i++) { - System.out.println(arr6s.getDouble(i)); - } +// System.out.println("ORDER: " + order); +// for (int i = 0; i < 6; i++) { +// System.out.println(arr6s.getDouble(i)); +// } } } finally { Nd4j.factory().setOrder(origOrder); @@ -727,8 +727,8 @@ public class OpExecutionerTests extends BaseNd4jTest { DropOut dropOut = new DropOut(array, result, 0.05); Nd4j.getExecutioner().exec(dropOut); - System.out.println("Src array: " + array); - System.out.println("Res array: " + result); +// System.out.println("Src array: " + array); +// System.out.println("Res array: " + result); assertNotEquals(array, result); } @@ -741,8 +741,8 @@ public class OpExecutionerTests extends BaseNd4jTest { DropOutInverted dropOut = new DropOutInverted(array, result, 0.65); Nd4j.getExecutioner().exec(dropOut); - System.out.println("Src array: " + array); - System.out.println("Res array: " + result); +// System.out.println("Src array: " + array); +// System.out.println("Res array: " + result); assertNotEquals(array, result); } @@ -778,8 +778,8 @@ public class OpExecutionerTests extends BaseNd4jTest { assertEquals(5, result.columns()); assertEquals(assertion, result); - System.out.println(assertion.toString()); - System.out.println(result.toString()); +// System.out.println(assertion.toString()); +// System.out.println(result.toString()); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTestsC.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTestsC.java index 72be040c5..3e7551ae4 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTestsC.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/ops/OpExecutionerTestsC.java @@ -126,7 +126,7 @@ public class OpExecutionerTestsC extends BaseNd4jTest { @Test public void testBroadcastMultiDim() { INDArray data = Nd4j.linspace(1, 30, 30, DataType.DOUBLE).reshape(2, 3, 5); - System.out.println(data); +// System.out.println(data); INDArray mask = Nd4j.create(new double[][] {{1.00, 1.00, 1.00, 1.00, 1.00}, {1.00, 1.00, 1.00, 0.00, 0.00}}); Nd4j.getExecutioner().exec(new BroadcastMulOp(data, mask, data, 0, 2)); INDArray assertion = Nd4j.create(new double[] {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, @@ -342,7 +342,8 @@ public class OpExecutionerTestsC extends BaseNd4jTest { public void testTad() { INDArray arr = Nd4j.linspace(1, 12, 12, DataType.DOUBLE).reshape(2, 3, 2); for (int i = 0; i < arr.tensorsAlongDimension(0); i++) { - System.out.println(arr.tensorAlongDimension(i, 0)); +// System.out.println(arr.tensorAlongDimension(i, 0)); + arr.tensorAlongDimension(i, 0); } } @@ -503,12 +504,12 @@ public class OpExecutionerTestsC extends BaseNd4jTest { @Test public void testMeanSumSimple() { - System.out.println("3d"); +// System.out.println("3d"); INDArray arr = Nd4j.ones(1, 4, 4); assertEquals(Nd4j.ones(1), arr.mean(1, 2)); assertEquals(Nd4j.ones(1).muli(16), arr.sum(1, 2)); - System.out.println("4d"); +// System.out.println("4d"); INDArray arr4 = Nd4j.ones(1, 1, 4, 4); INDArray arr4m = arr4.mean(2, 3); INDArray arr4s = arr4.sum(2, 3); @@ -516,7 +517,7 @@ public class OpExecutionerTestsC extends BaseNd4jTest { assertEquals(arr4m.getDouble(i), 1, 1e-1); for (int i = 0; i < arr4s.length(); i++) assertEquals(arr4s.getDouble(i), 16, 1e-1); - System.out.println("5d"); +// System.out.println("5d"); INDArray arr5 = Nd4j.ones(1, 1, 4, 4, 4); INDArray arr5s = arr5.sum(2, 3); for (int i = 0; i < arr5s.length(); i++) @@ -525,7 +526,7 @@ public class OpExecutionerTestsC extends BaseNd4jTest { for (int i = 0; i < arr5m.length(); i++) assertEquals(1, arr5m.getDouble(i), 1e-1); - System.out.println("6d"); +// System.out.println("6d"); INDArray arr6 = Nd4j.ones(1, 1, 4, 4, 4, 4); INDArray arr6m = arr6.mean(2, 3); for (int i = 0; i < arr6m.length(); i++) @@ -590,17 +591,17 @@ public class OpExecutionerTestsC extends BaseNd4jTest { @Test public void testSum5d() throws Exception { - System.out.println("5d"); +// System.out.println("5d"); INDArray arr5 = Nd4j.ones(1, 1, 4, 4, 4); INDArray arr5s = arr5.sum(2, 3); Thread.sleep(1000); - System.out.println("5d length: " + arr5s.length()); +// System.out.println("5d length: " + arr5s.length()); for (int i = 0; i < arr5s.length(); i++) assertEquals(16, arr5s.getDouble(i), 1e-1); INDArray arrF = Nd4j.ones(1, 1, 4, 4, 4); - System.out.println("A: " + arrF); +// System.out.println("A: " + arrF); } @@ -643,9 +644,9 @@ public class OpExecutionerTestsC extends BaseNd4jTest { INDArray cOrder = Nd4j.create(new int[] {2, 2}, 'c').assign(toAssign); INDArray fOrder = Nd4j.create(new int[] {2, 2}, 'f').assign(toAssign); - System.out.println(cOrder); - System.out.println(cOrder.sum(0)); //[2,4] -> correct - System.out.println(fOrder.sum(0)); //[2,3] -> incorrect +// System.out.println(cOrder); +// System.out.println(cOrder.sum(0)); //[2,4] -> correct +// System.out.println(fOrder.sum(0)); //[2,3] -> incorrect assertEquals(cOrder, fOrder); assertEquals(cOrder.sum(0), fOrder.sum(0)); @@ -908,7 +909,7 @@ public class OpExecutionerTestsC extends BaseNd4jTest { assertEquals(xDup, x); - log.info("bins: {}", z); +// log.info("bins: {}", z); assertEquals(zExp, z); } @@ -931,8 +932,8 @@ public class OpExecutionerTestsC extends BaseNd4jTest { expManhattanDistance += Math.abs(diff); } double expectedEuclidean = Math.sqrt(sumSquaredDiff); - System.out.println("Expected, Euclidean: " + expectedEuclidean); - System.out.println("Expected, Manhattan: " + expManhattanDistance); +// System.out.println("Expected, Euclidean: " + expectedEuclidean); +// System.out.println("Expected, Manhattan: " + expManhattanDistance); int mb = 2; INDArray firstOrig = Nd4j.create(mb, 2, 2, 2); @@ -959,14 +960,14 @@ public class OpExecutionerTestsC extends BaseNd4jTest { INDArray outManhattan = Nd4j.getExecutioner().exec(new ManhattanDistance(first, second, 1, 2, 3)); - System.out.println("\n\nOrder: " + order); - System.out.println("Euclidean:"); +// System.out.println("\n\nOrder: " + order); +// System.out.println("Euclidean:"); //System.out.println(Arrays.toString(out.getRow(0).dup().data().asDouble())); //System.out.println(Arrays.toString(out.getRow(1).dup().data().asDouble())); assertEquals(out.getDouble(0), out.getDouble(1), 1e-5); - System.out.println("Manhattan:"); +// System.out.println("Manhattan:"); //System.out.println(Arrays.toString(outManhattan.getRow(0).dup().data().asDouble())); //System.out.println(Arrays.toString(outManhattan.getRow(1).dup().data().asDouble())); @@ -1017,7 +1018,7 @@ public class OpExecutionerTestsC extends BaseNd4jTest { for (int i = 0; i < 32; i++) { INDArray tensor = array.tensorAlongDimension(i, 1, 2); - log.info("tad {}: {}", i, array.getDouble(0)); +// log.info("tad {}: {}", i, array.getDouble(0)); assertEquals((float) (100 + i) * (100 * 100), tensor.sumNumber().floatValue(), 0.001f); assertEquals((float) 100 + i, tensor.meanNumber().floatValue(), 0.001f); } @@ -1076,7 +1077,7 @@ public class OpExecutionerTestsC extends BaseNd4jTest { INDArray pile = Nd4j.pile(arrays); - log.info("Pile: {}", pile); +// log.info("Pile: {}", pile); INDArray[] tears = Nd4j.tear(pile, 1, 2); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/profiling/OperationProfilerTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/profiling/OperationProfilerTests.java index d0c61de9b..95bd8a649 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/profiling/OperationProfilerTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/profiling/OperationProfilerTests.java @@ -125,7 +125,7 @@ public class OperationProfilerTests extends BaseNd4jTest { OpProfiler.PenaltyCause[] causes = OpProfiler.getInstance().processOperands(x, y); - log.info("Causes: {}", Arrays.toString(causes)); +// log.info("Causes: {}", Arrays.toString(causes)); assertEquals(1, causes.length); assertTrue(ArrayUtils.contains(causes, OpProfiler.PenaltyCause.MIXED_ORDER)); //assertTrue(ArrayUtils.contains(causes, OpProfiler.PenaltyCause.NON_EWS_ACCESS)); @@ -139,7 +139,7 @@ public class OperationProfilerTests extends BaseNd4jTest { OpProfiler.PenaltyCause[] causes = OpProfiler.getInstance().processOperands(x, y, z); - log.info("Causes: {}", Arrays.toString(causes)); +// log.info("Causes: {}", Arrays.toString(causes)); assertEquals(1, causes.length); assertTrue(ArrayUtils.contains(causes, OpProfiler.PenaltyCause.MIXED_ORDER)); //assertTrue(ArrayUtils.contains(causes, OpProfiler.PenaltyCause.NON_EWS_ACCESS)); @@ -154,7 +154,7 @@ public class OperationProfilerTests extends BaseNd4jTest { OpProfiler.PenaltyCause[] causes = OpProfiler.getInstance().processOperands(w, x, y, z); - log.info("Causes: {}", Arrays.toString(causes)); +// log.info("Causes: {}", Arrays.toString(causes)); assertEquals(1, causes.length); assertTrue(ArrayUtils.contains(causes, OpProfiler.PenaltyCause.MIXED_ORDER)); } @@ -167,7 +167,7 @@ public class OperationProfilerTests extends BaseNd4jTest { OpProfiler.PenaltyCause[] causes = OpProfiler.getInstance().processOperands(x, y); - log.info("Causes: {}", Arrays.toString(causes)); +// log.info("Causes: {}", Arrays.toString(causes)); assertEquals(1, causes.length); assertTrue(ArrayUtils.contains(causes, OpProfiler.PenaltyCause.STRIDED_ACCESS)); } @@ -181,7 +181,7 @@ public class OperationProfilerTests extends BaseNd4jTest { OpProfiler.PenaltyCause[] causes = OpProfiler.getInstance().processTADOperands(pair.getFirst()); - log.info("Causes: {}", Arrays.toString(causes)); +// log.info("Causes: {}", Arrays.toString(causes)); assertEquals(1, causes.length); assertTrue(ArrayUtils.contains(causes, OpProfiler.PenaltyCause.TAD_NON_EWS_ACCESS)); } @@ -195,7 +195,7 @@ public class OperationProfilerTests extends BaseNd4jTest { OpProfiler.PenaltyCause[] causes = OpProfiler.getInstance().processTADOperands(pair.getFirst()); - log.info("Causes: {}", Arrays.toString(causes)); +// log.info("Causes: {}", Arrays.toString(causes)); assertEquals(1, causes.length); assertTrue(ArrayUtils.contains(causes, OpProfiler.PenaltyCause.TAD_NON_EWS_ACCESS)); } @@ -211,7 +211,7 @@ public class OperationProfilerTests extends BaseNd4jTest { OpProfiler.PenaltyCause[] causes = OpProfiler.getInstance().processTADOperands(pair.getFirst()); - log.info("Causes: {}", Arrays.toString(causes)); +// log.info("Causes: {}", Arrays.toString(causes)); assertEquals(1, causes.length); assertTrue(ArrayUtils.contains(causes, OpProfiler.PenaltyCause.TAD_NON_EWS_ACCESS)); } @@ -225,8 +225,8 @@ public class OperationProfilerTests extends BaseNd4jTest { OpProfiler.PenaltyCause[] causes = OpProfiler.getInstance().processTADOperands(pair.getFirst()); - log.info("TAD: {}", Arrays.toString(pair.getFirst().asInt())); - log.info("Causes: {}", Arrays.toString(causes)); +// log.info("TAD: {}", Arrays.toString(pair.getFirst().asInt())); +// log.info("Causes: {}", Arrays.toString(causes)); assertEquals(1, causes.length); assertTrue(ArrayUtils.contains(causes, OpProfiler.PenaltyCause.NONE)); } @@ -239,8 +239,8 @@ public class OperationProfilerTests extends BaseNd4jTest { OpProfiler.PenaltyCause[] causes = OpProfiler.getInstance().processTADOperands(pair.getFirst()); - log.info("TAD: {}", Arrays.toString(pair.getFirst().asInt())); - log.info("Causes: {}", Arrays.toString(causes)); +// log.info("TAD: {}", Arrays.toString(pair.getFirst().asInt())); +// log.info("Causes: {}", Arrays.toString(causes)); assertEquals(1, causes.length); assertTrue(ArrayUtils.contains(causes, OpProfiler.PenaltyCause.TAD_STRIDED_ACCESS)); } @@ -412,7 +412,7 @@ public class OperationProfilerTests extends BaseNd4jTest { val avgA = (nanosB - nanosA) / iterations; - log.info("A: {}; B: {}", avgA, avgB); +// log.info("A: {}; B: {}", avgA, avgB); } } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/rng/RandomTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/rng/RandomTests.java index c663f3db8..fc6a034fd 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/rng/RandomTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/rng/RandomTests.java @@ -89,7 +89,7 @@ public class RandomTests extends BaseNd4jTest { Nd4j.createUninitialized(shape, Nd4j.order()), mean, standardDeviation), Nd4j.getRandom()); - log.info("arr: {}", arr.data().asDouble()); +// log.info("arr: {}", arr.data().asDouble()); assertEquals(exp, arr); } @@ -106,8 +106,8 @@ public class RandomTests extends BaseNd4jTest { UniformDistribution distribution2 = new UniformDistribution(z2, 1.0, 2.0); Nd4j.getExecutioner().exec(distribution2, random2); - System.out.println("Data: " + z1); - System.out.println("Data: " + z2); +// System.out.println("Data: " + z1); +// System.out.println("Data: " + z2); for (int e = 0; e < z1.length(); e++) { double val = z1.getDouble(e); assertTrue(val >= 1.0 && val <= 2.0); @@ -136,8 +136,8 @@ public class RandomTests extends BaseNd4jTest { log.info("States cpu: {}/{}", random1.rootState(), random1.nodeState()); - System.out.println("Data: " + z1); - System.out.println("Data: " + z2); +// System.out.println("Data: " + z1); +// System.out.println("Data: " + z2); for (int e = 0; e < z1.length(); e++) { double val = z1.getDouble(e); assertTrue(val >= 1.0 && val <= 2.0); @@ -157,8 +157,8 @@ public class RandomTests extends BaseNd4jTest { UniformDistribution distribution2 = new UniformDistribution(z2, 1.0, 2.0); Nd4j.getExecutioner().exec(distribution2, random1); - System.out.println("Data: " + z1); - System.out.println("Data: " + z2); +// System.out.println("Data: " + z1); +// System.out.println("Data: " + z2); assertNotEquals(z1, z2); } @@ -404,7 +404,7 @@ public class RandomTests extends BaseNd4jTest { Distribution nd = new NormalDistribution(random1, 0.0, 1.0); Nd4j.sort(z1, true); - System.out.println("Data for Anderson-Darling: " + z1); +// System.out.println("Data for Anderson-Darling: " + z1); for (int i = 0; i < n; i++) { @@ -434,7 +434,7 @@ public class RandomTests extends BaseNd4jTest { Random random1 = Nd4j.getRandomFactory().getNewRandomInstance(119); - log.info("1: ----------------"); +// log.info("1: ----------------"); INDArray z0 = Nd4j.getExecutioner().exec(new GaussianDistribution(Nd4j.createUninitialized(DataType.DOUBLE, 1000000), 0.0, 1.0)); @@ -443,7 +443,7 @@ public class RandomTests extends BaseNd4jTest { random1.setSeed(119); - log.info("2: ----------------"); +// log.info("2: ----------------"); INDArray z1 = Nd4j.zeros(DataType.DOUBLE, 55000000); INDArray z2 = Nd4j.zeros(DataType.DOUBLE, 55000000); @@ -451,16 +451,16 @@ public class RandomTests extends BaseNd4jTest { GaussianDistribution op1 = new GaussianDistribution(z1, 0.0, 1.0); Nd4j.getExecutioner().exec(op1, random1); - log.info("2: ----------------"); +// log.info("2: ----------------"); //log.info("End: [{}, {}, {}, {}]", z1.getFloat(29000000), z1.getFloat(29000001), z1.getFloat(29000002), z1.getFloat(29000003)); //log.info("Sum: {}", z1.sumNumber().doubleValue()); - log.info("Sum2: {}", z2.sumNumber().doubleValue()); +// log.info("Sum2: {}", z2.sumNumber().doubleValue()); INDArray match = Nd4j.getExecutioner().exec(new MatchCondition(z1, Conditions.isNan())); - log.info("NaNs: {}", match); +// log.info("NaNs: {}", match); assertEquals(0.0f, match.getFloat(0), 0.01f); /* @@ -481,7 +481,7 @@ public class RandomTests extends BaseNd4jTest { public void testSum_119() { INDArray z2 = Nd4j.zeros(DataType.DOUBLE, 55000000); val sum = z2.sumNumber().doubleValue(); - log.info("Sum2: {}", sum); +// log.info("Sum2: {}", sum); assertEquals(0.0, sum, 1e-5); } @@ -1371,7 +1371,7 @@ public class RandomTests extends BaseNd4jTest { val array = dist.sample(new int[] {6, 9}); - log.info("Array: {}", array); +// log.info("Array: {}", array); } @Test @@ -1380,7 +1380,7 @@ public class RandomTests extends BaseNd4jTest { val array = dist.sample(new int[] {9, 6}); - log.info("Array: {}", array); +// log.info("Array: {}", array); } @Test @@ -1389,7 +1389,7 @@ public class RandomTests extends BaseNd4jTest { val array = dist.sample(new int[] {9, 9}); - log.info("Array: {}", array); +// log.info("Array: {}", array); } @Test @@ -1398,7 +1398,7 @@ public class RandomTests extends BaseNd4jTest { int numBatches = 1; for( int t=0; t<10; t++ ) { - System.out.println(t); +// System.out.println(t); numBatches = t; List initial = getList(numBatches); @@ -1425,7 +1425,7 @@ public class RandomTests extends BaseNd4jTest { Nd4j.getRandom().setSeed(12345); INDArray arr = Nd4j.create(DataType.DOUBLE, 100); Nd4j.exec(new BernoulliDistribution(arr, 0.5)); - System.out.println(arr); +// System.out.println(arr); double sum = arr.sumNumber().doubleValue(); assertTrue(String.valueOf(sum), sum > 0.0 && sum < 100.0); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/serde/NumpyFormatTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/serde/NumpyFormatTests.java index 164760dc0..8fa4ed44f 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/serde/NumpyFormatTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/serde/NumpyFormatTests.java @@ -70,7 +70,7 @@ public class NumpyFormatTests extends BaseNd4jTest { int lastDot = path.lastIndexOf('.'); int lastUnderscore = path.lastIndexOf('_'); String dtype = path.substring(lastUnderscore+1, lastDot); - System.out.println(path + " : " + dtype); +// System.out.println(path + " : " + dtype); DataType dt = DataType.fromNumpy(dtype); //System.out.println(dt); @@ -120,7 +120,7 @@ public class NumpyFormatTests extends BaseNd4jTest { int lastDot = path.lastIndexOf('.'); int lastUnderscore = path.lastIndexOf('_'); String dtype = path.substring(lastUnderscore+1, lastDot); - System.out.println(path + " : " + dtype); +// System.out.println(path + " : " + dtype); DataType dt = DataType.fromNumpy(dtype); //System.out.println(dt); @@ -173,7 +173,7 @@ public class NumpyFormatTests extends BaseNd4jTest { int lastDot = path.lastIndexOf('.'); int lastSlash = Math.max(path.lastIndexOf('/'), path.lastIndexOf('\\')); String dtype = path.substring(lastSlash+1, lastDot); - System.out.println(path + " : " + dtype); +// System.out.println(path + " : " + dtype); DataType dt = DataType.fromNumpy(dtype); //System.out.println(dt); @@ -236,7 +236,7 @@ public class NumpyFormatTests extends BaseNd4jTest { int lastDot = path.lastIndexOf('.'); int lastUnderscore = path.lastIndexOf('_'); String dtype = path.substring(lastUnderscore + 1, lastDot); - System.out.println(path + " : " + dtype); +// System.out.println(path + " : " + dtype); DataType dt = DataType.fromNumpy(dtype); //System.out.println(dt); @@ -322,8 +322,8 @@ public class NumpyFormatTests extends BaseNd4jTest { @Test public void testNumpyBoolean() { INDArray out = Nd4j.createFromNpyFile(new File("c:/Users/raver/Downloads/error2.npy")); - System.out.println(ArrayUtil.toList(ArrayUtil.toInts(out.shape()))); - System.out.println(out); +// System.out.println(ArrayUtil.toList(ArrayUtil.toInts(out.shape()))); +// System.out.println(out); } @Override diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/NDArrayMathTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/NDArrayMathTests.java index 521515b5f..aa6ce104b 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/NDArrayMathTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/NDArrayMathTests.java @@ -116,10 +116,11 @@ public class NDArrayMathTests extends BaseNd4jTest { INDArray otherTest = Nd4j.linspace(1, 144, 144, DataType.DOUBLE).reshape(6, 3, 2, 2, 2); - System.out.println(otherTest); +// System.out.println(otherTest); INDArray baseArr = Nd4j.linspace(1, 8, 8, DataType.DOUBLE).reshape(2, 2, 2); for (int i = 0; i < baseArr.tensorsAlongDimension(0, 1); i++) { - System.out.println(NDArrayMath.sliceOffsetForTensor(i, baseArr, new int[] {2, 2})); +// System.out.println(NDArrayMath.sliceOffsetForTensor(i, baseArr, new int[] {2, 2})); + NDArrayMath.sliceOffsetForTensor(i, baseArr, new int[] {2, 2}); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/ShapeTestsC.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/ShapeTestsC.java index f4f3e67f2..373791a2c 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/ShapeTestsC.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/ShapeTestsC.java @@ -126,7 +126,7 @@ public class ShapeTestsC extends BaseNd4jTest { INDArray matrix = Nd4j.create(new double[][] {{1, 2}, {3, 4}}); for (int i = 0; i < matrix.rows(); i++) { INDArray row = matrix.getRow(i); - System.out.println(matrix.getRow(i)); +// System.out.println(matrix.getRow(i)); } matrix.putRow(1, Nd4j.create(new double[] {1, 2})); assertEquals(matrix.getRow(0), matrix.getRow(1)); @@ -187,9 +187,9 @@ public class ShapeTestsC extends BaseNd4jTest { INDArray slice = nd.slice(1, 0); INDArray vector = slice; - for (int i = 0; i < vector.length(); i++) { - System.out.println(vector.getDouble(i)); - } +// for (int i = 0; i < vector.length(); i++) { +// System.out.println(vector.getDouble(i)); +// } assertEquals(Nd4j.create(new double[] {4, 5, 6}), vector); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/TADTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/TADTests.java index 6f47d00da..2953e2677 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/TADTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/TADTests.java @@ -94,13 +94,13 @@ public class TADTests extends BaseNd4jTest { } } - log.info("3D TADs:"); +// log.info("3D TADs:"); for (char o : order) { INDArray array = Nd4j.create(new int[] {9, 7, 5, 3}, o); for (int[] shape : dim_3) { Arrays.sort(shape); - log.info("About to do shape: " + Arrays.toString(shape) + " for array of shape " - + array.shapeInfoToString()); +// log.info("About to do shape: " + Arrays.toString(shape) + " for array of shape " +// + array.shapeInfoToString()); INDArray assertion = array.tensorAlongDimension(0, shape); INDArray test = array.tensorAlongDimension(0, shape); assertEquals(assertion, test); @@ -128,10 +128,10 @@ public class TADTests extends BaseNd4jTest { Pair tadBuffersC = Nd4j.getExecutioner().getTADManager().getTADOnlyShapeInfo(arrayC, 2, 3); - log.info("Got TADShapeF: {}", Arrays.toString(tadBuffersF.getFirst().asInt()) + " with java " - + javaFTad.shapeInfoDataBuffer()); - log.info("Got TADShapeC: {}", Arrays.toString(tadBuffersC.getFirst().asInt()) + " with java " - + javaCTad.shapeInfoDataBuffer()); +// log.info("Got TADShapeF: {}", Arrays.toString(tadBuffersF.getFirst().asInt()) + " with java " +// + javaFTad.shapeInfoDataBuffer()); +// log.info("Got TADShapeC: {}", Arrays.toString(tadBuffersC.getFirst().asInt()) + " with java " +// + javaCTad.shapeInfoDataBuffer()); } @Test diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/ConcatTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/ConcatTests.java index a68883b4f..97f591cff 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/ConcatTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/ConcatTests.java @@ -138,7 +138,7 @@ public class ConcatTests extends BaseNd4jTest { assertEquals(exp, concat0); - System.out.println("1------------------------"); +// System.out.println("1------------------------"); //ConcatV2, dim 1 second = Nd4j.linspace(24, 32, 8, DataType.DOUBLE).reshape('c', 2, 1, 4); @@ -148,7 +148,7 @@ public class ConcatTests extends BaseNd4jTest { exp.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.point(3), NDArrayIndex.all()}, second); exp.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.interval(4, 6), NDArrayIndex.all()}, third); - System.out.println("2------------------------"); +// System.out.println("2------------------------"); INDArray concat1 = Nd4j.concat(1, first, second, third); @@ -192,7 +192,7 @@ public class ConcatTests extends BaseNd4jTest { INDArray s2 = s.getFirst().assign(second); INDArray t2 = t.getFirst().assign(third); - System.out.println("-------------------------------------------"); +// System.out.println("-------------------------------------------"); INDArray concat0 = Nd4j.concat(0, f2, s2, t2); assertEquals(exp, concat0); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/ConcatTestsC.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/ConcatTestsC.java index 806cf4d08..07ef7dcac 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/ConcatTestsC.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/ConcatTestsC.java @@ -108,7 +108,7 @@ public class ConcatTestsC extends BaseNd4jTest { assertEquals(3, result.rows()); assertEquals(10, result.columns()); - System.out.println(result); +// System.out.println(result); for (int x = 0; x < 30; x++) { assertEquals(1f, result.getFloat(x), 0.001f); @@ -124,8 +124,8 @@ public class ConcatTestsC extends BaseNd4jTest { INDArray concat1 = Nd4j.concat(1, a, b); INDArray oneAssertion = Nd4j.create(new double[][] {{1, 2, 1, 2}, {3, 4, 3, 4}}); - System.out.println("Assertion: " + Arrays.toString(oneAssertion.data().asFloat())); - System.out.println("Result: " + Arrays.toString(concat1.data().asFloat())); +// System.out.println("Assertion: " + Arrays.toString(oneAssertion.data().asFloat())); +// System.out.println("Result: " + Arrays.toString(concat1.data().asFloat())); assertEquals(oneAssertion, concat1); @@ -186,7 +186,7 @@ public class ConcatTestsC extends BaseNd4jTest { second = Nd4j.linspace(24, 32, 8, Nd4j.dataType()).reshape('c', 2, 1, 4); for (int i = 0; i < second.tensorsAlongDimension(1); i++) { INDArray secondTad = second.tensorAlongDimension(i, 1); - System.out.println(second.tensorAlongDimension(i, 1)); +// System.out.println(second.tensorAlongDimension(i, 1)); } third = Nd4j.linspace(32, 48, 16).reshape('c', 2, 2, 4); @@ -215,7 +215,7 @@ public class ConcatTestsC extends BaseNd4jTest { @Test(expected = ND4JIllegalStateException.class) public void testConcatVector() { - System.out.println(Nd4j.concat(0, Nd4j.ones(1,1000000), Nd4j.create(1, 1))); + Nd4j.concat(0, Nd4j.ones(1,1000000), Nd4j.create(1, 1)); } @Test diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/padding/PaddingTestsC.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/padding/PaddingTestsC.java index 2483f03e6..448d8c8ec 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/padding/PaddingTestsC.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/concat/padding/PaddingTestsC.java @@ -103,7 +103,7 @@ public class PaddingTestsC extends BaseNd4jTest { long outWidth = Convolution.outSize(h, kh, sy, ph, 1, true); long outHeight = Convolution.outSize(w, kw, sx, pw, 1, true); INDArray padded = Nd4j.pad(linspaced, new int[][] {{0, 0}, {0, 0}, {ph, ph + sy - 1}, {pw, pw + sx - 1}}); - System.out.println(padded); +// System.out.println(padded); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/indexing/IndexingTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/indexing/IndexingTests.java index b67f684c7..eb8af3852 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/indexing/IndexingTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/indexing/IndexingTests.java @@ -52,7 +52,7 @@ public class IndexingTests extends BaseNd4jTest { @Test public void testGet() { - System.out.println("Testing sub-array put and get with a 3D array ..."); +// System.out.println("Testing sub-array put and get with a 3D array ..."); INDArray arr = Nd4j.linspace(0, 124, 125).reshape(5, 5, 5); @@ -99,13 +99,13 @@ public class IndexingTests extends BaseNd4jTest { INDArray whatToPut = arr.get(whereToGet); assertEquals(subArr_A, whatToPut); - System.out.println(whatToPut); +// System.out.println(whatToPut); INDArrayIndex[] whereToPut = new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all()}; subArr_B.put(whereToPut, whatToPut); assertEquals(subArr_A, subArr_B); - System.out.println("... done"); +// System.out.println("... done"); } /* @@ -154,7 +154,7 @@ public class IndexingTests extends BaseNd4jTest { INDArrayIndex ndi_Slice = NDArrayIndex.point(s); for (int i = 0; i < rows; i++) { for (int j = 0; j < cols; j++) { - log.info("Running for ( {}, {} - {} , {} - {} )", s, i, rows, j, cols); +// log.info("Running for ( {}, {} - {} , {} - {} )", s, i, rows, j, cols); INDArrayIndex ndi_I = NDArrayIndex.interval(i, rows); INDArrayIndex ndi_J = NDArrayIndex.interval(j, cols); INDArray aView = A.get(ndi_Slice, NDArrayIndex.all(), NDArrayIndex.all()).get(ndi_I, ndi_J); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/indexing/IndexingTestsC.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/indexing/IndexingTestsC.java index 9593b5a3b..b836cfe75 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/indexing/IndexingTestsC.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/indexing/IndexingTestsC.java @@ -193,7 +193,7 @@ public class IndexingTestsC extends BaseNd4jTest { @Test public void testGet() { - System.out.println("Testing sub-array put and get with a 3D array ..."); +// System.out.println("Testing sub-array put and get with a 3D array ..."); INDArray arr = Nd4j.linspace(0, 124, 125).reshape(5, 5, 5); @@ -238,14 +238,14 @@ public class IndexingTestsC extends BaseNd4jTest { INDArrayIndex[] whereToGet = new INDArrayIndex[] {ndi_Slice, ndi_I, ndi_J}; INDArray whatToPut = arr.get(whereToGet); - System.out.println(whatToPut); +// System.out.println(whatToPut); INDArrayIndex[] whereToPut = new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all()}; subArr_B.put(whereToPut, whatToPut); assertEquals(subArr_A, subArr_B); - System.out.println("... done"); +// System.out.println("... done"); } @Test @@ -286,7 +286,7 @@ public class IndexingTestsC extends BaseNd4jTest { INDArrayIndex ndi_Slice = NDArrayIndex.point(s); for (int i = 0; i < rows; i++) { for (int j = 0; j < cols; j++) { - log.info("Running for ( {}, {} - {} , {} - {} )", s, i, rows, j, cols); +// log.info("Running for ( {}, {} - {} , {} - {} )", s, i, rows, j, cols); INDArrayIndex ndi_I = NDArrayIndex.interval(i, rows); INDArrayIndex ndi_J = NDArrayIndex.interval(j, cols); INDArray aView = A.get(ndi_Slice, NDArrayIndex.all(), NDArrayIndex.all()).get(ndi_I, ndi_J); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/ones/LeadingAndTrailingOnes.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/ones/LeadingAndTrailingOnes.java index 275e9dcd6..9f4d9ec9b 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/ones/LeadingAndTrailingOnes.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/ones/LeadingAndTrailingOnes.java @@ -65,7 +65,7 @@ public class LeadingAndTrailingOnes extends BaseNd4jTest { INDArray arr = Nd4j.create(1, 10, 1, 1); arr.assign(1); arr.toString(); - System.out.println(arr); +// System.out.println(arr); } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/ones/LeadingAndTrailingOnesC.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/ones/LeadingAndTrailingOnesC.java index d5f5ac361..7c95b9bfe 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/ones/LeadingAndTrailingOnesC.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/shape/ones/LeadingAndTrailingOnesC.java @@ -41,18 +41,18 @@ public class LeadingAndTrailingOnesC extends BaseNd4jTest { public void testCreateLeadingAndTrailingOnes() { INDArray arr = Nd4j.create(1, 10, 1, 1); arr.assign(1); - System.out.println(arr); +// System.out.println(arr); } @Test public void testMatrix() { INDArray arr = Nd4j.linspace(1, 4, 4).reshape(2, 2); INDArray slice1 = arr.slice(1); - System.out.println(arr.slice(1)); +// System.out.println(arr.slice(1)); INDArray oneInMiddle = Nd4j.linspace(1, 4, 4).reshape(2, 1, 2); INDArray otherSlice = oneInMiddle.slice(1); assertEquals(2, otherSlice.offset()); - System.out.println(otherSlice); +// System.out.println(otherSlice); INDArray twoOnesInMiddle = Nd4j.linspace(1, 4, 4).reshape(2, 1, 1, 2); INDArray sub = twoOnesInMiddle.get(NDArrayIndex.point(1), NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all()); @@ -65,21 +65,7 @@ public class LeadingAndTrailingOnesC extends BaseNd4jTest { INDArray tensor = Nd4j.linspace(1, 144, 144).reshape(2, 2, 1, 1, 6, 6); INDArray tensorSlice1 = tensor.slice(1); INDArray tensorSlice1Slice1 = tensorSlice1.slice(1); - System.out.println(tensor); - } - - @Test - public void testOnesInMiddleTensor() { - INDArray im2colAssertion = Nd4j.create(new double[] {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, - 0.0, 0.0, 1.0, 2.0, 0.0, 0.0, 0.0, 0.0, 3.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, - 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, - 5.0, 6.0, 0.0, 0.0, 0.0, 0.0, 7.0, 8.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, - 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 9.0, 10.0, - 0.0, 0.0, 0.0, 0.0, 11.0, 12.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, - 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 13.0, 14.0, 0.0, 0.0, - 0.0, 0.0, 15.0, 16.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0}, - new int[] {2, 2, 1, 1, 6, 6}); - System.out.println(im2colAssertion); +// System.out.println(tensor); } @Override diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/slicing/SlicingTestsC.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/slicing/SlicingTestsC.java index 0e696c884..4fd06dced 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/slicing/SlicingTestsC.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/slicing/SlicingTestsC.java @@ -43,7 +43,8 @@ public class SlicingTestsC extends BaseNd4jTest { @Test public void testSliceRowVector() { INDArray arr = Nd4j.zeros(5); - System.out.println(arr.slice(1)); +// System.out.println(arr.slice(1)); + arr.slice(1); } @@ -51,10 +52,10 @@ public class SlicingTestsC extends BaseNd4jTest { public void testSliceAssertion() { INDArray arr = Nd4j.linspace(1, 30, 30).reshape(3, 5, 2); INDArray firstRow = arr.slice(0).slice(0); - for (int i = 0; i < firstRow.length(); i++) { - System.out.println(firstRow.getDouble(i)); - } - System.out.println(firstRow); +// for (int i = 0; i < firstRow.length(); i++) { +// System.out.println(firstRow.getDouble(i)); +// } +// System.out.println(firstRow); } @Test @@ -64,19 +65,19 @@ public class SlicingTestsC extends BaseNd4jTest { INDArray sliceZero = arr.slice(0); for (int i = 0; i < sliceZero.rows(); i++) { INDArray row = sliceZero.slice(i); - for (int j = 0; j < row.length(); j++) { - System.out.println(row.getDouble(j)); - } - System.out.println(row); +// for (int j = 0; j < row.length(); j++) { +// System.out.println(row.getDouble(j)); +// } +// System.out.println(row); } INDArray assertion = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, new int[] {5, 2}); for (int i = 0; i < assertion.rows(); i++) { INDArray row = assertion.slice(i); - for (int j = 0; j < row.length(); j++) { - System.out.println(row.getDouble(j)); - } - System.out.println(row); +// for (int j = 0; j < row.length(); j++) { +// System.out.println(row.getDouble(j)); +// } +// System.out.println(row); } assertArrayEquals(new long[] {5, 2}, sliceZero.shape()); assertEquals(assertion, sliceZero); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/specials/SortCooTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/specials/SortCooTests.java index 33d24cd68..fb85535f6 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/specials/SortCooTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/specials/SortCooTests.java @@ -84,13 +84,13 @@ public class SortCooTests extends BaseNd4jTest { DataBuffer idx = Nd4j.getDataBufferFactory().createLong(indices); DataBuffer val = Nd4j.createBuffer(values); - log.info("Old indices: {}", Arrays.toString(idx.asInt())); +// log.info("Old indices: {}", Arrays.toString(idx.asInt())); NativeOpsHolder.getInstance().getDeviceNativeOps().sortCooIndices(null, (LongPointer) idx.addressPointer(), val.addressPointer(), 4, 3); - log.info("New indices: {}", Arrays.toString(idx.asInt())); +// log.info("New indices: {}", Arrays.toString(idx.asInt())); assertArrayEquals(expIndices, idx.asInt()); assertArrayEquals(expValues, val.asDouble(), 1e-5); diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/util/ValidationUtilTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/util/ValidationUtilTests.java index 98f2b7aa8..7e6e73289 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/util/ValidationUtilTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/util/ValidationUtilTests.java @@ -44,7 +44,7 @@ public class ValidationUtilTests extends BaseNd4jTest { ValidationResult vr0 = Nd4jCommonValidator.isValidFile(fNonExistent); assertFalse(vr0.isValid()); assertTrue(vr0.getIssues().get(0), vr0.getIssues().get(0).contains("exist")); - System.out.println(vr0.toString()); +// System.out.println(vr0.toString()); //Test empty file: File fEmpty = new File(f, "0.bin"); @@ -52,7 +52,7 @@ public class ValidationUtilTests extends BaseNd4jTest { ValidationResult vr1 = Nd4jCommonValidator.isValidFile(fEmpty); assertFalse(vr1.isValid()); assertTrue(vr1.getIssues().get(0), vr1.getIssues().get(0).contains("empty")); - System.out.println(vr1.toString()); +// System.out.println(vr1.toString()); //Test directory File directory = new File(f, "dir"); @@ -61,14 +61,14 @@ public class ValidationUtilTests extends BaseNd4jTest { ValidationResult vr2 = Nd4jCommonValidator.isValidFile(directory); assertFalse(vr2.isValid()); assertTrue(vr2.getIssues().get(0), vr2.getIssues().get(0).contains("directory")); - System.out.println(vr2.toString()); +// System.out.println(vr2.toString()); //Test valid non-empty file - valid File f3 = new File(f, "1.txt"); FileUtils.writeStringToFile(f3, "Test", StandardCharsets.UTF_8); ValidationResult vr3 = Nd4jCommonValidator.isValidFile(f3); assertTrue(vr3.isValid()); - System.out.println(vr3.toString()); +// System.out.println(vr3.toString()); } @Test @@ -80,7 +80,7 @@ public class ValidationUtilTests extends BaseNd4jTest { ValidationResult vr0 = Nd4jCommonValidator.isValidZipFile(fNonExistent, false); assertFalse(vr0.isValid()); assertTrue(vr0.getIssues().get(0), vr0.getIssues().get(0).contains("exist")); - System.out.println(vr0.toString()); +// System.out.println(vr0.toString()); //Test empty zip: File fEmpty = new ClassPathResource("validation/empty_zip.zip").getFile(); @@ -88,7 +88,7 @@ public class ValidationUtilTests extends BaseNd4jTest { ValidationResult vr1 = Nd4jCommonValidator.isValidZipFile(fEmpty, false); assertFalse(vr1.isValid()); assertTrue(vr1.getIssues().get(0), vr1.getIssues().get(0).contains("empty")); - System.out.println(vr1.toString()); +// System.out.println(vr1.toString()); //Test directory (not zip file) File directory = new File(f, "dir"); @@ -97,7 +97,7 @@ public class ValidationUtilTests extends BaseNd4jTest { ValidationResult vr2 = Nd4jCommonValidator.isValidFile(directory); assertFalse(vr2.isValid()); assertTrue(vr2.getIssues().get(0), vr2.getIssues().get(0).contains("directory")); - System.out.println(vr2.toString()); +// System.out.println(vr2.toString()); //Test non-empty zip - valid File f3 = new File(f, "1.zip"); @@ -108,7 +108,7 @@ public class ValidationUtilTests extends BaseNd4jTest { } ValidationResult vr3 = Nd4jCommonValidator.isValidZipFile(f3, false); assertTrue(vr3.isValid()); - System.out.println(vr3.toString()); +// System.out.println(vr3.toString()); //Test non-empty zip - but missing required entries ValidationResult vr4 = Nd4jCommonValidator.isValidZipFile(f3, false, "content.txt", "someFile1.bin", "someFile2.bin"); @@ -117,7 +117,7 @@ public class ValidationUtilTests extends BaseNd4jTest { String s = vr4.getIssues().get(0); assertTrue(s, s.contains("someFile1.bin") && s.contains("someFile2.bin")); assertFalse(s, s.contains("content.txt")); - System.out.println(vr4.toString()); +// System.out.println(vr4.toString()); } @@ -131,7 +131,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertFalse(vr0.isValid()); assertEquals("INDArray Text File", vr0.getFormatType()); assertTrue(vr0.getIssues().get(0), vr0.getIssues().get(0).contains("exist")); - System.out.println(vr0.toString()); +// System.out.println(vr0.toString()); //Test empty file: File fEmpty = new File(f, "empty.txt"); @@ -141,7 +141,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertEquals("INDArray Text File", vr1.getFormatType()); assertFalse(vr1.isValid()); assertTrue(vr1.getIssues().get(0), vr1.getIssues().get(0).contains("empty")); - System.out.println(vr1.toString()); +// System.out.println(vr1.toString()); //Test directory (not zip file) File directory = new File(f, "dir"); @@ -151,7 +151,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertEquals("INDArray Text File", vr2.getFormatType()); assertFalse(vr2.isValid()); assertTrue(vr2.getIssues().get(0), vr2.getIssues().get(0).contains("directory")); - System.out.println(vr2.toString()); +// System.out.println(vr2.toString()); //Test non-INDArray format: File fText = new File(f, "text.txt"); @@ -161,7 +161,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertFalse(vr3.isValid()); String s = vr3.getIssues().get(0); assertTrue(s, s.contains("text") && s.contains("INDArray") && s.contains("corrupt")); - System.out.println(vr3.toString()); +// System.out.println(vr3.toString()); //Test corrupted txt format: File fValid = new File(f, "valid.txt"); @@ -179,7 +179,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertFalse(vr4.isValid()); s = vr4.getIssues().get(0); assertTrue(s, s.contains("text") && s.contains("INDArray") && s.contains("corrupt")); - System.out.println(vr4.toString()); +// System.out.println(vr4.toString()); //Test valid npz format: @@ -188,7 +188,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertTrue(vr5.isValid()); assertNull(vr5.getIssues()); assertNull(vr5.getException()); - System.out.println(vr4.toString()); +// System.out.println(vr4.toString()); } @@ -204,7 +204,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertFalse(vr0.isValid()); assertEquals("Numpy .npy File", vr0.getFormatType()); assertTrue(vr0.getIssues().get(0), vr0.getIssues().get(0).contains("exist")); - System.out.println(vr0.toString()); +// System.out.println(vr0.toString()); //Test empty file: File fEmpty = new File(f, "empty.npy"); @@ -214,7 +214,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertEquals("Numpy .npy File", vr1.getFormatType()); assertFalse(vr1.isValid()); assertTrue(vr1.getIssues().get(0), vr1.getIssues().get(0).contains("empty")); - System.out.println(vr1.toString()); +// System.out.println(vr1.toString()); //Test directory (not zip file) File directory = new File(f, "dir"); @@ -224,7 +224,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertEquals("Numpy .npy File", vr2.getFormatType()); assertFalse(vr2.isValid()); assertTrue(vr2.getIssues().get(0), vr2.getIssues().get(0).contains("directory")); - System.out.println(vr2.toString()); +// System.out.println(vr2.toString()); //Test non-numpy format: File fText = new File(f, "text.txt"); @@ -234,7 +234,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertFalse(vr3.isValid()); String s = vr3.getIssues().get(0); assertTrue(s, s.contains("npy") && s.toLowerCase().contains("numpy") && s.contains("corrupt")); - System.out.println(vr3.toString()); +// System.out.println(vr3.toString()); //Test corrupted npy format: File fValid = new ClassPathResource("numpy_arrays/arange_3,4_float32.npy").getFile(); @@ -250,7 +250,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertFalse(vr4.isValid()); s = vr4.getIssues().get(0); assertTrue(s, s.contains("npy") && s.toLowerCase().contains("numpy") && s.contains("corrupt")); - System.out.println(vr4.toString()); +// System.out.println(vr4.toString()); //Test valid npy format: @@ -259,7 +259,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertTrue(vr5.isValid()); assertNull(vr5.getIssues()); assertNull(vr5.getException()); - System.out.println(vr4.toString()); +// System.out.println(vr4.toString()); } @Test @@ -273,7 +273,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertFalse(vr0.isValid()); assertEquals("Numpy .npz File", vr0.getFormatType()); assertTrue(vr0.getIssues().get(0), vr0.getIssues().get(0).contains("exist")); - System.out.println(vr0.toString()); +// System.out.println(vr0.toString()); //Test empty file: File fEmpty = new File(f, "empty.npz"); @@ -283,7 +283,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertEquals("Numpy .npz File", vr1.getFormatType()); assertFalse(vr1.isValid()); assertTrue(vr1.getIssues().get(0), vr1.getIssues().get(0).contains("empty")); - System.out.println(vr1.toString()); +// System.out.println(vr1.toString()); //Test directory (not zip file) File directory = new File(f, "dir"); @@ -293,7 +293,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertEquals("Numpy .npz File", vr2.getFormatType()); assertFalse(vr2.isValid()); assertTrue(vr2.getIssues().get(0), vr2.getIssues().get(0).contains("directory")); - System.out.println(vr2.toString()); +// System.out.println(vr2.toString()); //Test non-numpy format: File fText = new File(f, "text.txt"); @@ -303,7 +303,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertFalse(vr3.isValid()); String s = vr3.getIssues().get(0); assertTrue(s, s.contains("npz") && s.toLowerCase().contains("numpy") && s.contains("corrupt")); - System.out.println(vr3.toString()); +// System.out.println(vr3.toString()); //Test corrupted npz format: File fValid = new ClassPathResource("numpy_arrays/npz/float32.npz").getFile(); @@ -319,7 +319,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertFalse(vr4.isValid()); s = vr4.getIssues().get(0); assertTrue(s, s.contains("npz") && s.toLowerCase().contains("numpy") && s.contains("corrupt")); - System.out.println(vr4.toString()); +// System.out.println(vr4.toString()); //Test valid npz format: @@ -328,7 +328,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertTrue(vr5.isValid()); assertNull(vr5.getIssues()); assertNull(vr5.getException()); - System.out.println(vr4.toString()); +// System.out.println(vr4.toString()); } @Test @@ -341,7 +341,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertFalse(vr0.isValid()); assertEquals("Numpy text file", vr0.getFormatType()); assertTrue(vr0.getIssues().get(0), vr0.getIssues().get(0).contains("exist")); - System.out.println(vr0.toString()); +// System.out.println(vr0.toString()); //Test empty file: File fEmpty = new File(f, "empty.txt"); @@ -351,7 +351,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertEquals("Numpy text file", vr1.getFormatType()); assertFalse(vr1.isValid()); assertTrue(vr1.getIssues().get(0), vr1.getIssues().get(0).contains("empty")); - System.out.println(vr1.toString()); +// System.out.println(vr1.toString()); //Test directory (not zip file) File directory = new File(f, "dir"); @@ -361,7 +361,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertEquals("Numpy text file", vr2.getFormatType()); assertFalse(vr2.isValid()); assertTrue(vr2.getIssues().get(0), vr2.getIssues().get(0).contains("directory")); - System.out.println(vr2.toString()); +// System.out.println(vr2.toString()); //Test non-numpy format: File fText = new File(f, "text.txt"); @@ -371,7 +371,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertFalse(vr3.isValid()); String s = vr3.getIssues().get(0); assertTrue(s, s.contains("text") && s.toLowerCase().contains("numpy") && s.contains("corrupt")); - System.out.println(vr3.toString()); +// System.out.println(vr3.toString()); //Test corrupted txt format: File fValid = new ClassPathResource("numpy_arrays/txt/arange_3,4_float32.txt").getFile(); @@ -387,7 +387,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertFalse(vr4.isValid()); s = vr4.getIssues().get(0); assertTrue(s, s.contains("text") && s.toLowerCase().contains("numpy") && s.contains("corrupt")); - System.out.println(vr4.toString()); +// System.out.println(vr4.toString()); //Test valid npz format: @@ -396,7 +396,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertTrue(vr5.isValid()); assertNull(vr5.getIssues()); assertNull(vr5.getException()); - System.out.println(vr4.toString()); +// System.out.println(vr4.toString()); } @Test @@ -418,7 +418,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertFalse(vr0.isValid()); assertEquals("SameDiff FlatBuffers file", vr0.getFormatType()); assertTrue(vr0.getIssues().get(0), vr0.getIssues().get(0).contains("exist")); - System.out.println(vr0.toString()); +// System.out.println(vr0.toString()); //Test empty file: File fEmpty = new File(f, "empty.fb"); @@ -428,7 +428,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertEquals("SameDiff FlatBuffers file", vr1.getFormatType()); assertFalse(vr1.isValid()); assertTrue(vr1.getIssues().get(0), vr1.getIssues().get(0).contains("empty")); - System.out.println(vr1.toString()); +// System.out.println(vr1.toString()); //Test directory (not zip file) File directory = new File(f, "dir"); @@ -438,7 +438,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertEquals("SameDiff FlatBuffers file", vr2.getFormatType()); assertFalse(vr2.isValid()); assertTrue(vr2.getIssues().get(0), vr2.getIssues().get(0).contains("directory")); - System.out.println(vr2.toString()); +// System.out.println(vr2.toString()); //Test non-flatbuffers File fText = new File(f, "text.fb"); @@ -448,7 +448,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertFalse(vr3.isValid()); String s = vr3.getIssues().get(0); assertTrue(s, s.contains("FlatBuffers") && s.contains("SameDiff") && s.contains("corrupt")); - System.out.println(vr3.toString()); +// System.out.println(vr3.toString()); //Test corrupted flatbuffers format: byte[] fbBytes = FileUtils.readFileToByteArray(fOrig); @@ -463,7 +463,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertFalse(vr4.isValid()); s = vr4.getIssues().get(0); assertTrue(s, s.contains("FlatBuffers") && s.contains("SameDiff") && s.contains("corrupt")); - System.out.println(vr4.toString()); +// System.out.println(vr4.toString()); //Test valid npz format: @@ -472,7 +472,7 @@ public class ValidationUtilTests extends BaseNd4jTest { assertTrue(vr5.isValid()); assertNull(vr5.getIssues()); assertNull(vr5.getException()); - System.out.println(vr4.toString()); +// System.out.println(vr4.toString()); } @Override diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/workspace/CyclicWorkspaceTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/workspace/CyclicWorkspaceTests.java index fc48044ea..60ed58b76 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/workspace/CyclicWorkspaceTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/workspace/CyclicWorkspaceTests.java @@ -53,7 +53,7 @@ public class CyclicWorkspaceTests extends BaseNd4jTest { val fArray = Nd4j.create(fShape).assign(e); val lArray = Nd4j.create(lShape).assign(e); - log.info("Current offset: {}; Current size: {};", ws.getCurrentOffset(), ws.getCurrentSize()); +// log.info("Current offset: {}; Current size: {};", ws.getCurrentOffset(), ws.getCurrentSize()); } } } diff --git a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/workspace/SpecialWorkspaceTests.java b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/workspace/SpecialWorkspaceTests.java index ce7a899a5..8f389697d 100644 --- a/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/workspace/SpecialWorkspaceTests.java +++ b/nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/workspace/SpecialWorkspaceTests.java @@ -70,7 +70,7 @@ public class SpecialWorkspaceTests extends BaseNd4jTest { } Nd4jWorkspace workspace = (Nd4jWorkspace) Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread("WS1"); - workspace.enableDebug(true); +// workspace.enableDebug(true); assertEquals(0, workspace.getStepNumber()); @@ -172,7 +172,7 @@ public class SpecialWorkspaceTests extends BaseNd4jTest { Nd4jWorkspace workspace = (Nd4jWorkspace) Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread(configuration, "WS1"); - workspace.enableDebug(true); +// workspace.enableDebug(true); try (MemoryWorkspace ws = Nd4j.getWorkspaceManager().getAndActivateWorkspace(configuration, "WS1")) { Nd4j.create(500); diff --git a/nd4j/nd4j-buffer/src/main/java/org/nd4j/linalg/api/buffer/DataType.java b/nd4j/nd4j-buffer/src/main/java/org/nd4j/linalg/api/buffer/DataType.java index 84715f878..8e82184c4 100644 --- a/nd4j/nd4j-buffer/src/main/java/org/nd4j/linalg/api/buffer/DataType.java +++ b/nd4j/nd4j-buffer/src/main/java/org/nd4j/linalg/api/buffer/DataType.java @@ -94,7 +94,7 @@ public enum DataType { * Note: Boolean values are considered numerical (0/1)
*/ public boolean isNumerical(){ - return this != UTF8 && this != COMPRESSED && this != UNKNOWN; + return this != UTF8 && this != BOOL && this != COMPRESSED && this != UNKNOWN; } /** diff --git a/nd4j/nd4j-common/src/main/java/org/nd4j/config/ND4JSystemProperties.java b/nd4j/nd4j-common/src/main/java/org/nd4j/config/ND4JSystemProperties.java index 912c7f1f1..14401d691 100644 --- a/nd4j/nd4j-common/src/main/java/org/nd4j/config/ND4JSystemProperties.java +++ b/nd4j/nd4j-common/src/main/java/org/nd4j/config/ND4JSystemProperties.java @@ -31,6 +31,13 @@ public class ND4JSystemProperties { * initialization information */ public static final String LOG_INITIALIZATION = "org.nd4j.log.initialization"; + + /** + * Applicability: nd4j-native when running non-AVX binary on an AVX compatible CPU
+ * Description: Set to true to avoid logging AVX warnings (i.e., running generic x86 binaries on an AVX2 system) + */ + public static final String ND4J_IGNORE_AVX = "org.nd4j.avx.ignore"; + /** * Applicability: Always
* Description: This system property defines the maximum amount of off-heap memory that can be used. diff --git a/nd4j/nd4j-common/src/main/java/org/nd4j/linalg/util/ArrayUtil.java b/nd4j/nd4j-common/src/main/java/org/nd4j/linalg/util/ArrayUtil.java index e1408e298..35e5607a2 100644 --- a/nd4j/nd4j-common/src/main/java/org/nd4j/linalg/util/ArrayUtil.java +++ b/nd4j/nd4j-common/src/main/java/org/nd4j/linalg/util/ArrayUtil.java @@ -2071,9 +2071,10 @@ public class ArrayUtil { return new boolean[0]; boolean[] ret = new boolean[arr.length * arr[0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[i].length; j++) - ret[count++] = arr[i][j]; + for (int i = 0; i < arr.length; i++) { + System.arraycopy(arr[i], 0, ret, count, arr[i].length); + count += arr[i].length; + } return ret; } @@ -2083,11 +2084,12 @@ public class ArrayUtil { boolean[] ret = new boolean[arr.length * arr[0].length * arr[0][0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[0].length; j++) - for (int k = 0; k < arr[0][0].length; k++) { - ret[count++] = arr[i][j][k]; - } + for (int i = 0; i < arr.length; i++) { + for (int j = 0; j < arr[0].length; j++) { + System.arraycopy(arr[i][j], 0, ret, count, arr[0][0].length); + count += arr[0][0].length; + } + } return ret; } @@ -2096,24 +2098,27 @@ public class ArrayUtil { return new float[0]; float[] ret = new float[arr.length * arr[0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[i].length; j++) - ret[count++] = arr[i][j]; + for (int i = 0; i < arr.length; i++) { + System.arraycopy(arr[i], 0, ret, count, arr[i].length); + count += arr[i].length; + } return ret; } public static float[] flatten(float[][][] arr) { - if(arr.length == 0 || arr[0].length == 0 || arr[0][0].length == 0) + if (arr.length == 0 || arr[0].length == 0 || arr[0][0].length == 0) return new float[0]; float[] ret = new float[arr.length * arr[0].length * arr[0][0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[0].length; j++) - for (int k = 0; k < arr[0][0].length; k++) { - ret[count++] = arr[i][j][k]; - } + for (int i = 0; i < arr.length; i++) { + for (int j = 0; j < arr[0].length; j++) { + System.arraycopy(arr[i][j], 0, ret, count, arr[0][0].length); + count += arr[0][0].length; + } + } + return ret; } @@ -2123,11 +2128,12 @@ public class ArrayUtil { double[] ret = new double[arr.length * arr[0].length * arr[0][0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[0].length; j++) - for (int k = 0; k < arr[0][0].length; k++) { - ret[count++] = arr[i][j][k]; - } + for (int i = 0; i < arr.length; i++) { + for (int j = 0; j < arr[0].length; j++) { + System.arraycopy(arr[i][j], 0, ret, count, arr[0][0].length); + count += arr[0][0].length; + } + } return ret; } @@ -2137,11 +2143,12 @@ public class ArrayUtil { int[] ret = new int[arr.length * arr[0].length * arr[0][0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[0].length; j++) - for (int k = 0; k < arr[0][0].length; k++) { - ret[count++] = arr[i][j][k]; - } + for (int i = 0; i < arr.length; i++) { + for (int j = 0; j < arr[0].length; j++) { + System.arraycopy(arr[i][j], 0, ret, count, arr[0][0].length); + count += arr[0][0].length; + } + } return ret; } @@ -2151,11 +2158,12 @@ public class ArrayUtil { val ret = new short[arr.length * arr[0].length * arr[0][0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[0].length; j++) - for (int k = 0; k < arr[0][0].length; k++) { - ret[count++] = arr[i][j][k]; - } + for (int i = 0; i < arr.length; i++) { + for (int j = 0; j < arr[0].length; j++) { + System.arraycopy(arr[i][j], 0, ret, count, arr[0][0].length); + count += arr[0][0].length; + } + } return ret; } @@ -2165,11 +2173,12 @@ public class ArrayUtil { val ret = new byte[arr.length * arr[0].length * arr[0][0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[0].length; j++) - for (int k = 0; k < arr[0][0].length; k++) { - ret[count++] = arr[i][j][k]; - } + for (int i = 0; i < arr.length; i++) { + for (int j = 0; j < arr[0].length; j++) { + System.arraycopy(arr[i][j], 0, ret, count, arr[0][0].length); + count += arr[0][0].length; + } + } return ret; } @@ -2177,11 +2186,14 @@ public class ArrayUtil { val ret = new long[arr.length * arr[0].length * arr[0][0].length * arr[0][0][0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[0].length; j++) - for (int k = 0; k < arr[0][0].length; k++) - for (int m = 0; m < arr[0][0][0].length; m++) - ret[count++] = arr[i][j][k][m]; + for (int i = 0; i < arr.length; i++) { + for (int j = 0; j < arr[0].length; j++) { + for (int k = 0; k < arr[0][0].length; k++) { + System.arraycopy(arr[i][j][k], 0, ret, count, arr[0][0][0].length); + count += arr[0][0][0].length; + } + } + } return ret; } @@ -2190,11 +2202,14 @@ public class ArrayUtil { val ret = new short[arr.length * arr[0].length * arr[0][0].length * arr[0][0][0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[0].length; j++) - for (int k = 0; k < arr[0][0].length; k++) - for (int m = 0; m < arr[0][0][0].length; m++) - ret[count++] = arr[i][j][k][m]; + for (int i = 0; i < arr.length; i++) { + for (int j = 0; j < arr[0].length; j++) { + for (int k = 0; k < arr[0][0].length; k++) { + System.arraycopy(arr[i][j][k], 0, ret, count, arr[0][0][0].length); + count += arr[0][0][0].length; + } + } + } return ret; } @@ -2203,11 +2218,14 @@ public class ArrayUtil { val ret = new byte[arr.length * arr[0].length * arr[0][0].length * arr[0][0][0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[0].length; j++) - for (int k = 0; k < arr[0][0].length; k++) - for (int m = 0; m < arr[0][0][0].length; m++) - ret[count++] = arr[i][j][k][m]; + for (int i = 0; i < arr.length; i++) { + for (int j = 0; j < arr[0].length; j++) { + for (int k = 0; k < arr[0][0].length; k++) { + System.arraycopy(arr[i][j][k], 0, ret, count, arr[0][0][0].length); + count += arr[0][0][0].length; + } + } + } return ret; } @@ -2216,11 +2234,14 @@ public class ArrayUtil { val ret = new boolean[arr.length * arr[0].length * arr[0][0].length * arr[0][0][0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[0].length; j++) - for (int k = 0; k < arr[0][0].length; k++) - for (int m = 0; m < arr[0][0][0].length; m++) - ret[count++] = arr[i][j][k][m]; + for (int i = 0; i < arr.length; i++) { + for (int j = 0; j < arr[0].length; j++) { + for (int k = 0; k < arr[0][0].length; k++) { + System.arraycopy(arr[i][j][k], 0, ret, count, arr[0][0][0].length); + count += arr[0][0][0].length; + } + } + } return ret; } @@ -2229,11 +2250,14 @@ public class ArrayUtil { float[] ret = new float[arr.length * arr[0].length * arr[0][0].length * arr[0][0][0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[0].length; j++) - for (int k = 0; k < arr[0][0].length; k++) - for (int m = 0; m < arr[0][0][0].length; m++) - ret[count++] = arr[i][j][k][m]; + for (int i = 0; i < arr.length; i++) { + for (int j = 0; j < arr[0].length; j++) { + for (int k = 0; k < arr[0][0].length; k++) { + System.arraycopy(arr[i][j][k], 0, ret, count, arr[0][0][0].length); + count += arr[0][0][0].length; + } + } + } return ret; } @@ -2242,11 +2266,14 @@ public class ArrayUtil { double[] ret = new double[arr.length * arr[0].length * arr[0][0].length * arr[0][0][0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[0].length; j++) - for (int k = 0; k < arr[0][0].length; k++) - for (int m = 0; m < arr[0][0][0].length; m++) - ret[count++] = arr[i][j][k][m]; + for (int i = 0; i < arr.length; i++) { + for (int j = 0; j < arr[0].length; j++) { + for (int k = 0; k < arr[0][0].length; k++) { + System.arraycopy(arr[i][j][k], 0, ret, count, arr[0][0][0].length); + count += arr[0][0][0].length; + } + } + } return ret; } @@ -2255,11 +2282,14 @@ public class ArrayUtil { int[] ret = new int[arr.length * arr[0].length * arr[0][0].length * arr[0][0][0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[0].length; j++) - for (int k = 0; k < arr[0][0].length; k++) - for (int m = 0; m < arr[0][0][0].length; m++) - ret[count++] = arr[i][j][k][m]; + for (int i = 0; i < arr.length; i++) { + for (int j = 0; j < arr[0].length; j++) { + for (int k = 0; k < arr[0][0].length; k++) { + System.arraycopy(arr[i][j][k], 0, ret, count, arr[0][0][0].length); + count += arr[0][0][0].length; + } + } + } return ret; } @@ -2271,11 +2301,8 @@ public class ArrayUtil { int[] ret = new int[arr.length * arr[0].length]; int count = 0; for (int i = 0; i < arr.length; i++) { - if (arr[i].length != arr[0].length) - throw new IllegalStateException("Length of all rows must be equal"); - - for (int j = 0; j < arr[i].length; j++) - ret[count++] = arr[i][j]; + System.arraycopy(arr[i], 0, ret, count, arr[i].length); + count += arr[i].length; } return ret; } @@ -2285,9 +2312,10 @@ public class ArrayUtil { return new short[0]; val ret = new short[arr.length * arr[0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[i].length; j++) - ret[count++] = arr[i][j]; + for (int i = 0; i < arr.length; i++) { + System.arraycopy(arr[i], 0, ret, count, arr[i].length); + count += arr[i].length; + } return ret; } @@ -2297,34 +2325,21 @@ public class ArrayUtil { val ret = new byte[arr.length * arr[0].length]; int count = 0; for (int i = 0; i < arr.length; i++) { - if (arr[i].length != arr[0].length) - throw new IllegalStateException("Length of all rows must be equal"); - - for (int j = 0; j < arr[i].length; j++) - ret[count++] = arr[i][j]; + System.arraycopy(arr[i], 0, ret, count, arr[i].length); + count += arr[i].length; } return ret; } - /* - public static boolean[] flatten(boolean[][] arr) { - boolean[] ret = new boolean[arr.length * arr[0].length]; - int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[i].length; j++) - ret[count++] = arr[i][j]; - return ret; - } - */ - public static long[] flatten(long[][] arr) { if(arr.length == 0 || arr[0].length == 0 ) return new long[0]; long[] ret = new long[arr.length * arr[0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[i].length; j++) - ret[count++] = arr[i][j]; + for (int i = 0; i < arr.length; i++) { + System.arraycopy(arr[i], 0, ret, count, arr[i].length); + count += arr[i].length; + } return ret; } @@ -2334,11 +2349,12 @@ public class ArrayUtil { long[] ret = new long[arr.length * arr[0].length * arr[0][0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[0].length; j++) - for (int k = 0; k < arr[0][0].length; k++) { - ret[count++] = arr[i][j][k]; - } + for (int i = 0; i < arr.length; i++) { + for (int j = 0; j < arr[0].length; j++) { + System.arraycopy(arr[i][j], 0, ret, count, arr[0][0].length); + count += arr[0][0].length; + } + } return ret; } @@ -2354,9 +2370,10 @@ public class ArrayUtil { return new double[0]; double[] ret = new double[arr.length * arr[0].length]; int count = 0; - for (int i = 0; i < arr.length; i++) - for (int j = 0; j < arr[i].length; j++) - ret[count++] = arr[i][j]; + for (int i = 0; i < arr.length; i++) { + System.arraycopy(arr[i], 0, ret, count, arr[i].length); + count += arr[i].length; + } return ret; } diff --git a/nd4j/nd4j-context/src/main/java/org/nd4j/linalg/factory/Environment.java b/nd4j/nd4j-context/src/main/java/org/nd4j/linalg/factory/Environment.java new file mode 100644 index 000000000..ce8e7e9f6 --- /dev/null +++ b/nd4j/nd4j-context/src/main/java/org/nd4j/linalg/factory/Environment.java @@ -0,0 +1,85 @@ +/* ****************************************************************************** + * Copyright (c) 2019 Konduit K.K. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0. + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + * + * SPDX-License-Identifier: Apache-2.0 + ******************************************************************************/ +package org.nd4j.linalg.factory; + +/** + * ND4J backend Environment instance + * + * @author Alex Black + */ +public interface Environment { + + /** BLAS major version number (if applicable) */ + int blasMajorVersion(); + /** BLAS minor version number (if applicable) */ + int blasMinorVersion(); + /** BLAS patch version number (if applicable) */ + int blasPatchVersion(); + + /** Returns true if ND4J is set to verbose mode */ + boolean isVerbose(); + /** Set verbose mode */ + void setVerbose(boolean reallyVerbose); + /** Returns true if ND4J is set to debug mode */ + boolean isDebug(); + /** Returns true if ND4J is set to profiling mode */ + boolean isProfiling(); + /** Returns true if ND4J is set to detecting leaks mode */ + boolean isDetectingLeaks(); + /** Returns true if ND4J is set to debug and verbose mode */ + boolean isDebugAndVerbose(); + + /** Set debug mode */ + void setDebug( boolean reallyDebug); + /** Set profiling mode */ + void setProfiling( boolean reallyProfile); + /** Set leaks detection mode */ + void setLeaksDetector( boolean reallyDetect); + /** Returns true if helpers (cuDNN, DNNL/MKLDNN etc) are allowed */ + boolean helpersAllowed(); + /** Set whether helpers (cuDNN, DNNL/MKLDNN etc) are allowed */ + void allowHelpers(boolean reallyAllow); + + /** Returns the TAD (tensor along dimension) threshold for ops */ + int tadThreshold(); + /** Set the TAD (tensor along dimension) threshold for ops */ + void setTadThreshold(int threshold); + + /** Returns the elementwise threshold for ops */ + int elementwiseThreshold(); + /** Set the elementwise threshold for ops */ + void setElementwiseThreshold(int threshold); + + /** Returns the maximum number of threads for C++ op execution (if applicable) */ + int maxThreads(); + /** Set the maximum number of threads for C++ op execution (if applicable) */ + void setMaxThreads(int max); + + /** Returns the maximum number of master threads for C++ op execution (if applicable) */ + int maxMasterThreads(); + /** Set the maximum number of master threads for C++ op execution (if applicable) */ + void setMaxMasterThreads(int max); + + /** Set the maximum primary memory */ + void setMaxPrimaryMemory(long maxBytes); + /** Set the maximum special memory */ + void setMaxSpecialMemory(long maxBytes); + /** Set the maximum device memory */ + void setMaxDeviceMemory(long maxBytes); + + /** Return true if the backend is a CPU backend, or false otherwise */ + boolean isCPU(); +} diff --git a/nd4j/nd4j-context/src/main/java/org/nd4j/linalg/factory/Nd4jBackend.java b/nd4j/nd4j-context/src/main/java/org/nd4j/linalg/factory/Nd4jBackend.java index ec4739b86..7575c1238 100644 --- a/nd4j/nd4j-context/src/main/java/org/nd4j/linalg/factory/Nd4jBackend.java +++ b/nd4j/nd4j-context/src/main/java/org/nd4j/linalg/factory/Nd4jBackend.java @@ -1,5 +1,6 @@ -/******************************************************************************* +/* ****************************************************************************** * Copyright (c) 2015-2018 Skymind, Inc. + * Copyright (c) 2019 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at @@ -143,6 +144,8 @@ public abstract class Nd4jBackend { */ public abstract Class getNDArrayClass(); + public abstract Environment getEnvironment(); + /** * Loads the best available backend.