cavis/libnd4j/tests_cpu/layers_tests/BrodcastTests.cpp
raver119 6de00bf75f
[WIP] Weekly update of repo (#8390)
* [WIP] Fix compilation after nd4j changes (#37)

* Fix compilation.

* Some tests fixed

* Disable tests temporarily.

* Restored test

* Tests restored.

* Test restored.

* [WIP] perf tests (#40)

* special maxpool test

Signed-off-by: raver119 <raver119@gmail.com>

* special maxpool test

Signed-off-by: raver119 <raver119@gmail.com>

* Shyrma bnorm bp (#41)

Batchnorm backprop mkldnn

* Add SameDiff memory reuse memory manager (array cache) (#39)

* Attention op comments

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* ArrayCacheMemoryMgr - first pass

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* Tweak array cache for use with SameDiff identity arrays

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* ArrayCacheMemoryMgr javadoc and properly get max memory

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* LRU cache policy + add tests

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* Fixes

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* Resize arrays internally if required for ArrayCacheMemoryMgr

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* Test improvement

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* Small polish

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* SameDiff op runtime benchmarking listener (#42)

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* INLINE_LOOPS for windows

Signed-off-by: raver119 <raver119@gmail.com>

* [WIP] ThreadPool (#8)

This PR removes OpenMP use in 95% of cases
2019-11-13 17:15:18 +03:00

65 lines
2.4 KiB
C++

/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
//
// Created by agibsonccc on 1/19/17.
//
#include "testinclude.h"
#include <broadcasting.h>
class BroadcastMultiDimTest : public testing::Test {
public:
int dimensions[2] = {0,2};
Nd4jLong inputShapeBuffer[10] = {3,2,3,5,15,5,1,8192,1,99};
float inputData[30] = {1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0,11.0,12.0,13.0,14.0,15.0,16.0,17.0,18.0,19.0,20.0,21.0,22.0,23.0,24.0,25.0,26.0,27.0,28.0,29.0,30.0};
float dataAssertion[30] = {1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0,11.0,12.0,13.0,14.0,15.0,16.0,17.0,18.0,0.0,0.0,21.0,22.0,23.0,0.0,0.0,26.0,27.0,28.0,0.0,0.0};
float result[30] = {0.0};
float broadcastData[10] = {1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0};
Nd4jLong broadcastShapeInfo[8] = {2,2,5,5,1,8192,1,99};
int opNum = 2;
int dimensionLength = 2;
};
#ifndef __CUDABLAS__
TEST_F(BroadcastMultiDimTest,MultimDimTest) {
shape::TAD *tad = new shape::TAD();
tad->init(inputShapeBuffer,dimensions,dimensionLength);
tad->createTadOnlyShapeInfo();
tad-> createOffsets();
functions::broadcast::Broadcast<float, float, float>::exec(
opNum,
inputData, //x
inputShapeBuffer, //xShapeInfo
broadcastData, //y
broadcastShapeInfo, //yShapeInfo
result, //result
inputShapeBuffer, //resultShapeInfo
dimensions, //dimension
dimensionLength, //dimensionLength
tad->tadOnlyShapeInfo, //tadShapeInfo
tad->tadOffsets, //tadOffset
tad->tadOnlyShapeInfo, //tadShapeInfoZ
tad->tadOffsets, 0, tad->numTads); //tadOffsetZ
for(int i = 0; i < 30; i++) {
ASSERT_EQ(dataAssertion[i],result[i]);
}
delete tad;
}
#endif