* [WIP] Fix compilation after nd4j changes (#37) * Fix compilation. * Some tests fixed * Disable tests temporarily. * Restored test * Tests restored. * Test restored. * [WIP] perf tests (#40) * special maxpool test Signed-off-by: raver119 <raver119@gmail.com> * special maxpool test Signed-off-by: raver119 <raver119@gmail.com> * Shyrma bnorm bp (#41) Batchnorm backprop mkldnn * Add SameDiff memory reuse memory manager (array cache) (#39) * Attention op comments Signed-off-by: AlexDBlack <blacka101@gmail.com> * ArrayCacheMemoryMgr - first pass Signed-off-by: AlexDBlack <blacka101@gmail.com> * Tweak array cache for use with SameDiff identity arrays Signed-off-by: AlexDBlack <blacka101@gmail.com> * ArrayCacheMemoryMgr javadoc and properly get max memory Signed-off-by: AlexDBlack <blacka101@gmail.com> * LRU cache policy + add tests Signed-off-by: AlexDBlack <blacka101@gmail.com> * Fixes Signed-off-by: AlexDBlack <blacka101@gmail.com> * Resize arrays internally if required for ArrayCacheMemoryMgr Signed-off-by: AlexDBlack <blacka101@gmail.com> * Test improvement Signed-off-by: AlexDBlack <blacka101@gmail.com> * Small polish Signed-off-by: AlexDBlack <blacka101@gmail.com> * SameDiff op runtime benchmarking listener (#42) Signed-off-by: AlexDBlack <blacka101@gmail.com> * INLINE_LOOPS for windows Signed-off-by: raver119 <raver119@gmail.com> * [WIP] ThreadPool (#8) This PR removes OpenMP use in 95% of cases
86 lines
4.3 KiB
C++
86 lines
4.3 KiB
C++
/*******************************************************************************
|
|
* Copyright (c) 2015-2018 Skymind, Inc.
|
|
*
|
|
* This program and the accompanying materials are made available under the
|
|
* terms of the Apache License, Version 2.0 which is available at
|
|
* https://www.apache.org/licenses/LICENSE-2.0.
|
|
*
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
* License for the specific language governing permissions and limitations
|
|
* under the License.
|
|
*
|
|
* SPDX-License-Identifier: Apache-2.0
|
|
******************************************************************************/
|
|
|
|
//
|
|
// Created by raver on 6/6/2018.
|
|
//
|
|
|
|
#include <op_boilerplate.h>
|
|
#include <pointercast.h>
|
|
#include <ops/declarable/BroadcastableOp.h>
|
|
#include <helpers/ShapeUtils.h>
|
|
|
|
namespace nd4j {
|
|
namespace ops {
|
|
BroadcastableOp::BroadcastableOp(const char *name, int numTArgs, int numIArgs) : DeclarableCustomOp::DeclarableCustomOp(2, 1, name, false, numTArgs, numIArgs) {
|
|
//
|
|
}
|
|
|
|
ShapeList *BroadcastableOp::calculateOutputShape(ShapeList *inputShape, nd4j::graph::Context &block) {
|
|
auto shapeList = SHAPELIST();
|
|
auto x = inputShape->at(0);
|
|
auto y = inputShape->at(1);
|
|
auto outputs = _descriptor->getOutputTypesForOutput(0);
|
|
nd4j::DataType dtype = block.dataType(0);
|
|
if (block.dataType(0) != nd4j::DataType::BOOL && !(outputs.size() == 1 && outputs[0] == nd4j::DataType::BOOL)) {
|
|
if (Environment::getInstance()->isExperimentalBuild()) {
|
|
if (shape::length(y) > shape::length(x)) {
|
|
dtype = DataTypeUtils::pickPairwiseResultType(y, x);
|
|
} else {
|
|
dtype = DataTypeUtils::pickPairwiseResultType(x, y);
|
|
}
|
|
} else {
|
|
dtype = ArrayOptions::dataType(x);
|
|
}
|
|
} else
|
|
dtype = nd4j::DataType::BOOL;
|
|
|
|
if(shape::isEmpty(x) || shape::isEmpty(y)) {
|
|
// this is edge case, [3, 4] + [] = []
|
|
if ((shape::isEmpty(x) && shape::rank(x) == 0) || (shape::isEmpty(y) && shape::rank(y) == 0)) {
|
|
shapeList->push_back(ConstantShapeHelper::getInstance()->createShapeInfo(ShapeDescriptor::emptyDescriptor(dtype)));
|
|
return shapeList;
|
|
}
|
|
|
|
|
|
Nd4jLong *newshape = nullptr;
|
|
ShapeUtils::evalBroadcastShapeInfo(x, y, true, newshape, block.workspace());
|
|
shapeList->push_back(ConstantShapeHelper::getInstance()->createShapeInfo(ShapeDescriptor(newshape, dtype)));
|
|
} else if (shape::isScalar(x) && shape::isScalar(y)) {
|
|
if (shape::rank(x) >= shape::rank(y)) {
|
|
shapeList->push_back(ConstantShapeHelper::getInstance()->createShapeInfo(ShapeDescriptor(x, dtype)));
|
|
} else {
|
|
shapeList->push_back(ConstantShapeHelper::getInstance()->createShapeInfo(ShapeDescriptor(y, dtype)));
|
|
}
|
|
} else if (shape::equalsSoft(x, y)) {
|
|
shapeList->push_back(ConstantShapeHelper::getInstance()->createShapeInfo(ShapeDescriptor(x, dtype)));
|
|
} else if (shape::isScalar(x) && !shape::isScalar(y)) {
|
|
shapeList->push_back(ConstantShapeHelper::getInstance()->createShapeInfo(ShapeDescriptor(y, dtype)));
|
|
} else if (!shape::isScalar(x) && shape::isScalar(y)) {
|
|
shapeList->push_back(ConstantShapeHelper::getInstance()->createShapeInfo(ShapeDescriptor(x, dtype)));
|
|
} else if (ShapeUtils::areShapesBroadcastable(x, y)) {
|
|
Nd4jLong *newshape = nullptr;
|
|
ShapeUtils::evalBroadcastShapeInfo(x, y, true, newshape, block.workspace());
|
|
shapeList->push_back(ConstantShapeHelper::getInstance()->createShapeInfo(ShapeDescriptor(newshape, dtype)));
|
|
} else {
|
|
// in this case we'll throw exception later
|
|
shapeList->push_back(ConstantShapeHelper::getInstance()->createShapeInfo(ShapeDescriptor(x, dtype)));
|
|
}
|
|
|
|
return shapeList;
|
|
}
|
|
}
|
|
} |