[WIP] Fix compilation after nd4j changes (#37)
* Fix compilation. * Some tests fixed * Disable tests temporarily. * Restored test * Tests restored. * Test restored.master
parent
0107fb10ab
commit
6958f2ba24
|
@ -61,7 +61,7 @@ lazy val commonSettings = Seq(
|
|||
|
||||
lazy val publishNexus = Seq(
|
||||
publishTo := {
|
||||
val nexus = "https://nexus.ci.skymind.io/"
|
||||
val nexus = "https://packages.konduit.ai/"
|
||||
if (isSnapshot.value)
|
||||
Some("snapshots" at nexus + "content/repositories/maven-snapshots")
|
||||
else
|
||||
|
|
|
@ -80,7 +80,7 @@ object Implicits {
|
|||
class IntArray2INDArray(val underlying: Array[Int]) extends AnyVal {
|
||||
def mkNDArray(shape: Array[Int], ord: NDOrdering = NDOrdering(Nd4j.order()), offset: Int = 0): INDArray = {
|
||||
val strides = Nd4j.getStrides(shape, ord.value)
|
||||
Nd4j.create(underlying, shape.map(_.toLong), strides.map(_.toLong), ord.value, DataType.INT)
|
||||
Nd4j.create(underlying.map(_.toInt), shape.map(_.toLong), strides.map(_.toLong), ord.value, DataType.INT)
|
||||
}
|
||||
|
||||
def toNDArray: INDArray = Nd4j.createFromArray(underlying: _*)
|
||||
|
|
|
@ -170,9 +170,9 @@ class ConstructionTest extends FlatSpec with Matchers {
|
|||
sd.setTrainingConfig(conf)
|
||||
sd.fit(new SingletonMultiDataSetIterator(mds), 1)
|
||||
|
||||
w.eval.toDoubleVector.head shouldBe (0.0629 +- 0.0001)
|
||||
w.eval.toDoubleVector.tail.head shouldBe (0.3128 +- 0.0001)
|
||||
w.eval.toDoubleVector.tail.tail.head shouldBe (0.2503 +- 0.0001)
|
||||
w.getArr.get(0) shouldBe (0.0629 +- 0.0001)
|
||||
w.getArr.get(1) shouldBe (0.3128 +- 0.0001)
|
||||
w.getArr.get(2) shouldBe (0.2503 +- 0.0001)
|
||||
//Console.println(w.eval)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -209,7 +209,7 @@ class MathTest extends FlatSpec with Matchers {
|
|||
val x = sd.bind(arr)
|
||||
val y = new SDVariableWrapper(x)
|
||||
|
||||
x.get(SDIndex.point(0)).getArr shouldBe y(0).getArr
|
||||
x.get(SDIndex.point(0)).eval shouldBe y(0).eval
|
||||
}
|
||||
|
||||
"SDVariable " should "be indexable in 2d" in {
|
||||
|
@ -221,7 +221,7 @@ class MathTest extends FlatSpec with Matchers {
|
|||
|
||||
x(0, ---).eval shouldBe x(SDIndex.point(0), SDIndex.all()).eval
|
||||
|
||||
val slice1 = x.get(SDIndex.interval(0, 2), SDIndex.all()).eval
|
||||
val slice1 = x.get(SDIndex.interval(0L, 2L), SDIndex.all()).eval
|
||||
val slice2 = x(0 :: 2, ---).eval
|
||||
slice1 shouldBe slice2
|
||||
}
|
||||
|
@ -237,10 +237,10 @@ class MathTest extends FlatSpec with Matchers {
|
|||
x.get(SDIndex.point(0), SDIndex.point(0), SDIndex.all()).eval shouldBe x(0, 0, ---).eval
|
||||
x.get(SDIndex.point(0), SDIndex.point(0), SDIndex.point(0)).eval shouldBe x(0, 0, 0).eval
|
||||
|
||||
x.get(SDIndex.interval(0, 2), SDIndex.point(0), SDIndex.point(0)).eval shouldBe x(0 :: 2, 0, 0).eval
|
||||
x.get(SDIndex.interval(0, 2), SDIndex.interval(0, 1), SDIndex.interval(0, 2)).eval shouldBe x(0 :: 2,
|
||||
x.get(SDIndex.interval(0L, 2L), SDIndex.point(0), SDIndex.point(0)).eval shouldBe x(0 :: 2, 0, 0).eval
|
||||
x.get(SDIndex.interval(0L, 2L), SDIndex.interval(0L, 1L), SDIndex.interval(0L, 2L)).eval shouldBe x(0 :: 2,
|
||||
0 :: 1,
|
||||
0 :: 2).eval
|
||||
x.get(SDIndex.interval(0, 2), SDIndex.interval(0, 1), SDIndex.all()).eval shouldBe x(0 :: 2, 0 :: 1, ---).eval
|
||||
x.get(SDIndex.interval(0L, 2L), SDIndex.interval(0L, 1L), SDIndex.all()).eval shouldBe x(0 :: 2, 0 :: 1, ---).eval
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,11 +60,11 @@ class SameDiffTest extends FlatSpec with Matchers {
|
|||
sd.associateArrayWithVariable(inputArr, input)
|
||||
sd.associateArrayWithVariable(labelArr, label)
|
||||
|
||||
val result: INDArray = sd.execAndEndResult
|
||||
assertEquals(1, result.length)
|
||||
val result = sd.output(null: java.util.Map[String, org.nd4j.linalg.api.ndarray.INDArray], "loss")
|
||||
assertEquals(1, result.values().size())
|
||||
|
||||
val emptyMap = new HashMap[String, INDArray]()
|
||||
sd.execBackwards(emptyMap)
|
||||
sd.output(emptyMap, "loss")
|
||||
}
|
||||
|
||||
"SameDiff" should "run test dense layer forward pass" in {
|
||||
|
@ -84,7 +84,7 @@ class SameDiffTest extends FlatSpec with Matchers {
|
|||
val expMmul = iInput.mmul(iWeights)
|
||||
val expZ = expMmul.addRowVector(iBias)
|
||||
val expOut = Transforms.sigmoid(expZ, true)
|
||||
sd.exec(new HashMap[String, INDArray](), sd.outputs)
|
||||
sd.output(new HashMap[String, INDArray](), "mmul", "out", "bias", "add")
|
||||
assertEquals(expMmul, mmul.getArr)
|
||||
assertEquals(expZ, z.getArr)
|
||||
assertEquals(expOut, out.getArr)
|
||||
|
@ -109,15 +109,18 @@ class SameDiffTest extends FlatSpec with Matchers {
|
|||
.dataSetFeatureMapping("in", "in2")
|
||||
.skipBuilderValidation(true)
|
||||
.build
|
||||
sd.setTrainingConfig(c)
|
||||
sd.fit(new SingletonMultiDataSetIterator(new MultiDataSet(Array[INDArray](inArr, inArr2), null)), 1)
|
||||
val out = tanh.eval
|
||||
|
||||
val data = new HashMap[String, INDArray]()
|
||||
data.put("in", Nd4j.randn(1, 3))
|
||||
data.put("in2", Nd4j.randn(3, 4))
|
||||
in.convertToConstant
|
||||
val out2 = tanh.eval
|
||||
val out = sd.output(data, "tanh")
|
||||
val out2 = sd.output(data, "tanh")
|
||||
assertEquals(out, out2)
|
||||
assertEquals(VariableType.CONSTANT, in.getVariableType)
|
||||
assertEquals(inArr, in.getArr)
|
||||
//Sanity check on fitting:
|
||||
sd.fit(new SingletonMultiDataSetIterator(new MultiDataSet(Array[INDArray](inArr2), null)), 1)
|
||||
sd.setTrainingConfig(c)
|
||||
sd.fit(new SingletonMultiDataSetIterator(new MultiDataSet(Array[INDArray](inArr, inArr2), null)), 1)
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue