DL4J trace logging (#79)

* MLN/CG trace logging for debugging

Signed-off-by: AlexDBlack <blacka101@gmail.com>

* Tiny tweak

Signed-off-by: AlexDBlack <blacka101@gmail.com>
master
Alex Black 2019-07-24 22:04:08 +10:00 committed by AlexDBlack
parent 2cd95dc517
commit ac1fc1a27a
2 changed files with 63 additions and 0 deletions

View File

@ -1945,6 +1945,8 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork {
activations.put(configuration.getNetworkInputs().get(i), features[i]);
}
boolean traceLog = log.isTraceEnabled();
//Do forward pass according to the topological ordering of the network
for (int i = 0; i <= layerIndex; i++) {
GraphVertex current = vertices[topologicalOrder[i]];
@ -1955,6 +1957,10 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork {
continue;
}
if(traceLog){
log.trace("About forward pass: {} (\"{}\") - {}", i, vName, current.getClass().getSimpleName());
}
try(MemoryWorkspace wsFFWorking = workspaceMgr.notifyScopeEntered(ArrayType.FF_WORKING_MEM)){
VertexIndices[] inputsTo = current.getOutputVertices();
@ -2026,6 +2032,10 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork {
current.clear();
}
}
if(traceLog){
log.trace("Completed forward pass: {} (\"{}\") - {}", i, vName, current.getClass().getSimpleName());
}
}
return activations;
@ -2089,6 +2099,8 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork {
}
workspaceMgr.setHelperWorkspacePointers(helperWorkspaces);
boolean traceLog = log.isTraceEnabled();
Map<String, INDArray> activations = new HashMap<>();
//Do forward pass according to the topological ordering of the network
int stopIndex;
@ -2102,6 +2114,10 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork {
String vName = current.getVertexName();
int vIdx = current.getVertexIndex();
if(traceLog){
log.trace("About forward pass: {} (\"{}\") - {}", i, vName, current.getClass().getSimpleName());
}
if(excludeIdxs != null && ArrayUtils.contains(excludeIdxs, vIdx)){
continue;
}
@ -2159,6 +2175,10 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork {
current.clear();
}
}
if(traceLog){
log.trace("Completed forward pass: {} (\"{}\") - {}", i, vName, current.getClass().getSimpleName());
}
}
return activations;
}
@ -2557,6 +2577,9 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork {
LinkedList<Triple<String, INDArray, Character>> gradients = new LinkedList<>();
boolean[] setVertexEpsilon = new boolean[topologicalOrder.length]; //If true: already set epsilon for this vertex; later epsilons should be *added* to the existing one, not set
MemoryWorkspace initialWorkspace = Nd4j.getMemoryManager().getCurrentWorkspace();
boolean traceLog = log.isTraceEnabled();
try{
for(int i=topologicalOrder.length-1; i>= 0; i--){
boolean hitFrozen = false;
@ -2564,6 +2587,10 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork {
int vIdx = current.getVertexIndex();
String vertexName = current.getVertexName();
if(traceLog){
log.trace("About backprop: {} (\"{}\") - {}", i, vertexName, current.getClass().getSimpleName());
}
//FIXME: make the frozen vertex feature extraction more flexible
if (current.hasLayer() && current.getLayer() instanceof FrozenLayer || current instanceof FrozenVertex){
hitFrozen = true;
@ -2719,6 +2746,10 @@ public class ComputationGraph implements Serializable, Model, NeuralNetwork {
}
closeAtEndIteraton[i] = null;
}
if(traceLog){
log.trace("Completed backprop: {} (\"{}\") - {}", i, vertexName, current.getClass().getSimpleName());
}
}
} finally {

View File

@ -1107,6 +1107,8 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura
List<INDArray> out = new ArrayList<>();
out.add(workspaceMgr.leverageTo(ArrayType.INPUT, input)); //Probably unnecessary usually
boolean traceLog = log.isTraceEnabled();
for( int i=0; i<=layerIndex; i++ ){
try(MemoryWorkspace wsFFWorking = workspaceMgr.notifyScopeEntered(ArrayType.FF_WORKING_MEM)){
if (getLayerWiseConfigurations().getInputPreProcess(i) != null) {
@ -1115,6 +1117,10 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura
validateArrayWorkspaces(workspaceMgr, input, ArrayType.ACTIVATIONS, i, true, "Feed forward to layer (training)");
}
if(traceLog){
log.trace("About to forward pass: {} - {}", i, layers[i].getClass().getSimpleName());
}
if(fwdPassType == FwdPassType.STANDARD){
input = layers[i].activate(input, true, workspaceMgr);
} else if(fwdPassType == FwdPassType.RNN_ACTIVATE_WITH_STORED_STATE){
@ -1142,6 +1148,10 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura
validateArrayWorkspaces(workspaceMgr, layers[i].input(), ArrayType.INPUT, i, false, "Feed forward to layer (training)");
out.add(input);
if(traceLog){
log.trace("Completed forward pass: {} - {}", i, layers[i].getClass().getSimpleName());
}
}
}
@ -1228,10 +1238,17 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura
MemoryWorkspace wsActCloseNext = null;
MemoryWorkspace temp = null;
MemoryWorkspace initialWorkspace = Nd4j.getMemoryManager().getCurrentWorkspace();
boolean traceLog = log.isTraceEnabled();
try {
for (int i = 0; i <= layerIndex; i++) {
LayerWorkspaceMgr mgr = (i % 2 == 0 ? mgrEven : mgrOdd);
if(traceLog){
log.trace("About to forward pass: {} - {}", i, layers[i].getClass().getSimpleName());
}
//Edge case: for first layer with dropout, inputs can't be in previous workspace (as it hasn't been opened yet)
//Hence: put inputs in working memory
if(i == 0 && wsm != WorkspaceMode.NONE){
@ -1300,6 +1317,10 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura
temp = null;
}
if(traceLog){
log.trace("Completed forward pass: {} - {}", i, layers[i].getClass().getSimpleName());
}
//Edge case: for first layer with dropout, inputs can't be in previous workspace (as it hasn't been opened yet)
//Hence: put inputs in working memory -> set back to default for next use of workspace mgr
if(i == 0 && wsm != WorkspaceMode.NONE){
@ -1846,12 +1867,19 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura
MemoryWorkspace wsActGradCloseNext = null;
MemoryWorkspace wsActGradTemp = null;
MemoryWorkspace initialWorkspace = Nd4j.getMemoryManager().getCurrentWorkspace();
boolean traceLog = log.isTraceEnabled();
try {
for (int i = layers.length - 1; i >= 0; i--) {
if (layers[i] instanceof FrozenLayer) {
break;
}
if(traceLog){
log.trace("About to backprop: {} - {}", i, layers[i].getClass().getSimpleName());
}
LayerWorkspaceMgr workspaceMgr = (i % 2 == 0 ? mgrEven : mgrOdd);
if (withOutputLayer && i == layers.length - 1) {
@ -1927,6 +1955,10 @@ public class MultiLayerNetwork implements Serializable, Classifier, Layer, Neura
wsActGradCloseNext = wsActGradTemp;
wsActGradTemp = null;
}
if(traceLog){
log.trace("Completed backprop: {} - {}", i, layers[i].getClass().getSimpleName());
}
}
} finally {
if(wsActGradCloseNext != null){