From d945d7a596198b2c318fa0e6a0a3d62877fdb64b Mon Sep 17 00:00:00 2001 From: opfromthestart Date: Tue, 31 Jan 2023 11:06:23 -0500 Subject: [PATCH 1/4] Some better error messages --- juice/src/layer.rs | 4 ++-- juice/src/layers/common/linear.rs | 3 +++ juice/src/layers/container/sequential.rs | 3 +++ 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/juice/src/layer.rs b/juice/src/layer.rs index fe62ff1c3..e7b4919b6 100644 --- a/juice/src/layer.rs +++ b/juice/src/layer.rs @@ -538,8 +538,8 @@ impl Layer { let old_shape = self.input_blobs_data[input_i].read().unwrap().desc().clone(); if old_shape.size() != reshaped_shape.size() { panic!( - "Input Shape Mismatch\nExpected {:?}\nActual {:?}", - reshaped_shape, old_shape + "Input Shape Mismatch at layer {}\nLayer has input shape {:?}\nGiven input has shape {:?}", + self.name, reshaped_shape, old_shape ); } self.input_blobs_data[input_i] diff --git a/juice/src/layers/common/linear.rs b/juice/src/layers/common/linear.rs index 38d90b993..45cf18b8a 100644 --- a/juice/src/layers/common/linear.rs +++ b/juice/src/layers/common/linear.rs @@ -86,6 +86,9 @@ impl> ILayer for Linear { output_data: &mut Vec>>, output_gradient: &mut Vec>>, ) { + if input_data.len() == 0 { + panic!("Linear layer expected input, but none was given."); + } let input = input_data[0].read().unwrap(); let batch_size = input.desc()[0]; // reshape top diff --git a/juice/src/layers/container/sequential.rs b/juice/src/layers/container/sequential.rs index 1808f8b83..c835ef564 100644 --- a/juice/src/layers/container/sequential.rs +++ b/juice/src/layers/container/sequential.rs @@ -293,6 +293,9 @@ impl + 'static> ILayer for Sequential { output_data: &mut [ArcLock>], ) { for layer in &self.layers { + if layer.borrow().input_blob_names.len() < input_data.len() { + panic!("Layer {} expected {} inputs but got {}.", layer.borrow().name, layer.borrow().input_blob_names.len(), input_data.len()); + } for (i, (input, input_name)) in input_data.iter().zip(self.input_tensor_names.iter()).enumerate() { if &layer.borrow().input_blob_names[i] == input_name { layer.borrow_mut().input_blobs_data[i] = input.clone(); From 264f38c5ecfe6c9f47a817721c0eced908628253 Mon Sep 17 00:00:00 2001 From: opfromthestart Date: Tue, 31 Jan 2023 11:13:21 -0500 Subject: [PATCH 2/4] Slightly more clear --- juice/src/layer.rs | 2 +- juice/src/layers/container/sequential.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/juice/src/layer.rs b/juice/src/layer.rs index e7b4919b6..2e0c73821 100644 --- a/juice/src/layer.rs +++ b/juice/src/layer.rs @@ -538,7 +538,7 @@ impl Layer { let old_shape = self.input_blobs_data[input_i].read().unwrap().desc().clone(); if old_shape.size() != reshaped_shape.size() { panic!( - "Input Shape Mismatch at layer {}\nLayer has input shape {:?}\nGiven input has shape {:?}", + "Input Shape Mismatch at layer {}\nLayer has input shape {:?}\nInput given has shape {:?}", self.name, reshaped_shape, old_shape ); } diff --git a/juice/src/layers/container/sequential.rs b/juice/src/layers/container/sequential.rs index c835ef564..36238b116 100644 --- a/juice/src/layers/container/sequential.rs +++ b/juice/src/layers/container/sequential.rs @@ -294,7 +294,7 @@ impl + 'static> ILayer for Sequential { ) { for layer in &self.layers { if layer.borrow().input_blob_names.len() < input_data.len() { - panic!("Layer {} expected {} inputs but got {}.", layer.borrow().name, layer.borrow().input_blob_names.len(), input_data.len()); + panic!("Layer {} expected {} input(s) but got {}.", layer.borrow().name, layer.borrow().input_blob_names.len(), input_data.len()); } for (i, (input, input_name)) in input_data.iter().zip(self.input_tensor_names.iter()).enumerate() { if &layer.borrow().input_blob_names[i] == input_name { From 996d5c695cbd6358f3733555faf3d4f8da2f4241 Mon Sep 17 00:00:00 2001 From: opfromthestart Date: Tue, 31 Jan 2023 11:25:23 -0500 Subject: [PATCH 3/4] Errors to clarify current batch size limitations. --- juice/src/layer.rs | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/juice/src/layer.rs b/juice/src/layer.rs index 2e0c73821..cabe5c802 100644 --- a/juice/src/layer.rs +++ b/juice/src/layer.rs @@ -537,10 +537,22 @@ impl Layer { // reshape input tensor to the reshaped shape let old_shape = self.input_blobs_data[input_i].read().unwrap().desc().clone(); if old_shape.size() != reshaped_shape.size() { - panic!( - "Input Shape Mismatch at layer {}\nLayer has input shape {:?}\nInput given has shape {:?}", - self.name, reshaped_shape, old_shape - ); + if reshaped_shape[1..] == old_shape[1..] { + eprintln!("Expected batch size {} but got batch size {}.", reshaped_shape[0], old_shape[0]); + } + else if reshaped_shape[1..] == old_shape { + eprintln!("Expected batch size {} but got batch size {}.", reshaped_shape[0], 1); + } + else { + eprintln!( + "Input Shape Mismatch at layer {}\nLayer has input shape {:?}\nInput given has shape {:?}", + self.name, reshaped_shape, old_shape + ); + } + if reshaped_shape == old_shape[1..] { + eprintln!("You may have forgotten to specify a batch size in your model input."); + } + panic!(); } self.input_blobs_data[input_i] .write() From 71da871287b3f872dc27975e3aaf1abd06663576 Mon Sep 17 00:00:00 2001 From: opfromthestart Date: Sat, 11 Feb 2023 17:14:02 -0500 Subject: [PATCH 4/4] More info on input shape mismatch --- juice/src/layer.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/juice/src/layer.rs b/juice/src/layer.rs index cabe5c802..1a8f77f28 100644 --- a/juice/src/layer.rs +++ b/juice/src/layer.rs @@ -537,18 +537,18 @@ impl Layer { // reshape input tensor to the reshaped shape let old_shape = self.input_blobs_data[input_i].read().unwrap().desc().clone(); if old_shape.size() != reshaped_shape.size() { + eprintln!( + "Input Shape Mismatch at layer {}\nLayer has input shape {:?}\nInput given has shape {:?}", + self.name, reshaped_shape, old_shape + ); if reshaped_shape[1..] == old_shape[1..] { + eprintln!("This may be a batch size error"); eprintln!("Expected batch size {} but got batch size {}.", reshaped_shape[0], old_shape[0]); } else if reshaped_shape[1..] == old_shape { + eprintln!("This may be a batch size error"); eprintln!("Expected batch size {} but got batch size {}.", reshaped_shape[0], 1); } - else { - eprintln!( - "Input Shape Mismatch at layer {}\nLayer has input shape {:?}\nInput given has shape {:?}", - self.name, reshaped_shape, old_shape - ); - } if reshaped_shape == old_shape[1..] { eprintln!("You may have forgotten to specify a batch size in your model input."); }