@@ -24,6 +24,15 @@ limitations under the License.
2424#include  " tensorflow/lite/micro/system_setup.h" 
2525#include  " tensorflow/lite/schema/schema_generated.h" 
2626
27+ namespace  {
28+ using  HelloWorldOpResolver = tflite::MicroMutableOpResolver<1 >;
29+ 
30+ TfLiteStatus RegisterOps (HelloWorldOpResolver& op_resolver) {
31+   TF_LITE_ENSURE_STATUS (op_resolver.AddFullyConnected ());
32+   return  kTfLiteOk ;
33+ }
34+ }  //  namespace
35+ 
2736TfLiteStatus LoadFloatModelAndPerformInference () {
2837  //  Map the model into a usable data structure. This doesn't involve any
2938  //  copying or parsing, it's a very lightweight operation.
@@ -36,14 +45,16 @@ TfLiteStatus LoadFloatModelAndPerformInference() {
3645        model->version (), TFLITE_SCHEMA_VERSION);
3746  }
3847
39-   //  This pulls in all the operation implementations we need 
40-   tflite::AllOpsResolver resolver ;
48+   HelloWorldOpResolver op_resolver; 
49+   TF_LITE_ENSURE_STATUS ( RegisterOps (op_resolver)) ;
4150
42-   constexpr  int  kTensorArenaSize  = 2056 ;
51+   //  Arena size just a round number. The exact arena usage can be determined
52+   //  using the RecordingMicroInterpreter.
53+   constexpr  int  kTensorArenaSize  = 3000 ;
4354  uint8_t  tensor_arena[kTensorArenaSize ];
4455
4556  //  Build an interpreter to run the model with
46-   tflite::MicroInterpreter interpreter (model, resolver , tensor_arena,
57+   tflite::MicroInterpreter interpreter (model, op_resolver , tensor_arena,
4758                                       kTensorArenaSize );
4859
4960  //  Allocate memory from the tensor_arena for the model's tensors
@@ -97,14 +108,16 @@ TfLiteStatus LoadQuantModelAndPerformInference() {
97108        model->version (), TFLITE_SCHEMA_VERSION);
98109  }
99110
100-   //  This pulls in all the operation implementations we need 
101-   tflite::AllOpsResolver resolver ;
111+   HelloWorldOpResolver op_resolver; 
112+   TF_LITE_ENSURE_STATUS ( RegisterOps (op_resolver)) ;
102113
114+   //  Arena size just a round number. The exact arena usage can be determined
115+   //  using the RecordingMicroInterpreter.
103116  constexpr  int  kTensorArenaSize  = 2056 ;
104117  uint8_t  tensor_arena[kTensorArenaSize ];
105118
106119  //  Build an interpreter to run the model with
107-   tflite::MicroInterpreter interpreter (model, resolver , tensor_arena,
120+   tflite::MicroInterpreter interpreter (model, op_resolver , tensor_arena,
108121                                       kTensorArenaSize );
109122
110123  //  Allocate memory from the tensor_arena for the model's tensors
0 commit comments