Daniel Konegen / MNIST_example

Dependencies:   mbed-os

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers memory_planner.h Source File

memory_planner.h

00001 /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
00002 
00003 Licensed under the Apache License, Version 2.0 (the "License");
00004 you may not use this file except in compliance with the License.
00005 You may obtain a copy of the License at
00006 
00007     http://www.apache.org/licenses/LICENSE-2.0
00008 
00009 Unless required by applicable law or agreed to in writing, software
00010 distributed under the License is distributed on an "AS IS" BASIS,
00011 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00012 See the License for the specific language governing permissions and
00013 limitations under the License.
00014 ==============================================================================*/
00015 
00016 #ifndef TENSORFLOW_LITE_EXPERIMENTAL_MICRO_MEMORY_PLANNER_MEMORY_PLANNER_H_
00017 #define TENSORFLOW_LITE_EXPERIMENTAL_MICRO_MEMORY_PLANNER_MEMORY_PLANNER_H_
00018 
00019 #include "tensorflow/lite/c/c_api_internal.h"
00020 #include "tensorflow/lite/core/api/error_reporter.h"
00021 
00022 namespace tflite {
00023 
00024 // Interface class for planning the layout of memory buffers during the
00025 // execution of a graph.
00026 // It's designed to be used by a client that iterates in any order through the
00027 // buffers it wants to lay out, and then calls the getter functions for
00028 // information about the calculated layout. For example:
00029 //
00030 // SomeMemoryPlanner planner;
00031 // planner.AddBuffer(reporter, 100, 0, 1);  // Buffer 0
00032 // planner.AddBuffer(reporter, 50, 2, 3);   // Buffer 1
00033 // planner.AddBuffer(reporter, 50, 2, 3);   // Buffer 2
00034 //
00035 // int offset0;
00036 // TF_EXPECT_OK(planner.GetOffsetForBuffer(reporter, 0, &offset0));
00037 // int offset1;
00038 // TF_EXPECT_OK(planner.GetOffsetForBuffer(reporter, 1, &offset1));
00039 // int offset2;
00040 // TF_EXPECT_OK(planner.GetOffsetForBuffer(reporter, 2, &offset2));
00041 // const int arena_size_needed = planner.GetMaximumMemorySize();
00042 //
00043 // The goal is for applications to be able to experiment with different layout
00044 // strategies without changing their client code, by swapping out classes that
00045 // implement this interface.=
00046 class MemoryPlanner {
00047  public:
00048   MemoryPlanner() {}
00049   virtual ~MemoryPlanner() {}
00050 
00051   // Pass information about a buffer's size and lifetime to the layout
00052   // algorithm. The order this is called implicitly assigns an index to the
00053   // result, so the buffer information that's passed into the N-th call of
00054   // this method will be used as the buffer_index argument to
00055   // GetOffsetForBuffer().
00056   virtual TfLiteStatus AddBuffer(tflite::ErrorReporter* error_reporter,
00057                                  int size, int first_time_used,
00058                                  int last_time_used) = 0;
00059 
00060   // The largest contguous block of memory that's needed to hold the layout.
00061   virtual int GetMaximumMemorySize() = 0;
00062   // How many buffers have been added to the planner.
00063   virtual int GetBufferCount() = 0;
00064   // Calculated layout offset for the N-th buffer added to the planner.
00065   virtual TfLiteStatus GetOffsetForBuffer(tflite::ErrorReporter* error_reporter,
00066                                           int buffer_index, int* offset) = 0;
00067 };
00068 
00069 }  // namespace tflite
00070 
00071 #endif  // TENSORFLOW_LITE_EXPERIMENTAL_MICRO_MEMORY_PLANNER_MEMORY_PLANNER_H_