View Source tflite_beam_interpreter (tflite_beam v0.3.8)

An interpreter for a graph of nodes that input and output from tensors.

Summary

Functions

Allocate memory for tensors in the graph

Return the execution plan of the model.

Get the name of the input tensor

Get the name of the output tensor

Get SignatureDef map from the Metadata of a TfLite FlatBuffer buffer.

Fill data to the specified input tensor

Get the list of input tensors.

Run forwarding

New interpreter

New interpreter with model filepath

New interpreter with model buffer

Return the number of ops in the model.

Get the data of the output tensor

Get the list of output tensors.

Fill input data to corresponding input tensor of the interpreter, call tflite_beam_interpreter:invoke/1 and return output tensor(s).

Provide a list of tensor indexes that are inputs to the model. Each index is bound check and this modifies the consistent_ flag of the interpreter.

Set the number of threads available to the interpreter.

Provide a list of tensor indexes that are outputs to the model. Each index is bound check and this modifies the consistent_ flag of the interpreter.

Provide a list of tensor indexes that are variable tensors. Each index is bound check and this modifies the consistent_ flag of the interpreter.

Returns list of all keys of different method signatures defined in the model.

Get any tensor in the graph by its id

Return the number of tensors in the model.

Get the list of variable tensors.

Types

tflite_beam_tensor_type/0

-type tflite_beam_tensor_type() ::
          no_type |
          {f, 32} |
          {s, 32} |
          {u, 8} |
          {s, 64} |
          string | bool |
          {s, 16} |
          {c, 64} |
          {s, 8} |
          {f, 16} |
          {f, 64} |
          {c, 128} |
          {u, 64} |
          resource | variant |
          {u, 32}.

Functions

allocate_tensors(Self)

-spec allocate_tensors(reference()) -> ok | {error, binary()}.

Allocate memory for tensors in the graph

execution_plan(Self)

-spec execution_plan(reference()) -> [non_neg_integer()] | {error, binary()}.

Return the execution plan of the model.

Experimental interface, subject to change.

get_input_name(Self, Index)

-spec get_input_name(reference(), non_neg_integer()) -> {ok, binary()} | {error, binary()}.

Get the name of the input tensor

Note that the index here means the index in the result list of inputs/1. For example, if inputs/1 returns [42, 314], then 0 should be passed here to get the name of tensor 42

get_output_name(Self, Index)

-spec get_output_name(reference(), non_neg_integer()) -> {ok, binary()} | {error, binary()}.

Get the name of the output tensor

Note that the index here means the index in the result list of outputs/1. For example, if outputs/1 returns [42, 314], then 0 should be passed here to get the name of tensor 42

get_signature_defs(Self)

-spec get_signature_defs(reference()) -> {ok, map()} | nil | {error, binary()}.

Get SignatureDef map from the Metadata of a TfLite FlatBuffer buffer.

input_tensor(Self, Index, Data)

-spec input_tensor(reference(), non_neg_integer(), binary()) -> ok | {error, binary()}.

Fill data to the specified input tensor

Note: although we have typed_input_tensor available in C++, here what we really passed to the NIF is binary` data, therefore, Im not pretend that we have type information.

inputs(Self)

-spec inputs(reference()) -> {ok, [non_neg_integer()]} | {error, binary()}.

Get the list of input tensors.

return a list of input tensor id

invoke(Self)

-spec invoke(reference()) -> ok | {error, binary()}.

Run forwarding

new()

-spec new() -> {ok, reference()} | {error, binary()}.

New interpreter

new(ModelPath)

-spec new(list() | binary()) -> {ok, reference()} | {error, binary()}.

New interpreter with model filepath

new_from_buffer(Buffer)

-spec new_from_buffer(binary()) -> {ok, reference()} | {error, binary()}.

New interpreter with model buffer

nodes_size(Self)

-spec nodes_size(reference()) -> non_neg_integer() | {error, binary()}.

Return the number of ops in the model.

output_tensor(Self, Index)

-spec output_tensor(reference(), non_neg_integer()) -> {ok, binary()} | {error, binary()}.

Get the data of the output tensor

Note that the index here means the index in the result list of outputs/1. For example, if outputs/1 returns [42, 314], then 0` should be passed here to get the name of tensor `42

outputs(Self)

-spec outputs(reference()) -> {ok, [non_neg_integer()]} | {error, binary()}.

Get the list of output tensors.

return a list of output tensor id

predict(Self, Input)

-spec predict(reference(), [binary()] | binary() | map()) ->
                 [#tflite_beam_tensor{name :: binary(),
                                      index :: non_neg_integer(),
                                      shape :: tuple(),
                                      shape_signature :: list(),
                                      type :: tflite_beam_tensor_type(),
                                      quantization_params ::
                                          #tflite_beam_quantization_params{scale :: term(),
                                                                           zero_point :: term(),
                                                                           quantized_dimension :: term()},
                                      sparsity_params :: term(),
                                      ref :: reference()} |
                  {error, binary()}] |
                 #tflite_beam_tensor{name :: binary(),
                                     index :: non_neg_integer(),
                                     shape :: tuple(),
                                     shape_signature :: list(),
                                     type :: tflite_beam_tensor_type(),
                                     quantization_params ::
                                         #tflite_beam_quantization_params{scale :: term(),
                                                                          zero_point :: term(),
                                                                          quantized_dimension :: term()},
                                     sparsity_params :: term(),
                                     ref :: reference()} |
                 {error, binary()}.

Fill input data to corresponding input tensor of the interpreter, call tflite_beam_interpreter:invoke/1 and return output tensor(s).

set_inputs(Self, Inputs)

-spec set_inputs(reference(), [integer()]) -> ok | {error, binary()}.

Provide a list of tensor indexes that are inputs to the model. Each index is bound check and this modifies the consistent_ flag of the interpreter.

set_num_threads(Self, NumThreads)

-spec set_num_threads(reference(), integer()) -> ok | {error, binary()}.

Set the number of threads available to the interpreter.

As TfLite interpreter could internally apply a TfLite delegate by default (i.e. XNNPACK), the number of threads that are available to the default delegate should be set via InterpreterBuilder APIs as follows:

  {ok, Interpreter} = tflite_beam_interpreter:new(),
  {ok, Builder} = tflite_beam_interpreter_builder:new(Model, Resolver),
  tflite_beam_interpreter_builder:set_num_threads(Builder, NumThreads),
  tflite_beam_interpreter_builder:build(Builder, Interpreter)

set_outputs(Self, Outputs)

-spec set_outputs(reference(), [integer()]) -> ok | {error, binary()}.

Provide a list of tensor indexes that are outputs to the model. Each index is bound check and this modifies the consistent_ flag of the interpreter.

set_variables(Self, Variables)

-spec set_variables(reference(), [integer()]) -> ok | {error, binary()}.

Provide a list of tensor indexes that are variable tensors. Each index is bound check and this modifies the consistent_ flag of the interpreter.

signature_keys(Self)

-spec signature_keys(reference()) -> [binary()] | {error, binary()}.

Returns list of all keys of different method signatures defined in the model.

WARNING: Experimental interface, subject to change

tensor(Self, TensorIndex)

-spec tensor(reference(), non_neg_integer()) ->
                #tflite_beam_tensor{name :: binary(),
                                    index :: non_neg_integer(),
                                    shape :: tuple(),
                                    shape_signature :: list(),
                                    type :: tflite_beam_tensor_type(),
                                    quantization_params ::
                                        #tflite_beam_quantization_params{scale :: term(),
                                                                         zero_point :: term(),
                                                                         quantized_dimension :: term()},
                                    sparsity_params :: term(),
                                    ref :: reference()} |
                {error, binary()}.

Get any tensor in the graph by its id

Note that the tensor_index here means the id of a tensor. For example, if inputs/1 returns [42, 314], then 42 should be passed here to get tensor 42.

tensors_size(Self)

-spec tensors_size(reference()) -> non_neg_integer() | {error, binary()}.

Return the number of tensors in the model.

variables(Self)

-spec variables(reference()) -> {ok, [non_neg_integer()]} | {error, binary()}.

Get the list of variable tensors.