aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/lite/experimental
diff options
context:
space:
mode:
authorGravatar Yifei Feng <yifeif@google.com>2018-08-21 14:48:01 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-08-21 15:00:13 -0700
commit0f02f05913e03889bbcb85e71a6d005a8519bfb9 (patch)
treec5b2bacb1b96d260b67cb56c208ce8f1b1025dae /tensorflow/contrib/lite/experimental
parent3f24f93c2a32b2eae8951e5b272c3b647c5b9611 (diff)
Merged commit includes the following changes:
209663919 by yifeif<yifeif@google.com>: Internal change. -- 209663914 by amitpatankar<amitpatankar@google.com>: Fix the topk_op_test for numpy>1.15. -- 209660476 by jdduke<jdduke@google.com>: Fix model lifetime for TensorFlow Lite C# bindings Ensure the model's existence for the duration of the interpreter, as per API requirements. -- 209655960 by scottzhu<scottzhu@google.com>: Unify RNN Cell interface between TF and Keras. -- 209655731 by A. Unique TensorFlower<gardener@tensorflow.org>: Added tests for PredictionOps and PartitionExamplesOps -- 209655291 by nolivia<nolivia@google.com>: adding rate class so that we can save global_step/sec using tf.contrib.summary. The function takes the rate in relation to any tensors provided that the numerator and denominator are broadcastable and have dtypes that can be cast to float64 -- 209654655 by kramerb<kramerb@google.com>: [XLA] Switch from tensorflow::gtl::InlinedVector to absl::InlinedVector This one comes with extra goodies like a move constructor. -- 209653851 by A. Unique TensorFlower<gardener@tensorflow.org>: Internal build specification change -- PiperOrigin-RevId: 209663919
Diffstat (limited to 'tensorflow/contrib/lite/experimental')
-rw-r--r--tensorflow/contrib/lite/experimental/examples/unity/TensorFlowLitePlugin/Assets/TensorFlowLite/SDK/Scripts/Interpreter.cs31
1 files changed, 17 insertions, 14 deletions
diff --git a/tensorflow/contrib/lite/experimental/examples/unity/TensorFlowLitePlugin/Assets/TensorFlowLite/SDK/Scripts/Interpreter.cs b/tensorflow/contrib/lite/experimental/examples/unity/TensorFlowLitePlugin/Assets/TensorFlowLite/SDK/Scripts/Interpreter.cs
index b6905b5fbf..676783063d 100644
--- a/tensorflow/contrib/lite/experimental/examples/unity/TensorFlowLitePlugin/Assets/TensorFlowLite/SDK/Scripts/Interpreter.cs
+++ b/tensorflow/contrib/lite/experimental/examples/unity/TensorFlowLitePlugin/Assets/TensorFlowLite/SDK/Scripts/Interpreter.cs
@@ -29,15 +29,16 @@ namespace TensorFlowLite
{
private const string TensorFlowLibrary = "tensorflowlite_c";
- private TFL_Interpreter handle;
+ private TFL_Model model;
+ private TFL_Interpreter interpreter;
public Interpreter(byte[] modelData) {
GCHandle modelDataHandle = GCHandle.Alloc(modelData, GCHandleType.Pinned);
IntPtr modelDataPtr = modelDataHandle.AddrOfPinnedObject();
- TFL_Model model = TFL_NewModel(modelDataPtr, modelData.Length);
- handle = TFL_NewInterpreter(model, /*options=*/IntPtr.Zero);
- TFL_DeleteModel(model);
- if (handle == IntPtr.Zero) throw new Exception("Failed to create TensorFlowLite Interpreter");
+ model = TFL_NewModel(modelDataPtr, modelData.Length);
+ if (model == IntPtr.Zero) throw new Exception("Failed to create TensorFlowLite Model");
+ interpreter = TFL_NewInterpreter(model, /*options=*/IntPtr.Zero);
+ if (interpreter == IntPtr.Zero) throw new Exception("Failed to create TensorFlowLite Interpreter");
}
~Interpreter() {
@@ -45,43 +46,45 @@ namespace TensorFlowLite
}
public void Dispose() {
- if (handle != IntPtr.Zero) TFL_DeleteInterpreter(handle);
- handle = IntPtr.Zero;
+ if (interpreter != IntPtr.Zero) TFL_DeleteInterpreter(interpreter);
+ interpreter = IntPtr.Zero;
+ if (model != IntPtr.Zero) TFL_DeleteModel(model);
+ model = IntPtr.Zero;
}
public void Invoke() {
- ThrowIfError(TFL_InterpreterInvoke(handle));
+ ThrowIfError(TFL_InterpreterInvoke(interpreter));
}
public int GetInputTensorCount() {
- return TFL_InterpreterGetInputTensorCount(handle);
+ return TFL_InterpreterGetInputTensorCount(interpreter);
}
public void SetInputTensorData(int inputTensorIndex, Array inputTensorData) {
GCHandle tensorDataHandle = GCHandle.Alloc(inputTensorData, GCHandleType.Pinned);
IntPtr tensorDataPtr = tensorDataHandle.AddrOfPinnedObject();
- TFL_Tensor tensor = TFL_InterpreterGetInputTensor(handle, inputTensorIndex);
+ TFL_Tensor tensor = TFL_InterpreterGetInputTensor(interpreter, inputTensorIndex);
ThrowIfError(TFL_TensorCopyFromBuffer(
tensor, tensorDataPtr, Buffer.ByteLength(inputTensorData)));
}
public void ResizeInputTensor(int inputTensorIndex, int[] inputTensorShape) {
ThrowIfError(TFL_InterpreterResizeInputTensor(
- handle, inputTensorIndex, inputTensorShape, inputTensorShape.Length));
+ interpreter, inputTensorIndex, inputTensorShape, inputTensorShape.Length));
}
public void AllocateTensors() {
- ThrowIfError(TFL_InterpreterAllocateTensors(handle));
+ ThrowIfError(TFL_InterpreterAllocateTensors(interpreter));
}
public int GetOutputTensorCount() {
- return TFL_InterpreterGetOutputTensorCount(handle);
+ return TFL_InterpreterGetOutputTensorCount(interpreter);
}
public void GetOutputTensorData(int outputTensorIndex, Array outputTensorData) {
GCHandle tensorDataHandle = GCHandle.Alloc(outputTensorData, GCHandleType.Pinned);
IntPtr tensorDataPtr = tensorDataHandle.AddrOfPinnedObject();
- TFL_Tensor tensor = TFL_InterpreterGetOutputTensor(handle, outputTensorIndex);
+ TFL_Tensor tensor = TFL_InterpreterGetOutputTensor(interpreter, outputTensorIndex);
ThrowIfError(TFL_TensorCopyToBuffer(
tensor, tensorDataPtr, Buffer.ByteLength(outputTensorData)));
}