From 1ce54d5c70ad6f9350f7350c592839654d48782e Mon Sep 17 00:00:00 2001 From: Jovan Serbedzija Date: Fri, 20 Dec 2024 11:17:58 +0100 Subject: [PATCH] Fix flatbuffer serialization for embedding backward op (#1642) As described in https://github.com/tenstorrent/tt-mlir/pull/1633, I checked if any other operations has the same problem, and the embedding backward operation was the only one. --- lib/Target/TTNN/TTNNToFlatbuffer.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/Target/TTNN/TTNNToFlatbuffer.cpp b/lib/Target/TTNN/TTNNToFlatbuffer.cpp index b7ff1d7d5..db35c13b1 100644 --- a/lib/Target/TTNN/TTNNToFlatbuffer.cpp +++ b/lib/Target/TTNN/TTNNToFlatbuffer.cpp @@ -780,8 +780,8 @@ createEmbeddingBackwardOp(FlatbufferObjectCache &cache, std::optional<::mlir::tt::ttnn::MemoryConfigAttr> memoryConfig = op.getMemoryConfig(); - auto output = cache.getOrCreate(op.getResult(), tensorValueToFlatbuffer, - kHostAllocatedAddress, kHostAllocatedSize); + auto out = cache.at<::tt::target::TensorRef>( + getOperandThroughDPSOps(op.getResult())); return ::tt::target::ttnn::CreateEmbeddingBackwardOp( *cache.fbb, in0, in1, in2, dtype.has_value() @@ -791,7 +791,7 @@ createEmbeddingBackwardOp(FlatbufferObjectCache &cache, memoryConfig.has_value() ? cache.getOrCreate(memoryConfig.value(), memoryConfigToFlatbuffer) : 0, - output); + out); } template