Skip to content

Commit

Permalink
Minor fix of TTNNToFlatbuffer createOps (#1398)
Browse files Browse the repository at this point in the history
Concrete op types shouldn't be shadowed by template arguments when they are only ever going to be instantiated with concrete types with same names anyway, it's confusing for no reason.
  • Loading branch information
azecevicTT authored Nov 26, 2024
1 parent 3d029b6 commit 4083e98
Showing 1 changed file with 0 additions and 8 deletions.
8 changes: 0 additions & 8 deletions lib/Target/TTNN/TTNNToFlatbuffer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -595,7 +595,6 @@ createReductionOp(FlatbufferObjectCache &cache, ReductionOp op) {
dim_arg, op.getKeepDim());
}

template <typename TransposeOp>
::flatbuffers::Offset<::tt::target::ttnn::TransposeOp>
createTransposeOp(FlatbufferObjectCache &cache, TransposeOp op) {
auto in =
Expand All @@ -608,7 +607,6 @@ createTransposeOp(FlatbufferObjectCache &cache, TransposeOp op) {
return ::tt::target::ttnn::CreateTransposeOp(*cache.fbb, in, out, dim0, dim1);
}

template <typename ConcatOp>
::flatbuffers::Offset<::tt::target::ttnn::ConcatOp>
createConcatOp(FlatbufferObjectCache &cache, ConcatOp op) {
std::vector<::flatbuffers::Offset<::tt::target::TensorRef>> ins;
Expand All @@ -623,7 +621,6 @@ createConcatOp(FlatbufferObjectCache &cache, ConcatOp op) {
return ::tt::target::ttnn::CreateConcatOpDirect(*cache.fbb, &ins, out, dim);
}

template <typename EmbeddingOp>
::flatbuffers::Offset<::tt::target::ttnn::EmbeddingOp>
createEmbeddingOp(FlatbufferObjectCache &cache, EmbeddingOp op) {
auto in0 =
Expand All @@ -635,7 +632,6 @@ createEmbeddingOp(FlatbufferObjectCache &cache, EmbeddingOp op) {
return ::tt::target::ttnn::CreateEmbeddingOp(*cache.fbb, in0, in1, output);
}

template <typename ReshapeOp>
::flatbuffers::Offset<::tt::target::ttnn::ReshapeOp>
createReshapeOp(FlatbufferObjectCache &cache, ReshapeOp op) {
auto in =
Expand All @@ -648,7 +644,6 @@ createReshapeOp(FlatbufferObjectCache &cache, ReshapeOp op) {
return ::tt::target::ttnn::CreateReshapeOp(*cache.fbb, in, out, shape);
}

template <typename SliceOp>
::flatbuffers::Offset<::tt::target::ttnn::SliceOp>
createSliceOp(FlatbufferObjectCache &cache, SliceOp op) {
auto in =
Expand All @@ -666,7 +661,6 @@ createSliceOp(FlatbufferObjectCache &cache, SliceOp op) {
step);
}

template <typename MaxPool2dOp>
::flatbuffers::Offset<::tt::target::ttnn::MaxPool2dOp>
createMaxPool2dOp(FlatbufferObjectCache &cache, MaxPool2dOp op) {
auto in =
Expand All @@ -684,7 +678,6 @@ createMaxPool2dOp(FlatbufferObjectCache &cache, MaxPool2dOp op) {
op.getPaddingWidth());
}

template <typename SoftmaxOp>
::flatbuffers::Offset<::tt::target::ttnn::SoftmaxOp>
createSoftmaxOp(FlatbufferObjectCache &cache, SoftmaxOp op) {
auto in =
Expand All @@ -696,7 +689,6 @@ createSoftmaxOp(FlatbufferObjectCache &cache, SoftmaxOp op) {
return ::tt::target::ttnn::CreateSoftmaxOp(*cache.fbb, in, out, dimension);
}

template <typename DeallocateOp>
::flatbuffers::Offset<::tt::target::ttnn::DeallocateOp>
createDeallocateOp(FlatbufferObjectCache &cache, DeallocateOp op) {
auto in =
Expand Down

0 comments on commit 4083e98

Please sign in to comment.