From b703099debad988ba4183b77dd1f957dea62499f Mon Sep 17 00:00:00 2001 From: Nitika Shanker Date: Wed, 21 Feb 2024 19:41:52 +0000 Subject: [PATCH] #0: squash this --- ttnn/cpp/ttnn/operations/binary.hpp | 2 +- ttnn/ttnn/operations/core.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/ttnn/cpp/ttnn/operations/binary.hpp b/ttnn/cpp/ttnn/operations/binary.hpp index 8c22db4bbc4e..85da18142dae 100644 --- a/ttnn/cpp/ttnn/operations/binary.hpp +++ b/ttnn/cpp/ttnn/operations/binary.hpp @@ -47,7 +47,7 @@ inline ttnn::Tensor add( if (height_b == 1 or width_b == 1) { if (dtype.has_value()) { - TT_THROW("Bcast: Unsupported dtype argument."); + TT_THROW("ttnn.add: cannot change dtype when broadcasting"); } tt::tt_metal::BcastOpDim bcast_op_dim; if (height_b == 1 and width_b == 1) { diff --git a/ttnn/ttnn/operations/core.py b/ttnn/ttnn/operations/core.py index befd945f8e36..25f2039da9d1 100644 --- a/ttnn/ttnn/operations/core.py +++ b/ttnn/ttnn/operations/core.py @@ -814,7 +814,8 @@ def clone(tensor, memory_config: ttnn.MemoryConfig, dtype: ttnn.DataType): In the case where the layout is the same, the operation simply pad or unpad the last two dimensions depending on layout requested. Args: * :attr:`tensor`: the ttnn.Tensor - * :attr:`layout`: the layout of either ttnn.ROW_MAJOR_LAYOUT or ttnn.TILE_LAYOUT. + * :attr:`memory_config`: the `ttnn` memory config, DRAM_MEMORY_CONFIG or L1_MEMORY_CONFIG. + * :attr:`dtype`: the `ttnn` data type. Example:: >>> tensor = ttnn.to_device(ttnn.from_torch(torch.zeros((1, 1, 64, 32), dtype=torch.bfloat16, layout=ttnn.TILE_LAYOUT)), device, memory_config=ttnn.DRAM_MEMORY_CONFIG) >>> output = ttnn.clone(tensor, tnn.DRAM_MEMORY_CONFIG, tnn.bfloat8_b)