From 63288e4502a82a8df740291d2b1d9d909cd430a0 Mon Sep 17 00:00:00 2001 From: Eyon Date: Wed, 31 Jan 2024 05:10:50 +0000 Subject: [PATCH] #4003: Look further back in time per op file to see if we had a prior failure --- docs/Makefile | 4 +- docs/requirements-docs.txt | 1 + docs/source/index.rst | 1 + docs/source/ttnn/onboarding.rst | 2 +- docs/source/ttnn/sweeps/acos.rst | 12 + docs/source/ttnn/sweeps/add.rst | 72 +++ docs/source/ttnn/sweeps/asin.rst | 12 + docs/source/ttnn/sweeps/atan.rst | 12 + docs/source/ttnn/sweeps/average_pool2d.rst | 10 + docs/source/ttnn/sweeps/concat.rst | 488 ++++++++++++++++++ docs/source/ttnn/sweeps/cos.rst | 12 + docs/source/ttnn/sweeps/eq.rst | 12 + docs/source/ttnn/sweeps/eqz.rst | 12 + docs/source/ttnn/sweeps/exp.rst | 12 + docs/source/ttnn/sweeps/gelu.rst | 12 + docs/source/ttnn/sweeps/gez.rst | 12 + docs/source/ttnn/sweeps/gt.rst | 12 + docs/source/ttnn/sweeps/gte.rst | 12 + docs/source/ttnn/sweeps/gtz.rst | 12 + docs/source/ttnn/sweeps/index.rst | 66 +++ docs/source/ttnn/sweeps/layer_norm.rst | 24 + docs/source/ttnn/sweeps/lez.rst | 12 + docs/source/ttnn/sweeps/linear.rst | 24 + docs/source/ttnn/sweeps/lt.rst | 12 + docs/source/ttnn/sweeps/lte.rst | 12 + docs/source/ttnn/sweeps/ltz.rst | 12 + docs/source/ttnn/sweeps/matmul.rst | 24 + docs/source/ttnn/sweeps/max_pool.rst | 16 + docs/source/ttnn/sweeps/mul.rst | 72 +++ docs/source/ttnn/sweeps/ne.rst | 12 + docs/source/ttnn/sweeps/nez.rst | 12 + docs/source/ttnn/sweeps/relu.rst | 12 + docs/source/ttnn/sweeps/repeat_interleave.rst | 200 +++++++ docs/source/ttnn/sweeps/rsqrt.rst | 12 + docs/source/ttnn/sweeps/sin.rst | 12 + docs/source/ttnn/sweeps/softmax.rst | 20 + docs/source/ttnn/sweeps/sub.rst | 72 +++ docs/source/ttnn/sweeps/tan.rst | 12 + docs/source/ttnn/sweeps/tanh.rst | 12 + .../sweeps/transformer_attention_softmax.rst | 12 + .../sweeps/transformer_concatenate_heads.rst | 16 + ..._split_query_key_value_and_split_heads.rst | 16 + docs/source/ttnn/sweeps/upsample.rst | 104 ++++ .../sweep_tests/build_html_sweep_results.py | 197 ------- .../sweep_tests/build_rst_sweep_results.py | 299 +++++++++++ 45 files changed, 1816 insertions(+), 200 deletions(-) create mode 100644 docs/source/ttnn/sweeps/acos.rst create mode 100644 docs/source/ttnn/sweeps/add.rst create mode 100644 docs/source/ttnn/sweeps/asin.rst create mode 100644 docs/source/ttnn/sweeps/atan.rst create mode 100644 docs/source/ttnn/sweeps/average_pool2d.rst create mode 100644 docs/source/ttnn/sweeps/concat.rst create mode 100644 docs/source/ttnn/sweeps/cos.rst create mode 100644 docs/source/ttnn/sweeps/eq.rst create mode 100644 docs/source/ttnn/sweeps/eqz.rst create mode 100644 docs/source/ttnn/sweeps/exp.rst create mode 100644 docs/source/ttnn/sweeps/gelu.rst create mode 100644 docs/source/ttnn/sweeps/gez.rst create mode 100644 docs/source/ttnn/sweeps/gt.rst create mode 100644 docs/source/ttnn/sweeps/gte.rst create mode 100644 docs/source/ttnn/sweeps/gtz.rst create mode 100644 docs/source/ttnn/sweeps/index.rst create mode 100644 docs/source/ttnn/sweeps/layer_norm.rst create mode 100644 docs/source/ttnn/sweeps/lez.rst create mode 100644 docs/source/ttnn/sweeps/linear.rst create mode 100644 docs/source/ttnn/sweeps/lt.rst create mode 100644 docs/source/ttnn/sweeps/lte.rst create mode 100644 docs/source/ttnn/sweeps/ltz.rst create mode 100644 docs/source/ttnn/sweeps/matmul.rst create mode 100644 docs/source/ttnn/sweeps/max_pool.rst create mode 100644 docs/source/ttnn/sweeps/mul.rst create mode 100644 docs/source/ttnn/sweeps/ne.rst create mode 100644 docs/source/ttnn/sweeps/nez.rst create mode 100644 docs/source/ttnn/sweeps/relu.rst create mode 100644 docs/source/ttnn/sweeps/repeat_interleave.rst create mode 100644 docs/source/ttnn/sweeps/rsqrt.rst create mode 100644 docs/source/ttnn/sweeps/sin.rst create mode 100644 docs/source/ttnn/sweeps/softmax.rst create mode 100644 docs/source/ttnn/sweeps/sub.rst create mode 100644 docs/source/ttnn/sweeps/tan.rst create mode 100644 docs/source/ttnn/sweeps/tanh.rst create mode 100644 docs/source/ttnn/sweeps/transformer_attention_softmax.rst create mode 100644 docs/source/ttnn/sweeps/transformer_concatenate_heads.rst create mode 100644 docs/source/ttnn/sweeps/transformer_split_query_key_value_and_split_heads.rst create mode 100644 docs/source/ttnn/sweeps/upsample.rst delete mode 100644 tests/ttnn/sweep_tests/build_html_sweep_results.py create mode 100644 tests/ttnn/sweep_tests/build_rst_sweep_results.py diff --git a/docs/Makefile b/docs/Makefile index 1cc8e3951190..19eb1458f024 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -16,7 +16,7 @@ DOCS_VERSION ?= latest BRANCH = $(shell git rev-parse --abbrev-ref HEAD) GITHUB_TOKEN ?= INSERT_TOKEN_HERE -TTNN_SWEEPS_DIR = $(HTMLDIR)/ttnn_sweeps +TTNN_SWEEPS_DIR = $(SOURCEDIR)/ttnn/sweeps # Put it first so that "make" without argument is like "make help". help: @@ -46,7 +46,7 @@ ttnn_sweeps/check_directory: ttnn_sweeps: @echo "Note that GITHUB_TOKEN must be set before calling this" - @cd .. && python tests/ttnn/sweep_tests/build_html_sweep_results.py --dir docs/$(TTNN_SWEEPS_DIR) --token $(GITHUB_TOKEN) + @cd .. && python tests/ttnn/sweep_tests/build_rst_sweep_results.py --dir docs/$(TTNN_SWEEPS_DIR) --token $(GITHUB_TOKEN) server: @echo "Navigate to: \033[4;33mlocalhost:$(PORT)/index.html\033[0m" diff --git a/docs/requirements-docs.txt b/docs/requirements-docs.txt index 1459d6b63e8d..fb3b9f3cf710 100644 --- a/docs/requirements-docs.txt +++ b/docs/requirements-docs.txt @@ -8,3 +8,4 @@ nbsphinx==0.9.3 sphinxcontrib-jquery==4.1 ipython==8.12.3 pandoc==2.3 +tabulate==0.9.0 diff --git a/docs/source/index.rst b/docs/source/index.rst index e8f47fa51fca..ca09e9d96619 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -29,6 +29,7 @@ Welcome to TT-Metalium documentation! ttnn/onboarding ttnn/converting_torch_model_to_ttnn ttnn/dependencies/index.rst + ttnn/sweeps/index.rst .. toctree:: :caption: Models diff --git a/docs/source/ttnn/onboarding.rst b/docs/source/ttnn/onboarding.rst index d95219026e2d..a2cb652ce876 100644 --- a/docs/source/ttnn/onboarding.rst +++ b/docs/source/ttnn/onboarding.rst @@ -13,7 +13,7 @@ is that the long term benefits will help us maintain our objectives. Please foll * When creating the branch, please follow the pattern of 'TTNN--'. For example, if the issue is 4730, the branch name would be `TTNN-4730-concat-operation` * Use the `fallback` reference implementation for the operation and implement the functionality. * Add the documentation in the rst format for the operation under `ttnn documentation `_ - * Add sweep tests to the branch using the fallback implementation under `ttnn sweep tests `_ + * Add :ref:`sweep tests` to the branch using the fallback implementation under `ttnn sweep tests `_ 3. Update the issue referencing the pull requests after verifying that all the sweep tests run as expected. A TTNN CODEOWNERS will review the PR and verify that the API is acceptable and that the sweep tests reflect the intended functionality. 4. If the pull request (PR) is accepted it will be merge into the main branch and a new branch should be created that adds the implementation. * The fallback implementation for the Operation should be left and will continue to be used for op-by-op PCC comparisons when debugging models (see `--ttnn-enable-debug-decorator`). diff --git a/docs/source/ttnn/sweeps/acos.rst b/docs/source/ttnn/sweeps/acos.rst new file mode 100644 index 000000000000..938362ae850c --- /dev/null +++ b/docs/source/ttnn/sweeps/acos.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_acos: + +acos +==================================================================== +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + 0 crashed Exception: ttnn.acos: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 crashed Exception: ttnn.acos: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 crashed Exception: ttnn.acos: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 crashed Exception: ttnn.acos: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/add.rst b/docs/source/ttnn/sweeps/add.rst new file mode 100644 index 000000000000..77a355b773dd --- /dev/null +++ b/docs/source/ttnn/sweeps/add.rst @@ -0,0 +1,72 @@ +.. _ttnn.sweep_test_add: + +add +==================================================================== +==== ======== ========================================================================================== ============= ======== ======= =========== ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width broadcast input_a_dtype input_b_dtype input_a_layout input_b_layout input_b_memory_config input_a_memory_config output_memory_config +==== ======== ========================================================================================== ============= ======== ======= =========== ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + 0 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 passed nan (1,) 384 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 4 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 5 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 6 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 7 passed nan (1,) 384 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 8 skipped Broadcasting along width is not supported for row major layout (1,) 384 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 9 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 10 skipped Broadcasting along width is not supported for row major layout (1,) 384 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 11 passed nan (1,) 384 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 12 skipped Broadcasting along width is not supported for row major layout (1,) 384 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 13 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 14 skipped Broadcasting along width is not supported for row major layout (1,) 384 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 15 passed nan (1,) 384 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 16 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 17 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 18 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 19 passed nan (1,) 384 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 20 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 21 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 22 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 23 passed nan (1,) 384 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 24 skipped Broadcasting along width is not supported for row major layout (1,) 384 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 25 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 26 skipped Broadcasting along width is not supported for row major layout (1,) 384 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 27 passed nan (1,) 384 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 28 skipped Broadcasting along width is not supported for row major layout (1,) 384 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 29 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 30 skipped Broadcasting along width is not supported for row major layout (1,) 384 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 31 passed nan (1,) 384 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 32 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 33 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 34 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 35 passed nan (1,) 1024 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 36 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 37 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 38 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 39 passed nan (1,) 1024 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 40 skipped Broadcasting along width is not supported for row major layout (1,) 1024 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 41 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 42 skipped Broadcasting along width is not supported for row major layout (1,) 1024 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 43 passed nan (1,) 1024 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 44 skipped Broadcasting along width is not supported for row major layout (1,) 1024 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 45 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 46 skipped Broadcasting along width is not supported for row major layout (1,) 1024 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 47 passed nan (1,) 1024 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 48 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 49 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 50 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 51 passed nan (1,) 1024 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 52 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 53 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 54 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 55 passed nan (1,) 1024 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 56 skipped Broadcasting along width is not supported for row major layout (1,) 1024 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 57 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 58 skipped Broadcasting along width is not supported for row major layout (1,) 1024 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 59 passed nan (1,) 1024 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 60 skipped Broadcasting along width is not supported for row major layout (1,) 1024 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 61 crashed Exception: ttnn.add: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 62 skipped Broadcasting along width is not supported for row major layout (1,) 1024 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 63 passed nan (1,) 1024 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== ========================================================================================== ============= ======== ======= =========== ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/asin.rst b/docs/source/ttnn/sweeps/asin.rst new file mode 100644 index 000000000000..9762a609df5b --- /dev/null +++ b/docs/source/ttnn/sweeps/asin.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_asin: + +asin +==================================================================== +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + 0 crashed Exception: ttnn.asin: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 crashed Exception: ttnn.asin: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 crashed Exception: ttnn.asin: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 crashed Exception: ttnn.asin: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/atan.rst b/docs/source/ttnn/sweeps/atan.rst new file mode 100644 index 000000000000..f6f2863e4908 --- /dev/null +++ b/docs/source/ttnn/sweeps/atan.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_atan: + +atan +==================================================================== +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + 0 crashed Exception: ttnn.atan: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 crashed Exception: ttnn.atan: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 crashed Exception: ttnn.atan: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 crashed Exception: ttnn.atan: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/average_pool2d.rst b/docs/source/ttnn/sweeps/average_pool2d.rst new file mode 100644 index 000000000000..7ec67db539f9 --- /dev/null +++ b/docs/source/ttnn/sweeps/average_pool2d.rst @@ -0,0 +1,10 @@ +.. _ttnn.sweep_test_average_pool2d: + +average_pool2d +==================================================================== +==== ======== =========== =============== ================= + .. status exception input_shape dtype +==== ======== =========== =============== ================= + 0 passed nan [1, 2048, 7, 7] DataType.BFLOAT16 + 1 passed nan [1, 64, 1, 32] DataType.BFLOAT16 +==== ======== =========== =============== ================= diff --git a/docs/source/ttnn/sweeps/concat.rst b/docs/source/ttnn/sweeps/concat.rst new file mode 100644 index 000000000000..11920289a754 --- /dev/null +++ b/docs/source/ttnn/sweeps/concat.rst @@ -0,0 +1,488 @@ +.. _ttnn.sweep_test_concat: + +concat +==================================================================== +==== =================== ============================================================================================= =================== ================= ============================= ============================= ================ ================= ============================================================================================================================== + .. status exception number_of_tensors rank_of_tensors max_random_size_of_each_dim dimension_to_concatenate_on layout dtype memory_config +==== =================== ============================================================================================= =================== ================= ============================= ============================= ================ ================= ============================================================================================================================== + 0 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 1 1 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 1 1 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 2 skipped Tile layout is only supported for tensors with rank >= 2 1 1 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 skipped Tile layout is only supported for tensors with rank >= 2 1 1 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 4 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 1 1 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 5 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 1 1 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 6 skipped Tile layout is only supported for tensors with rank >= 2 1 1 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 7 skipped Tile layout is only supported for tensors with rank >= 2 1 1 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 8 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 1 1 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 9 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 1 1 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 10 skipped Tile layout is only supported for tensors with rank >= 2 1 1 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 11 skipped Tile layout is only supported for tensors with rank >= 2 1 1 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 12 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 1 1 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 13 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 1 1 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 14 skipped Tile layout is only supported for tensors with rank >= 2 1 1 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 15 skipped Tile layout is only supported for tensors with rank >= 2 1 1 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 16 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 1 1 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 17 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 1 1 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 18 skipped Tile layout is only supported for tensors with rank >= 2 1 1 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 19 skipped Tile layout is only supported for tensors with rank >= 2 1 1 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 20 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 1 1 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 21 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 1 1 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 22 skipped Tile layout is only supported for tensors with rank >= 2 1 1 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 23 skipped Tile layout is only supported for tensors with rank >= 2 1 1 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 24 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 2 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 25 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 2 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 26 is_expected_to_fail You must have at least two tensors to concat! 1 2 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 27 is_expected_to_fail You must have at least two tensors to concat! 1 2 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 28 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 2 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 29 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 2 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 30 is_expected_to_fail You must have at least two tensors to concat! 1 2 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 31 is_expected_to_fail You must have at least two tensors to concat! 1 2 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 32 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 2 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 33 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 2 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 34 is_expected_to_fail You must have at least two tensors to concat! 1 2 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 35 is_expected_to_fail You must have at least two tensors to concat! 1 2 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 36 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 2 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 37 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 2 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 38 is_expected_to_fail You must have at least two tensors to concat! 1 2 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 39 is_expected_to_fail You must have at least two tensors to concat! 1 2 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 40 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 2 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 41 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 2 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 42 is_expected_to_fail You must have at least two tensors to concat! 1 2 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 43 is_expected_to_fail You must have at least two tensors to concat! 1 2 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 44 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 2 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 45 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 2 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 46 is_expected_to_fail You must have at least two tensors to concat! 1 2 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 47 is_expected_to_fail You must have at least two tensors to concat! 1 2 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 48 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 3 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 49 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 3 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 50 is_expected_to_fail You must have at least two tensors to concat! 1 3 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 51 is_expected_to_fail You must have at least two tensors to concat! 1 3 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 52 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 3 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 53 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 3 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 54 is_expected_to_fail You must have at least two tensors to concat! 1 3 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 55 is_expected_to_fail You must have at least two tensors to concat! 1 3 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 56 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 3 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 57 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 3 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 58 is_expected_to_fail You must have at least two tensors to concat! 1 3 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 59 is_expected_to_fail You must have at least two tensors to concat! 1 3 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 60 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 3 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 61 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 3 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 62 is_expected_to_fail You must have at least two tensors to concat! 1 3 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 63 is_expected_to_fail You must have at least two tensors to concat! 1 3 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 64 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 3 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 65 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 3 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 66 is_expected_to_fail You must have at least two tensors to concat! 1 3 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 67 is_expected_to_fail You must have at least two tensors to concat! 1 3 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 68 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 3 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 69 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 3 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 70 is_expected_to_fail You must have at least two tensors to concat! 1 3 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 71 is_expected_to_fail You must have at least two tensors to concat! 1 3 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 72 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 4 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 73 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 4 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 74 is_expected_to_fail You must have at least two tensors to concat! 1 4 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 75 is_expected_to_fail You must have at least two tensors to concat! 1 4 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 76 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 4 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 77 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 4 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 78 is_expected_to_fail You must have at least two tensors to concat! 1 4 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 79 is_expected_to_fail You must have at least two tensors to concat! 1 4 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 80 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 4 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 81 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 4 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 82 is_expected_to_fail You must have at least two tensors to concat! 1 4 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 83 is_expected_to_fail You must have at least two tensors to concat! 1 4 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 84 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 4 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 85 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 4 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 86 is_expected_to_fail You must have at least two tensors to concat! 1 4 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 87 is_expected_to_fail You must have at least two tensors to concat! 1 4 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 88 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 4 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 89 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 4 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 90 is_expected_to_fail You must have at least two tensors to concat! 1 4 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 91 is_expected_to_fail You must have at least two tensors to concat! 1 4 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 92 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 4 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 93 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 1 4 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 94 is_expected_to_fail You must have at least two tensors to concat! 1 4 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 95 is_expected_to_fail You must have at least two tensors to concat! 1 4 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 96 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 2 1 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 97 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 2 1 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 98 skipped Tile layout is only supported for tensors with rank >= 2 2 1 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 99 skipped Tile layout is only supported for tensors with rank >= 2 2 1 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 100 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 2 1 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 101 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 2 1 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 102 skipped Tile layout is only supported for tensors with rank >= 2 2 1 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 103 skipped Tile layout is only supported for tensors with rank >= 2 2 1 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 104 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 2 1 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 105 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 2 1 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 106 skipped Tile layout is only supported for tensors with rank >= 2 2 1 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 107 skipped Tile layout is only supported for tensors with rank >= 2 2 1 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 108 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 2 1 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 109 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 2 1 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 110 skipped Tile layout is only supported for tensors with rank >= 2 2 1 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 111 skipped Tile layout is only supported for tensors with rank >= 2 2 1 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 112 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 2 1 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 113 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 2 1 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 114 skipped Tile layout is only supported for tensors with rank >= 2 2 1 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 115 skipped Tile layout is only supported for tensors with rank >= 2 2 1 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 116 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 2 1 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 117 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 2 1 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 118 skipped Tile layout is only supported for tensors with rank >= 2 2 1 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 119 skipped Tile layout is only supported for tensors with rank >= 2 2 1 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 120 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 2 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 121 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 2 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 122 passed nan 2 2 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 123 passed nan 2 2 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 124 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 2 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 125 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 2 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 126 passed nan 2 2 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 127 passed nan 2 2 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 128 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 2 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 129 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 2 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 130 is_expected_to_fail ttnn: Dimension out of range: dim 2 cannot be used for tensors of rank 2 2 2 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 131 is_expected_to_fail ttnn: Dimension out of range: dim 2 cannot be used for tensors of rank 2 2 2 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 132 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 2 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 133 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 2 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 134 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 2 2 2 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 135 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 2 2 2 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 136 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 2 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 137 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 2 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 138 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 2 2 2 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 139 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 2 2 2 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 140 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 2 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 141 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 2 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 142 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 2 2 2 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 143 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 2 2 2 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 144 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 3 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 145 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 3 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 146 passed nan 2 3 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 147 passed nan 2 3 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 148 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 3 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 149 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 3 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 150 passed nan 2 3 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 151 passed nan 2 3 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 152 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 3 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 153 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 3 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 154 passed nan 2 3 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 155 passed nan 2 3 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 156 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 3 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 157 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 3 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 158 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 3 2 3 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 159 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 3 2 3 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 160 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 3 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 161 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 3 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 162 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 3 2 3 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 163 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 3 2 3 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 164 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 3 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 165 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 3 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 166 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 3 2 3 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 167 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 3 2 3 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 168 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 4 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 169 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 4 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 170 passed nan 2 4 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 171 passed nan 2 4 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 172 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 4 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 173 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 4 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 174 passed nan 2 4 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 175 passed nan 2 4 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 176 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 4 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 177 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 4 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 178 passed nan 2 4 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 179 passed nan 2 4 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 180 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 4 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 181 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 4 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 182 passed nan 2 4 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 183 passed nan 2 4 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 184 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 4 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 185 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 4 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 186 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 4 2 4 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 187 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 4 2 4 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 188 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 4 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 189 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 2 4 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 190 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 4 2 4 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 191 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 4 2 4 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 192 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 3 1 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 193 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 3 1 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 194 skipped Tile layout is only supported for tensors with rank >= 2 3 1 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 195 skipped Tile layout is only supported for tensors with rank >= 2 3 1 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 196 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 3 1 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 197 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 3 1 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 198 skipped Tile layout is only supported for tensors with rank >= 2 3 1 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 199 skipped Tile layout is only supported for tensors with rank >= 2 3 1 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 200 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 3 1 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 201 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 3 1 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 202 skipped Tile layout is only supported for tensors with rank >= 2 3 1 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 203 skipped Tile layout is only supported for tensors with rank >= 2 3 1 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 204 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 3 1 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 205 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 3 1 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 206 skipped Tile layout is only supported for tensors with rank >= 2 3 1 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 207 skipped Tile layout is only supported for tensors with rank >= 2 3 1 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 208 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 3 1 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 209 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 3 1 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 210 skipped Tile layout is only supported for tensors with rank >= 2 3 1 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 211 skipped Tile layout is only supported for tensors with rank >= 2 3 1 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 212 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 3 1 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 213 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 3 1 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 214 skipped Tile layout is only supported for tensors with rank >= 2 3 1 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 215 skipped Tile layout is only supported for tensors with rank >= 2 3 1 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 216 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 2 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 217 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 2 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 218 passed nan 3 2 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 219 passed nan 3 2 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 220 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 2 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 221 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 2 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 222 passed nan 3 2 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 223 passed nan 3 2 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 224 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 2 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 225 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 2 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 226 is_expected_to_fail ttnn: Dimension out of range: dim 2 cannot be used for tensors of rank 2 3 2 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 227 is_expected_to_fail ttnn: Dimension out of range: dim 2 cannot be used for tensors of rank 2 3 2 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 228 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 2 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 229 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 2 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 230 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 2 3 2 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 231 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 2 3 2 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 232 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 2 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 233 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 2 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 234 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 2 3 2 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 235 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 2 3 2 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 236 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 2 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 237 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 2 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 238 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 2 3 2 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 239 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 2 3 2 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 240 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 3 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 241 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 3 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 242 passed nan 3 3 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 243 passed nan 3 3 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 244 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 3 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 245 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 3 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 246 passed nan 3 3 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 247 passed nan 3 3 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 248 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 3 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 249 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 3 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 250 passed nan 3 3 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 251 passed nan 3 3 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 252 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 3 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 253 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 3 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 254 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 3 3 3 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 255 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 3 3 3 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 256 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 3 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 257 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 3 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 258 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 3 3 3 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 259 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 3 3 3 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 260 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 3 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 261 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 3 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 262 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 3 3 3 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 263 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 3 3 3 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 264 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 4 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 265 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 4 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 266 passed nan 3 4 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 267 passed nan 3 4 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 268 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 4 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 269 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 4 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 270 passed nan 3 4 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 271 passed nan 3 4 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 272 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 4 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 273 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 4 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 274 passed nan 3 4 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 275 passed nan 3 4 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 276 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 4 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 277 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 4 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 278 passed nan 3 4 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 279 passed nan 3 4 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 280 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 4 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 281 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 4 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 282 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 4 3 4 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 283 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 4 3 4 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 284 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 4 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 285 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 3 4 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 286 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 4 3 4 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 287 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 4 3 4 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 288 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 4 1 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 289 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 4 1 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 290 skipped Tile layout is only supported for tensors with rank >= 2 4 1 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 291 skipped Tile layout is only supported for tensors with rank >= 2 4 1 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 292 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 4 1 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 293 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 4 1 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 294 skipped Tile layout is only supported for tensors with rank >= 2 4 1 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 295 skipped Tile layout is only supported for tensors with rank >= 2 4 1 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 296 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 4 1 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 297 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 4 1 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 298 skipped Tile layout is only supported for tensors with rank >= 2 4 1 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 299 skipped Tile layout is only supported for tensors with rank >= 2 4 1 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 300 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 4 1 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 301 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 4 1 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 302 skipped Tile layout is only supported for tensors with rank >= 2 4 1 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 303 skipped Tile layout is only supported for tensors with rank >= 2 4 1 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 304 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 4 1 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 305 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 4 1 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 306 skipped Tile layout is only supported for tensors with rank >= 2 4 1 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 307 skipped Tile layout is only supported for tensors with rank >= 2 4 1 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 308 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 4 1 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 309 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 4 1 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 310 skipped Tile layout is only supported for tensors with rank >= 2 4 1 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 311 skipped Tile layout is only supported for tensors with rank >= 2 4 1 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 312 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 2 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 313 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 2 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 314 passed nan 4 2 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 315 passed nan 4 2 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 316 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 2 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 317 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 2 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 318 passed nan 4 2 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 319 passed nan 4 2 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 320 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 2 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 321 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 2 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 322 is_expected_to_fail ttnn: Dimension out of range: dim 2 cannot be used for tensors of rank 2 4 2 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 323 is_expected_to_fail ttnn: Dimension out of range: dim 2 cannot be used for tensors of rank 2 4 2 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 324 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 2 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 325 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 2 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 326 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 2 4 2 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 327 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 2 4 2 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 328 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 2 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 329 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 2 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 330 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 2 4 2 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 331 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 2 4 2 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 332 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 2 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 333 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 2 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 334 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 2 4 2 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 335 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 2 4 2 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 336 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 3 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 337 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 3 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 338 passed nan 4 3 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 339 passed nan 4 3 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 340 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 3 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 341 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 3 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 342 passed nan 4 3 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 343 passed nan 4 3 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 344 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 3 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 345 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 3 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 346 passed nan 4 3 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 347 passed nan 4 3 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 348 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 3 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 349 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 3 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 350 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 3 4 3 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 351 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 3 4 3 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 352 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 3 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 353 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 3 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 354 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 3 4 3 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 355 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 3 4 3 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 356 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 3 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 357 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 3 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 358 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 3 4 3 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 359 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 3 4 3 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 360 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 4 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 361 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 4 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 362 passed nan 4 4 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 363 passed nan 4 4 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 364 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 4 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 365 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 4 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 366 passed nan 4 4 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 367 passed nan 4 4 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 368 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 4 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 369 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 4 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 370 passed nan 4 4 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 371 passed nan 4 4 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 372 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 4 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 373 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 4 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 374 passed nan 4 4 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 375 passed nan 4 4 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 376 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 4 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 377 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 4 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 378 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 4 4 4 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 379 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 4 4 4 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 380 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 4 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 381 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 4 4 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 382 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 4 4 4 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 383 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 4 4 4 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 384 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 5 1 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 385 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 5 1 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 386 skipped Tile layout is only supported for tensors with rank >= 2 5 1 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 387 skipped Tile layout is only supported for tensors with rank >= 2 5 1 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 388 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 5 1 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 389 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 5 1 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 390 skipped Tile layout is only supported for tensors with rank >= 2 5 1 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 391 skipped Tile layout is only supported for tensors with rank >= 2 5 1 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 392 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 5 1 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 393 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 5 1 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 394 skipped Tile layout is only supported for tensors with rank >= 2 5 1 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 395 skipped Tile layout is only supported for tensors with rank >= 2 5 1 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 396 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 5 1 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 397 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 5 1 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 398 skipped Tile layout is only supported for tensors with rank >= 2 5 1 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 399 skipped Tile layout is only supported for tensors with rank >= 2 5 1 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 400 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 5 1 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 401 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 5 1 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 402 skipped Tile layout is only supported for tensors with rank >= 2 5 1 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 403 skipped Tile layout is only supported for tensors with rank >= 2 5 1 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 404 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 5 1 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 405 crashed Exception: ttnn.concat: Tensor must be of rank {2, 3, 4}, but got 1 5 1 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 406 skipped Tile layout is only supported for tensors with rank >= 2 5 1 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 407 skipped Tile layout is only supported for tensors with rank >= 2 5 1 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 408 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 2 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 409 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 2 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 410 passed nan 5 2 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 411 passed nan 5 2 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 412 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 2 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 413 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 2 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 414 passed nan 5 2 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 415 passed nan 5 2 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 416 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 2 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 417 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 2 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 418 is_expected_to_fail ttnn: Dimension out of range: dim 2 cannot be used for tensors of rank 2 5 2 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 419 is_expected_to_fail ttnn: Dimension out of range: dim 2 cannot be used for tensors of rank 2 5 2 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 420 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 2 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 421 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 2 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 422 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 2 5 2 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 423 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 2 5 2 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 424 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 2 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 425 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 2 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 426 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 2 5 2 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 427 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 2 5 2 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 428 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 2 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 429 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 2 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 430 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 2 5 2 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 431 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 2 5 2 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 432 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 3 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 433 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 3 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 434 passed nan 5 3 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 435 passed nan 5 3 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 436 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 3 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 437 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 3 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 438 passed nan 5 3 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 439 passed nan 5 3 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 440 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 3 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 441 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 3 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 442 passed nan 5 3 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 443 passed nan 5 3 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 444 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 3 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 445 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 3 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 446 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 3 5 3 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 447 is_expected_to_fail ttnn: Dimension out of range: dim 3 cannot be used for tensors of rank 3 5 3 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 448 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 3 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 449 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 3 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 450 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 3 5 3 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 451 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 3 5 3 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 452 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 3 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 453 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 3 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 454 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 3 5 3 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 455 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 3 5 3 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 456 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 4 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 457 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 4 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 458 passed nan 5 4 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 459 passed nan 5 4 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 460 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 4 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 461 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 4 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 462 passed nan 5 4 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 463 passed nan 5 4 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 464 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 4 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 465 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 4 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 466 passed nan 5 4 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 467 passed nan 5 4 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 468 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 4 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 469 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 4 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 470 passed nan 5 4 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 471 passed nan 5 4 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 472 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 4 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 473 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 4 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 474 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 4 5 4 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 475 is_expected_to_fail ttnn: Dimension out of range: dim 4 cannot be used for tensors of rank 4 5 4 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 476 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 4 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 477 crashed Exception: ttnn.concat: Tensor must be of layout {}, but got Layout.ROW_MAJOR 5 4 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 478 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 4 5 4 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 479 is_expected_to_fail ttnn: Dimension out of range: dim 5 cannot be used for tensors of rank 4 5 4 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) +==== =================== ============================================================================================= =================== ================= ============================= ============================= ================ ================= ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/cos.rst b/docs/source/ttnn/sweeps/cos.rst new file mode 100644 index 000000000000..5f0f734b6012 --- /dev/null +++ b/docs/source/ttnn/sweeps/cos.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_cos: + +cos +==================================================================== +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config layout +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + 0 passed nan (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 1 passed nan (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 2 passed nan (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 3 passed nan (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== diff --git a/docs/source/ttnn/sweeps/eq.rst b/docs/source/ttnn/sweeps/eq.rst new file mode 100644 index 000000000000..a1f1dd20054b --- /dev/null +++ b/docs/source/ttnn/sweeps/eq.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_eq: + +eq +==================================================================== +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width input_a_dtype input_b_dtype input_a_layout input_b_layout input_b_memory_config input_a_memory_config output_memory_config +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + 0 passed nan (1,) 384 1024 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 passed nan (1,) 384 4096 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 passed nan (1,) 1024 1024 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 passed nan (1,) 1024 4096 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/eqz.rst b/docs/source/ttnn/sweeps/eqz.rst new file mode 100644 index 000000000000..3e5b8047a103 --- /dev/null +++ b/docs/source/ttnn/sweeps/eqz.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_eqz: + +eqz +==================================================================== +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config layout +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + 0 passed nan (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 1 passed nan (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 2 passed nan (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 3 passed nan (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== diff --git a/docs/source/ttnn/sweeps/exp.rst b/docs/source/ttnn/sweeps/exp.rst new file mode 100644 index 000000000000..29a295aadbee --- /dev/null +++ b/docs/source/ttnn/sweeps/exp.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_exp: + +exp +==================================================================== +==== ======== ========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config +==== ======== ========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + 0 crashed Exception: ttnn.exp: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 crashed Exception: ttnn.exp: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 crashed Exception: ttnn.exp: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 crashed Exception: ttnn.exp: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== ========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/gelu.rst b/docs/source/ttnn/sweeps/gelu.rst new file mode 100644 index 000000000000..0437783209e6 --- /dev/null +++ b/docs/source/ttnn/sweeps/gelu.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_gelu: + +gelu +==================================================================== +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + 0 crashed Exception: ttnn.gelu: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 crashed Exception: ttnn.gelu: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 crashed Exception: ttnn.gelu: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 crashed Exception: ttnn.gelu: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/gez.rst b/docs/source/ttnn/sweeps/gez.rst new file mode 100644 index 000000000000..8e0affbed1f9 --- /dev/null +++ b/docs/source/ttnn/sweeps/gez.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_gez: + +gez +==================================================================== +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config layout +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + 0 passed nan (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 1 passed nan (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 2 passed nan (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 3 passed nan (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== diff --git a/docs/source/ttnn/sweeps/gt.rst b/docs/source/ttnn/sweeps/gt.rst new file mode 100644 index 000000000000..462f4e98415f --- /dev/null +++ b/docs/source/ttnn/sweeps/gt.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_gt: + +gt +==================================================================== +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width input_a_dtype input_b_dtype input_a_layout input_b_layout input_b_memory_config input_a_memory_config output_memory_config +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + 0 passed nan (1,) 384 1024 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 passed nan (1,) 384 4096 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 passed nan (1,) 1024 1024 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 passed nan (1,) 1024 4096 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/gte.rst b/docs/source/ttnn/sweeps/gte.rst new file mode 100644 index 000000000000..d02293219a47 --- /dev/null +++ b/docs/source/ttnn/sweeps/gte.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_gte: + +gte +==================================================================== +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width input_a_dtype input_b_dtype input_a_layout input_b_layout input_b_memory_config input_a_memory_config output_memory_config +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + 0 passed nan (1,) 384 1024 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 passed nan (1,) 384 4096 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 passed nan (1,) 1024 1024 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 passed nan (1,) 1024 4096 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/gtz.rst b/docs/source/ttnn/sweeps/gtz.rst new file mode 100644 index 000000000000..e9b9678c44f9 --- /dev/null +++ b/docs/source/ttnn/sweeps/gtz.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_gtz: + +gtz +==================================================================== +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config layout +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + 0 passed nan (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 1 passed nan (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 2 passed nan (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 3 passed nan (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== diff --git a/docs/source/ttnn/sweeps/index.rst b/docs/source/ttnn/sweeps/index.rst new file mode 100644 index 000000000000..c069418dcd3c --- /dev/null +++ b/docs/source/ttnn/sweeps/index.rst @@ -0,0 +1,66 @@ + +.. _ttnn.sweep_tests: + +Sweep Test Results +================== + +Recent New Failures +------------------- + +We have had 0 new failures since the prior run. + +.. toctree:: + :maxdepth: 2 + :hidden: + + + + + + +All Sweep Tests +-------------------- + +These are the sweep tests for commit hash 5d391c6035dbbe59db024c9860bdf9fe9684af2f + +.. toctree:: + :maxdepth: 2 + + acos + add + asin + atan + average_pool2d + concat + cos + eq + eqz + exp + gelu + gez + gt + gte + gtz + layer_norm + lez + linear + lt + lte + ltz + matmul + max_pool + mul + ne + nez + relu + repeat_interleave + rsqrt + sin + softmax + sub + tan + tanh + transformer_attention_softmax + transformer_concatenate_heads + transformer_split_query_key_value_and_split_heads + upsample diff --git a/docs/source/ttnn/sweeps/layer_norm.rst b/docs/source/ttnn/sweeps/layer_norm.rst new file mode 100644 index 000000000000..939c8e263d16 --- /dev/null +++ b/docs/source/ttnn/sweeps/layer_norm.rst @@ -0,0 +1,24 @@ +.. _ttnn.sweep_test_layer_norm: + +layer_norm +==================================================================== +==== ======== ================================================================================================= ============= ======== ======= ===================== ========= ================= ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width use_weight_and_bias epsilon input_dtype input_memory_config output_memory_config +==== ======== ================================================================================================= ============= ======== ======= ===================== ========= ================= ============================================================================================================================== ============================================================================================================================== + 0 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 768 False 1e-06 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 768 False 1e-12 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 768 True 1e-06 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 768 True 1e-12 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 4 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 False 1e-06 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 5 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 False 1e-12 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 6 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 True 1e-06 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 7 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 True 1e-12 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 8 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 768 False 1e-06 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 9 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 768 False 1e-12 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 10 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 768 True 1e-06 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 11 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 768 True 1e-12 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 12 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 False 1e-06 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 13 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 False 1e-12 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 14 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 True 1e-06 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 15 crashed Exception: ttnn.layer_norm: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 True 1e-12 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== ================================================================================================= ============= ======== ======= ===================== ========= ================= ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/lez.rst b/docs/source/ttnn/sweeps/lez.rst new file mode 100644 index 000000000000..64ead125da94 --- /dev/null +++ b/docs/source/ttnn/sweeps/lez.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_lez: + +lez +==================================================================== +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config layout +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + 0 passed nan (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 1 passed nan (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 2 passed nan (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 3 passed nan (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== diff --git a/docs/source/ttnn/sweeps/linear.rst b/docs/source/ttnn/sweeps/linear.rst new file mode 100644 index 000000000000..8c2779b1b7b5 --- /dev/null +++ b/docs/source/ttnn/sweeps/linear.rst @@ -0,0 +1,24 @@ +.. _ttnn.sweep_test_linear: + +linear +==================================================================== +==== ======== ======================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================== ============= ======== ======== ======== ========== ================= ================= ================= ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== =========== + .. status exception batch_sizes m_size k_size n_size use_bias input_a_dtype input_b_dtype output_dtype input_b_memory_config input_a_memory_config output_memory_config core_grid +==== ======== ======================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================== ============= ======== ======== ======== ========== ================= ================= ================= ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== =========== + 0 passed nan (1,) 384 1024 1024 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 1 failed PCC: 0.8803494354946775 Expected tensor([[[-0.0211, 0.1425, -0.0015, ..., 0.0457, 0.0549, -0.1128], [-0.0582, 0.0032, 0.0958, ..., -0.0397, 0.0902, 0.1195], [-0.0323, 0.0287, -0.0272, ..., -0.0778, -0.0046, 0.0567], ..., [ 0.0430, -0.0738, 0.0307, ..., -0.2631, 0.0335, -0.0332], [-0.0353, 0.1715, 0.1305, ..., -0.0762, 0.0670, 0.0197], [ 0.1220, 0.0975, -0.0509, ..., 0.0447, 0.0446, -0.0875]]]) Actual tensor([[[ 0.0245, 0.1592, 0.0028, ..., 0.1157, -0.0366, -0.1357], [-0.0095, 0.0179, 0.0933, ..., 0.0287, 0.0023, 0.0918], [ 0.0179, 0.0469, -0.0266, ..., -0.0089, -0.0889, 0.0271], ..., [ 0.0938, -0.0583, 0.0291, ..., -0.1914, -0.0571, -0.0542], [ 0.0113, 0.1826, 0.1211, ..., -0.0046, -0.0254, -0.0082], [ 0.1689, 0.1104, -0.0508, ..., 0.1113, -0.0417, -0.1060]]], dtype=torch.bfloat16) (1,) 384 1024 1024 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 2 passed nan (1,) 384 1024 4096 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 3 failed PCC: 0.8791013361785466 Expected tensor([[[-0.0355, -0.0530, -0.1055, ..., -0.1886, -0.0507, 0.2829], [-0.0684, -0.1729, -0.0735, ..., -0.0836, 0.1141, 0.2472], [-0.1276, -0.1643, -0.0992, ..., -0.2011, -0.0514, -0.0516], ..., [ 0.0447, -0.1706, -0.0068, ..., -0.1234, 0.2169, 0.0443], [ 0.0585, -0.1142, 0.1161, ..., -0.1416, -0.0302, 0.0506], [-0.1156, -0.1593, -0.0169, ..., -0.2715, 0.1379, 0.2467]]]) Actual tensor([[[-0.0320, 0.0466, -0.0618, ..., -0.0942, -0.0310, 0.1885], [-0.0640, -0.0747, -0.0359, ..., 0.0117, 0.1299, 0.1504], [-0.1216, -0.0630, -0.0593, ..., -0.0986, -0.0317, -0.1377], ..., [ 0.0466, -0.0737, 0.0312, ..., -0.0251, 0.2354, -0.0459], [ 0.0601, -0.0189, 0.1504, ..., -0.0461, -0.0055, -0.0439], [-0.1099, -0.0603, 0.0197, ..., -0.1680, 0.1523, 0.1523]]], dtype=torch.bfloat16) (1,) 384 1024 4096 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 4 passed nan (1,) 384 4096 1024 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 5 failed PCC: 0.9648397254439961 Expected tensor([[[ 0.4453, 0.0872, -0.1819, ..., 0.0624, -0.1920, -0.6219], [-0.0203, 0.0683, 0.0427, ..., 0.4018, -0.1924, 0.0069], [ 0.1411, 0.0283, 0.1317, ..., 0.0590, 0.0237, 0.3793], ..., [-0.2242, 0.1184, 0.1177, ..., -0.2257, -0.3221, -0.1804], [ 0.1554, 0.0030, 0.4789, ..., -0.4707, 0.1527, -0.0453], [-0.0422, -0.0391, 0.0631, ..., 0.0455, -0.5543, 0.2994]]]) Actual tensor([[[ 0.4160, 0.0684, -0.2393, ..., 0.1089, -0.1416, -0.5938], [-0.0562, 0.0542, -0.0226, ..., 0.4648, -0.1367, 0.0417], [ 0.1025, 0.0103, 0.0659, ..., 0.1338, 0.0811, 0.4219], ..., [-0.2812, 0.0986, 0.0576, ..., -0.1699, -0.2832, -0.1406], [ 0.1445, -0.0142, 0.4141, ..., -0.4102, 0.2021, -0.0107], [-0.0820, -0.0527, -0.0014, ..., 0.1045, -0.4883, 0.3301]]], dtype=torch.bfloat16) (1,) 384 4096 1024 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 6 passed nan (1,) 384 4096 4096 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 7 failed PCC: 0.9643694648094275 Expected tensor([[[-0.0227, 0.1662, -0.1177, ..., 0.2275, -0.1542, 0.2209], [-0.2330, 0.0135, 0.1743, ..., -0.0687, -0.0320, -0.5262], [-0.1137, -0.0379, 0.0058, ..., 0.3123, 0.0232, 0.1886], ..., [ 0.0553, -0.0704, 0.1193, ..., -0.1157, 0.3217, 0.0584], [-0.4309, -0.2307, -0.2260, ..., -0.0475, 0.0302, 0.1271], [-0.0021, -0.1733, 0.2423, ..., -0.1886, -0.1380, 0.1986]]]) Actual tensor([[[ 0.0610, 0.2676, -0.2158, ..., 0.3359, -0.0830, 0.2031], [-0.1504, 0.1079, 0.0879, ..., 0.0354, 0.0518, -0.5312], [-0.0225, 0.0532, -0.0913, ..., 0.4238, 0.1025, 0.1738], ..., [ 0.1455, 0.0197, 0.0267, ..., -0.0120, 0.3809, 0.0361], [-0.3320, -0.1338, -0.3242, ..., 0.0737, 0.1021, 0.1123], [ 0.0723, -0.0752, 0.1523, ..., -0.0771, -0.0591, 0.1787]]], dtype=torch.bfloat16) (1,) 384 4096 4096 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 8 passed nan (1,) 1024 1024 1024 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 9 passed nan (1,) 1024 1024 1024 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 10 passed nan (1,) 1024 1024 4096 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 11 passed nan (1,) 1024 1024 4096 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 12 passed nan (1,) 1024 4096 1024 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 13 passed nan (1,) 1024 4096 1024 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 14 passed nan (1,) 1024 4096 4096 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 15 passed nan (1,) 1024 4096 4096 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan +==== ======== ======================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================== ============= ======== ======== ======== ========== ================= ================= ================= ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== =========== diff --git a/docs/source/ttnn/sweeps/lt.rst b/docs/source/ttnn/sweeps/lt.rst new file mode 100644 index 000000000000..aedbb7097e6b --- /dev/null +++ b/docs/source/ttnn/sweeps/lt.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_lt: + +lt +==================================================================== +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width input_a_dtype input_b_dtype input_a_layout input_b_layout input_b_memory_config input_a_memory_config output_memory_config +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + 0 passed nan (1,) 384 1024 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 passed nan (1,) 384 4096 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 passed nan (1,) 1024 1024 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 passed nan (1,) 1024 4096 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/lte.rst b/docs/source/ttnn/sweeps/lte.rst new file mode 100644 index 000000000000..42f0ddcc269d --- /dev/null +++ b/docs/source/ttnn/sweeps/lte.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_lte: + +lte +==================================================================== +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width input_a_dtype input_b_dtype input_a_layout input_b_layout input_b_memory_config input_a_memory_config output_memory_config +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + 0 passed nan (1,) 384 1024 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 passed nan (1,) 384 4096 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 passed nan (1,) 1024 1024 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 passed nan (1,) 1024 4096 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/ltz.rst b/docs/source/ttnn/sweeps/ltz.rst new file mode 100644 index 000000000000..2aa98b81f4d4 --- /dev/null +++ b/docs/source/ttnn/sweeps/ltz.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_ltz: + +ltz +==================================================================== +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config layout +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + 0 passed nan (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 1 passed nan (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 2 passed nan (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 3 passed nan (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== diff --git a/docs/source/ttnn/sweeps/matmul.rst b/docs/source/ttnn/sweeps/matmul.rst new file mode 100644 index 000000000000..153747e17b29 --- /dev/null +++ b/docs/source/ttnn/sweeps/matmul.rst @@ -0,0 +1,24 @@ +.. _ttnn.sweep_test_matmul: + +matmul +==================================================================== +==== ======== ============================================================================================= ============= ======== ======== ======== ======================= ================= ================= ================= ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== =========== + .. status exception batch_sizes m_size k_size n_size batch_matrix_multiply input_a_dtype input_b_dtype output_dtype input_b_memory_config input_a_memory_config output_memory_config core_grid +==== ======== ============================================================================================= ============= ======== ======== ======== ======================= ================= ================= ================= ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== =========== + 0 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 1024 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 1 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 1024 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 2 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 4096 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 3 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 4096 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 4 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 1024 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 5 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 1024 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 6 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 4096 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 7 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 4096 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 8 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 1024 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 9 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 1024 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 10 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 4096 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 11 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 4096 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 12 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 1024 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 13 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 1024 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 14 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 4096 True DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan + 15 crashed Exception: ttnn.matmul: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 4096 False DataType.BFLOAT16 DataType.BFLOAT16 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) nan +==== ======== ============================================================================================= ============= ======== ======== ======== ======================= ================= ================= ================= ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== =========== diff --git a/docs/source/ttnn/sweeps/max_pool.rst b/docs/source/ttnn/sweeps/max_pool.rst new file mode 100644 index 000000000000..56fe9cf00313 --- /dev/null +++ b/docs/source/ttnn/sweeps/max_pool.rst @@ -0,0 +1,16 @@ +.. _ttnn.sweep_test_max_pool: + +max_pool +==================================================================== +==== ======== ================================ ================== ============= ========= ======== ========== ========= ================== + .. status exception act_shape kernel_size padding stride dilation nblocks dtype +==== ======== ================================ ================== ============= ========= ======== ========== ========= ================== + 0 passed nan [1, 64, 112, 112] (3, 3) (1, 1) (2, 2) (1, 1) 1 DataType.BFLOAT16 + 1 passed nan [1, 64, 112, 112] (3, 3) (1, 1) (2, 2) (1, 1) 1 DataType.BFLOAT8_B + 2 passed nan [4, 64, 112, 112] (3, 3) (1, 1) (2, 2) (1, 1) 1 DataType.BFLOAT16 + 3 passed nan [4, 64, 112, 112] (3, 3) (1, 1) (2, 2) (1, 1) 1 DataType.BFLOAT8_B + 4 passed nan [8, 64, 112, 112] (3, 3) (1, 1) (2, 2) (1, 1) 1 DataType.BFLOAT16 + 5 passed nan [8, 64, 112, 112] (3, 3) (1, 1) (2, 2) (1, 1) 1 DataType.BFLOAT8_B + 6 skipped Configuration does not fit in L1 [16, 64, 112, 112] (3, 3) (1, 1) (2, 2) (1, 1) 1 DataType.BFLOAT16 + 7 passed nan [16, 64, 112, 112] (3, 3) (1, 1) (2, 2) (1, 1) 1 DataType.BFLOAT8_B +==== ======== ================================ ================== ============= ========= ======== ========== ========= ================== diff --git a/docs/source/ttnn/sweeps/mul.rst b/docs/source/ttnn/sweeps/mul.rst new file mode 100644 index 000000000000..481b0948dfcc --- /dev/null +++ b/docs/source/ttnn/sweeps/mul.rst @@ -0,0 +1,72 @@ +.. _ttnn.sweep_test_mul: + +mul +==================================================================== +==== ======== ========================================================================================== ============= ======== ======= =========== ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width broadcast input_a_dtype input_b_dtype input_a_layout input_b_layout input_b_memory_config input_a_memory_config output_memory_config +==== ======== ========================================================================================== ============= ======== ======= =========== ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + 0 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 passed nan (1,) 384 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 4 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 5 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 6 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 7 passed nan (1,) 384 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 8 skipped Broadcasting along width is not supported for row major layout (1,) 384 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 9 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 10 skipped Broadcasting along width is not supported for row major layout (1,) 384 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 11 passed nan (1,) 384 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 12 skipped Broadcasting along width is not supported for row major layout (1,) 384 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 13 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 14 skipped Broadcasting along width is not supported for row major layout (1,) 384 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 15 passed nan (1,) 384 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 16 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 17 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 18 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 19 passed nan (1,) 384 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 20 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 21 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 22 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 23 passed nan (1,) 384 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 24 skipped Broadcasting along width is not supported for row major layout (1,) 384 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 25 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 26 skipped Broadcasting along width is not supported for row major layout (1,) 384 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 27 passed nan (1,) 384 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 28 skipped Broadcasting along width is not supported for row major layout (1,) 384 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 29 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 30 skipped Broadcasting along width is not supported for row major layout (1,) 384 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 31 passed nan (1,) 384 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 32 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 33 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 34 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 35 passed nan (1,) 1024 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 36 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 37 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 38 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 39 passed nan (1,) 1024 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 40 skipped Broadcasting along width is not supported for row major layout (1,) 1024 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 41 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 42 skipped Broadcasting along width is not supported for row major layout (1,) 1024 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 43 passed nan (1,) 1024 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 44 skipped Broadcasting along width is not supported for row major layout (1,) 1024 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 45 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 46 skipped Broadcasting along width is not supported for row major layout (1,) 1024 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 47 passed nan (1,) 1024 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 48 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 49 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 50 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 51 passed nan (1,) 1024 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 52 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 53 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 54 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 55 passed nan (1,) 1024 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 56 skipped Broadcasting along width is not supported for row major layout (1,) 1024 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 57 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 58 skipped Broadcasting along width is not supported for row major layout (1,) 1024 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 59 passed nan (1,) 1024 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 60 skipped Broadcasting along width is not supported for row major layout (1,) 1024 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 61 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 62 skipped Broadcasting along width is not supported for row major layout (1,) 1024 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 63 passed nan (1,) 1024 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== ========================================================================================== ============= ======== ======= =========== ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/ne.rst b/docs/source/ttnn/sweeps/ne.rst new file mode 100644 index 000000000000..14a5c96226e4 --- /dev/null +++ b/docs/source/ttnn/sweeps/ne.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_ne: + +ne +==================================================================== +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width input_a_dtype input_b_dtype input_a_layout input_b_layout input_b_memory_config input_a_memory_config output_memory_config +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + 0 passed nan (1,) 384 1024 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 passed nan (1,) 384 4096 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 passed nan (1,) 1024 1024 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 passed nan (1,) 1024 4096 DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== =========== ============= ======== ======= ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/nez.rst b/docs/source/ttnn/sweeps/nez.rst new file mode 100644 index 000000000000..3cae60b49674 --- /dev/null +++ b/docs/source/ttnn/sweeps/nez.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_nez: + +nez +==================================================================== +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config layout +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + 0 passed nan (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 1 passed nan (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 2 passed nan (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 3 passed nan (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== diff --git a/docs/source/ttnn/sweeps/relu.rst b/docs/source/ttnn/sweeps/relu.rst new file mode 100644 index 000000000000..8813001fda08 --- /dev/null +++ b/docs/source/ttnn/sweeps/relu.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_relu: + +relu +==================================================================== +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + 0 crashed Exception: ttnn.relu: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 crashed Exception: ttnn.relu: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 crashed Exception: ttnn.relu: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 crashed Exception: ttnn.relu: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/repeat_interleave.rst b/docs/source/ttnn/sweeps/repeat_interleave.rst new file mode 100644 index 000000000000..ad51c9512eff --- /dev/null +++ b/docs/source/ttnn/sweeps/repeat_interleave.rst @@ -0,0 +1,200 @@ +.. _ttnn.sweep_test_repeat_interleave: + +repeat_interleave +==================================================================== +==== =================== ======================================================================================================== ====================== ================ ============================= ======================== ================ ================= ============================================================================================================================== + .. status exception make_repeat_a_tensor rank_of_tensor max_random_size_of_each_dim dimension_to_repeat_on layout dtype memory_config +==== =================== ======================================================================================================== ====================== ================ ============================= ======================== ================ ================= ============================================================================================================================== + 0 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 False 1 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 False 1 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 2 skipped Tile layout is only supported for tensors with rank >= 2 False 1 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 skipped Tile layout is only supported for tensors with rank >= 2 False 1 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 4 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 False 1 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 5 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 False 1 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 6 skipped Tile layout is only supported for tensors with rank >= 2 False 1 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 7 skipped Tile layout is only supported for tensors with rank >= 2 False 1 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 8 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 False 1 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 9 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 False 1 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 10 skipped Tile layout is only supported for tensors with rank >= 2 False 1 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 11 skipped Tile layout is only supported for tensors with rank >= 2 False 1 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 12 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 False 1 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 13 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 False 1 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 14 skipped Tile layout is only supported for tensors with rank >= 2 False 1 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 15 skipped Tile layout is only supported for tensors with rank >= 2 False 1 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 16 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 False 1 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 17 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 False 1 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 18 skipped Tile layout is only supported for tensors with rank >= 2 False 1 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 19 skipped Tile layout is only supported for tensors with rank >= 2 False 1 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 20 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 False 1 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 21 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 False 1 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 22 skipped Tile layout is only supported for tensors with rank >= 2 False 1 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 23 skipped Tile layout is only supported for tensors with rank >= 2 False 1 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 24 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 2 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 25 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 2 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 26 passed nan False 2 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 27 passed nan False 2 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 28 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 2 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 29 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 2 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 30 passed nan False 2 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 31 passed nan False 2 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 32 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 2 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 33 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 2 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 34 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 2) False 2 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 35 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 2) False 2 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 36 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 2 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 37 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 2 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 38 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 3) False 2 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 39 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 3) False 2 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 40 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 2 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 41 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 2 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 42 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 4) False 2 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 43 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 4) False 2 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 44 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 2 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 45 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 2 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 46 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 5) False 2 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 47 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 5) False 2 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 48 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 3 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 49 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 3 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 50 passed nan False 3 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 51 passed nan False 3 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 52 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 3 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 53 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 3 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 54 passed nan False 3 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 55 passed nan False 3 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 56 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 3 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 57 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 3 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 58 passed nan False 3 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 59 passed nan False 3 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 60 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 3 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 61 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 3 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 62 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-3, 2], but got 3) False 3 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 63 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-3, 2], but got 3) False 3 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 64 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 3 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 65 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 3 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 66 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-3, 2], but got 4) False 3 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 67 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-3, 2], but got 4) False 3 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 68 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 3 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 69 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 3 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 70 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-3, 2], but got 5) False 3 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 71 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-3, 2], but got 5) False 3 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 72 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 4 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 73 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 4 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 74 passed nan False 4 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 75 passed nan False 4 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 76 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 4 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 77 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 4 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 78 passed nan False 4 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 79 passed nan False 4 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 80 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 4 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 81 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 4 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 82 passed nan False 4 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 83 passed nan False 4 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 84 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 4 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 85 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 4 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 86 passed nan False 4 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 87 passed nan False 4 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 88 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 4 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 89 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 4 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 90 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-4, 3], but got 4) False 4 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 91 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-4, 3], but got 4) False 4 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 92 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 4 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 93 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR False 4 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 94 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-4, 3], but got 5) False 4 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 95 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-4, 3], but got 5) False 4 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 96 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 True 1 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 97 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 True 1 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 98 skipped Tile layout is only supported for tensors with rank >= 2 True 1 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 99 skipped Tile layout is only supported for tensors with rank >= 2 True 1 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 100 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 True 1 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 101 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 True 1 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 102 skipped Tile layout is only supported for tensors with rank >= 2 True 1 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 103 skipped Tile layout is only supported for tensors with rank >= 2 True 1 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 104 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 True 1 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 105 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 True 1 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 106 skipped Tile layout is only supported for tensors with rank >= 2 True 1 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 107 skipped Tile layout is only supported for tensors with rank >= 2 True 1 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 108 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 True 1 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 109 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 True 1 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 110 skipped Tile layout is only supported for tensors with rank >= 2 True 1 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 111 skipped Tile layout is only supported for tensors with rank >= 2 True 1 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 112 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 True 1 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 113 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 True 1 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 114 skipped Tile layout is only supported for tensors with rank >= 2 True 1 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 115 skipped Tile layout is only supported for tensors with rank >= 2 True 1 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 116 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 True 1 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 117 crashed Exception: ttnn.repeat_interleave: Tensor must be of rank {2, 3, 4}, but got 1 True 1 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 118 skipped Tile layout is only supported for tensors with rank >= 2 True 1 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 119 skipped Tile layout is only supported for tensors with rank >= 2 True 1 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 120 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 2 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 121 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 2 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 122 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 2 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 123 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 2 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 124 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 2 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 125 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 2 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 126 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 2 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 127 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 2 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 128 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 2 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 129 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 2 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 130 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 2) True 2 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 131 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 2) True 2 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 132 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 2 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 133 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 2 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 134 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 3) True 2 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 135 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 3) True 2 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 136 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 2 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 137 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 2 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 138 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 4) True 2 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 139 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 4) True 2 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 140 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 2 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 141 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 2 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 142 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 5) True 2 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 143 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-2, 1], but got 5) True 2 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 144 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 3 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 145 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 3 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 146 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 3 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 147 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 3 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 148 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 3 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 149 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 3 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 150 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 3 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 151 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 3 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 152 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 3 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 153 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 3 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 154 is_expected_to_fail ttnn: repeats must be 0-dim or 1-dim tensor True 3 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 155 is_expected_to_fail ttnn: repeats must be 0-dim or 1-dim tensor True 3 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 156 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 3 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 157 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 3 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 158 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-3, 2], but got 3) True 3 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 159 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-3, 2], but got 3) True 3 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 160 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 3 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 161 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 3 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 162 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-3, 2], but got 4) True 3 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 163 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-3, 2], but got 4) True 3 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 164 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 3 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 165 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 3 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 166 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-3, 2], but got 5) True 3 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 167 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-3, 2], but got 5) True 3 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 168 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 4 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 169 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 4 32 0 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 170 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 4 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 171 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 4 32 0 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 172 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 4 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 173 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 4 32 1 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 174 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 4 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 175 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 4 32 1 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 176 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 4 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 177 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 4 32 2 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 178 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 4 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 179 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 4 32 2 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 180 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 4 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 181 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 4 32 3 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 182 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 4 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 183 is_expected_to_fail ttnn: repeats must have the same size as input along dim True 4 32 3 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 184 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 4 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 185 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 4 32 4 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 186 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-4, 3], but got 4) True 4 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 187 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-4, 3], but got 4) True 4 32 4 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 188 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 4 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 189 crashed Exception: ttnn.repeat_interleave: Tensor must be of layout {}, but got Layout.ROW_MAJOR True 4 32 5 Layout.ROW_MAJOR DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) + 190 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-4, 3], but got 5) True 4 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 191 is_expected_to_fail ttnn: Dimension out of range (expected to be in range of [-4, 3], but got 5) True 4 32 5 Layout.TILE DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::L1,shard_spec=std::nullopt) +==== =================== ======================================================================================================== ====================== ================ ============================= ======================== ================ ================= ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/rsqrt.rst b/docs/source/ttnn/sweeps/rsqrt.rst new file mode 100644 index 000000000000..f6262043a7b7 --- /dev/null +++ b/docs/source/ttnn/sweeps/rsqrt.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_rsqrt: + +rsqrt +==================================================================== +==== ======== ============================================================================================ ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config +==== ======== ============================================================================================ ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + 0 crashed Exception: ttnn.rsqrt: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 crashed Exception: ttnn.rsqrt: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 crashed Exception: ttnn.rsqrt: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 crashed Exception: ttnn.rsqrt: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== ============================================================================================ ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/sin.rst b/docs/source/ttnn/sweeps/sin.rst new file mode 100644 index 000000000000..fb790725e2e2 --- /dev/null +++ b/docs/source/ttnn/sweeps/sin.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_sin: + +sin +==================================================================== +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config layout +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + 0 passed nan (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 1 passed nan (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 2 passed nan (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 3 passed nan (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== diff --git a/docs/source/ttnn/sweeps/softmax.rst b/docs/source/ttnn/sweeps/softmax.rst new file mode 100644 index 000000000000..a56624ac8574 --- /dev/null +++ b/docs/source/ttnn/sweeps/softmax.rst @@ -0,0 +1,20 @@ +.. _ttnn.sweep_test_softmax: + +softmax +==================================================================== +==== ======== ============================================================================================== ============= ======== ======= ===== ================= ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width dim input_dtype input_memory_config output_memory_config +==== ======== ============================================================================================== ============= ======== ======= ===== ================= ============================================================================================================================== ============================================================================================================================== + 0 crashed Exception: ttnn.softmax: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 -1 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 crashed Exception: ttnn.softmax: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 -2 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 crashed Exception: ttnn.softmax: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 -3 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 crashed Exception: ttnn.softmax: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 -1 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 4 crashed Exception: ttnn.softmax: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 -2 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 5 crashed Exception: ttnn.softmax: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 -3 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 6 crashed Exception: ttnn.softmax: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 -1 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 7 crashed Exception: ttnn.softmax: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 -2 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 8 crashed Exception: ttnn.softmax: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 -3 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 9 crashed Exception: ttnn.softmax: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 -1 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 10 crashed Exception: ttnn.softmax: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 -2 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 11 crashed Exception: ttnn.softmax: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 -3 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== ============================================================================================== ============= ======== ======= ===== ================= ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/sub.rst b/docs/source/ttnn/sweeps/sub.rst new file mode 100644 index 000000000000..ae346577ecf4 --- /dev/null +++ b/docs/source/ttnn/sweeps/sub.rst @@ -0,0 +1,72 @@ +.. _ttnn.sweep_test_sub: + +sub +==================================================================== +==== ======== ========================================================================================== ============= ======== ======= =========== ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width broadcast input_a_dtype input_b_dtype input_a_layout input_b_layout input_b_memory_config input_a_memory_config output_memory_config +==== ======== ========================================================================================== ============= ======== ======= =========== ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== + 0 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 passed nan (1,) 384 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 4 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 5 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 6 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 7 passed nan (1,) 384 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 8 skipped Broadcasting along width is not supported for row major layout (1,) 384 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 9 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 10 skipped Broadcasting along width is not supported for row major layout (1,) 384 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 11 passed nan (1,) 384 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 12 skipped Broadcasting along width is not supported for row major layout (1,) 384 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 13 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 14 skipped Broadcasting along width is not supported for row major layout (1,) 384 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 15 passed nan (1,) 384 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 16 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 17 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 18 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 19 passed nan (1,) 384 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 20 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 21 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 22 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 23 passed nan (1,) 384 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 24 skipped Broadcasting along width is not supported for row major layout (1,) 384 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 25 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 26 skipped Broadcasting along width is not supported for row major layout (1,) 384 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 27 passed nan (1,) 384 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 28 skipped Broadcasting along width is not supported for row major layout (1,) 384 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 29 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 30 skipped Broadcasting along width is not supported for row major layout (1,) 384 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 31 passed nan (1,) 384 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 32 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 33 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 34 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 35 passed nan (1,) 1024 1024 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 36 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 37 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 38 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 39 passed nan (1,) 1024 1024 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 40 skipped Broadcasting along width is not supported for row major layout (1,) 1024 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 41 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 42 skipped Broadcasting along width is not supported for row major layout (1,) 1024 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 43 passed nan (1,) 1024 1024 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 44 skipped Broadcasting along width is not supported for row major layout (1,) 1024 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 45 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 46 skipped Broadcasting along width is not supported for row major layout (1,) 1024 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 47 passed nan (1,) 1024 1024 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 48 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 49 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 50 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 51 passed nan (1,) 1024 4096 nan DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 52 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 53 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 54 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 55 passed nan (1,) 1024 4096 h DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 56 skipped Broadcasting along width is not supported for row major layout (1,) 1024 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 57 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 58 skipped Broadcasting along width is not supported for row major layout (1,) 1024 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 59 passed nan (1,) 1024 4096 w DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 60 skipped Broadcasting along width is not supported for row major layout (1,) 1024 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 61 crashed Exception: ttnn.sub: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.ROW_MAJOR Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 62 skipped Broadcasting along width is not supported for row major layout (1,) 1024 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.ROW_MAJOR tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 63 passed nan (1,) 1024 4096 hw DataType.BFLOAT16 DataType.BFLOAT16 Layout.TILE Layout.TILE tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== ========================================================================================== ============= ======== ======= =========== ================= ================= ================ ================ ============================================================================================================================== ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/tan.rst b/docs/source/ttnn/sweeps/tan.rst new file mode 100644 index 000000000000..1c7d5f7f5566 --- /dev/null +++ b/docs/source/ttnn/sweeps/tan.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_tan: + +tan +==================================================================== +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config layout +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== + 0 passed nan (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 1 passed nan (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 2 passed nan (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE + 3 passed nan (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) Layout.TILE +==== ======== =========== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== =========== diff --git a/docs/source/ttnn/sweeps/tanh.rst b/docs/source/ttnn/sweeps/tanh.rst new file mode 100644 index 000000000000..bb405dfb24c6 --- /dev/null +++ b/docs/source/ttnn/sweeps/tanh.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_tanh: + +tanh +==================================================================== +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + .. status exception batch_sizes height width input_dtype input_memory_config output_memory_config +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== + 0 crashed Exception: ttnn.tanh: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 crashed Exception: ttnn.tanh: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 crashed Exception: ttnn.tanh: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 1024 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 crashed Exception: ttnn.tanh: Tensor must be of layout {}, but got Layout.ROW_MAJOR (1,) 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== =========================================================================================== ============= ======== ======= ================= ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/transformer_attention_softmax.rst b/docs/source/ttnn/sweeps/transformer_attention_softmax.rst new file mode 100644 index 000000000000..46df9ad75cdd --- /dev/null +++ b/docs/source/ttnn/sweeps/transformer_attention_softmax.rst @@ -0,0 +1,12 @@ +.. _ttnn.sweep_test_transformer_attention_softmax: + +transformer_attention_softmax +==================================================================== +==== ======== ================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================= ============ =========== =============== ====================== ================= ============================================================================================================================== ============================================================================================================================== + .. status exception batch_size num_heads sequence_size target_sequence_size input_dtype input_memory_config output_memory_config +==== ======== ================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================= ============ =========== =============== ====================== ================= ============================================================================================================================== ============================================================================================================================== + 0 failed PCC: 0.8487984729849205 Expected tensor([[[[0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003], [0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003], [0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003], [0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, ..., 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002], [0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, ..., 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, ..., 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002], [0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, ..., 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003], [0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, ..., 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003], [0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, ..., 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002], [0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002], [0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, ..., 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003], [0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003], [0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003], [0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, ..., 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003], [0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003], [0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003], [0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, ..., 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002], [0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003], [0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, ..., 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003], ..., [0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003], [0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003], [0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003], [0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002], [0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, ..., 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003], [0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, ..., 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003], [0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002], [0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002], [0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, ..., 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003], [0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, ..., 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002], [0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003], [0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003], [0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003], [0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002], [0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, ..., 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002], [0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, ..., 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002], [0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002], [0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003], [0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003], [0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003], [0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003], [0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002], [0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, ..., 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003], [0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003], [0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003], [0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002], [0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, ..., 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003]]]]) Actual tensor([[[[0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003], [0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, ..., 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003], [0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, ..., 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002], [0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, ..., 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, ..., 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003], [0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002], [0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, ..., 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003], [0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, ..., 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003], [0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, ..., 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002], [0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, ..., 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, ..., 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002], [0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, ..., 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002], [0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, ..., 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003], [0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003], [0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003], [0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003], [0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, ..., 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003], [0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, ..., 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002], [0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003], [0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003], [0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, ..., 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002], [0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003], [0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, ..., 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003], [0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, ..., 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003], ..., [0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, ..., 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003], [0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, ..., 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002], [0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003], [0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, ..., 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002], [0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, ..., 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002], [0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, ..., 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003], [0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, ..., 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002], [0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, ..., 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002], [0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, ..., 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003], [0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, ..., 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002], [0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, ..., 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003], [0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002], [0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, ..., 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003], [0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, ..., 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002], [0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, ..., 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002], [0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, ..., 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003], [0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003], [0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, ..., 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003], [0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002], [0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, ..., 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002], [0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, ..., 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002], [0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003], [0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003], [0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, ..., 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, ..., 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002], [0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, ..., 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, ..., 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003], [0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, ..., 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002], [0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, ..., 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003], [0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002]]]], dtype=torch.bfloat16) 1 1 384 384 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 failed PCC: 0.8524679670988284 Expected tensor([[[[0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], ..., [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000]]]]) Actual tensor([[[[0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], ..., [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000]]]], dtype=torch.bfloat16) 1 1 384 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 failed PCC: 0.8482701837062309 Expected tensor([[[[0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003], [0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003], [0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003], [0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002], [0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, ..., 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002], [0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, ..., 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003], [0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003], [0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003], [0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003], [0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003], [0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, ..., 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002], [0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, ..., 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003], [0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003], [0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003], [0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, ..., 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003], [0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, ..., 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003], [0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, ..., 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003], [0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002], [0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, ..., 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002], [0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, ..., 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003], [0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002], [0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, ..., 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002], [0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], ..., [0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, ..., 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002], [0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003], [0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, ..., 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003], [0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003], [0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003], [0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, ..., 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002], [0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003], [0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, ..., 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003], [0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, ..., 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003], [0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002], [0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002], [0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002], [0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002], [0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, ..., 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003], [0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003], [0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002], [0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003], [0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, ..., 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002], [0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003], [0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002], [0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002], [0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002], [0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002]]]]) Actual tensor([[[[0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, ..., 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003], [0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, ..., 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003], [0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, ..., 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003], [0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002], [0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002], [0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, ..., 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002], [0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, ..., 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002], [0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, ..., 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003], [0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003], [0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, ..., 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002], [0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, ..., 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002], [0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003], [0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, ..., 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002], [0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, ..., 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003], [0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, ..., 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002], [0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, ..., 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003], [0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, ..., 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002], [0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, ..., 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003], [0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003], [0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, ..., 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003], [0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, ..., 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002], [0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, ..., 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003], [0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002], [0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, ..., 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002], [0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003], [0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, ..., 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003], [0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002], [0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, ..., 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002], [0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002], ..., [0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, ..., 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002], [0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, ..., 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002], [0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002], [0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, ..., 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003], [0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003], [0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002], [0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002], [0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002], [0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, ..., 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003], [0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, ..., 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002], [0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002], [0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, ..., 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002], [0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, ..., 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002], [0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, ..., 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003], [0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002], [0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, ..., 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003], [0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, ..., 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003], [0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, ..., 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002], [0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, ..., 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002], [0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, ..., 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003], [0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.002], [0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, ..., 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002], [0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002], [0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, ..., 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.003, 0.002, 0.003], [0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, ..., 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002], [0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, ..., 0.002, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.003], [0.002, 0.003, 0.002, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, ..., 0.003, 0.003, 0.003, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.003, 0.002, 0.003, 0.003, 0.002, 0.002, 0.002, 0.002, 0.003, 0.003, 0.002, 0.003, 0.002, 0.002]]]], dtype=torch.bfloat16) 1 1 1024 384 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 failed PCC: 0.851730800398322 Expected tensor([[[[0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], ..., [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000]]]]) Actual tensor([[[[0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], ..., [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000], [0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, ..., 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000]]]], dtype=torch.bfloat16) 1 1 1024 4096 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== ================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================= ============ =========== =============== ====================== ================= ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/transformer_concatenate_heads.rst b/docs/source/ttnn/sweeps/transformer_concatenate_heads.rst new file mode 100644 index 000000000000..8899d18b1493 --- /dev/null +++ b/docs/source/ttnn/sweeps/transformer_concatenate_heads.rst @@ -0,0 +1,16 @@ +.. _ttnn.sweep_test_transformer_concatenate_heads: + +transformer_concatenate_heads +==================================================================== +==== ======== =========== ============ =========== =============== =========== ================= ============================================================================================================================== ============================================================================================================================== + .. status exception batch_size num_heads sequence_size head_size input_dtype input_memory_config output_memory_config +==== ======== =========== ============ =========== =============== =========== ================= ============================================================================================================================== ============================================================================================================================== + 0 passed nan 1 4 384 64 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 passed nan 1 4 384 128 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 passed nan 1 4 1024 64 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 passed nan 1 4 1024 128 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 4 passed nan 1 16 384 64 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 5 passed nan 1 16 384 128 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 6 passed nan 1 16 1024 64 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 7 passed nan 1 16 1024 128 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== =========== ============ =========== =============== =========== ================= ============================================================================================================================== ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/transformer_split_query_key_value_and_split_heads.rst b/docs/source/ttnn/sweeps/transformer_split_query_key_value_and_split_heads.rst new file mode 100644 index 000000000000..d1af522ed5c1 --- /dev/null +++ b/docs/source/ttnn/sweeps/transformer_split_query_key_value_and_split_heads.rst @@ -0,0 +1,16 @@ +.. _ttnn.sweep_test_transformer_split_query_key_value_and_split_heads: + +transformer_split_query_key_value_and_split_heads +==================================================================== +==== ======== =========== ============ =============== =========== =========== ================= ============================================================================================================================== + .. status exception batch_size sequence_size num_heads head_size input_dtype input_memory_config +==== ======== =========== ============ =============== =========== =========== ================= ============================================================================================================================== + 0 passed nan 1 384 4 64 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 1 passed nan 1 384 4 128 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 2 passed nan 1 384 16 64 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 3 passed nan 1 384 16 128 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 4 passed nan 1 1024 4 64 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 5 passed nan 1 1024 4 128 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 6 passed nan 1 1024 16 64 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) + 7 passed nan 1 1024 16 128 DataType.BFLOAT16 tt::tt_metal::MemoryConfig(memory_layout=TensorMemoryLayout::INTERLEAVED,buffer_type=BufferType::DRAM,shard_spec=std::nullopt) +==== ======== =========== ============ =============== =========== =========== ================= ============================================================================================================================== diff --git a/docs/source/ttnn/sweeps/upsample.rst b/docs/source/ttnn/sweeps/upsample.rst new file mode 100644 index 000000000000..204065b3832a --- /dev/null +++ b/docs/source/ttnn/sweeps/upsample.rst @@ -0,0 +1,104 @@ +.. _ttnn.sweep_test_upsample: + +upsample +==================================================================== +==== ======== =========== ============ === === === ========= ========= + .. status exception batch_size c h w scale_h scale_w +==== ======== =========== ============ === === === ========= ========= + 0 passed nan 1 320 64 64 2 2 + 1 passed nan 1 320 64 64 2 4 + 2 passed nan 1 320 64 64 4 2 + 3 passed nan 1 320 64 64 4 4 + 4 passed nan 1 320 64 16 2 2 + 5 passed nan 1 320 64 16 2 4 + 6 passed nan 1 320 64 16 4 2 + 7 passed nan 1 320 64 16 4 4 + 8 passed nan 1 320 32 64 2 2 + 9 passed nan 1 320 32 64 2 4 + 10 passed nan 1 320 32 64 4 2 + 11 passed nan 1 320 32 64 4 4 + 12 passed nan 1 320 32 16 2 2 + 13 passed nan 1 320 32 16 2 4 + 14 passed nan 1 320 32 16 4 2 + 15 passed nan 1 320 32 16 4 4 + 16 passed nan 1 64 64 64 2 2 + 17 passed nan 1 64 64 64 2 4 + 18 passed nan 1 64 64 64 4 2 + 19 passed nan 1 64 64 64 4 4 + 20 passed nan 1 64 64 16 2 2 + 21 passed nan 1 64 64 16 2 4 + 22 passed nan 1 64 64 16 4 2 + 23 passed nan 1 64 64 16 4 4 + 24 passed nan 1 64 32 64 2 2 + 25 passed nan 1 64 32 64 2 4 + 26 passed nan 1 64 32 64 4 2 + 27 passed nan 1 64 32 64 4 4 + 28 passed nan 1 64 32 16 2 2 + 29 passed nan 1 64 32 16 2 4 + 30 passed nan 1 64 32 16 4 2 + 31 passed nan 1 64 32 16 4 4 + 32 passed nan 2 320 64 64 2 2 + 33 passed nan 2 320 64 64 2 4 + 34 passed nan 2 320 64 64 4 2 + 35 passed nan 2 320 64 64 4 4 + 36 passed nan 2 320 64 16 2 2 + 37 passed nan 2 320 64 16 2 4 + 38 passed nan 2 320 64 16 4 2 + 39 passed nan 2 320 64 16 4 4 + 40 passed nan 2 320 32 64 2 2 + 41 passed nan 2 320 32 64 2 4 + 42 passed nan 2 320 32 64 4 2 + 43 passed nan 2 320 32 64 4 4 + 44 passed nan 2 320 32 16 2 2 + 45 passed nan 2 320 32 16 2 4 + 46 passed nan 2 320 32 16 4 2 + 47 passed nan 2 320 32 16 4 4 + 48 passed nan 2 64 64 64 2 2 + 49 passed nan 2 64 64 64 2 4 + 50 passed nan 2 64 64 64 4 2 + 51 passed nan 2 64 64 64 4 4 + 52 passed nan 2 64 64 16 2 2 + 53 passed nan 2 64 64 16 2 4 + 54 passed nan 2 64 64 16 4 2 + 55 passed nan 2 64 64 16 4 4 + 56 passed nan 2 64 32 64 2 2 + 57 passed nan 2 64 32 64 2 4 + 58 passed nan 2 64 32 64 4 2 + 59 passed nan 2 64 32 64 4 4 + 60 passed nan 2 64 32 16 2 2 + 61 passed nan 2 64 32 16 2 4 + 62 passed nan 2 64 32 16 4 2 + 63 passed nan 2 64 32 16 4 4 + 64 passed nan 4 320 64 64 2 2 + 65 passed nan 4 320 64 64 2 4 + 66 passed nan 4 320 64 64 4 2 + 67 passed nan 4 320 64 64 4 4 + 68 passed nan 4 320 64 16 2 2 + 69 passed nan 4 320 64 16 2 4 + 70 passed nan 4 320 64 16 4 2 + 71 passed nan 4 320 64 16 4 4 + 72 passed nan 4 320 32 64 2 2 + 73 passed nan 4 320 32 64 2 4 + 74 passed nan 4 320 32 64 4 2 + 75 passed nan 4 320 32 64 4 4 + 76 passed nan 4 320 32 16 2 2 + 77 passed nan 4 320 32 16 2 4 + 78 passed nan 4 320 32 16 4 2 + 79 passed nan 4 320 32 16 4 4 + 80 passed nan 4 64 64 64 2 2 + 81 passed nan 4 64 64 64 2 4 + 82 passed nan 4 64 64 64 4 2 + 83 passed nan 4 64 64 64 4 4 + 84 passed nan 4 64 64 16 2 2 + 85 passed nan 4 64 64 16 2 4 + 86 passed nan 4 64 64 16 4 2 + 87 passed nan 4 64 64 16 4 4 + 88 passed nan 4 64 32 64 2 2 + 89 passed nan 4 64 32 64 2 4 + 90 passed nan 4 64 32 64 4 2 + 91 passed nan 4 64 32 64 4 4 + 92 passed nan 4 64 32 16 2 2 + 93 passed nan 4 64 32 16 2 4 + 94 passed nan 4 64 32 16 4 2 + 95 passed nan 4 64 32 16 4 4 +==== ======== =========== ============ === === === ========= ========= diff --git a/tests/ttnn/sweep_tests/build_html_sweep_results.py b/tests/ttnn/sweep_tests/build_html_sweep_results.py deleted file mode 100644 index 6df4a4aa23b3..000000000000 --- a/tests/ttnn/sweep_tests/build_html_sweep_results.py +++ /dev/null @@ -1,197 +0,0 @@ -# SPDX-FileCopyrightText: © 2023 Tenstorrent Inc. - -# SPDX-License-Identifier: Apache-2.0 - - -import argparse -import requests -import tempfile -import pathlib -import zipfile -import pandas as pd -from loguru import logger - - -def get_list_of_runs(): - params = {"per_page": 3} - url = "https://api.github.com/repos/tenstorrent-metal/tt-metal/actions/workflows/ttnn-run-sweeps.yaml/runs" - headers = {"Accept": "application/vnd.github.v3+json"} - response = requests.get(url, headers=headers, params=params) - if response.status_code == 200: - runs = response.json() - else: - raise RuntimeError(f"Error fetching workflow runs: {response.status_code}") - - return runs - - -def download_artifacts(token, artifacts_url, output_path): - response = requests.get(artifacts_url) - headers = {"Authorization": f"token {token}", "Accept": "application/vnd.github.v3+json"} - if response.status_code == 200: - artifacts_data = response.json() - if artifacts_data["artifacts"]: - artifact = artifacts_data["artifacts"][0] - artifact_download_url = artifact["archive_download_url"] - artifact_response = requests.get(artifact_download_url, headers=headers) - if artifact_response.status_code == 200: - with open(output_path, "wb") as file: - file.write(artifact_response.content) - logger.info(f"{artifacts_url} downloaded successfully.") - else: - raise RuntimeError("Failed to download the artifact.") - else: - raise RuntimeError("No artifacts found. Is there a run in progress?") - else: - raise RuntimeError("Failed to fetch artifacts list.") - - -def read_csv_from_zip(zip_file, file_name): - with zip_file.open(file_name) as f: - return pd.read_csv(f) - - -def trim_column(texte, longueur): - if len(texte) > longueur: - return texte[-longueur + 3 :] - return texte - - -def get_subset_for_status(recent_df, prior_df, status): - failed_recent = recent_df[recent_df["status"] == status] - failed_prior = recent_df[prior_df["status"] == status] - return failed_recent, failed_prior - - -def extract_only_recent_changes(failed_recent, failed_prior): - run_id_column_name = failed_recent.columns[0] - newly_failed = failed_recent[~failed_recent[run_id_column_name].isin(failed_prior[run_id_column_name])] - for column in newly_failed.columns: - newly_failed[column] = newly_failed[column].apply(lambda x: trim_column(str(x), 10)) - return newly_failed - - -def build_new_failures(recent_df, prior_df): - failed_recent, failed_prior = get_subset_for_status(recent_df, prior_df, "failed") - return extract_only_recent_changes(failed_recent, failed_prior) - - -def build_new_crashes(recent_df, prior_df): - failed_recent, failed_prior = get_subset_for_status(recent_df, prior_df, "crashed") - return extract_only_recent_changes(failed_recent, failed_prior) - - -def diff_results(recent_zip, prior_zip, directory_for_html_pages, commit_hash): - directory_for_html_pages = pathlib.Path(directory_for_html_pages) - html_files = [] - html_failure_files = [] - failures_since_last_run = 0 - with zipfile.ZipFile(recent_zip, "r") as zip1, zipfile.ZipFile(prior_zip, "r") as zip2: - zip1_files = set(zip1.namelist()) - zip2_files = set(zip2.namelist()) - common_files = zip1_files.intersection(zip2_files) - # pd.set_option("display.max_rows", None) - # pd.set_option("display.max_columns", None) - # pd.set_option("display.width", None) - # pd.set_option("display.max_colwidth", 10) - for file_name in common_files: - test_name = pathlib.Path(file_name).stem - if file_name.endswith(".csv"): - recent_df = read_csv_from_zip(zip1, file_name) - html_table = recent_df.to_html() - html_page_name = directory_for_html_pages / f"{test_name}.html" - with open(html_page_name, "w") as f: - f.write(html_table) - html_files.append(f"{test_name}.html") - prior_df = read_csv_from_zip(zip2, file_name) - failures_df = build_new_failures(recent_df, prior_df) - crashes_df = build_new_crashes(recent_df, prior_df) - combined_test_resutls_df = pd.concat([failures_df, crashes_df]) - if combined_test_resutls_df.size > 0: - failures_since_last_run = failures_since_last_run + combined_test_resutls_df.size - html_table = combined_test_resutls_df.to_html() - html_page_name = directory_for_html_pages / f"{test_name}_failure.html" - with open(html_page_name, "w") as f: - f.write(html_table) - html_failure_files.append(f"{test_name}_failure.html") - - html_template = """ - - - - Sweep Test Results - - - -

Sweep Tests

-

We have had {failures_since_last_run} failures since the prior run.

-

Commit Hash: {commit_hash}

-
- {iframes} - - - """ - - iframe_tags = "".join( - [f'

{file.split(".")[0]}

' for file in html_failure_files] - ) - complete_html = html_template.format( - commit_hash=commit_hash, failures_since_last_run=failures_since_last_run, iframes=iframe_tags - ) - html_page_name = directory_for_html_pages / f"index.html" - with open(html_page_name, "w") as file: - file.write(complete_html) - - logger.info(f"Built {html_page_name}") - - -def download_from_pipeline(token, directory_for_html_pages): - """ - Download the results of the sweeps from the GitHub pipeline. - - :param token: Provide your GitHub token. - """ - - runs = get_list_of_runs() - if len(runs["workflow_runs"]) < 3: - # Note that if the run is in progress, there will not be any artifacts avaiable yet on the most recent run. - raise RuntimeError("We need at least three runs to compare the changes in the sweep tests") - - if runs["workflow_runs"][0]["status"] == "completed": - most_recent_run = runs["workflow_runs"][0] - prior_run = runs["workflow_runs"][1] - else: - most_recent_run = runs["workflow_runs"][1] - prior_run = runs["workflow_runs"][2] - - most_recent_artifact_url = most_recent_run["artifacts_url"] - commit_hash = most_recent_run["head_sha"] - prior_artifact_url = prior_run["artifacts_url"] - - with tempfile.TemporaryDirectory() as temp_dir: - temp_dir_path = pathlib.Path(temp_dir) - recent_zip = temp_dir_path / "recent.zip" - prior_zip = temp_dir_path / "prior.zip" - download_artifacts(token, most_recent_artifact_url, output_path=recent_zip) - download_artifacts(token, prior_artifact_url, output_path=prior_zip) - diff_results(recent_zip, prior_zip, directory_for_html_pages, commit_hash) - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument("--token") - parser.add_argument("--dir") - token = parser.parse_args().token - directory_for_html_pages = parser.parse_args().dir - download_from_pipeline(token, directory_for_html_pages) - - -if __name__ == "__main__": - main() diff --git a/tests/ttnn/sweep_tests/build_rst_sweep_results.py b/tests/ttnn/sweep_tests/build_rst_sweep_results.py new file mode 100644 index 000000000000..7ec1450e023f --- /dev/null +++ b/tests/ttnn/sweep_tests/build_rst_sweep_results.py @@ -0,0 +1,299 @@ +# SPDX-FileCopyrightText: © 2023 Tenstorrent Inc. + +# SPDX-License-Identifier: Apache-2.0 + + +import argparse +import requests +import tempfile +import pathlib +import zipfile +import pandas as pd +from loguru import logger +from dataclasses import dataclass +from tabulate import tabulate +import os +import shutil + + +def get_list_of_runs(): + params = {"per_page": 15} + url = "https://api.github.com/repos/tenstorrent-metal/tt-metal/actions/workflows/ttnn-run-sweeps.yaml/runs" + headers = {"Accept": "application/vnd.github.v3+json"} + response = requests.get(url, headers=headers, params=params) + if response.status_code == 200: + runs = response.json() + else: + raise RuntimeError(f"Error fetching workflow runs: {response.status_code}:{response.text}") + + return runs + + +def download_artifacts(token, artifacts_url, temp_dir_path, directory_index): + response = requests.get(artifacts_url) + headers = {"Authorization": f"token {token}", "Accept": "application/vnd.github.v3+json"} + if response.status_code == 200: + artifacts_data = response.json() + if artifacts_data["artifacts"]: + artifact = artifacts_data["artifacts"][0] + artifact_download_url = artifact["archive_download_url"] + artifact_response = requests.get(artifact_download_url, headers=headers) + if artifact_response.status_code == 200: + (temp_dir_path / str(directory_index)).mkdir(parents=True, exist_ok=True) + artifact_zip = temp_dir_path / str(directory_index) / "artifact.zip" + with open(artifact_zip, "wb") as file: + file.write(artifact_response.content) + logger.info(f"{artifacts_url} downloaded successfully.") + return True + else: + raise RuntimeError("Failed to download the artifact.") + else: + print(f"No artifacts found. Is there a run in progress for {artifacts_url} ?") + else: + raise RuntimeError(f"Failed to fetch artifacts list. {response.status_code}:{response.text}") + return False + + +def read_csv_from_zip(zip_file, file_name): + with zip_file.open(file_name) as f: + df = pd.read_csv(f) + if not df.empty and len(df.columns) > 1: + # remove first unamed column which is just the index. + # This will be displayed by tabulate. + df = df.iloc[:, 1:] + return df + + +def trim_column(texte, longueur): + if len(texte) > longueur: + return texte[-longueur + 3 :] + return texte + + +def get_subset_for_status(recent_df, prior_df, status): + failed_recent = recent_df[recent_df["status"] == status] + failed_prior = recent_df[prior_df["status"] == status] + return failed_recent, failed_prior + + +def extract_only_recent_changes(failed_recent, failed_prior): + run_id_column_name = failed_recent.columns[0] + newly_failed = failed_recent[~failed_recent[run_id_column_name].isin(failed_prior[run_id_column_name])] + for column in newly_failed.columns: + newly_failed[column] = newly_failed[column].apply(lambda x: trim_column(str(x), 10)) + return newly_failed + + +def build_new_failures(recent_df, prior_df): + failed_recent, failed_prior = get_subset_for_status(recent_df, prior_df, "failed") + return extract_only_recent_changes(failed_recent, failed_prior) + + +def build_new_crashes(recent_df, prior_df): + failed_recent, failed_prior = get_subset_for_status(recent_df, prior_df, "crashed") + return extract_only_recent_changes(failed_recent, failed_prior) + + +def delete_directory_contents(dir_path): + for item in os.listdir(dir_path): + item_path = os.path.join(dir_path, item) + if os.path.isfile(item_path) or os.path.islink(item_path): + os.unlink(item_path) + elif os.path.isdir(item_path): + shutil.rmtree(item_path) + + +@dataclass +class OperationFailure: + file_name: str + failure_file_name: str + commit_hash_with_failure: str + commit_hash_prior_to_failure: str + failures: int + + +def diff_results(temp_dir_path, most_recent_run_index, total_runs, directory_for_rst_pages): + directory_for_rst_pages = pathlib.Path(directory_for_rst_pages) + rst_failure_files = [] + rst_files = [] + failures_since_last_run = 0 + + recent_zip = temp_dir_path / str(most_recent_run_index) / "artifact.zip" + most_recent_commit_hash = "" + commit_hash_file = temp_dir_path / str(most_recent_run_index) / "commit_hash.txt" + with open(commit_hash_file, "r") as file: + most_recent_commit_hash = file.read() + + new_failures = {} + + with zipfile.ZipFile(recent_zip, "r") as zip1: + # We want to put the latest csv from the most recent run into html files + zip1_files = set(zip1.namelist()) + for file_name in zip1_files: + test_name = pathlib.Path(file_name).stem + if file_name.endswith(".csv"): + recent_df = read_csv_from_zip(zip1, file_name) + for col in recent_df.columns: + recent_df[col] = recent_df[col].apply(lambda x: str(x).replace("\t", " ").replace("\n", " ")) + rst_table = tabulate(recent_df, headers="keys", tablefmt="rst") + rst_page_name = directory_for_rst_pages / f"{test_name}.rst" + with open(rst_page_name, "w") as f: + f.writelines(f".. _ttnn.sweep_test_{test_name}:\n") + f.writelines("\n") + f.writelines(f"{test_name}\n") + f.writelines("====================================================================\n") + f.write(rst_table) + new_failures[test_name] = OperationFailure( + f"{test_name}.rst", f"{test_name}_failure.rst", most_recent_commit_hash, "", 0 + ) + rst_files.append(test_name) + + # Now we need to check and see which differences started showing up relative to the most recent run per operation file + for test_name in new_failures: + commit_hash = most_recent_commit_hash + prior_run_index = most_recent_run_index + 1 + while new_failures[test_name].failures == 0 and prior_run_index < total_runs - 1: + prior_zip = temp_dir_path / str(prior_run_index) / "artifact.zip" + with zipfile.ZipFile(prior_zip, "r") as zip2: + for file_name in zip2.namelist(): + if file_name.endswith(f"{test_name}.csv"): + test_name = pathlib.Path(file_name).stem + recent_df = read_csv_from_zip(zip1, file_name) + prior_df = read_csv_from_zip(zip2, file_name) + failures_df = build_new_failures(recent_df, prior_df) + crashes_df = build_new_crashes(recent_df, prior_df) + combined_test_results_df = pd.concat([failures_df, crashes_df]) + if len(combined_test_results_df) > 0: + failures_since_last_run = failures_since_last_run + len(combined_test_results_df) + new_failures[test_name].failures = combined_test_results_df.size + new_failures[test_name].failure_file_name = f"{test_name}_failure.rst" + new_failures[test_name].commit_hash_with_failure = commit_hash + + rst_table = tabulate(combined_test_results_df, headers="keys", tablefmt="rst") + rst_page_name = directory_for_rst_pages / f"{test_name}_failure.rst" + with open(rst_page_name, "w") as f: + f.writelines(f".. _ttnn.sweep_test_failure_{test_name}:\n") + f.writelines("\n") + f.writelines(f"{test_name}\n") + f.writelines( + "====================================================================\n" + ) + f.write(rst_table) + rst_failure_files.append(new_failures[test_name]) + + commit_hash_file = temp_dir_path / str(prior_run_index) / "commit_hash.txt" + with open(commit_hash_file, "r") as file: + commit_hash = file.read() + new_failures[test_name].commit_hash_prior_to_failure = commit_hash + + prior_run_index = prior_run_index + 1 + + rst_template = """ +.. _ttnn.sweep_tests: + +Sweep Test Results +================== + +Recent New Failures +------------------- + +We have had {failures_since_last_run} new failures since the prior run. + +.. toctree:: + :maxdepth: 2 + :hidden: + + {toctree_failure_filenames} + +{sweep_test_failure_entries} + + +All Sweep Tests +-------------------- + +These are the sweep tests for commit hash {most_recent_commit_hash} + +.. toctree:: + :maxdepth: 2 + + {toctree_entries} +""" + + sweep_test_failure_entries = "\n".join( + [ + f"* :ref:`{op_failure.file_name.split('.')[0]} ` " + f"-> ( {op_failure.commit_hash_prior_to_failure} .. {op_failure.commit_hash_with_failure} ]" + for op_failure in rst_failure_files + ] + ) + sweep_test_failure_entries = sweep_test_failure_entries.lstrip() + + toctree_failure_filenames = "\n ".join( + [op_failure.failure_file_name.replace(".rst", "") for op_failure in rst_failure_files] + ) + + toctree_entries = "\n ".join(sorted(rst_files)) + + complete_rst = rst_template.format( + most_recent_commit_hash=most_recent_commit_hash, + failures_since_last_run=failures_since_last_run, + toctree_failure_filenames=toctree_failure_filenames, + sweep_test_failure_entries=sweep_test_failure_entries, + toctree_entries=toctree_entries, + ) + + rst_page_name = directory_for_rst_pages / "index.rst" + with open(rst_page_name, "w") as file: + file.write(complete_rst) + + logger.info(f"Built {rst_page_name}") + + +def download_from_pipeline(token, directory_for_rst_pages): + """ + Download the results of the sweeps from the GitHub pipeline. + + :param token: Provide your GitHub token. + """ + + runs = get_list_of_runs() + if len(runs["workflow_runs"]) < 3: + # Note that if the run is in progress, there will not be any artifacts available yet on the most recent run. + raise RuntimeError("We need at least three runs to compare the changes in the sweep tests") + + total_runs = len(runs["workflow_runs"]) + if runs["workflow_runs"][0]["status"] == "completed": + most_recent_run_index = 0 + else: # a run is in progress so we just use the prior two for the first comparison + most_recent_run_index = 1 + + directory_index = 0 + with tempfile.TemporaryDirectory() as temp_dir: + temp_dir_path = pathlib.Path(temp_dir) + for i in range(most_recent_run_index, total_runs): + most_recent_run = runs["workflow_runs"][i] + most_recent_artifact_url = most_recent_run["artifacts_url"] + commit_hash = most_recent_run["head_sha"] + if download_artifacts(token, most_recent_artifact_url, temp_dir_path, directory_index): + commit_hash_file = temp_dir_path / str(directory_index) / "commit_hash.txt" + with open(commit_hash_file, "w") as file: + file.write(commit_hash) + directory_index = directory_index + 1 + + total_runs = directory_index + delete_directory_contents(directory_for_rst_pages) + diff_results(temp_dir_path, 0, total_runs, directory_for_rst_pages) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--token") + parser.add_argument("--dir") + token = parser.parse_args().token + directory_for_rst_pages = parser.parse_args().dir + + download_from_pipeline(token, directory_for_rst_pages) + + +if __name__ == "__main__": + main()