Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
mbezuljTT committed Dec 23, 2024
1 parent 2f30383 commit ab2af49
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 5 deletions.
7 changes: 7 additions & 0 deletions test/lit.cfg.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,3 +98,10 @@ def set_system_desc_features(system_desc):
],
append_path=True,
)

if "TT_METAL_HOME" in os.environ:
print(f"{os.environ['TT_METAL_HOME']}")
llvm_config.with_environment("TT_METAL_HOME", os.environ["TT_METAL_HOME"])
else:

llvm_config.with_environment("TT_METAL_HOME", r"/__w/tt-mlir/tt-mlir/third_party/tt-metal/src/tt-metal")
10 changes: 5 additions & 5 deletions test/ttmlir/Silicon/TTNN/optimizer/mnist_sharding.mlir
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
// RUN: ttmlir-opt --ttir-to-ttnn-backend-pipeline="system-desc-path=%system_desc_path% enable-optimizer=true memory-layout-analysis-enabled=true" %s > %t.mlir
// RUN: FileCheck %s --input-file=%t.mlir
// RUN: ttmlir-translate --ttnn-to-flatbuffer %t.mlir > %t.ttnn
// RUN: ttmlir-opt --ttir-to-ttnn-backend-pipeline="system-desc-path=%system_desc_path% enable-optimizer=true memory-layout-analysis-enabled=true" -o output_file.mlir %s
// RUN: FileCheck %s --input-file=output_file.mlir
// RUN: ttmlir-translate --ttnn-to-flatbuffer output_file.mlir > %t.ttnn
#loc = loc("MNISTLinear":4294967295:0)
module @"tt-forge-graph" attributes {} {
func.func @main(%arg0: tensor<1x784xf32> loc("MNISTLinear":4294967295:0), %arg1: tensor<1x10xf32> loc("MNISTLinear":4294967295:0), %arg2: tensor<256x10xf32> loc("MNISTLinear":4294967295:0), %arg3: tensor<1x256xf32> loc("MNISTLinear":4294967295:0), %arg4: tensor<784x256xf32> loc("MNISTLinear":4294967295:0)) -> tensor<1x10xf32> {
// CHECK-DAG: #[[LAYOUT_10:.*]] = #ttnn.ttnn_layout<(d0, d1) -> (d0, d1), <1x8>, memref<1x1x!tt.tile<32x32, f32>, #l1_>, <width_sharded>>
// CHECK-DAG: #[[LAYOUT_11:.*]] = #ttnn.ttnn_layout<(d0, d1) -> (d0, d1), <1x1>, memref<1x1x!tt.tile<32x32, f32>, #l1_>, <width_sharded>>
// CHECK-DAG: #[[LAYOUT_10:.*]] = #ttnn.ttnn_layout<(d0, d1) -> (d0, d1), <1x8, (d0, d1) -> (0, d1 floordiv 8, d1 mod 8)>, memref<1x1x!tt.tile<32x32, f32>, #l1_>, <width_sharded>>
// CHECK-DAG: #[[LAYOUT_11:.*]] = #ttnn.ttnn_layout<(d0, d1) -> (d0, d1), <1x1, (d0, d1) -> (0, d1 floordiv 8, d1 mod 8)>, memref<1x1x!tt.tile<32x32, f32>, #l1_>, <width_sharded>>
%0 = tensor.empty() : tensor<1x256xf32> loc(#loc8)
// CHECK: %{{.*}} = "ttnn.matmul"{{.*}} -> tensor<1x256xf32, #[[LAYOUT_10]]>
%1 = "ttir.matmul"(%arg0, %arg4, %0) : (tensor<1x784xf32>, tensor<784x256xf32>, tensor<1x256xf32>) -> tensor<1x256xf32> loc(#loc8)
Expand Down

0 comments on commit ab2af49

Please sign in to comment.