Skip to content

Commit

Permalink
part 6
Browse files Browse the repository at this point in the history
  • Loading branch information
Pavle Josipovic committed Dec 8, 2024
1 parent 3144212 commit 533523d
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 4 deletions.
2 changes: 0 additions & 2 deletions models/demos/falcon7b_common/tt/model_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,6 @@ def get_ln_block_sharded_config(height_dim, hidden_dim):
ln_shard_width_hidden_dim,
],
ttnn.ShardOrientation.ROW_MAJOR,
False,
),
)

Expand Down Expand Up @@ -255,7 +254,6 @@ def get_model_config(model_config_str, prefill_seq_len=0, decode_batch_size=32):
shard_width,
],
ttnn.ShardOrientation.ROW_MAJOR,
False,
),
)

Expand Down
4 changes: 2 additions & 2 deletions ttnn/ttnn/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def create_sharded_memory_config(
Currently sharding only supports L1 tensors.
Example:
>>> tensor = ttnn.create_sharded_memory_config((5, 8), (320,64), ttnn.ShardStrategy.BLOCK, ttnn.ShardOrientation.ROW_MAJOR, False)
>>> tensor = ttnn.create_sharded_memory_config((5, 8), (320,64), ttnn.ShardStrategy.BLOCK, ttnn.ShardOrientation.ROW_MAJOR)
"""

Expand Down Expand Up @@ -231,7 +231,7 @@ def create_sharded_memory_config_(
Example::
>>> tensor = ttnn.create_sharded_memory_config((5, 8), (320,64), ttnn.ShardStrategy.BLOCK, ttnn.ShardOrientation.ROW_MAJOR, False)
>>> tensor = ttnn.create_sharded_memory_config((5, 8), (320,64), ttnn.ShardStrategy.BLOCK, ttnn.ShardOrientation.ROW_MAJOR)
"""

if not isinstance(shape, (list, tuple, ttnn.Shape)):
Expand Down

0 comments on commit 533523d

Please sign in to comment.