Skip to content

Commit

Permalink
switching test hymba order
Browse files Browse the repository at this point in the history
  • Loading branch information
bursteratom authored and winglian committed Dec 17, 2024
1 parent f828f16 commit 48e893b
Showing 1 changed file with 8 additions and 8 deletions.
16 changes: 8 additions & 8 deletions tests/e2e/test_packing_loss.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,14 +70,14 @@ def test_loss_packed(self, temp_dir):
)


class TestPackedHymba(unittest.TestCase):
class TestUnpackedHymba(unittest.TestCase):
"""
Test case for Packed training of hymba models
Test case for Unpacked training of hymba models
"""

@require_torch_2_5_1
@with_temp_dir
def test_loss_packed(self, temp_dir):
def test_loss_unpacked(self, temp_dir):
# pylint: disable=duplicate-code
cfg = DictDefault(
{
Expand All @@ -98,7 +98,7 @@ def test_loss_packed(self, temp_dir):
"o_proj",
],
"sequence_len": 1024,
"sample_packing": True,
"sample_packing": False,
"flash_attention": True,
"val_set_size": 0.0,
"datasets": [
Expand Down Expand Up @@ -133,14 +133,14 @@ def test_loss_packed(self, temp_dir):
)


class TestUnpackedHymba(unittest.TestCase):
class TestPackedHymba(unittest.TestCase):
"""
Test case for Unpacked training of hymba models
Test case for Packed training of hymba models
"""

@require_torch_2_5_1
@with_temp_dir
def test_loss_unpacked(self, temp_dir):
def test_loss_packed(self, temp_dir):
# pylint: disable=duplicate-code
cfg = DictDefault(
{
Expand All @@ -161,7 +161,7 @@ def test_loss_unpacked(self, temp_dir):
"o_proj",
],
"sequence_len": 1024,
"sample_packing": False,
"sample_packing": True,
"flash_attention": True,
"val_set_size": 0.0,
"datasets": [
Expand Down

0 comments on commit 48e893b

Please sign in to comment.