Skip to content

Commit

Permalink
Fixup opt to reduce the amount of odd if statements. (#2833)
Browse files Browse the repository at this point in the history
* Fixup opt to reduce the amount of odd if statements.

* Fixing cargo lock
  • Loading branch information
Narsil authored Dec 12, 2024
1 parent bf59118 commit 3bb3fd1
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 10 deletions.
14 changes: 7 additions & 7 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PyTorch OPT model."""
"""PyTorch OPT model."""

import random
from typing import List, Optional, Tuple, Union

Expand Down Expand Up @@ -317,7 +318,6 @@ def __init__(self, layer_id: int, prefix: str, config: OPTConfig, weights):
super().__init__()
self.process_group = weights.process_group
self.hidden_size = config.hidden_size
prefix = f"{prefix if prefix else ''}decoder.layers.{layer_id}"
self.self_attn = OPTAttention(
config,
prefix=f"{prefix}.self_attn",
Expand Down Expand Up @@ -478,7 +478,12 @@ def __init__(self, prefix: str, config: OPTConfig, weights):

self.layers = nn.ModuleList(
[
OPTDecoderLayer(layer_id, prefix, config, weights)
OPTDecoderLayer(
layer_id,
prefix=f"{prefix}decoder.layers.{layer_id}",
config=config,
weights=weights,
)
for layer_id in range(config.num_hidden_layers)
]
)
Expand Down

0 comments on commit 3bb3fd1

Please sign in to comment.