Skip to content

Commit

Permalink
fix lint
Browse files Browse the repository at this point in the history
  • Loading branch information
yingtongxiong committed Sep 19, 2023
1 parent 43342ec commit 5fe27c7
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
5 changes: 2 additions & 3 deletions internlm/model/linear.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,13 @@
from typing import Optional

import torch
import torch.nn.functional as F
from flash_attn.ops.fused_dense import ColumnParallelLinear, RowParallelLinear
from flash_attn.utils.distributed import all_reduce, reduce_scatter
from torch import nn

from internlm.core.context import ParallelMode
from internlm.core.context import global_context as gpc
from internlm.model.utils import fused_dense_func_torch, Silu
from internlm.model.utils import Silu, fused_dense_func_torch


class ScaleColumnParallelLinear(nn.Linear):
Expand Down Expand Up @@ -195,7 +194,7 @@ def __init__(
device=device,
dtype=dtype,
)

def forward(self, x):
w1_o = self.w1(x)
w2_o = self.w2(x)
Expand Down
1 change: 1 addition & 0 deletions internlm/model/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,4 +212,5 @@ def try_import_RMSNorm():
def Silu(w1_o, w2_o):
return F.silu(w1_o) * w2_o


Silu = torch.jit.script(Silu)

0 comments on commit 5fe27c7

Please sign in to comment.