-
Notifications
You must be signed in to change notification settings - Fork 348
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
accf70f
commit 544ae05
Showing
17 changed files
with
501 additions
and
26 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
Whisper | ||
----------------------------------------------------------------------------------------------------------------------- | ||
|
||
The Whisper model was presented in `Robust Speech Recognition via Large-Scale Weak Supervision | ||
<https://arxiv.org/abs/2212.04356>`_ by Alec Radford, Jong Wook Kim, Tao Xu, Greg Brockman, Christine | ||
McLeavey, Ilya Sutskever. | ||
|
||
According to the abstract, Whisper is trained on 680,000 hours of multilingual and multitask data. This | ||
scale was previously unseen. Whisper is able to approach the accuracy and robustness of humans. | ||
|
||
|
||
WhisperAdapterModel | ||
~~~~~~~~~~~~~~~~~~~~ | ||
|
||
.. autoclass:: transformers.adapters.WhisperAdapterModel | ||
:members: | ||
:inherited-members: WhisperPreTrainedModel |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,51 @@ | ||
from typing import Iterable, Tuple | ||
|
||
import torch.nn as nn | ||
|
||
from ..layer import AdapterLayer | ||
from ..model_mixin import ( | ||
EmbeddingAdaptersMixin, | ||
EmbeddingAdaptersWrapperMixin, | ||
InvertibleAdaptersWrapperMixin, | ||
ModelAdaptersMixin, | ||
ModelWithHeadsAdaptersMixin, | ||
) | ||
|
||
|
||
class WhisperEncoderLayerAdaptersMixin: | ||
"""Adds adapters to the WhisperEncoderLayer module of WHISPER.""" | ||
|
||
def _init_adapter_modules(self): | ||
self.attention_adapters = AdapterLayer("mh_adapter", self.config) | ||
self.output_adapters = AdapterLayer("output_adapter", self.config) | ||
self.attention_adapters._init_adapter_modules() | ||
self.output_adapters._init_adapter_modules() | ||
|
||
|
||
class WhisperDecoderLayerAdaptersMixin(WhisperEncoderLayerAdaptersMixin): | ||
"""Adds adapters to the WhisperDecoderLayer module of WHISPER.""" | ||
|
||
def _init_adapter_modules(self): | ||
super()._init_adapter_modules() | ||
self.cross_attention_adapters = AdapterLayer("cross_adapter", self.config) | ||
self.cross_attention_adapters._init_adapter_modules() | ||
|
||
|
||
class WhisperModelAdaptersMixin(EmbeddingAdaptersMixin, InvertibleAdaptersWrapperMixin, ModelAdaptersMixin): | ||
"""Adds adapters to the WhisperModel class.""" | ||
|
||
invertible_adapters_base_name = "encoder" | ||
|
||
def iter_layers(self) -> Iterable[Tuple[int, nn.Module]]: | ||
if hasattr(self, "encoder"): | ||
for i, layer in enumerate(self.encoder.layers): | ||
yield i, layer | ||
for i, layer in enumerate(self.decoder.layers, start=len(self.encoder.layers)): | ||
yield i, layer | ||
else: | ||
for i, layer in enumerate(self.decoder.layers): | ||
yield i, layer | ||
|
||
|
||
class WhisperModelWithHeadsAdaptersMixin(EmbeddingAdaptersWrapperMixin, ModelWithHeadsAdaptersMixin): | ||
pass |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
from typing import TYPE_CHECKING | ||
|
||
from ....utils import _LazyModule | ||
|
||
|
||
_import_structure = { | ||
"adapter_model": [ | ||
"WhisperAdapterModel", | ||
"WhisperModelWithHeads", | ||
], | ||
} | ||
|
||
|
||
if TYPE_CHECKING: | ||
from .adapter_model import WhisperAdapterModel, WhisperModelWithHeads | ||
|
||
else: | ||
import sys | ||
|
||
sys.modules[__name__] = _LazyModule( | ||
__name__, | ||
globals()["__file__"], | ||
_import_structure, | ||
) |
Oops, something went wrong.