Skip to content

Commit

Permalink
improvnig quantization and linen modules
Browse files Browse the repository at this point in the history
  • Loading branch information
erfanzar committed Apr 10, 2024
1 parent 7ec77d7 commit 0897204
Show file tree
Hide file tree
Showing 6 changed files with 1,851 additions and 887 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ requires-python = ">=3.8"

readme = "README.md"

version = "0.0.42"
version = "0.0.45"

dependencies = [
"jax>=0.4.20",
Expand Down
2 changes: 1 addition & 1 deletion src/fjformer/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@
from . import optimizers as optimizers
from . import linen as linen

__version__ = "0.0.42"
__version__ = "0.0.45"

__all__ = (
# Loss and extra function
Expand Down
31 changes: 28 additions & 3 deletions src/fjformer/linen/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from .linear import (
from .linen import (
Linear as Linear,
LinearBitKernel as LinearBitKernel,
quantize as quantize,
Expand All @@ -7,7 +7,20 @@
de_quantize_params as de_quantize_params,
Conv as Conv,
Embed as Embed,
promote_dtype as promote_dtype
promote_dtype as promote_dtype,
ConvTranspose as ConvTranspose,
GroupNorm as GroupNorm,
BatchNorm as BatchNorm,
LayerNorm as LayerNorm,
RMSNorm as RMSNorm,
WeightNorm as WeightNorm,
InstanceNorm as InstanceNorm,
SpectralNorm as SpectralNorm,
Module as Module,
ConvLocal as ConvLocal,
compact as compact,
initializers as initializers,

)

__all__ = (
Expand All @@ -19,5 +32,17 @@
"de_quantize_params",
"Conv",
"Embed",
"promote_dtype"
"promote_dtype",
"ConvTranspose",
"GroupNorm",
"BatchNorm",
"LayerNorm",
"RMSNorm",
"WeightNorm",
"InstanceNorm",
"SpectralNorm",
"Module",
"ConvLocal",
"compact",
"initializers"
)
Loading

0 comments on commit 0897204

Please sign in to comment.