Skip to content

Commit

Permalink
Remove theano backend
Browse files Browse the repository at this point in the history
  • Loading branch information
CyberZHG committed Jun 2, 2020
1 parent 1f4b22b commit 46ae8c6
Show file tree
Hide file tree
Showing 11 changed files with 5 additions and 43 deletions.
4 changes: 0 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,3 @@ with codecs.open('xxx.txt', 'r', 'utf8') as reader:
### Use `tensorflow.python.keras`

Add `TF_KERAS=1` to environment variables to use `tensorflow.python.keras`.

### Use `theano` Backend

Add `KERAS_BACKEND=theano` to environment variables to enable `theano` backend.
6 changes: 1 addition & 5 deletions README.zh-CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -285,8 +285,4 @@ model = load_trained_model_from_checkpoint(

### 使用`tensorflow.python.keras`

在环境变量里加入`TF_KERAS=1`可以启用`tensorflow.python.keras`。加入`TF_EAGER=1`可以启用eager execution。在Keras本身没去支持之前,如果想使用tensorflow 2.0则必须使用`TF_KERAS=1`

### 使用`theano`后端

在环境变量中加入`KERAS_BACKEND=theano`来启用`theano`后端。
在环境变量里加入`TF_KERAS=1`可以启用`tensorflow.python.keras`
2 changes: 1 addition & 1 deletion keras_bert/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@
from .util import *
from .datasets import *

__version__ = '0.82.0'
__version__ = '0.83.0'
1 change: 0 additions & 1 deletion keras_bert/activations/__init__.py

This file was deleted.

8 changes: 0 additions & 8 deletions keras_bert/activations/gelu_fallback.py

This file was deleted.

8 changes: 0 additions & 8 deletions keras_bert/activations/gelu_selection.py

This file was deleted.

7 changes: 0 additions & 7 deletions keras_bert/activations/gelu_tensorflow.py

This file was deleted.

2 changes: 1 addition & 1 deletion keras_bert/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from distutils.util import strtobool

__all__ = [
'keras', 'utils', 'activations', 'applications', 'backend', 'datasets', 'engine',
'keras', 'utils', 'activations', 'applications', 'backend', 'datasets',
'layers', 'preprocessing', 'wrappers', 'callbacks', 'constraints', 'initializers',
'metrics', 'models', 'losses', 'optimizers', 'regularizers', 'TF_KERAS',
]
Expand Down
6 changes: 1 addition & 5 deletions keras_bert/bert.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
import numpy as np
from keras_pos_embd import PositionEmbedding
from keras_layer_normalization import LayerNormalization
from keras_transformer import get_encoders
from keras_transformer import get_encoders, gelu
from keras_transformer import get_custom_objects as get_encoder_custom_objects
from .backend import keras
from .activations import gelu
from .layers import get_inputs, get_embedding, TokenEmbedding, EmbeddingSimilarity, Masked, Extract, TaskEmbedding
from .optimizers import AdamWarmup

Expand Down Expand Up @@ -200,9 +199,6 @@ def get_custom_objects():
custom_objects['TaskEmbedding'] = TaskEmbedding
custom_objects['Masked'] = Masked
custom_objects['Extract'] = Extract
custom_objects['gelu'] = gelu
custom_objects['gelu_tensorflow'] = gelu
custom_objects['gelu_fallback'] = gelu
custom_objects['AdamWarmup'] = AdamWarmup
return custom_objects

Expand Down
2 changes: 0 additions & 2 deletions keras_bert/layers/task_embed.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,6 @@ def call(self, inputs, **kwargs):
task_embed = K.gather(self.embeddings, tasks)
if self.mask_zero:
task_embed = task_embed * K.expand_dims(K.cast(K.not_equal(tasks, 0), K.floatx()), axis=-1)
if K.backend() == 'theano':
task_embed = K.tile(task_embed, (1, K.shape(inputs)[1], 1))
return inputs + task_embed

def get_config(self):
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
numpy
Keras
keras-transformer>=0.34.0
keras-transformer>=0.35.0

0 comments on commit 46ae8c6

Please sign in to comment.