From 46ae8c60df5fb1f3395cb78ef83a747462abc1c5 Mon Sep 17 00:00:00 2001 From: CyberZHG <853842+CyberZHG@users.noreply.github.com> Date: Tue, 2 Jun 2020 23:36:49 +0800 Subject: [PATCH] Remove theano backend --- README.md | 4 ---- README.zh-CN.md | 6 +----- keras_bert/__init__.py | 2 +- keras_bert/activations/__init__.py | 1 - keras_bert/activations/gelu_fallback.py | 8 -------- keras_bert/activations/gelu_selection.py | 8 -------- keras_bert/activations/gelu_tensorflow.py | 7 ------- keras_bert/backend.py | 2 +- keras_bert/bert.py | 6 +----- keras_bert/layers/task_embed.py | 2 -- requirements.txt | 2 +- 11 files changed, 5 insertions(+), 43 deletions(-) delete mode 100644 keras_bert/activations/__init__.py delete mode 100644 keras_bert/activations/gelu_fallback.py delete mode 100644 keras_bert/activations/gelu_selection.py delete mode 100644 keras_bert/activations/gelu_tensorflow.py diff --git a/README.md b/README.md index ffce654..46ace26 100644 --- a/README.md +++ b/README.md @@ -229,7 +229,3 @@ with codecs.open('xxx.txt', 'r', 'utf8') as reader: ### Use `tensorflow.python.keras` Add `TF_KERAS=1` to environment variables to use `tensorflow.python.keras`. - -### Use `theano` Backend - -Add `KERAS_BACKEND=theano` to environment variables to enable `theano` backend. diff --git a/README.zh-CN.md b/README.zh-CN.md index 0b44cb6..9a2f449 100644 --- a/README.zh-CN.md +++ b/README.zh-CN.md @@ -285,8 +285,4 @@ model = load_trained_model_from_checkpoint( ### 使用`tensorflow.python.keras` -在环境变量里加入`TF_KERAS=1`可以启用`tensorflow.python.keras`。加入`TF_EAGER=1`可以启用eager execution。在Keras本身没去支持之前,如果想使用tensorflow 2.0则必须使用`TF_KERAS=1`。 - -### 使用`theano`后端 - -在环境变量中加入`KERAS_BACKEND=theano`来启用`theano`后端。 +在环境变量里加入`TF_KERAS=1`可以启用`tensorflow.python.keras`。 diff --git a/keras_bert/__init__.py b/keras_bert/__init__.py index 2f82394..61ff910 100644 --- a/keras_bert/__init__.py +++ b/keras_bert/__init__.py @@ -5,4 +5,4 @@ from .util import * from .datasets import * -__version__ = '0.82.0' +__version__ = '0.83.0' diff --git a/keras_bert/activations/__init__.py b/keras_bert/activations/__init__.py deleted file mode 100644 index 4f77d8a..0000000 --- a/keras_bert/activations/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .gelu_selection import gelu diff --git a/keras_bert/activations/gelu_fallback.py b/keras_bert/activations/gelu_fallback.py deleted file mode 100644 index 67a52a8..0000000 --- a/keras_bert/activations/gelu_fallback.py +++ /dev/null @@ -1,8 +0,0 @@ -import math -from keras_bert.backend import backend as K - -__all__ = ['gelu'] - - -def gelu(x): - return 0.5 * x * (1.0 + K.tanh(math.sqrt(2.0 / math.pi) * (x + 0.044715 * x * x * x))) diff --git a/keras_bert/activations/gelu_selection.py b/keras_bert/activations/gelu_selection.py deleted file mode 100644 index a687e69..0000000 --- a/keras_bert/activations/gelu_selection.py +++ /dev/null @@ -1,8 +0,0 @@ -from keras_bert.backend import backend as K - -__all__ = ['gelu'] - -if K.backend() == 'tensorflow': - from .gelu_tensorflow import gelu -else: - from .gelu_fallback import gelu diff --git a/keras_bert/activations/gelu_tensorflow.py b/keras_bert/activations/gelu_tensorflow.py deleted file mode 100644 index 6227bc1..0000000 --- a/keras_bert/activations/gelu_tensorflow.py +++ /dev/null @@ -1,7 +0,0 @@ -from tensorflow.python.ops.math_ops import erf, sqrt - -__all__ = ['gelu'] - - -def gelu(x): - return 0.5 * x * (1.0 + erf(x / sqrt(2.0))) diff --git a/keras_bert/backend.py b/keras_bert/backend.py index b2c3633..90dd5a9 100644 --- a/keras_bert/backend.py +++ b/keras_bert/backend.py @@ -2,7 +2,7 @@ from distutils.util import strtobool __all__ = [ - 'keras', 'utils', 'activations', 'applications', 'backend', 'datasets', 'engine', + 'keras', 'utils', 'activations', 'applications', 'backend', 'datasets', 'layers', 'preprocessing', 'wrappers', 'callbacks', 'constraints', 'initializers', 'metrics', 'models', 'losses', 'optimizers', 'regularizers', 'TF_KERAS', ] diff --git a/keras_bert/bert.py b/keras_bert/bert.py index 477527a..efa2786 100644 --- a/keras_bert/bert.py +++ b/keras_bert/bert.py @@ -1,10 +1,9 @@ import numpy as np from keras_pos_embd import PositionEmbedding from keras_layer_normalization import LayerNormalization -from keras_transformer import get_encoders +from keras_transformer import get_encoders, gelu from keras_transformer import get_custom_objects as get_encoder_custom_objects from .backend import keras -from .activations import gelu from .layers import get_inputs, get_embedding, TokenEmbedding, EmbeddingSimilarity, Masked, Extract, TaskEmbedding from .optimizers import AdamWarmup @@ -200,9 +199,6 @@ def get_custom_objects(): custom_objects['TaskEmbedding'] = TaskEmbedding custom_objects['Masked'] = Masked custom_objects['Extract'] = Extract - custom_objects['gelu'] = gelu - custom_objects['gelu_tensorflow'] = gelu - custom_objects['gelu_fallback'] = gelu custom_objects['AdamWarmup'] = AdamWarmup return custom_objects diff --git a/keras_bert/layers/task_embed.py b/keras_bert/layers/task_embed.py index 18a9bca..b3a9c39 100644 --- a/keras_bert/layers/task_embed.py +++ b/keras_bert/layers/task_embed.py @@ -68,8 +68,6 @@ def call(self, inputs, **kwargs): task_embed = K.gather(self.embeddings, tasks) if self.mask_zero: task_embed = task_embed * K.expand_dims(K.cast(K.not_equal(tasks, 0), K.floatx()), axis=-1) - if K.backend() == 'theano': - task_embed = K.tile(task_embed, (1, K.shape(inputs)[1], 1)) return inputs + task_embed def get_config(self): diff --git a/requirements.txt b/requirements.txt index 7b38e7c..247345f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ numpy Keras -keras-transformer>=0.34.0 +keras-transformer>=0.35.0