From 6ba333a04e9e6c219203d1ec437a3c740488a1cc Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Sun, 10 Feb 2019 21:36:16 +0530 Subject: [PATCH 01/59] Create pseu.py --- keras_contrib/layers/advanced_activations/pseu.py | 1 + 1 file changed, 1 insertion(+) create mode 100644 keras_contrib/layers/advanced_activations/pseu.py diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -0,0 +1 @@ + From 82ae700e831fe5b630685eab6d8a0ec053054592 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Sun, 10 Feb 2019 21:37:10 +0530 Subject: [PATCH 02/59] Add PSEU activation layer --- .../layers/advanced_activations/pseu.py | 59 +++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 8b1378917..d30469f0c 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -1 +1,60 @@ +class PSEU(Layer): + """Parametric Soft Exponential Unit with trainable alpha + See: https://arxiv.org/pdf/1602.01321.pdf by Godfrey and Gashler + Reference: https://github.com/keras-team/keras/issues/3842 (@hobson) + # Input shape + Arbitrary. Use the keyword argument `input_shape` + (tuple of integers, does not include the samples axis) + when using this layer as the first layer in a model. + # Output shape + Same shape as the input. + # Arguments + alpha_init: float. Initial value of the alpha weights. + weights: initial alpha weights, as a list of 1 numpy array. + if both weights & alpha_init are provided, weights + overrides alpha_init + # Example + model = Sequential() + model.add(Dense(10)) + model.add(PSEU()) + Soft Exponential f(α, x): + α == 0: x + α > 0: (exp(αx)-1) / α + α + α < 0: -ln(1-α(x + α)) / α + """ + def __init__(self, alpha_init=0.1, + weights=None, **kwargs): + self.supports_masking = True + self.alpha_init = K.cast_to_floatx(alpha_init) + self.initial_weights = weights + super(PSEU, self).__init__(**kwargs) + def build(self, input_shape): + input_shape = input_shape[1:] + self.alphas = K.variable(self.alpha_init * np.ones(input_shape), + name='{}_alphas'.format(self.name)) + self.trainable_weights = [self.alphas] + + if self.initial_weights is not None: + self.set_weights(self.initial_weights) + del self.initial_weights + + self.build = True + + def call_alpha_gt0(self, x, alpha): + return alpha + (K.exp(alpha * x) - 1.) / alpha + + def call_alpha_lt0(self, x, alpha): + return - K.log(1 - alpha * (x + alpha)) / alpha + + def call(self, x, mask=None): + return K.switch(self.alphas > 0, self.call_alpha_gt0, + K.switch(self.alphas < 0, self.call_alpha_lt0, x)) + + def compute_output_shape(self, input_shape): + return input_shape + + def get_config(self): + config = {'alpha_init': float(self.alpha_init)} + base_config = super(ParametricSoftExp, self).get_config() + return dict(list(base_config.items()) + list(config.items())) From 3012cde2da6ef35a064506b04ebd3714e74c0f73 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Sun, 10 Feb 2019 21:37:58 +0530 Subject: [PATCH 03/59] Create test_pseu.py --- tests/keras_contrib/layers/advanced_activations/test_pseu.py | 1 + 1 file changed, 1 insertion(+) create mode 100644 tests/keras_contrib/layers/advanced_activations/test_pseu.py diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -0,0 +1 @@ + From d6cf865db69da7b9fab9de4c5e5fd3b63f0f2af0 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Sun, 10 Feb 2019 21:39:10 +0530 Subject: [PATCH 04/59] Add PSEU test --- .../layers/advanced_activations/test_pseu.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index 8b1378917..61cf50725 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -1 +1,10 @@ +import pytest +from keras_contrib.utils.test_utils import layer_test +from keras_contrib.layers import advanced_activations + +@pytest.mark.parametrize('kwargs', [{}, {'shared_axes': 1}]) +def test_pseu(kwargs): + layer_test(advanced_activations.PSEU, + kwargs={'alpha_init': 0.1}, + input_shape=(2, 3, 4)) From c9ac0e4abc86b9698f3a9a490977a7f205d4f8d8 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Sun, 10 Feb 2019 22:29:14 +0530 Subject: [PATCH 05/59] Update PSEU layer --- .../layers/advanced_activations/pseu.py | 87 +++++++++++++------ 1 file changed, 62 insertions(+), 25 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index d30469f0c..1654871c1 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -1,3 +1,10 @@ +from keras import initializers +from keras import regularizers +from keras import constraints +from keras.layers import Layer +from keras import backend as K + + class PSEU(Layer): """Parametric Soft Exponential Unit with trainable alpha See: https://arxiv.org/pdf/1602.01321.pdf by Godfrey and Gashler @@ -9,52 +16,82 @@ class PSEU(Layer): # Output shape Same shape as the input. # Arguments - alpha_init: float. Initial value of the alpha weights. - weights: initial alpha weights, as a list of 1 numpy array. - if both weights & alpha_init are provided, weights - overrides alpha_init + initializer: Initializer for alpha weights. + alpha_init: Initial value of the alpha weights (float) + This value overrides any specified initializer + by default, but, one can use their initializer + of choice by specifying alpha_init=None. + regularizer: Regularizer for alpha weights. + constraint: Constraint for alpha weights. + trainable: Whether the alpha weights are trainable or not + # Example model = Sequential() model.add(Dense(10)) model.add(PSEU()) + + Note : Specify alpha_init=None to use other intializers + Soft Exponential f(α, x): α == 0: x α > 0: (exp(αx)-1) / α + α α < 0: -ln(1-α(x + α)) / α """ - def __init__(self, alpha_init=0.1, - weights=None, **kwargs): + def __init__(self, + alpha_init=0.1, + initializer='glorot_uniform', + regularizer=None, + constraint=None, + trainable=True, + **kwargs): + self.supports_masking = True - self.alpha_init = K.cast_to_floatx(alpha_init) - self.initial_weights = weights + self.alpha_init = alpha_init + self.initializer = initializers.get(initializer) + self.regularizer = regularizers.get(regularizer) + self.constraint = constraints.get(constraint) + self.trainable = trainable + super(PSEU, self).__init__(**kwargs) def build(self, input_shape): - input_shape = input_shape[1:] - self.alphas = K.variable(self.alpha_init * np.ones(input_shape), - name='{}_alphas'.format(self.name)) - self.trainable_weights = [self.alphas] - - if self.initial_weights is not None: - self.set_weights(self.initial_weights) - del self.initial_weights + new_input_shape = input_shape[1:] + self.alphas = self.add_weight(shape=new_input_shape, + name='{}_alphas'.format(self.name), + initializer=self.initializer, + regularizer=self.regularizer, + constraint=self.constraint) - self.build = True + if self.trainable: + self.trainable_weights = [self.alphas] - def call_alpha_gt0(self, x, alpha): - return alpha + (K.exp(alpha * x) - 1.) / alpha + if self.alpha_init is not None: + self.set_weights([self.alpha_init*np.ones(new_input_shape)]) - def call_alpha_lt0(self, x, alpha): - return - K.log(1 - alpha * (x + alpha)) / alpha + self.build = True def call(self, x, mask=None): - return K.switch(self.alphas > 0, self.call_alpha_gt0, - K.switch(self.alphas < 0, self.call_alpha_lt0, x)) + if K.eval(self.alphas) < 0: + return - K.log(1 - self.alphas * (x + self.alphas)) / self.alphas + elif K.eval(self.alphas) > 0: + return self.alphas + (K.exp(self.alphas * x) - 1.) / self.alphas + else: + return x def compute_output_shape(self, input_shape): return input_shape def get_config(self): - config = {'alpha_init': float(self.alpha_init)} - base_config = super(ParametricSoftExp, self).get_config() + if self.alpha_init is None: + config = {'alpha_initializer': initializers.serialize(self.initializer), + 'regularizer': regularizers.serialize(self.regularizer), + 'constraint': constraints.serialize(self.constraint), + 'trainable': self.trainable} + else: + config = {'alpha_initial': float(self.alpha_init), + 'regularizer': regularizers.serialize(self.regularizer), + 'constraint': constraints.serialize(self.constraint), + 'trainable': self.trainable} + + base_config = super(PSEU, self).get_config() return dict(list(base_config.items()) + list(config.items())) From abd58ae61b51e529d21e07ae4db1d689bcc5a700 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Sun, 10 Feb 2019 22:32:36 +0530 Subject: [PATCH 06/59] Update test --- .../keras_contrib/layers/advanced_activations/test_pseu.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index 61cf50725..1c10b6824 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -3,8 +3,10 @@ from keras_contrib.layers import advanced_activations -@pytest.mark.parametrize('kwargs', [{}, {'shared_axes': 1}]) +@pytest.mark.parametrize('trainable', [True, False]) +@pytest.mark.parametrize('alpha_init', [-0.1, 0, 0.1]) def test_pseu(kwargs): layer_test(advanced_activations.PSEU, - kwargs={'alpha_init': 0.1}, + kwargs={'alpha_init': alpha_init, + 'trainable': trainable}, input_shape=(2, 3, 4)) From 1891e17ae3b210f727aee17b4f5871aeb2a8b99c Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Sun, 10 Feb 2019 22:35:53 +0530 Subject: [PATCH 07/59] Add import --- keras_contrib/layers/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/keras_contrib/layers/__init__.py b/keras_contrib/layers/__init__.py index 5f4e8383e..cada2d8f2 100644 --- a/keras_contrib/layers/__init__.py +++ b/keras_contrib/layers/__init__.py @@ -4,6 +4,7 @@ from .advanced_activations.srelu import SReLU from .advanced_activations.swish import Swish from .advanced_activations.sinerelu import SineReLU +from .advanced_activations.pseu import PSEU from .convolutional import CosineConv2D from .convolutional import SubPixelUpscaling From 542db44a73e5b3f3168f9ad6aafa9a0522b3b905 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Sun, 10 Feb 2019 22:56:37 +0530 Subject: [PATCH 08/59] Fix silly errors in test --- .../keras_contrib/layers/advanced_activations/test_pseu.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index 1c10b6824..ecd0ea9cb 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -5,8 +5,12 @@ @pytest.mark.parametrize('trainable', [True, False]) @pytest.mark.parametrize('alpha_init', [-0.1, 0, 0.1]) -def test_pseu(kwargs): +def test_pseu(trainable, + alpha_init): layer_test(advanced_activations.PSEU, kwargs={'alpha_init': alpha_init, 'trainable': trainable}, input_shape=(2, 3, 4)) + +if __name__ == '__main__': + pytest.main([__file__]) From 6888fd59d8cbd65a2c7187dcf7a686db6e378653 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Sun, 10 Feb 2019 23:01:09 +0530 Subject: [PATCH 09/59] Fix pep8 violation --- keras_contrib/layers/advanced_activations/pseu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 1654871c1..2757d2a21 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -66,7 +66,7 @@ def build(self, input_shape): self.trainable_weights = [self.alphas] if self.alpha_init is not None: - self.set_weights([self.alpha_init*np.ones(new_input_shape)]) + self.set_weights([self.alpha_init * np.ones(new_input_shape)]) self.build = True From 8497f72e6cb9a2f80947fea733859025db74968e Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Sun, 10 Feb 2019 23:19:00 +0530 Subject: [PATCH 10/59] Fix import error --- tests/keras_contrib/layers/advanced_activations/test_pseu.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index ecd0ea9cb..11c5e9583 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -1,13 +1,13 @@ import pytest from keras_contrib.utils.test_utils import layer_test -from keras_contrib.layers import advanced_activations +from keras_contrib.layers import PSEU @pytest.mark.parametrize('trainable', [True, False]) @pytest.mark.parametrize('alpha_init', [-0.1, 0, 0.1]) def test_pseu(trainable, alpha_init): - layer_test(advanced_activations.PSEU, + layer_test(PSEU, kwargs={'alpha_init': alpha_init, 'trainable': trainable}, input_shape=(2, 3, 4)) From 94c5ac1cbb4b59bf5e2c4a463121abbc939be62e Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 00:02:32 +0530 Subject: [PATCH 11/59] Fix pep8 --- tests/keras_contrib/layers/advanced_activations/test_pseu.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index 11c5e9583..2f34abc81 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -12,5 +12,6 @@ def test_pseu(trainable, 'trainable': trainable}, input_shape=(2, 3, 4)) + if __name__ == '__main__': pytest.main([__file__]) From 1015e10ecf78856e0800c75e71e4eb087e1307a5 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 00:17:37 +0530 Subject: [PATCH 12/59] import numpy --- keras_contrib/layers/advanced_activations/pseu.py | 1 + 1 file changed, 1 insertion(+) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 2757d2a21..c31627cde 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -3,6 +3,7 @@ from keras import constraints from keras.layers import Layer from keras import backend as K +import numpy as np class PSEU(Layer): From dc3588a888174712b1cd5a41afbe47dcdaf378ea Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 00:40:32 +0530 Subject: [PATCH 13/59] utf-8 --- keras_contrib/layers/advanced_activations/pseu.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index c31627cde..f98526ce8 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from keras import initializers from keras import regularizers from keras import constraints @@ -72,9 +73,9 @@ def build(self, input_shape): self.build = True def call(self, x, mask=None): - if K.eval(self.alphas) < 0: + if K.eval(self.alphas).all() < 0: return - K.log(1 - self.alphas * (x + self.alphas)) / self.alphas - elif K.eval(self.alphas) > 0: + elif K.eval(self.alphas).all() > 0: return self.alphas + (K.exp(self.alphas * x) - 1.) / self.alphas else: return x From f214d537b088a199f178dae6433ddffae0e18c0d Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 00:41:33 +0530 Subject: [PATCH 14/59] Fix test --- tests/keras_contrib/layers/advanced_activations/test_pseu.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index 2f34abc81..81189524c 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import pytest from keras_contrib.utils.test_utils import layer_test from keras_contrib.layers import PSEU From 60d1d1e7705ff2263f9206bf94f00032e7b79929 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 00:50:24 +0530 Subject: [PATCH 15/59] Update test_pseu.py --- tests/keras_contrib/layers/advanced_activations/test_pseu.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index 81189524c..9d193d0e7 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -5,12 +5,10 @@ @pytest.mark.parametrize('trainable', [True, False]) -@pytest.mark.parametrize('alpha_init', [-0.1, 0, 0.1]) def test_pseu(trainable, alpha_init): layer_test(PSEU, - kwargs={'alpha_init': alpha_init, - 'trainable': trainable}, + kwargs={'trainable': trainable}, input_shape=(2, 3, 4)) From e1cefb0a77d4331ce661d5ada0f5b9d889cb819e Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 00:51:00 +0530 Subject: [PATCH 16/59] Update test_pseu.py --- tests/keras_contrib/layers/advanced_activations/test_pseu.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index 9d193d0e7..8cf3c39f6 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -5,8 +5,7 @@ @pytest.mark.parametrize('trainable', [True, False]) -def test_pseu(trainable, - alpha_init): +def test_pseu(trainable): layer_test(PSEU, kwargs={'trainable': trainable}, input_shape=(2, 3, 4)) From 326bfa3cf08afe9e9ab145bd77733e2eb80dee09 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 00:55:21 +0530 Subject: [PATCH 17/59] Fix silly error --- keras_contrib/layers/advanced_activations/pseu.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index f98526ce8..da889a38a 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -47,6 +47,7 @@ def __init__(self, trainable=True, **kwargs): + super(PSEU, self).__init__(**kwargs) self.supports_masking = True self.alpha_init = alpha_init self.initializer = initializers.get(initializer) @@ -54,8 +55,6 @@ def __init__(self, self.constraint = constraints.get(constraint) self.trainable = trainable - super(PSEU, self).__init__(**kwargs) - def build(self, input_shape): new_input_shape = input_shape[1:] self.alphas = self.add_weight(shape=new_input_shape, From d28a5441687383e154876d917983a6839c0ebabb Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 00:56:42 +0530 Subject: [PATCH 18/59] Update test_pseu.py --- .../keras_contrib/layers/advanced_activations/test_pseu.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index 8cf3c39f6..60be2a42e 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -5,9 +5,12 @@ @pytest.mark.parametrize('trainable', [True, False]) -def test_pseu(trainable): +@pytest.mark.parametrize('alpha_init', [-0.1, 0., 0.1]) +def test_pseu(trainable, + alpha_init): layer_test(PSEU, - kwargs={'trainable': trainable}, + kwargs={'trainable': trainable, + 'alpha_init': alpha_init}, input_shape=(2, 3, 4)) From df9e9643932d45c1bea664b24d586e0b964f3a7b Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 00:58:41 +0530 Subject: [PATCH 19/59] Update pseu.py --- keras_contrib/layers/advanced_activations/pseu.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index da889a38a..cfe01d490 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -72,9 +72,9 @@ def build(self, input_shape): self.build = True def call(self, x, mask=None): - if K.eval(self.alphas).all() < 0: + if self.alpha_init is not None and self.alpha_init < 0: return - K.log(1 - self.alphas * (x + self.alphas)) / self.alphas - elif K.eval(self.alphas).all() > 0: + elif self.alpha_init is not None and self.alpha_init > 0: return self.alphas + (K.exp(self.alphas * x) - 1.) / self.alphas else: return x From 51ef0c8ae61e49e73287cff7d70ea03d24f07a07 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 01:10:27 +0530 Subject: [PATCH 20/59] Update pseu.py --- keras_contrib/layers/advanced_activations/pseu.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index cfe01d490..d0a0c954d 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -84,12 +84,12 @@ def compute_output_shape(self, input_shape): def get_config(self): if self.alpha_init is None: - config = {'alpha_initializer': initializers.serialize(self.initializer), + config = {'alpha_init': initializers.serialize(self.initializer), 'regularizer': regularizers.serialize(self.regularizer), 'constraint': constraints.serialize(self.constraint), 'trainable': self.trainable} else: - config = {'alpha_initial': float(self.alpha_init), + config = {'alpha_init': float(self.alpha_init), 'regularizer': regularizers.serialize(self.regularizer), 'constraint': constraints.serialize(self.constraint), 'trainable': self.trainable} From a3bccfbe1b415db9dc91e06f7b9abf333e2d9b78 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 08:54:17 +0530 Subject: [PATCH 21/59] Remove unnecessary initializer --- .../layers/advanced_activations/pseu.py | 26 +++++-------------- 1 file changed, 7 insertions(+), 19 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index d0a0c954d..7f9dbdcab 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from keras import initializers from keras import regularizers from keras import constraints from keras.layers import Layer @@ -18,11 +17,7 @@ class PSEU(Layer): # Output shape Same shape as the input. # Arguments - initializer: Initializer for alpha weights. alpha_init: Initial value of the alpha weights (float) - This value overrides any specified initializer - by default, but, one can use their initializer - of choice by specifying alpha_init=None. regularizer: Regularizer for alpha weights. constraint: Constraint for alpha weights. trainable: Whether the alpha weights are trainable or not @@ -41,7 +36,6 @@ class PSEU(Layer): """ def __init__(self, alpha_init=0.1, - initializer='glorot_uniform', regularizer=None, constraint=None, trainable=True, @@ -50,22 +44,22 @@ def __init__(self, super(PSEU, self).__init__(**kwargs) self.supports_masking = True self.alpha_init = alpha_init - self.initializer = initializers.get(initializer) + self.initializer = initializers.get('glorot_uniform') + # Add random initializer self.regularizer = regularizers.get(regularizer) self.constraint = constraints.get(constraint) self.trainable = trainable def build(self, input_shape): new_input_shape = input_shape[1:] + self.alphas = self.add_weight(shape=new_input_shape, name='{}_alphas'.format(self.name), initializer=self.initializer, regularizer=self.regularizer, constraint=self.constraint) - if self.trainable: self.trainable_weights = [self.alphas] - if self.alpha_init is not None: self.set_weights([self.alpha_init * np.ones(new_input_shape)]) @@ -83,16 +77,10 @@ def compute_output_shape(self, input_shape): return input_shape def get_config(self): - if self.alpha_init is None: - config = {'alpha_init': initializers.serialize(self.initializer), - 'regularizer': regularizers.serialize(self.regularizer), - 'constraint': constraints.serialize(self.constraint), - 'trainable': self.trainable} - else: - config = {'alpha_init': float(self.alpha_init), - 'regularizer': regularizers.serialize(self.regularizer), - 'constraint': constraints.serialize(self.constraint), - 'trainable': self.trainable} + config = {'alpha_init': float(self.alpha_init), + 'regularizer': regularizers.serialize(self.regularizer), + 'constraint': constraints.serialize(self.constraint), + 'trainable': self.trainable} base_config = super(PSEU, self).get_config() return dict(list(base_config.items()) + list(config.items())) From a880bd07b50e81fe1aff1a31ab10dc8e6c6068cf Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 13:55:14 +0530 Subject: [PATCH 22/59] Add missing import --- keras_contrib/layers/advanced_activations/pseu.py | 1 + 1 file changed, 1 insertion(+) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 7f9dbdcab..3fcad1ea2 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +from keras import initializers from keras import regularizers from keras import constraints from keras.layers import Layer From 3fd819e64d923318f5eb5a6aecc46a83b1ef0143 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 15:28:33 +0530 Subject: [PATCH 23/59] Fix import order --- keras_contrib/layers/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/keras_contrib/layers/__init__.py b/keras_contrib/layers/__init__.py index 705adc093..0607d41f8 100644 --- a/keras_contrib/layers/__init__.py +++ b/keras_contrib/layers/__init__.py @@ -1,10 +1,10 @@ from __future__ import absolute_import from .advanced_activations.pelu import PELU +from .advanced_activations.pseu import PSEU from .advanced_activations.srelu import SReLU from .advanced_activations.swish import Swish from .advanced_activations.sinerelu import SineReLU -from .advanced_activations.pseu import PSEU from .convolutional.cosineconvolution2d import CosineConv2D from .convolutional.cosineconvolution2d import CosineConvolution2D From 55676cd67660be2562a38459238a3369a146e62d Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 15:31:00 +0530 Subject: [PATCH 24/59] Fix import order --- keras_contrib/layers/advanced_activations/pseu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 3fcad1ea2..15f3f6154 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -1,10 +1,10 @@ # -*- coding: utf-8 -*- +import numpy as np from keras import initializers from keras import regularizers from keras import constraints from keras.layers import Layer from keras import backend as K -import numpy as np class PSEU(Layer): From be42270463d3810d8badee16db17771a4b8c71f3 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 15:34:13 +0530 Subject: [PATCH 25/59] Fix small problems --- keras_contrib/layers/advanced_activations/pseu.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 15f3f6154..d0cbae8b7 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -66,10 +66,10 @@ def build(self, input_shape): self.build = True - def call(self, x, mask=None): - if self.alpha_init is not None and self.alpha_init < 0: + def call(self, x): + if self.alpha_init < 0: return - K.log(1 - self.alphas * (x + self.alphas)) / self.alphas - elif self.alpha_init is not None and self.alpha_init > 0: + elif self.alpha_init > 0: return self.alphas + (K.exp(self.alphas * x) - 1.) / self.alphas else: return x From df24e8f721c1f5a56a919d142afe55f90ea1d3aa Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Mon, 11 Feb 2019 15:57:58 +0530 Subject: [PATCH 26/59] Sort imports in alphabetical order --- keras_contrib/layers/advanced_activations/pseu.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index d0cbae8b7..1b298dfea 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -1,10 +1,10 @@ # -*- coding: utf-8 -*- import numpy as np -from keras import initializers -from keras import regularizers +from keras import backend as K from keras import constraints +from keras import initializers from keras.layers import Layer -from keras import backend as K +from keras import regularizers class PSEU(Layer): From d57d963719fdc808ef33a861fbd8a281073c6dc6 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 08:09:11 +0530 Subject: [PATCH 27/59] Remove unnecessary condition --- keras_contrib/layers/advanced_activations/pseu.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 1b298dfea..95d1809d7 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -61,8 +61,7 @@ def build(self, input_shape): constraint=self.constraint) if self.trainable: self.trainable_weights = [self.alphas] - if self.alpha_init is not None: - self.set_weights([self.alpha_init * np.ones(new_input_shape)]) + self.set_weights([self.alpha_init * np.ones(new_input_shape)]) self.build = True From e6456e1da8329f9799964f5a84d1cf94537422e5 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 09:49:08 +0530 Subject: [PATCH 28/59] Remove unnecessary comment in docstring --- keras_contrib/layers/advanced_activations/pseu.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 95d1809d7..a266f3c15 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -28,8 +28,6 @@ class PSEU(Layer): model.add(Dense(10)) model.add(PSEU()) - Note : Specify alpha_init=None to use other intializers - Soft Exponential f(α, x): α == 0: x α > 0: (exp(αx)-1) / α + α From aa03980e6ca4856af2a50a09eb170751794be47b Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 14:37:57 +0530 Subject: [PATCH 29/59] Add option to specify initializer for alpha weights --- .../layers/advanced_activations/pseu.py | 30 +++++++++++++++---- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index a266f3c15..97c64c641 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -19,10 +19,17 @@ class PSEU(Layer): Same shape as the input. # Arguments alpha_init: Initial value of the alpha weights (float) + (0.1 by default) + initializer: The initializer for the alpha weights. + Any initializer specified here overrides + the value of alpha_init. + (None by default) regularizer: Regularizer for alpha weights. constraint: Constraint for alpha weights. trainable: Whether the alpha weights are trainable or not + NOTE : Do not set both alpha_init and initializer to None. + # Example model = Sequential() model.add(Dense(10)) @@ -35,6 +42,7 @@ class PSEU(Layer): """ def __init__(self, alpha_init=0.1, + initializer=None, regularizer=None, constraint=None, trainable=True, @@ -43,8 +51,13 @@ def __init__(self, super(PSEU, self).__init__(**kwargs) self.supports_masking = True self.alpha_init = alpha_init - self.initializer = initializers.get('glorot_uniform') - # Add random initializer + + if initializer is None: + self.initializer = initializer + else: + self.initializer = initializers.get(initializer) + self.alpha_init = None + self.regularizer = regularizers.get(regularizer) self.constraint = constraints.get(constraint) self.trainable = trainable @@ -54,12 +67,17 @@ def build(self, input_shape): self.alphas = self.add_weight(shape=new_input_shape, name='{}_alphas'.format(self.name), - initializer=self.initializer, regularizer=self.regularizer, constraint=self.constraint) + if self.trainable: self.trainable_weights = [self.alphas] - self.set_weights([self.alpha_init * np.ones(new_input_shape)]) + + if self.initializer is None: + self.set_weights([self.alpha_init * np.ones(new_input_shape)]) + else: + self.initializer = initializers.get(initializer) + self.alphas.initializer = self.initializer self.build = True @@ -75,7 +93,9 @@ def compute_output_shape(self, input_shape): return input_shape def get_config(self): - config = {'alpha_init': float(self.alpha_init), + + config = {'alpha_init': self.alpha_init, + 'initializer': initializers.serialize(self.initializer), 'regularizer': regularizers.serialize(self.regularizer), 'constraint': constraints.serialize(self.constraint), 'trainable': self.trainable} From 839be17e9be958e3bf5b30a2c195db48e4c9c712 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 14:44:38 +0530 Subject: [PATCH 30/59] Add initializer parameter to test --- .../keras_contrib/layers/advanced_activations/test_pseu.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index 60be2a42e..3a93e0e61 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -6,11 +6,14 @@ @pytest.mark.parametrize('trainable', [True, False]) @pytest.mark.parametrize('alpha_init', [-0.1, 0., 0.1]) +@pytest.mark.parametrize('initializer', [None, 'glorot_uniform', 'ones']) def test_pseu(trainable, - alpha_init): + alpha_init, + initializer): layer_test(PSEU, kwargs={'trainable': trainable, - 'alpha_init': alpha_init}, + 'alpha_init': alpha_init, + 'initializer': initializer}, input_shape=(2, 3, 4)) From 9813264ae27760d52df4c5d4851389c7a0913f93 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 14:48:08 +0530 Subject: [PATCH 31/59] Remove unnecessary line --- keras_contrib/layers/advanced_activations/pseu.py | 1 - 1 file changed, 1 deletion(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 97c64c641..5973201d0 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -76,7 +76,6 @@ def build(self, input_shape): if self.initializer is None: self.set_weights([self.alpha_init * np.ones(new_input_shape)]) else: - self.initializer = initializers.get(initializer) self.alphas.initializer = self.initializer self.build = True From 8a34e8f9342712972b9f5385e2b07e6713e67277 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 14:59:22 +0530 Subject: [PATCH 32/59] Remove useless spaces --- keras_contrib/layers/advanced_activations/pseu.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 5973201d0..696898ac1 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -49,15 +49,12 @@ def __init__(self, **kwargs): super(PSEU, self).__init__(**kwargs) - self.supports_masking = True self.alpha_init = alpha_init - if initializer is None: self.initializer = initializer else: self.initializer = initializers.get(initializer) self.alpha_init = None - self.regularizer = regularizers.get(regularizer) self.constraint = constraints.get(constraint) self.trainable = trainable From dd84064ca98243de71b2818dd5f619655645fd26 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 15:00:15 +0530 Subject: [PATCH 33/59] Remove unnecessary initializer parameter from test --- tests/keras_contrib/layers/advanced_activations/test_pseu.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index 3a93e0e61..c135159b0 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -6,14 +6,12 @@ @pytest.mark.parametrize('trainable', [True, False]) @pytest.mark.parametrize('alpha_init', [-0.1, 0., 0.1]) -@pytest.mark.parametrize('initializer', [None, 'glorot_uniform', 'ones']) def test_pseu(trainable, alpha_init, initializer): layer_test(PSEU, kwargs={'trainable': trainable, - 'alpha_init': alpha_init, - 'initializer': initializer}, + 'alpha_init': alpha_init}, input_shape=(2, 3, 4)) From f2c5d1a52920ffc2e248446f6ab059e6dea5fd40 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 15:00:49 +0530 Subject: [PATCH 34/59] Remove unnecessary initializer parameter from test --- tests/keras_contrib/layers/advanced_activations/test_pseu.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index c135159b0..186ced217 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -6,8 +6,7 @@ @pytest.mark.parametrize('trainable', [True, False]) @pytest.mark.parametrize('alpha_init', [-0.1, 0., 0.1]) -def test_pseu(trainable, - alpha_init, +def test_pseu(alpha_init, initializer): layer_test(PSEU, kwargs={'trainable': trainable, From f2b0f971a934b2522baecf03ddb137535e5e3e02 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 15:08:11 +0530 Subject: [PATCH 35/59] Fix silly mistake --- tests/keras_contrib/layers/advanced_activations/test_pseu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index 186ced217..083b7d956 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -7,7 +7,7 @@ @pytest.mark.parametrize('trainable', [True, False]) @pytest.mark.parametrize('alpha_init', [-0.1, 0., 0.1]) def test_pseu(alpha_init, - initializer): + trainable): layer_test(PSEU, kwargs={'trainable': trainable, 'alpha_init': alpha_init}, From 6bdf84316a10b1c41cfd808be14aef16d32edb98 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 15:22:54 +0530 Subject: [PATCH 36/59] Fix initializer parameter in test --- .../keras_contrib/layers/advanced_activations/test_pseu.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index 083b7d956..ed52e9b59 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -6,11 +6,14 @@ @pytest.mark.parametrize('trainable', [True, False]) @pytest.mark.parametrize('alpha_init', [-0.1, 0., 0.1]) +@pytest.mark.parametrize('initializer', ['glorot_uniform', 'glorot_normal']) def test_pseu(alpha_init, - trainable): + trainable, + initializer): layer_test(PSEU, kwargs={'trainable': trainable, - 'alpha_init': alpha_init}, + 'alpha_init': alpha_init, + 'initializer': initializer}, input_shape=(2, 3, 4)) From 6d7b8aa84c471521881ebb80903113be76b4b22a Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 16:45:10 +0530 Subject: [PATCH 37/59] Add necessary sign parameter and add overrides --- .../layers/advanced_activations/pseu.py | 43 ++++++++++++++----- 1 file changed, 33 insertions(+), 10 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 696898ac1..a5a281fe7 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -15,20 +15,33 @@ class PSEU(Layer): Arbitrary. Use the keyword argument `input_shape` (tuple of integers, does not include the samples axis) when using this layer as the first layer in a model. + # Output shape Same shape as the input. + # Arguments - alpha_init: Initial value of the alpha weights (float) - (0.1 by default) + alpha_init: Initial values of the alpha weights (float) + (0.1 by default). initializer: The initializer for the alpha weights. Any initializer specified here overrides - the value of alpha_init. - (None by default) + the value of alpha_init. + Note that even if the initializer is specified, + the alpha_init value controls the sign + of the weights (α > 0, α < 0 or α = 0). It + is glorot_uniform by default. + alpha_sign: The sign (negative, positive or 0) that + is taken into consideration when deciding which + formula to use (the formula is differet for + α > 0, α < 0 and α = 0). It is set to positive + by default, when the initializer is not None. + The sign of alpha_init overrides alpha_sign when + provided. regularizer: Regularizer for alpha weights. constraint: Constraint for alpha weights. trainable: Whether the alpha weights are trainable or not NOTE : Do not set both alpha_init and initializer to None. + Use # Example model = Sequential() @@ -41,8 +54,9 @@ class PSEU(Layer): α < 0: -ln(1-α(x + α)) / α """ def __init__(self, - alpha_init=0.1, - initializer=None, + alpha_init=None, + alpha_sign=None, + initializer='glorot_uniform', regularizer=None, constraint=None, trainable=True, @@ -50,11 +64,19 @@ def __init__(self, super(PSEU, self).__init__(**kwargs) self.alpha_init = alpha_init + + self.alpha_sign = 'positive' # positive by default + if self.alpha_init is not None: + self.alpha_sign = 'positive' if self.alpha_init > 0 else 'negative' + if self.alpha_init == 0: self.alpha_sign = None + if initializer is None: self.initializer = initializer + self.alpha_init = 0.1 # default α when initializer is None else: self.initializer = initializers.get(initializer) self.alpha_init = None + self.regularizer = regularizers.get(regularizer) self.constraint = constraints.get(constraint) self.trainable = trainable @@ -78,19 +100,20 @@ def build(self, input_shape): self.build = True def call(self, x): - if self.alpha_init < 0: + if self.alpha_sign is 'negative': return - K.log(1 - self.alphas * (x + self.alphas)) / self.alphas - elif self.alpha_init > 0: + if self.alpha_sign is 'positive': return self.alphas + (K.exp(self.alphas * x) - 1.) / self.alphas - else: + if self.alpha_sign is None: return x def compute_output_shape(self, input_shape): return input_shape def get_config(self): - + alpha_init = self.alpha_init if self.initializer is None else None config = {'alpha_init': self.alpha_init, + 'alpha_sign': self.alpha_sign, 'initializer': initializers.serialize(self.initializer), 'regularizer': regularizers.serialize(self.regularizer), 'constraint': constraints.serialize(self.constraint), From 0aa0b40bdb7cc7653fa7c6498589284b354251fa Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 16:45:40 +0530 Subject: [PATCH 38/59] Fix test --- tests/keras_contrib/layers/advanced_activations/test_pseu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index ed52e9b59..ceb28ebb8 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -6,7 +6,7 @@ @pytest.mark.parametrize('trainable', [True, False]) @pytest.mark.parametrize('alpha_init', [-0.1, 0., 0.1]) -@pytest.mark.parametrize('initializer', ['glorot_uniform', 'glorot_normal']) +@pytest.mark.parametrize('initializer', ['glorot_uniform', None]) def test_pseu(alpha_init, trainable, initializer): From 7d52579bdf88b7e1a0232d315376b925c03febe0 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 16:49:30 +0530 Subject: [PATCH 39/59] Fix pep8 problems --- keras_contrib/layers/advanced_activations/pseu.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index a5a281fe7..c5ea5c5fb 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -24,7 +24,7 @@ class PSEU(Layer): (0.1 by default). initializer: The initializer for the alpha weights. Any initializer specified here overrides - the value of alpha_init. + the value of alpha_init. Note that even if the initializer is specified, the alpha_init value controls the sign of the weights (α > 0, α < 0 or α = 0). It @@ -32,16 +32,16 @@ class PSEU(Layer): alpha_sign: The sign (negative, positive or 0) that is taken into consideration when deciding which formula to use (the formula is differet for - α > 0, α < 0 and α = 0). It is set to positive + α > 0, α < 0 and α = 0). It is set to 'positive' by default, when the initializer is not None. The sign of alpha_init overrides alpha_sign when provided. + ('positive', 'negative' or None) regularizer: Regularizer for alpha weights. constraint: Constraint for alpha weights. trainable: Whether the alpha weights are trainable or not NOTE : Do not set both alpha_init and initializer to None. - Use # Example model = Sequential() @@ -65,14 +65,15 @@ def __init__(self, super(PSEU, self).__init__(**kwargs) self.alpha_init = alpha_init - self.alpha_sign = 'positive' # positive by default + self.alpha_sign = 'positive' # positive by default if self.alpha_init is not None: self.alpha_sign = 'positive' if self.alpha_init > 0 else 'negative' - if self.alpha_init == 0: self.alpha_sign = None + if self.alpha_init == 0: + self.alpha_sign = None if initializer is None: self.initializer = initializer - self.alpha_init = 0.1 # default α when initializer is None + self.alpha_init = 0.1 # default α when initializer is None else: self.initializer = initializers.get(initializer) self.alpha_init = None From df63af43eb18439cde4e10fc2de8abed1c81429b Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 17:15:03 +0530 Subject: [PATCH 40/59] Add default initializer --- keras_contrib/layers/advanced_activations/pseu.py | 1 + 1 file changed, 1 insertion(+) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index c5ea5c5fb..670333d59 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -87,6 +87,7 @@ def build(self, input_shape): self.alphas = self.add_weight(shape=new_input_shape, name='{}_alphas'.format(self.name), + initializer='glorot_uniform', regularizer=self.regularizer, constraint=self.constraint) From 3736b4288bf544b59cc13560fe3efff9465a12e7 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 17:31:35 +0530 Subject: [PATCH 41/59] Fix silly errors --- .../layers/advanced_activations/pseu.py | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 670333d59..e27577008 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -72,7 +72,7 @@ def __init__(self, self.alpha_sign = None if initializer is None: - self.initializer = initializer + self.initializer = None self.alpha_init = 0.1 # default α when initializer is None else: self.initializer = initializers.get(initializer) @@ -84,20 +84,20 @@ def __init__(self, def build(self, input_shape): new_input_shape = input_shape[1:] - - self.alphas = self.add_weight(shape=new_input_shape, - name='{}_alphas'.format(self.name), - initializer='glorot_uniform', - regularizer=self.regularizer, - constraint=self.constraint) - - if self.trainable: - self.trainable_weights = [self.alphas] - if self.initializer is None: + self.alphas = self.add_weight(shape=new_input_shape, + name='{}_alphas'.format(self.name), + regularizer=self.regularizer, + constraint=self.constraint) self.set_weights([self.alpha_init * np.ones(new_input_shape)]) else: - self.alphas.initializer = self.initializer + self.alphas = self.add_weight(shape=new_input_shape, + name='{}_alphas'.format(self.name), + initializer=self.initializer, + regularizer=self.regularizer, + constraint=self.constraint) + if self.trainable: + self.trainable_weights = [self.alphas] self.build = True From f7758b9177c403c9c6251a484d88f4fd149ebcb4 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 18:19:10 +0530 Subject: [PATCH 42/59] REVERT TO NO-INITIALIZER VERSION --- .../layers/advanced_activations/pseu.py | 75 ++++--------------- 1 file changed, 16 insertions(+), 59 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index e27577008..9ad24a751 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -15,108 +15,65 @@ class PSEU(Layer): Arbitrary. Use the keyword argument `input_shape` (tuple of integers, does not include the samples axis) when using this layer as the first layer in a model. - # Output shape Same shape as the input. - # Arguments - alpha_init: Initial values of the alpha weights (float) - (0.1 by default). - initializer: The initializer for the alpha weights. - Any initializer specified here overrides - the value of alpha_init. - Note that even if the initializer is specified, - the alpha_init value controls the sign - of the weights (α > 0, α < 0 or α = 0). It - is glorot_uniform by default. - alpha_sign: The sign (negative, positive or 0) that - is taken into consideration when deciding which - formula to use (the formula is differet for - α > 0, α < 0 and α = 0). It is set to 'positive' - by default, when the initializer is not None. - The sign of alpha_init overrides alpha_sign when - provided. - ('positive', 'negative' or None) + alpha_init: Initial value of the alpha weights (float) regularizer: Regularizer for alpha weights. constraint: Constraint for alpha weights. trainable: Whether the alpha weights are trainable or not - - NOTE : Do not set both alpha_init and initializer to None. - # Example model = Sequential() model.add(Dense(10)) model.add(PSEU()) - Soft Exponential f(α, x): α == 0: x α > 0: (exp(αx)-1) / α + α α < 0: -ln(1-α(x + α)) / α """ def __init__(self, - alpha_init=None, - alpha_sign=None, - initializer='glorot_uniform', + alpha_init=0.1, regularizer=None, constraint=None, trainable=True, **kwargs): super(PSEU, self).__init__(**kwargs) + self.supports_masking = True self.alpha_init = alpha_init - - self.alpha_sign = 'positive' # positive by default - if self.alpha_init is not None: - self.alpha_sign = 'positive' if self.alpha_init > 0 else 'negative' - if self.alpha_init == 0: - self.alpha_sign = None - - if initializer is None: - self.initializer = None - self.alpha_init = 0.1 # default α when initializer is None - else: - self.initializer = initializers.get(initializer) - self.alpha_init = None - + self.initializer = initializers.get('glorot_uniform') + # Add random initializer self.regularizer = regularizers.get(regularizer) self.constraint = constraints.get(constraint) self.trainable = trainable def build(self, input_shape): new_input_shape = input_shape[1:] - if self.initializer is None: - self.alphas = self.add_weight(shape=new_input_shape, - name='{}_alphas'.format(self.name), - regularizer=self.regularizer, - constraint=self.constraint) - self.set_weights([self.alpha_init * np.ones(new_input_shape)]) - else: - self.alphas = self.add_weight(shape=new_input_shape, - name='{}_alphas'.format(self.name), - initializer=self.initializer, - regularizer=self.regularizer, - constraint=self.constraint) + + self.alphas = self.add_weight(shape=new_input_shape, + name='{}_alphas'.format(self.name), + initializer=self.initializer, + regularizer=self.regularizer, + constraint=self.constraint) if self.trainable: self.trainable_weights = [self.alphas] + self.set_weights([self.alpha_init * np.ones(new_input_shape)]) self.build = True def call(self, x): - if self.alpha_sign is 'negative': + if self.alpha_init < 0: return - K.log(1 - self.alphas * (x + self.alphas)) / self.alphas - if self.alpha_sign is 'positive': + elif self.alpha_init > 0: return self.alphas + (K.exp(self.alphas * x) - 1.) / self.alphas - if self.alpha_sign is None: + else: return x def compute_output_shape(self, input_shape): return input_shape def get_config(self): - alpha_init = self.alpha_init if self.initializer is None else None - config = {'alpha_init': self.alpha_init, - 'alpha_sign': self.alpha_sign, - 'initializer': initializers.serialize(self.initializer), + config = {'alpha_init': float(self.alpha_init), 'regularizer': regularizers.serialize(self.regularizer), 'constraint': constraints.serialize(self.constraint), 'trainable': self.trainable} From ed77ff770c2709ceb5a65422d99750d6a03381b7 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 18:21:22 +0530 Subject: [PATCH 43/59] Remove initializer param from test --- .../keras_contrib/layers/advanced_activations/test_pseu.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index ceb28ebb8..083b7d956 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -6,14 +6,11 @@ @pytest.mark.parametrize('trainable', [True, False]) @pytest.mark.parametrize('alpha_init', [-0.1, 0., 0.1]) -@pytest.mark.parametrize('initializer', ['glorot_uniform', None]) def test_pseu(alpha_init, - trainable, - initializer): + trainable): layer_test(PSEU, kwargs={'trainable': trainable, - 'alpha_init': alpha_init, - 'initializer': initializer}, + 'alpha_init': alpha_init}, input_shape=(2, 3, 4)) From 7749c679ee99416ca8fa23d59cb487de50fd6da3 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 18:25:55 +0530 Subject: [PATCH 44/59] Add custom initializer to work correctly --- keras_contrib/layers/advanced_activations/pseu.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 9ad24a751..d02816ea8 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -39,9 +39,7 @@ def __init__(self, **kwargs): super(PSEU, self).__init__(**kwargs) - self.supports_masking = True self.alpha_init = alpha_init - self.initializer = initializers.get('glorot_uniform') # Add random initializer self.regularizer = regularizers.get(regularizer) self.constraint = constraints.get(constraint) @@ -49,10 +47,13 @@ def __init__(self, def build(self, input_shape): new_input_shape = input_shape[1:] + + def alpha_init(input_shape): + return self.alpha_init * K.ones(input_shape) self.alphas = self.add_weight(shape=new_input_shape, name='{}_alphas'.format(self.name), - initializer=self.initializer, + initializer=alpha_init, regularizer=self.regularizer, constraint=self.constraint) if self.trainable: From 19d978bd896138c84596d0c3a0daa017a622d4f3 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 18:26:48 +0530 Subject: [PATCH 45/59] Fix pep8 problems --- keras_contrib/layers/advanced_activations/pseu.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index d02816ea8..08995048b 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -15,17 +15,21 @@ class PSEU(Layer): Arbitrary. Use the keyword argument `input_shape` (tuple of integers, does not include the samples axis) when using this layer as the first layer in a model. + # Output shape Same shape as the input. + # Arguments alpha_init: Initial value of the alpha weights (float) regularizer: Regularizer for alpha weights. constraint: Constraint for alpha weights. trainable: Whether the alpha weights are trainable or not + # Example model = Sequential() model.add(Dense(10)) model.add(PSEU()) + Soft Exponential f(α, x): α == 0: x α > 0: (exp(αx)-1) / α + α @@ -47,7 +51,7 @@ def __init__(self, def build(self, input_shape): new_input_shape = input_shape[1:] - + def alpha_init(input_shape): return self.alpha_init * K.ones(input_shape) From 75c83d293b8cbb38fbadb5ddb28884ebee14a4d9 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 18:30:39 +0530 Subject: [PATCH 46/59] Remove unnecessary lines and trainability --- keras_contrib/layers/advanced_activations/pseu.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 08995048b..6c84dc8b3 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -import numpy as np from keras import backend as K from keras import constraints from keras import initializers @@ -8,7 +7,7 @@ class PSEU(Layer): - """Parametric Soft Exponential Unit with trainable alpha + """Parametric Soft Exponential Unit See: https://arxiv.org/pdf/1602.01321.pdf by Godfrey and Gashler Reference: https://github.com/keras-team/keras/issues/3842 (@hobson) # Input shape @@ -23,7 +22,6 @@ class PSEU(Layer): alpha_init: Initial value of the alpha weights (float) regularizer: Regularizer for alpha weights. constraint: Constraint for alpha weights. - trainable: Whether the alpha weights are trainable or not # Example model = Sequential() @@ -39,7 +37,6 @@ def __init__(self, alpha_init=0.1, regularizer=None, constraint=None, - trainable=True, **kwargs): super(PSEU, self).__init__(**kwargs) @@ -47,7 +44,6 @@ def __init__(self, # Add random initializer self.regularizer = regularizers.get(regularizer) self.constraint = constraints.get(constraint) - self.trainable = trainable def build(self, input_shape): new_input_shape = input_shape[1:] @@ -60,10 +56,6 @@ def alpha_init(input_shape): initializer=alpha_init, regularizer=self.regularizer, constraint=self.constraint) - if self.trainable: - self.trainable_weights = [self.alphas] - self.set_weights([self.alpha_init * np.ones(new_input_shape)]) - self.build = True def call(self, x): @@ -80,8 +72,7 @@ def compute_output_shape(self, input_shape): def get_config(self): config = {'alpha_init': float(self.alpha_init), 'regularizer': regularizers.serialize(self.regularizer), - 'constraint': constraints.serialize(self.constraint), - 'trainable': self.trainable} + 'constraint': constraints.serialize(self.constraint)} base_config = super(PSEU, self).get_config() return dict(list(base_config.items()) + list(config.items())) From 8791343eb738d7c8051a4728edcfefa1aeaa9b06 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 18:31:32 +0530 Subject: [PATCH 47/59] Remove trainable param from test --- .../keras_contrib/layers/advanced_activations/test_pseu.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index 083b7d956..6898d5ac9 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -4,13 +4,10 @@ from keras_contrib.layers import PSEU -@pytest.mark.parametrize('trainable', [True, False]) @pytest.mark.parametrize('alpha_init', [-0.1, 0., 0.1]) -def test_pseu(alpha_init, - trainable): +def test_pseu(alpha_init): layer_test(PSEU, - kwargs={'trainable': trainable, - 'alpha_init': alpha_init}, + kwargs={'alpha_init': alpha_init}, input_shape=(2, 3, 4)) From 605ddc1fe6ef1a8327262bd674b62436eceffc07 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 18:54:28 +0530 Subject: [PATCH 48/59] Add PSEU to CODEOWNERS --- CODEOWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/CODEOWNERS b/CODEOWNERS index afa98df95..a233ecdef 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -26,6 +26,7 @@ keras_contrib/callbacks/snapshot.py @titu1994 # layers +keras_contrib/layers/advanced_activations/pseu.py @SriRangaTarun keras_contrib/layers/advanced_activations/sinerelu.py @wilderrodrigues keras_contrib/layers/advanced_activations/swish.py @gabrieldemarmiesse keras_contrib/layers/convolutional/subpixelupscaling.py @titu1994 From e7223722852174a3c2ce2ebe42c4d9c7d475ce5e Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 19:03:25 +0530 Subject: [PATCH 49/59] Avoid calling alpha_init inside build function --- keras_contrib/layers/advanced_activations/pseu.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 6c84dc8b3..ce821686d 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -45,15 +45,14 @@ def __init__(self, self.regularizer = regularizers.get(regularizer) self.constraint = constraints.get(constraint) + def alpha_initializer(self, input_shape): + return self.alpha_init * K.ones(input_shape) + def build(self, input_shape): new_input_shape = input_shape[1:] - - def alpha_init(input_shape): - return self.alpha_init * K.ones(input_shape) - self.alphas = self.add_weight(shape=new_input_shape, name='{}_alphas'.format(self.name), - initializer=alpha_init, + initializer=self.alpha_initializer, regularizer=self.regularizer, constraint=self.constraint) self.build = True From 351ef64ab4a649390c543bb815fa35f0963a80d7 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 20:10:01 +0530 Subject: [PATCH 50/59] Clean up code and remove unnecessary lines --- .../layers/advanced_activations/pseu.py | 33 +++++-------------- 1 file changed, 9 insertions(+), 24 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index ce821686d..c6ea87851 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -1,9 +1,6 @@ # -*- coding: utf-8 -*- from keras import backend as K -from keras import constraints -from keras import initializers from keras.layers import Layer -from keras import regularizers class PSEU(Layer): @@ -19,14 +16,12 @@ class PSEU(Layer): Same shape as the input. # Arguments - alpha_init: Initial value of the alpha weights (float) - regularizer: Regularizer for alpha weights. - constraint: Constraint for alpha weights. + alpha_initial: Initial value of the alpha weights (float) # Example model = Sequential() model.add(Dense(10)) - model.add(PSEU()) + model.add(PSEU(alpha_initial=0.2)) Soft Exponential f(α, x): α == 0: x @@ -34,33 +29,26 @@ class PSEU(Layer): α < 0: -ln(1-α(x + α)) / α """ def __init__(self, - alpha_init=0.1, - regularizer=None, - constraint=None, + alpha_initial=0.1, **kwargs): super(PSEU, self).__init__(**kwargs) - self.alpha_init = alpha_init - # Add random initializer - self.regularizer = regularizers.get(regularizer) - self.constraint = constraints.get(constraint) + self.alpha_initial = alpha_initial def alpha_initializer(self, input_shape): - return self.alpha_init * K.ones(input_shape) + return self.alpha_initial * K.ones(input_shape) def build(self, input_shape): new_input_shape = input_shape[1:] self.alphas = self.add_weight(shape=new_input_shape, name='{}_alphas'.format(self.name), - initializer=self.alpha_initializer, - regularizer=self.regularizer, - constraint=self.constraint) + initializer=self.alpha_initializer) self.build = True def call(self, x): - if self.alpha_init < 0: + if self.alpha_initial < 0: return - K.log(1 - self.alphas * (x + self.alphas)) / self.alphas - elif self.alpha_init > 0: + elif self.alpha_initial > 0: return self.alphas + (K.exp(self.alphas * x) - 1.) / self.alphas else: return x @@ -69,9 +57,6 @@ def compute_output_shape(self, input_shape): return input_shape def get_config(self): - config = {'alpha_init': float(self.alpha_init), - 'regularizer': regularizers.serialize(self.regularizer), - 'constraint': constraints.serialize(self.constraint)} - + config = {'alpha_initial': self.alpha_initial} base_config = super(PSEU, self).get_config() return dict(list(base_config.items()) + list(config.items())) From 69feddb8994feeb016874fa6fee9ea328b356440 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 20:22:34 +0530 Subject: [PATCH 51/59] Fix silly mistake --- .../keras_contrib/layers/advanced_activations/test_pseu.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index 6898d5ac9..bd12d0db8 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -4,10 +4,10 @@ from keras_contrib.layers import PSEU -@pytest.mark.parametrize('alpha_init', [-0.1, 0., 0.1]) -def test_pseu(alpha_init): +@pytest.mark.parametrize('alpha_initial', [-0.1, 0., 0.1]) +def test_pseu(alpha_initial): layer_test(PSEU, - kwargs={'alpha_init': alpha_init}, + kwargs={'alpha_initial': alpha_initial}, input_shape=(2, 3, 4)) From ef4a20fe64c394099936e54c64786e9f71a5a8dd Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 20:49:08 +0530 Subject: [PATCH 52/59] Simplify code --- .../layers/advanced_activations/pseu.py | 29 ++++++++++--------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index c6ea87851..b8ef0e7c6 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -7,6 +7,12 @@ class PSEU(Layer): """Parametric Soft Exponential Unit See: https://arxiv.org/pdf/1602.01321.pdf by Godfrey and Gashler Reference: https://github.com/keras-team/keras/issues/3842 (@hobson) + + Soft Exponential f(α, x): + α == 0: x + α > 0: (exp(αx)-1) / α + α + α < 0: -ln(1-α(x + α)) / α + # Input shape Arbitrary. Use the keyword argument `input_shape` (tuple of integers, does not include the samples axis) @@ -16,27 +22,22 @@ class PSEU(Layer): Same shape as the input. # Arguments - alpha_initial: Initial value of the alpha weights (float) + alpha: Value of the alpha weights (float) # Example model = Sequential() - model.add(Dense(10)) - model.add(PSEU(alpha_initial=0.2)) - - Soft Exponential f(α, x): - α == 0: x - α > 0: (exp(αx)-1) / α + α - α < 0: -ln(1-α(x + α)) / α + model.add(Dense(10, input_shape=(5,)) + model.add(PSEU(alpha=0.2)) """ def __init__(self, - alpha_initial=0.1, + alpha=0.1, **kwargs): super(PSEU, self).__init__(**kwargs) - self.alpha_initial = alpha_initial + self.alpha = alpha def alpha_initializer(self, input_shape): - return self.alpha_initial * K.ones(input_shape) + return self.alpha * K.ones(input_shape) def build(self, input_shape): new_input_shape = input_shape[1:] @@ -46,9 +47,9 @@ def build(self, input_shape): self.build = True def call(self, x): - if self.alpha_initial < 0: + if self.alpha < 0: return - K.log(1 - self.alphas * (x + self.alphas)) / self.alphas - elif self.alpha_initial > 0: + elif self.alpha > 0: return self.alphas + (K.exp(self.alphas * x) - 1.) / self.alphas else: return x @@ -57,6 +58,6 @@ def compute_output_shape(self, input_shape): return input_shape def get_config(self): - config = {'alpha_initial': self.alpha_initial} + config = {'alpha': self.alpha} base_config = super(PSEU, self).get_config() return dict(list(base_config.items()) + list(config.items())) From 4d8f0fb81b7fd644b52897d0be46044c9f37ee77 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Tue, 12 Feb 2019 20:49:54 +0530 Subject: [PATCH 53/59] Simplify code --- .../keras_contrib/layers/advanced_activations/test_pseu.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/keras_contrib/layers/advanced_activations/test_pseu.py b/tests/keras_contrib/layers/advanced_activations/test_pseu.py index bd12d0db8..8fea9d295 100644 --- a/tests/keras_contrib/layers/advanced_activations/test_pseu.py +++ b/tests/keras_contrib/layers/advanced_activations/test_pseu.py @@ -4,10 +4,10 @@ from keras_contrib.layers import PSEU -@pytest.mark.parametrize('alpha_initial', [-0.1, 0., 0.1]) -def test_pseu(alpha_initial): +@pytest.mark.parametrize('alpha', [-0.1, 0., 0.1]) +def test_pseu(alpha): layer_test(PSEU, - kwargs={'alpha_initial': alpha_initial}, + kwargs={'alpha': alpha}, input_shape=(2, 3, 4)) From dd22dd8227856ccda1c849b6cacc1923f58f9cf0 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Wed, 13 Feb 2019 08:44:51 +0530 Subject: [PATCH 54/59] Add important note in docsting --- keras_contrib/layers/advanced_activations/pseu.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index b8ef0e7c6..0e0ef9f53 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -23,6 +23,15 @@ class PSEU(Layer): # Arguments alpha: Value of the alpha weights (float) + NOTE : This function can become unstable for + very negative values of α (like -2. or + -3). If the functions starts returning + NaNs for α < 0, try decreasing the magnitude + of α. Alternatively, you can normalize the data + into fixed ranges before passing it to PSEU. Note + that PSEU returns NaNs for α < 0 only when large + negative values are passed to the layer. Adjust α + based on your specific dataset and use-case. # Example model = Sequential() From 49ec6c8d8693cc1b5b1feb8b053fd5a5e668e19f Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Wed, 13 Feb 2019 10:25:03 +0530 Subject: [PATCH 55/59] Set trainable=False --- keras_contrib/layers/advanced_activations/pseu.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 0e0ef9f53..5f9f8c1a8 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -52,7 +52,8 @@ def build(self, input_shape): new_input_shape = input_shape[1:] self.alphas = self.add_weight(shape=new_input_shape, name='{}_alphas'.format(self.name), - initializer=self.alpha_initializer) + initializer=self.alpha_initializer, + trainable=False) self.build = True def call(self, x): From 3e53036f06b3f4e9a555c67421b459b1996446f8 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Wed, 13 Feb 2019 15:32:58 +0530 Subject: [PATCH 56/59] Adjust alpha docs --- keras_contrib/layers/advanced_activations/pseu.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index 5f9f8c1a8..a7f427ef4 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -24,14 +24,12 @@ class PSEU(Layer): # Arguments alpha: Value of the alpha weights (float) NOTE : This function can become unstable for - very negative values of α (like -2. or - -3). If the functions starts returning - NaNs for α < 0, try decreasing the magnitude - of α. Alternatively, you can normalize the data - into fixed ranges before passing it to PSEU. Note - that PSEU returns NaNs for α < 0 only when large - negative values are passed to the layer. Adjust α - based on your specific dataset and use-case. + very negative values of α. If the functions + starts returning NaNs for α < 0, try decreasing + the magnitude of α. Alternatively, you can + normalize the data into fixed ranges before + passing it to PSEU. Adjust α based on your + specific dataset and use-case. # Example model = Sequential() From 5ce52c33d55a393a61768d522b6ccddb0aa10b8e Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Wed, 13 Feb 2019 15:38:40 +0530 Subject: [PATCH 57/59] Update alpha docstring --- keras_contrib/layers/advanced_activations/pseu.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index a7f427ef4..b7e53b043 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -28,8 +28,9 @@ class PSEU(Layer): starts returning NaNs for α < 0, try decreasing the magnitude of α. Alternatively, you can normalize the data into fixed ranges before - passing it to PSEU. Adjust α based on your - specific dataset and use-case. + passing it to PSEU. Note that numerical instability + may occur only and only for large negative inputs when α < 0 + Adjust α based on your specific dataset and use-case. # Example model = Sequential() From b919451825aebf4182a993547760c79c50e3bfa3 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Wed, 13 Feb 2019 16:56:01 +0530 Subject: [PATCH 58/59] Improve docs --- .../layers/advanced_activations/pseu.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index b7e53b043..f0b7c1e1e 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -23,14 +23,17 @@ class PSEU(Layer): # Arguments alpha: Value of the alpha weights (float) + NOTE : This function can become unstable for - very negative values of α. If the functions - starts returning NaNs for α < 0, try decreasing - the magnitude of α. Alternatively, you can - normalize the data into fixed ranges before - passing it to PSEU. Note that numerical instability - may occur only and only for large negative inputs when α < 0 - Adjust α based on your specific dataset and use-case. + negative values of α. In particular, the + function returns NaNs when α < 0 and x < 1/α + (where x is the input). + If the function starts returning NaNs for α < 0, + try decreasing the magnitude of α. + Alternatively, you can normalize the data into fixed + ranges before passing it to PSEU. + Adjust α based on your specific dataset + and use-case. # Example model = Sequential() From f4e1c65d84a5b9a37630d2aaf87324f2a76d4451 Mon Sep 17 00:00:00 2001 From: Tarun S Paparaju Date: Wed, 13 Feb 2019 18:03:57 +0530 Subject: [PATCH 59/59] Fix small typo in docs --- keras_contrib/layers/advanced_activations/pseu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/keras_contrib/layers/advanced_activations/pseu.py b/keras_contrib/layers/advanced_activations/pseu.py index f0b7c1e1e..482a5e22e 100644 --- a/keras_contrib/layers/advanced_activations/pseu.py +++ b/keras_contrib/layers/advanced_activations/pseu.py @@ -26,7 +26,7 @@ class PSEU(Layer): NOTE : This function can become unstable for negative values of α. In particular, the - function returns NaNs when α < 0 and x < 1/α + function returns NaNs when α < 0 and x <= 1/α (where x is the input). If the function starts returning NaNs for α < 0, try decreasing the magnitude of α.