HomeSort by relevance Sort by last modified time
    Searched refs:ReLU (Results 1 - 4 of 4) sorted by null

  /external/tensorflow/tensorflow/python/keras/layers/
advanced_activations_test.py 65 testing_utils.layer_test(keras.layers.ReLU,
72 'LeakyRelu' in keras.layers.ReLU(negative_slope=0.2)(x).name)
73 # Test that we use `relu` when appropriate in graph mode.
74 self.assertTrue('Relu' in keras.layers.ReLU()(x).name)
76 self.assertTrue('Relu6' in keras.layers.ReLU(max_value=6)(x).name)
80 ValueError, 'max_value of Relu layer cannot be negative value: -10'):
81 testing_utils.layer_test(keras.layers.ReLU,
86 'negative_slope of Relu layer cannot be negative value: -2'):
89 keras.layers.ReLU,
    [all...]
advanced_activations.py 59 return K.relu(inputs, alpha=self.alpha)
142 pos = K.relu(inputs)
143 neg = -self.alpha * K.relu(-inputs)
272 @keras_export('keras.layers.ReLU')
273 class ReLU(Layer):
298 super(ReLU, self).__init__(**kwargs)
300 raise ValueError('max_value of Relu layer '
303 raise ValueError('negative_slope of Relu layer '
314 # alpha is used for leaky relu slope in activations instead of
316 return K.relu(inputs
    [all...]
__init__.py 32 from tensorflow.python.keras.layers.advanced_activations import ReLU
  /external/tensorflow/tensorflow/python/keras/engine/
base_layer_test.py 225 keras.layers.Dense(3, activation='relu', kernel_initializer='ones'),
598 10, activation=keras.layers.ReLU(name='MyAct'), name='MyName2')
602 self.assertEqual(y.name, 'MyName2/MyAct/Relu:0')
607 10, activation=keras.layers.ReLU(name='MyAct'), name='MyName3')
616 collections.OrderedDict(activation=['relu'])),
630 activation=[None, 'relu'],

Completed in 263 milliseconds