HomeSort by relevance Sort by last modified time
    Searched refs:softplus (Results 1 - 25 of 45) sorted by null

1 2

  /external/tensorflow/tensorflow/contrib/distributions/python/ops/bijectors/
scale_tril.py 24 from tensorflow.contrib.distributions.python.ops.bijectors import softplus
39 Softplus transformation followed by a small shift (`1e-5`) which
76 tfb.Softplus(),
101 Default value: `None` (i.e., `tfb.Softplus()`).
116 diag_bijector = softplus.Softplus(validate_args=validate_args)
sigmoid.py 59 return -nn_ops.softplus(-x) - nn_ops.softplus(x)
softplus.py 15 """Softplus bijector."""
32 "Softplus",
36 class Softplus(bijector.Bijector):
39 The softplus `Bijector` has the following two useful properties:
42 * `softplus(x) approx x`, for large `x`, so it does not overflow as easily as
51 so the behavior for large `x` is the same as the standard softplus.
64 # Create the Y=g(X)=softplus(X) transform which works only on Tensors with 1
66 softplus = Softplus()
71 log(1 + exp(x)) == softplus.forward(x
    [all...]
softmax_centered.py 170 log_normalization = nn_ops.softplus(
__init__.py 46 @@Softplus
88 from tensorflow.contrib.distributions.python.ops.bijectors.softplus import *
  /external/tensorflow/tensorflow/contrib/labeled_tensor/python/ops/
nn.py 29 softplus = core.define_unary_op('softplus', nn.softplus) variable
nn_test.py 41 ('softplus', nn_ops.softplus, nn.softplus),
  /external/tensorflow/tensorflow/python/kernel_tests/
softplus_op_test.py 15 """Tests for Softplus and SoftplusGrad."""
42 softplus = nn_ops.softplus(np_features)
43 tf_softplus = self.evaluate(softplus)
46 self.assertShapeEqual(np_softplus, softplus)
81 y = nn_ops.softplus(x, name="softplus")
88 print("softplus (float) gradient err = ", err)
98 y = nn_ops.softplus(x, name="softplus")
    [all...]
  /external/tensorflow/tensorflow/contrib/distributions/python/kernel_tests/
estimator_test.py 51 def softplus(x): function in function:EstimatorHeadDistributionRegressionTest.testNormalLocScaleLogits
66 return softplus(logits[..., 1] + scale_bias)
71 scale=nn_ops.softplus(logits[..., 1] + scale_bias))
104 expected_stddev = softplus(logits[..., 1] + scale_bias)
inverse_gamma_test.py 315 self.assertAllClose(nn_ops.softplus(alpha).eval(),
317 self.assertAllClose(nn_ops.softplus(beta).eval(),
  /external/tensorflow/tensorflow/python/ops/distributions/
bernoulli.py 156 nn.softplus(-self.logits))
184 delta_probs0 = nn.softplus(-b.logits) - nn.softplus(-a.logits)
185 delta_probs1 = nn.softplus(b.logits) - nn.softplus(a.logits)
exponential.py 148 """Exponential with softplus transform on `rate`."""
152 "Use `tfd.Exponential(tf.nn.softplus(rate)).",
162 rate=nn.softplus(rate, name="softplus_rate"),
beta.py 351 """Beta with softplus transform of `concentration1` and `concentration0`."""
355 "Use `tfd.Beta(tf.nn.softplus(concentration1), "
356 "tf.nn.softplus(concentration2))` instead.",
368 concentration1=nn.softplus(concentration1,
370 concentration0=nn.softplus(concentration0,
gamma.py 289 """`Gamma` with softplus of `concentration` and `rate`."""
293 "Use `tfd.Gamma(tf.nn.softplus(concentration), "
294 "tf.nn.softplus(rate))` instead.",
305 concentration=nn.softplus(concentration,
307 rate=nn.softplus(rate, name="softplus_rate"),
laplace.py 221 """Laplace with softplus applied to `scale`."""
225 "Use `tfd.Laplace(loc, tf.nn.softplus(scale)) "
238 scale=nn.softplus(scale, name="softplus_scale"),
  /external/tensorflow/tensorflow/contrib/keras/api/keras/activations/
__init__.py 29 from tensorflow.python.keras.activations import softplus
  /external/tensorflow/tensorflow/python/keras/
activations.py 117 @keras_export('keras.activations.softplus')
118 def softplus(x): function
119 """Softplus activation function.
125 The softplus activation: `log(exp(x) + 1)`.
127 return nn.softplus(x)
138 The softplus activation: `x / (abs(x) + 1)`.
activations_test.py 41 'softplus', 'softsign', 'selu']
95 def softplus(x): function in function:KerasActivationsTest.test_softplus
99 f = keras.backend.function([x], [keras.activations.softplus(x)])
102 expected = softplus(test_values)
  /external/tensorflow/tensorflow/contrib/distributions/python/ops/
mvn_diag.py 229 """MultivariateNormalDiag with `diag_stddev = softplus(diag_stddev)`."""
249 scale_diag=nn.softplus(scale_diag),
logistic.py 199 return -nn_ops.softplus(-self._z(x))
205 return -nn_ops.softplus(self._z(x))
212 return - z - 2. * nn_ops.softplus(-z)
inverse_gamma.py 286 """`InverseGamma` with softplus of `concentration` and `rate`."""
305 concentration=nn.softplus(concentration,
307 rate=nn.softplus(rate, name="softplus_rate"),
geometric.py 191 # Claim: entropy(p) = softplus(s)/p - s
199 # = -[-softplus(s) + ps]/p
200 # = softplus(s)/p - s
206 # = -softplus(s)
210 return nn.softplus(self.logits) / probs - self.logits
  /external/tensorflow/tensorflow/contrib/nn/python/ops/
scaled_softplus.py 15 """Support for scaled softplus, a smoothed version of ReLU."""
40 This can be seen as a softplus applied to the scaled input, with the output
66 y = alpha * nn.softplus(x / alpha)
72 """Backprop for scaled softplus, with optional clipping."""
  /external/tensorflow/tensorflow/contrib/distributions/python/kernel_tests/bijectors/
softplus_test.py 23 from tensorflow.contrib.distributions.python.ops.bijectors.softplus import Softplus
46 bijector = Softplus(hinge_softness=0., validate_args=True)
52 bijector = Softplus()
53 self.assertEqual("softplus", bijector.name)
62 bijector = Softplus(hinge_softness=1.5)
71 bijector = Softplus()
81 bijector = Softplus()
82 self.assertEqual("softplus", bijector.name)
91 bijector = Softplus()
    [all...]
  /external/tensorflow/tensorflow/python/kernel_tests/distributions/
util_test.py     [all...]

Completed in 1065 milliseconds

1 2