HomeSort by relevance Sort by last modified time
    Searched refs:max_norm (Results 1 - 22 of 22) sorted by null

  /external/tensorflow/tensorflow/contrib/gan/python/features/python/
clip_weights_impl.py 78 max_norm=weight_clip,
  /external/tensorflow/tensorflow/python/ops/
clip_ops_test.py 32 def _testClipByNorm(self, inputs, max_norm, expected):
35 clipped = clip_ops.clip_by_norm(input_op, max_norm)
embedding_ops.py 58 def _clip(params, ids, max_norm):
61 This function optionally clips embeddings to an l2-norm of max_norm.
66 max_norm: If provided, the embeddings are l2-normalized to the value of
67 max_norm.
90 if max_norm is None:
96 max_norm,
106 max_norm=None,
125 max_norm: See embedding_lookup.
127 If max_norm is provided, transform_fn is applied to the norm-limited
151 result = _clip(_gather(params[0], ids, name=name), ids, max_norm)
    [all...]
  /external/tensorflow/tensorflow/python/keras/_impl/keras/layers/
local_test.py 75 k_constraint = keras.constraints.max_norm(0.01)
76 b_constraint = keras.constraints.max_norm(0.01)
155 k_constraint = keras.constraints.max_norm(0.01)
156 b_constraint = keras.constraints.max_norm(0.01)
normalization_test.py 74 max_norm = keras.constraints.max_norm
76 gamma_constraint=max_norm, beta_constraint=max_norm)
78 self.assertEqual(layer.gamma.constraint, max_norm)
79 self.assertEqual(layer.beta.constraint, max_norm)
gru_test.py 168 k_constraint = keras.constraints.max_norm(0.01)
169 r_constraint = keras.constraints.max_norm(0.01)
170 b_constraint = keras.constraints.max_norm(0.01)
simplernn_test.py 168 k_constraint = keras.constraints.max_norm(0.01)
169 r_constraint = keras.constraints.max_norm(0.01)
170 b_constraint = keras.constraints.max_norm(0.01)
core_test.py 207 k_constraint = keras.constraints.max_norm(0.01)
208 b_constraint = keras.constraints.max_norm(0.01)
lstm_test.py 183 k_constraint = keras.constraints.max_norm(0.01)
184 r_constraint = keras.constraints.max_norm(0.01)
185 b_constraint = keras.constraints.max_norm(0.01)
  /external/tensorflow/tensorflow/contrib/keras/api/keras/constraints/
__init__.py 23 from tensorflow.python.keras._impl.keras.constraints import max_norm
  /external/tensorflow/tensorflow/contrib/opt/python/training/
variable_clipping_optimizer.py 56 max_norm,
68 max_norm: The L2-norm to clip to, for all variables specified.
80 self._max_norm = max_norm
  /external/tensorflow/tensorflow/python/keras/_impl/keras/
constraints.py 40 @tf_export('keras.constraints.MaxNorm', 'keras.constraints.max_norm')
168 max_norm = MaxNorm variable
174 maxnorm = max_norm
constraints_test.py 41 all_activations = ['max_norm', 'non_neg',
55 norm_instance = keras.constraints.max_norm(m)
60 norm_instance = keras.constraints.max_norm(2.0)
  /external/tensorflow/tensorflow/python/keras/constraints/
__init__.py 23 from tensorflow.python.keras._impl.keras.constraints import max_norm
  /external/tensorflow/tensorflow/contrib/layers/python/layers/
embedding_ops.py 55 max_norm=None):
89 max_norm: If not None, all embeddings are l2-normalized to max_norm before
158 max_norm=max_norm)
573 max_norm=None):
601 max_norm: If not None, each embedding is normalized to have l2 norm equal
602 to max_norm before combining.
657 max_norm=max_norm,
    [all...]
optimizers.py 313 """Find max_norm given norm and previous average."""
360 `max_norm = exp(mean + std_factor*std)`
366 report_summary: If `True`, will add histogram summaries of the `max_norm`.
380 max_norm, log_mean = _adaptive_max_norm(norm, std_factor, decay,
385 summary.scalar("global_norm/adaptive_max_gradient_norm", max_norm)
387 # factor will be 1. if norm is smaller than max_norm
388 factor = array_ops.where(norm < max_norm,
feature_column.py 188 "max_norm",
    [all...]
feature_column_ops_test.py     [all...]
  /external/tensorflow/tensorflow/python/feature_column/
feature_column.py 569 ckpt_to_load_from=None, tensor_name_in_ckpt=None, max_norm=None,
629 max_norm: If not `None`, embedding values are l2-normalized to this value.
663 max_norm=max_norm,
671 tensor_name_in_ckpt=None, max_norm=None, trainable=True):
751 max_norm: If not `None`, embedding values are l2-normalized to this value.
    [all...]
feature_column_test.py     [all...]
  /external/tensorflow/tensorflow/python/kernel_tests/
embedding_ops_test.py 264 [embeddings], ids, max_norm=1.0)
274 [embeddings], ids, max_norm=2.0)
563 params, ids, max_norm=1.0).eval()
572 split_params, ids, max_norm=1.0).eval()
579 # It always applies max_norm.
598 params, ids, max_norm=l2_norm, transform_fn=transform).eval()
607 split_params, ids, max_norm=l2_norm,
    [all...]
  /external/tensorflow/tensorflow/contrib/slim/python/slim/
learning.py 280 def clip_gradient_norms(gradients_to_variables, max_norm):
285 max_norm: the maximum norm value.
294 tmp = clip_ops.clip_by_norm(grad.values, max_norm)
297 grad = clip_ops.clip_by_norm(grad, max_norm)

Completed in 260 milliseconds