HomeSort by relevance Sort by last modified time
    Searched refs:backprop (Results 1 - 25 of 32) sorted by null

1 2

  /external/tensorflow/tensorflow/core/kernels/
xent_op.h 29 // Computes Cross Entropy loss and backprop.
35 // backprop: output tensor for the backprop, dims: batch_size, num_classes.
40 typename TTypes<T>::Matrix backprop);
52 typename TTypes<T>::Matrix backprop) {
90 backprop.device(d) = logits - scratch.broadcast(one_by_class);
93 scratch.reshape(batch_only).device(d) = backprop.exp().sum(along_class);
103 (labels * (scratch.log().eval().broadcast(one_by_class) - backprop))
107 // backprop: prob - labels, where
109 backprop.device(d)
    [all...]
sparse_xent_op.h 134 // Computes Cross Entropy loss and backprop.
140 // backprop: output tensor for the backprop, dims: batch_size, num_classes.
144 typename TTypes<T>::Matrix backprop);
156 typename TTypes<T>::Matrix backprop) {
193 // backprop = logits - max_logits.
194 To32Bit(backprop).device(d) =
199 To32Bit(scratch).device(d) = To32Bit(backprop).exp().sum(along_class);
205 sparse_xent_helpers::To32BitConst<T>(backprop),
207 backprop.dimension(1) /* max_depth */)
    [all...]
sparse_xent_op_gpu.cu.cc 37 typename TTypes<T>::Matrix backprop) {
39 scratch, loss, backprop);
xent_op_gpu.cu.cc 38 typename TTypes<T>::Matrix backprop) {
40 backprop);
sparse_xent_op.cc 108 typename TTypes<T>::Matrix backprop) {
110 scratch, loss, backprop);
xent_op.cc 67 // Try to reuse the logits_in buffer for the backprop output.
88 typename TTypes<T>::Matrix backprop) {
90 backprop);
  /external/tensorflow/tensorflow/python/eager/
backprop_test.py 24 from tensorflow.python.eager import backprop
64 grad = backprop.gradients_function(fn, [0])(var)[0]
94 grads_and_vars = backprop.implicit_grad(fn)()
103 grad_fn = backprop.gradients_function(f)
116 backprop.gradients_function(f)(constant_op.constant(1.0))
134 grad = backprop.implicit_grad(f)()[0][0]
165 grads = backprop.implicit_grad(f)()
182 g, = backprop.gradients_function(loss, [0])(logits, labels)
195 grad = backprop.gradients_function(first, [0])(x)[0]
199 grad = backprop.gradients_function(second, [0])(f)[0
    [all...]
tape_test.py 22 from tensorflow.python.eager import backprop
74 da, db = backprop.gradients_function(fn, [0, 1])(a, b)
94 da, = backprop.gradients_function(forward, ['a'])(aa, bb)
108 da, = backprop.gradients_function(forward, [0])(aa, bb)
122 val, (da,) = backprop.val_and_grad_function(forward, ['a'])(aa, bb)
137 da, db = backprop.gradients_function(fn, [0, 1])(a, b)
156 grad, = backprop.gradients_function(fn, [0])(logits, labels)
165 g, = backprop.gradients_function(fn, [0])(t)
pywrap_tfe_test.py 22 from tensorflow.python.eager import backprop
63 with backprop.GradientTape(persistent=True) as tape:
95 with backprop.GradientTape(persistent=True) as tape:
128 with backprop.GradientTape(persistent=True) as tape:
158 ctx_handle, ctx_handle, "Identity", backprop._record_gradient, None,
164 ctx_handle, ctx.device_name, ctx_handle, backprop._record_gradient,
function_test.py 22 from tensorflow.python.eager import backprop
85 return backprop.implicit_grad(inner)()[0][0]
145 return backprop.implicit_grad(inner)()[0][0]
226 self.assertAllEqual(backprop.implicit_grad(f)()[0][0], 2.0)
235 self.assertAllEqual(backprop.implicit_grad(f)()[0][0], 2.0)
237 self.assertAllEqual(backprop.implicit_grad(f)()[0][0], 2.0)
305 return backprop.gradients_function(f, [0])(x)[0]
317 backprop.implicit_val_and_grad(f)()
323 return backprop.gradients_function(math_ops.multiply, [0, 1])(x, x)
357 g = backprop.implicit_grad(g)(constant_op.constant(1.0))[0][0
    [all...]
benchmarks_test.py 35 from tensorflow.python.eager import backprop # pylint: disable=unused-import
212 lambda: backprop.gradients_function(gen_array_ops.identity, [0])(m),
216 with backprop.GradientTape() as tape:
224 with backprop.GradientTape():
231 lambda: backprop.gradients_function(lambda x: x, [0])(m),
279 with backprop.GradientTape() as tape:
graph_callable_test.py 20 from tensorflow.python.eager import backprop
243 grad_fn = backprop.implicit_grad(my_function)
  /external/tensorflow/tensorflow/contrib/eager/python/
tfe.py 85 from tensorflow.python.eager import backprop
113 implicit_gradients = backprop.implicit_grad
114 implicit_value_and_gradients = backprop.implicit_val_and_grad
115 gradients_function = backprop.gradients_function
116 value_and_gradients_function = backprop.val_and_grad_function
117 GradientTape = backprop.GradientTape # pylint: disable=invalid-name
  /external/tensorflow/tensorflow/compiler/tf2xla/kernels/
softmax_op.cc 117 // backprop: prob - labels, where
120 xla::ComputationDataHandle backprop = local
122 return {loss, backprop};
147 xla::ComputationDataHandle loss, backprop; variable
148 std::tie(loss, backprop) =
151 ctx->SetOutput(1, backprop);
215 xla::ComputationDataHandle loss, backprop; variable
216 std::tie(loss, backprop) =
219 ctx->SetOutput(1, backprop);
  /external/tensorflow/tensorflow/python/kernel_tests/
depthwise_conv_op_test.py 464 backprop = nn_ops.depthwise_conv2d_native_backprop_input(
466 ret = backprop.eval()
467 self.assertShapeEqual(ret, backprop)
484 backprop = nn_ops.depthwise_conv2d_native_backprop_input(
486 ret = backprop.eval()
487 self.assertShapeEqual(ret, backprop)
515 backprop = nn_ops.depthwise_conv2d_native_backprop_filter(
517 ret = backprop.eval()
518 self.assertShapeEqual(ret, backprop)
535 backprop = nn_ops.depthwise_conv2d_native_backprop_filter
    [all...]
sparse_xent_op_test.py 67 loss, backprop = gen_nn_ops._sparse_softmax_cross_entropy_with_logits(
69 tf_loss, tf_backprop = sess.run([loss, backprop])
76 loss, backprop = gen_nn_ops._sparse_softmax_cross_entropy_with_logits(
79 tf_loss, tf_backprop = sess.run([loss, backprop])
90 loss, backprop = (gen_nn_ops._sparse_softmax_cross_entropy_with_logits(
92 tf_loss, tf_backprop = sess.run([loss, backprop])
103 loss, backprop = (gen_nn_ops._sparse_softmax_cross_entropy_with_logits(
106 sess.run([loss, backprop])
116 # With a hard target 3, the backprop is [0.25, 0.25, 0.25, -0.75]
130 # With a hard 1, the backprop is [0.032 - 1.0 = -0.968, 0.087, 0.237, 0.644
    [all...]
list_ops_test.py 25 from tensorflow.python.eager import backprop
158 with backprop.GradientTape() as tape:
169 with backprop.GradientTape() as tape:
179 with backprop.GradientTape() as tape:
xent_op_test.py 51 loss, backprop = gen_nn_ops._softmax_cross_entropy_with_logits(
53 tf_loss, tf_backprop = sess.run([loss, backprop])
74 loss, backprop = gen_nn_ops._softmax_cross_entropy_with_logits(
77 tf_loss, tf_backprop = sess.run([loss, backprop])
103 # With a hard target 3, the backprop is [0.25, 0.25, 0.25, -0.75]
117 # With a soft target (1, 2), the backprop is
  /external/tensorflow/tensorflow/compiler/tests/
depthwise_conv_op_test.py 330 backprop = nn_ops.depthwise_conv2d_native_backprop_input(
333 backprop = nn_ops.depthwise_conv2d_native_backprop_input(
336 ret = backprop.eval({t1: x1, t2: x2})
337 self.assertShapeEqual(ret, backprop)
365 backprop = nn_ops.depthwise_conv2d_native_backprop_filter(
368 backprop = nn_ops.depthwise_conv2d_native_backprop_filter(
370 ret = backprop.eval({t0: x0, t2: x2})
371 self.assertShapeEqual(ret, backprop)
randomized_tests.cc     [all...]
  /external/tensorflow/tensorflow/contrib/framework/python/ops/
accumulate_n_v2_eager_test.py 29 from tensorflow.python.eager import backprop
70 grad_fn = backprop.gradients_function(fn)
  /external/tensorflow/tensorflow/python/ops/
gradient_checker.py 97 # everything else to be 0 and compute the backprop -- this will give us one
114 backprop = sess.run(
116 jacobian[:, col] = backprop.ravel().view(jacobian.dtype)
122 backprop = sess.run(
124 if backprop.shape != x_data.shape:
126 (x_data.shape, backprop.shape))
127 if np.any(backprop):
control_flow_ops.py 788 corresponding while loop in backprop. This gives us access to both
789 the forward and the backprop WhileContexts.
792 a forward value that is needed for backprop, we create a history
793 accumulator and add it to `history_map`. Any time when we backprop
812 # The while loop context for backprop.
    [all...]
nn_batchnorm_test.py 193 # If scale_after_normalization is False, backprop for gamma in v1
221 backprop = constant_op.constant(backprop_val, name="backprop")
227 x, m, v, gamma, backprop, epsilon, scale_after_normalization)
238 [on], [x, m, v, beta, gamma], [backprop])
  /external/tensorflow/tensorflow/compiler/xla/tests/
client_library_test_base.cc 487 auto backprop = builder.Parameter(1, shape, "backprop"); local
492 builder.Select(activation_gtz, /*on_true=*/backprop, /*on_false=*/zero);

Completed in 795 milliseconds

1 2