OpenGrok
Home
Sort by relevance
Sort by last modified time
Full Search
Definition
Symbol
File Path
History
|
|
Help
Searched
full:backprops
(Results
1 - 25
of
27
) sorted by null
1
2
/external/tensorflow/tensorflow/core/kernels/
relu_op_functor.h
42
// Computes ReluGrad
backprops
.
47
//
backprops
: gradients to backpropagate to the Relu inputs.
50
typename TTypes<T>::Tensor
backprops
) {
54
backprops
.device(d) =
76
// Computes Relu6Grad
backprops
.
80
//
backprops
: gradients to backpropagate to the Relu6 inputs.
83
typename TTypes<T>::Tensor
backprops
) {
88
backprops
.device(d) = gradients * ((features > static_cast<T>(0)) *
114
// Computes EluGrad
backprops
.
118
//
backprops
: gradients to backpropagate to the Elu inputs
[
all
...]
softsign_op.h
44
// Computes SoftsignGrad
backprops
.
48
//
backprops
: gradients to backpropagate to the Softsign inputs.
51
typename TTypes<T>::Tensor
backprops
) {
52
backprops
.device(d) =
softplus_op.h
60
// Computes SoftplusGrad
backprops
.
64
//
backprops
: gradients to backpropagate to the Softplus inputs.
67
typename TTypes<T>::Tensor
backprops
) {
68
backprops
.device(d) =
relu_op.cc
85
typename TTypes<T>::Tensor
backprops
); \
98
typename TTypes<T>::Tensor
backprops
); \
111
typename TTypes<T>::Tensor
backprops
); \
124
typename TTypes<T>::Tensor
backprops
); \
conv_grad_ops.h
22
// And we need to compute two
backprops
: one for input and one for filter. We
25
// Both
backprops
can be computed as straightforward conv2d.
56
// So when we have
backprops
for the outputs (we denote them by
59
// The
backprops
for the input are:
75
// The
backprops
for the filter are:
softplus_op.cc
109
typename TTypes<T>::Tensor
backprops
); \
softsign_op.cc
110
typename TTypes<T>::Tensor
backprops
); \
fake_quant_ops_functor.h
107
Flat<float>
backprops
) {
119
backprops
.device(d) = gradients * between_nudged_min_max;
fake_quant_ops.cc
164
typename TTypes<float>::Flat
backprops
);
conv_grad_ops_3d.cc
349
// And we need to reverse the filter
backprops
.
[
all
...]
/external/tensorflow/tensorflow/core/api_def/base_api/
api_def_EluGrad.pbtxt
17
name: "
backprops
"
api_def_FakeQuantWithMinMaxArgsGradient.pbtxt
16
name: "
backprops
"
api_def_Relu6Grad.pbtxt
18
name: "
backprops
"
api_def_ReluGrad.pbtxt
18
name: "
backprops
"
api_def_SeluGrad.pbtxt
17
name: "
backprops
"
api_def_SoftplusGrad.pbtxt
17
name: "
backprops
"
api_def_SoftsignGrad.pbtxt
17
name: "
backprops
"
/external/tensorflow/tensorflow/cc/framework/
gradients.cc
118
//
backprops
. When pending[i] becomes zero, we collected all
406
std::map<Node*, Output>&
backprops
= while_backprops_[while_ctx];
local
407
DCHECK(
backprops
.find(exit_node) ==
backprops
.end());
408
backprops
[exit_node] = summed_grads;
410
// Wait until we have all exit nodes'
backprops
collected before processing
413
if (
backprops
.size() < while_ctx->exit_nodes().size()) return Status::OK();
416
//
backprops
. Create the gradient graph for the while loop.
420
for (Node* n : while_ctx->exit_nodes()) dy.push_back(
backprops
[n]);
438
// Initialize
backprops
[
all
...]
/external/tensorflow/tensorflow/core/graph/
gradients.cc
151
//
backprops
. When pending[i] becomes zero, we collected all
306
// Initialize
backprops
.
356
//
Backprops
along the in edges.
/external/tensorflow/tensorflow/compiler/tests/
fake_quant_ops_test.py
214
backprops
= session.run(outputs, {
219
backprops
,
/external/tensorflow/tensorflow/python/kernel_tests/
conv_ops_test.py
519
# Testing for
backprops
547
# "values" consists of two tensors for two
backprops
693
# Testing for
backprops
[
all
...]
/external/tensorflow/tensorflow/core/ops/
nn_ops.cc
966
.Output("
backprops
: T")
[
all
...]
ops.pbtxt
[
all
...]
/external/tensorflow/tensorflow/core/ops/compat/
ops_history.v0.pbtxt
[
all
...]
ops_history.v1.pbtxt
[
all
...]
Completed in 908 milliseconds
1
2