HomeSort by relevance Sort by last modified time
    Searched full:use_locking (Results 1 - 25 of 144) sorted by null

1 2 3 4 5 6

  /external/tensorflow/tensorflow/python/training/
proximal_gradient_descent.py 39 l2_regularization_strength=0.0, use_locking=False,
50 use_locking: If True use locks for update operations.
54 super(ProximalGradientDescentOptimizer, self).__init__(use_locking, name)
68 use_locking=self._use_locking).op
77 use_locking=self._use_locking)
87 use_locking=self._use_locking).op
97 use_locking=self._use_locking)
gradient_descent.py 34 def __init__(self, learning_rate, use_locking=False, name="GradientDescent"):
40 use_locking: If True use locks for update operations.
44 super(GradientDescentOptimizer, self).__init__(use_locking, name)
52 use_locking=self._use_locking).op
58 grad, use_locking=self._use_locking)
69 return var.scatter_sub(delta, use_locking=self._use_locking)
adadelta.py 37 use_locking=False, name="Adadelta"):
46 use_locking: If `True` use locks for update operations.
50 super(AdadeltaOptimizer, self).__init__(use_locking, name)
81 use_locking=self._use_locking)
94 use_locking=self._use_locking)
108 use_locking=self._use_locking)
122 use_locking=self._use_locking)
adagrad.py 41 use_locking=False, name="Adagrad"):
48 use_locking: If `True` use locks for update operations.
58 super(AdagradOptimizer, self).__init__(use_locking, name)
91 use_locking=self._use_locking)
100 use_locking=self._use_locking)
110 use_locking=self._use_locking)
120 use_locking=self._use_locking)
momentum.py 47 use_locking=False, name="Momentum", use_nesterov=False):
53 use_locking: If `True` use locks for update operations.
70 super(MomentumOptimizer, self).__init__(use_locking, name)
97 use_locking=self._use_locking,
107 use_locking=self._use_locking,
117 use_locking=self._use_locking,
127 use_locking=self._use_locking,
proximal_adagrad.py 39 use_locking=False, name="ProximalAdagrad"):
50 use_locking: If `True` use locks for update operations.
60 super(ProximalAdagradOptimizer, self).__init__(use_locking, name)
94 grad, use_locking=self._use_locking)
102 grad, use_locking=self._use_locking)
111 use_locking=self._use_locking)
121 use_locking=self._use_locking)
adam.py 41 use_locking=False, name="Adam"):
89 use_locking: If True use locks for update operations.
93 super(AdamOptimizer, self).__init__(use_locking, name)
152 grad, use_locking=self._use_locking).op
166 grad, use_locking=self._use_locking)
181 use_locking=self._use_locking)
187 v_t = state_ops.assign(v, v * beta2_t, use_locking=self._use_locking)
193 use_locking=self._use_locking)
200 x, i, v, use_locking=self._use_locking))
218 beta1_power * self._beta1_t, use_locking=self._use_locking
    [all...]
ftrl.py 45 use_locking=False,
61 use_locking: If `True` use locks for update operations.
85 super(FtrlOptimizer, self).__init__(use_locking, name)
158 use_locking=self._use_locking)
173 use_locking=self._use_locking)
190 use_locking=self._use_locking)
205 use_locking=self._use_locking)
223 use_locking=self._use_locking)
239 use_locking=self._use_locking)
255 use_locking=self._use_locking
    [all...]
rmsprop.py 66 use_locking=False,
88 use_locking: If True use locks for update operation.
96 super(RMSPropOptimizer, self).__init__(use_locking, name)
145 use_locking=self._use_locking).op
156 use_locking=self._use_locking).op
173 use_locking=self._use_locking)
184 use_locking=self._use_locking)
202 use_locking=self._use_locking)
214 use_locking=self._use_locking)
232 use_locking=self._use_locking
    [all...]
  /external/tensorflow/tensorflow/core/ops/
state_ops.cc 85 .Attr("use_locking: bool = true")
103 .Attr("use_locking: bool = false")
111 .Attr("use_locking: bool = false")
140 .Attr("use_locking: bool = true")
150 .Attr("use_locking: bool = false")
160 .Attr("use_locking: bool = false")
170 .Attr("use_locking: bool = false")
180 .Attr("use_locking: bool = false")
190 .Attr("use_locking: bool = true")
199 .Attr("use_locking: bool = true"
    [all...]
training_ops.cc 76 .Attr("use_locking: bool = false")
84 .Attr("use_locking: bool = false")
110 .Attr("use_locking: bool = false")
125 .Attr("use_locking: bool = false")
137 .Attr("use_locking: bool = false")
151 .Attr("use_locking: bool = false")
183 .Attr("use_locking: bool = false")
200 .Attr("use_locking: bool = false")
214 .Attr("use_locking: bool = false")
230 .Attr("use_locking: bool = false"
    [all...]
  /external/tensorflow/tensorflow/core/api_def/base_api/
api_def_ApplyGradientDescent.pbtxt 28 name: "use_locking"
api_def_ResourceApplyGradientDescent.pbtxt 22 name: "use_locking"
api_def_ApplyAdagrad.pbtxt 34 name: "use_locking"
api_def_ApplyProximalGradientDescent.pbtxt 40 name: "use_locking"
api_def_AssignAdd.pbtxt 23 name: "use_locking"
api_def_AssignSub.pbtxt 23 name: "use_locking"
api_def_ResourceApplyAdagrad.pbtxt 28 name: "use_locking"
api_def_ResourceApplyProximalGradientDescent.pbtxt 34 name: "use_locking"
api_def_ResourceSparseApplyAdadelta.pbtxt 46 name: "use_locking"
api_def_ResourceSparseApplyAdagrad.pbtxt 34 name: "use_locking"
  /external/tensorflow/tensorflow/tools/api/golden/
tensorflow.-variable.pbtxt 52 argspec: "args=[\'self\', \'value\', \'use_locking\'], varargs=None, keywords=None, defaults=[\'False\'], "
56 argspec: "args=[\'self\', \'delta\', \'use_locking\'], varargs=None, keywords=None, defaults=[\'False\'], "
60 argspec: "args=[\'self\', \'delta\', \'use_locking\'], varargs=None, keywords=None, defaults=[\'False\'], "
92 argspec: "args=[\'self\', \'sparse_delta\', \'use_locking\'], varargs=None, keywords=None, defaults=[\'False\'], "
  /external/tensorflow/tensorflow/contrib/opt/python/training/
addsign.py 42 use_locking=False,
76 use_locking: If True, use locks for update operations.
80 super(AddSignOptimizer, self).__init__(use_locking, name)
121 use_locking=self._use_locking).op
133 use_locking=self._use_locking)
142 m, (m * beta_t) + (grad * (1 - beta_t)), use_locking=self._use_locking)
166 use_locking=self._use_locking)
powersign.py 44 use_locking=False,
78 use_locking: If True, use locks for update operations.
82 super(PowerSignOptimizer, self).__init__(use_locking, name)
123 use_locking=self._use_locking).op
135 use_locking=self._use_locking)
145 m, (m * beta_t) + (grad * (1 - beta_t)), use_locking=self._use_locking)
170 use_locking=self._use_locking)
  /external/valgrind/drd/tests/
circular_buffer.c 52 static int use_locking = 1; variable
106 if (use_locking)
115 if (use_locking)
129 if (use_locking)
138 if (use_locking)
195 use_locking = 0;

Completed in 855 milliseconds

1 2 3 4 5 6