Home | History | Annotate | Download | only in optimizers

Lines Matching refs:output

32   Output constant_a = ops::Const(s.WithOpName("constant_a"), 1.0f, {1});
33 Output constant_b = ops::Const(s.WithOpName("constant_b"), 1, {1});
34 Output var = ops::Variable(s.WithOpName("var"), {1}, DT_FLOAT);
35 Output assign = ops::Assign(s.WithOpName("assign"), {var}, {constant_a});
36 Output identity = ops::Identity(s.WithOpName("identity"), {var});
37 Output fifo_queue = ops::FIFOQueue(s.WithOpName("fifo_queue"), {DT_FLOAT});
40 Output add = ops::AddN(s.WithOpName("add"), {constant_a, dequeue[0]});
41 Output learning_rate = ops::Const(s.WithOpName("learning_rate"), 0.01f, {1});
42 Output apply_gradient = ops::ApplyGradientDescent(
52 GraphDef output;
53 Status status = parallel.Optimize(nullptr, item, &output);
55 EXPECT_EQ(21, output.node_size());
57 const NodeDef& node_assign = output.node(0);
61 const NodeDef& node_constant_b = output.node(1);
64 const NodeDef& node_fifo_queue = output.node(2);
67 const NodeDef& node_identity = output.node(3);
71 const NodeDef& node_var = output.node(4);
74 const NodeDef& node_div_const0 = output.node(5);
78 const NodeDef& node_div0 = output.node(6);
81 const NodeDef& node_add0 = output.node(7);
84 const NodeDef& node_gradient0 = output.node(8);
87 const NodeDef& node_constant_a0 = output.node(9);
90 const NodeDef& node_dequeue0 = output.node(10);
93 const NodeDef& node_learning_rate0 = output.node(11);
96 const NodeDef& node_div_const1 = output.node(12);
100 const NodeDef& node_div1 = output.node(13);
104 const NodeDef& node_add1 = output.node(14);
107 const NodeDef& node_gradient1 = output.node(15);
110 const NodeDef& node_constant_a1 = output.node(16);
113 const NodeDef& node_dequeue1 = output.node(17);
116 const NodeDef& node_learning_rate1 = output.node(18);
119 const NodeDef& node_fetch = output.node(19);
124 const NodeDef& node_gradient = output.node(20);