HomeSort by relevance Sort by last modified time
    Searched refs:train (Results 26 - 50 of 171) sorted by null

12 3 4 5 6 7

  /external/tensorflow/tensorflow/examples/learn/
text_classification_cnn.py 91 if mode == tf.estimator.ModeKeys.TRAIN:
92 optimizer = tf.train.AdamOptimizer(learning_rate=0.01)
93 train_op = optimizer.minimize(loss, global_step=tf.train.get_global_step())
109 x_train = pandas.DataFrame(dbpedia.train.data)[1]
110 y_train = pandas.Series(dbpedia.train.target)
125 # Train.
132 classifier.train(input_fn=train_input_fn, steps=100)
iris_custom_decay_dnn.py 53 if mode == tf.estimator.ModeKeys.TRAIN:
54 global_step = tf.train.get_global_step()
55 learning_rate = tf.train.exponential_decay(
58 optimizer = tf.train.AdagradOptimizer(learning_rate=learning_rate)
78 # Train.
81 classifier.train(input_fn=train_input_fn, steps=1000)
resnet.py 157 if mode == tf.estimator.ModeKeys.TRAIN:
158 optimizer = tf.train.AdagradOptimizer(learning_rate=0.01)
159 train_op = optimizer.minimize(loss, global_step=tf.train.get_global_step())
180 # Train model and save summaries into logdir.
182 x={X_FEATURE: mnist.train.images},
183 y=mnist.train.labels.astype(np.int32),
187 classifier.train(input_fn=train_input_fn, steps=100)
text_classification.py 49 if mode == tf.estimator.ModeKeys.TRAIN:
50 optimizer = tf.train.AdamOptimizer(learning_rate=0.01)
51 train_op = optimizer.minimize(loss, global_step=tf.train.get_global_step())
111 x_train = pandas.Series(dbpedia.train.data[:, 1])
112 y_train = pandas.Series(dbpedia.train.target)
142 # Train.
149 classifier.train(input_fn=train_input_fn, steps=100)
text_classification_character_cnn.py 92 if mode == tf.estimator.ModeKeys.TRAIN:
93 optimizer = tf.train.AdamOptimizer(learning_rate=0.01)
94 train_op = optimizer.minimize(loss, global_step=tf.train.get_global_step())
111 x_train = pandas.DataFrame(dbpedia.train.data)[1]
112 y_train = pandas.Series(dbpedia.train.target)
128 # Train.
135 classifier.train(input_fn=train_input_fn, steps=100)
iris_custom_model.py 54 if mode == tf.estimator.ModeKeys.TRAIN:
55 optimizer = tf.train.AdagradOptimizer(learning_rate=0.1)
56 train_op = optimizer.minimize(loss, global_step=tf.train.get_global_step())
75 # Train.
78 classifier.train(input_fn=train_input_fn, steps=1000)
multiple_gpu.py 72 if mode == tf.estimator.ModeKeys.TRAIN:
73 optimizer = tf.train.AdagradOptimizer(learning_rate=0.1)
75 loss, global_step=tf.train.get_global_step())
94 # Train.
97 classifier.train(input_fn=train_input_fn, steps=100)
  /external/tensorflow/tensorflow/contrib/kfac/examples/
mlp.py 15 r"""Train an MLP on MNIST using K-FAC.
120 # Train with K-FAC. We'll use a decreasing learning rate that's cut in 1/2
123 global_step = tf.train.get_or_create_global_step()
125 learning_rate=tf.train.exponential_decay(
134 with tf.train.MonitoredTrainingSession(config=session_config) as sess:
158 """Train an MLP on MNIST.
190 """Train an MLP on MNIST, splitting the minibatch across multiple towers.
245 """Train an MLP on MNIST using tf.estimator.
272 mode: tf.estimator.ModeKey. Must be TRAIN.
279 ValueError: If 'mode' is anything other than TRAIN
    [all...]
convnet.py 15 r"""Train a ConvNet on MNIST using K-FAC.
199 # Train with K-FAC.
200 global_step = tf.train.get_or_create_global_step()
210 with tf.train.MonitoredTrainingSession(config=session_config) as sess:
281 with tf.device(tf.train.replica_device_setter(num_ps_tasks)):
282 global_step = tf.train.get_or_create_global_step()
290 sync_optimizer = tf.train.SyncReplicasOptimizer(
298 with tf.train.MonitoredTrainingSession(
328 """Train a ConvNet on MNIST.
358 """Train a ConvNet on MNIST
    [all...]
  /external/tensorflow/tensorflow/core/kernels/
sdca_ops_test.cc 233 Graph* train = nullptr; local
236 20 /* dense features per group */, &init, &train);
238 test::Benchmark("cpu", train, GetSingleThreadedOptions(), init).Run(iters);
244 Graph* train = nullptr; local
247 200000 /* dense features per group */, &init, &train);
249 test::Benchmark("cpu", train, GetSingleThreadedOptions(), init).Run(iters);
255 Graph* train = nullptr; local
258 0 /* dense features per group */, &init, &train);
260 test::Benchmark("cpu", train, GetMultiThreadedOptions(), init).Run(iters);
  /external/tensorflow/tensorflow/contrib/estimator/python/estimator/
extenders_test.py 61 estimator.train(input_fn=input_fn)
91 estimator.train(input_fn=input_fn)
107 estimator.train(input_fn=input_fn)
119 estimator.train(input_fn=input_fn)
126 estimator.train(input_fn=input_fn)
172 estimator.train(input_fn=input_fn, steps=1)
186 estimator.train(input_fn=input_fn, steps=1)
202 estimator.train(input_fn=input_fn, steps=1)
229 estimator.train(input_fn=input_fn, steps=1)
249 estimator.train(input_fn=input_fn, steps=1
    [all...]
  /external/tensorflow/tensorflow/contrib/layers/python/layers/
optimizers_test.py 70 train = optimizers_lib.optimize_loss(
73 session.run(train, feed_dict={x: 5})
86 train = optimizers_lib.optimize_loss(
89 session.run(train, feed_dict={x: 5})
169 train = optimizers_lib.optimize_loss(
176 session.run(train, feed_dict={x: 5})
186 train = optimizers_lib.optimize_loss(
194 session.run(train, feed_dict={x: 5})
202 train = optimizers_lib.optimize_loss(
209 session.run(train, feed_dict={x: 5}
    [all...]
optimizers.py 38 from tensorflow.python.training import training as train
41 "Adagrad": train.AdagradOptimizer,
42 "Adam": train.AdamOptimizer,
43 "Ftrl": train.FtrlOptimizer,
44 "Momentum": lambda lr: train.MomentumOptimizer(lr, momentum=0.9),
45 "RMSProp": train.RMSPropOptimizer,
46 "SGD": train.GradientDescentOptimizer,
80 optimizer=lambda lr: tf.train.MomentumOptimizer(lr, momentum=0.5))`.
83 optimizer=lambda: tf.train.MomentumOptimizer(0.5, momentum=0.5))`.
87 optimizer=tf.train.AdagradOptimizer)`
    [all...]
  /external/tensorflow/tensorflow/contrib/model_pruning/examples/cifar10/
cifar10_eval.py 28 data set, compile the program and train the model.
60 ckpt = tf.train.get_checkpoint_state(FLAGS.checkpoint_dir)
73 coord = tf.train.Coordinator()
119 variable_averages = tf.train.ExponentialMovingAverage(
122 saver = tf.train.Saver(variables_to_restore)
cifar10_input.py 121 images, label_batch = tf.train.shuffle_batch(
128 images, label_batch = tf.train.batch(
159 filename_queue = tf.train.string_input_producer(filenames)
194 print('Filling queue with %d CIFAR images before starting to train. '
210 eval_data: bool, indicating if one should use the train or eval data set.
232 filename_queue = tf.train.string_input_producer(filenames)
  /external/tensorflow/tensorflow/contrib/model_pruning/python/
learning.py 31 optimizer = tf.train.MomentumOptimizer(FLAGS.learning_rate, FLAGS.momentum)
43 learning.train(train_op,
58 def train(train_op, function
83 """Wrapper around tf-slim's train function.
136 sync_optimizer: an instance of tf.train.SyncReplicasOptimizer, or a list of
162 total_loss, _ = _slim.learning.train(
  /external/tensorflow/tensorflow/examples/tutorials/mnist/
mnist_softmax.py 57 train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
61 # Train
63 batch_xs, batch_ys = mnist.train.next_batch(100)
mnist_with_summaries.py 38 def train(): function
127 with tf.name_scope('train'):
128 train_step = tf.train.AdamOptimizer(FLAGS.learning_rate).minimize(
141 train_writer = tf.summary.FileWriter(FLAGS.log_dir + '/train', sess.graph)
145 # Train the model, and also write summaries.
149 def feed_dict(train):
151 if train or FLAGS.fake_data:
152 xs, ys = mnist.train.next_batch(100, fake_data=FLAGS.fake_data)
164 else: # Record train set summaries, and train
    [all...]
  /external/tensorflow/tensorflow/contrib/training/python/training/
training.py 36 optimizer = tf.train.MomentumOptimizer(FLAGS.learning_rate, FLAGS.momentum)
42 tf.contrib.training.train(train_op, my_log_dir)
48 In order to use the `train` function, one needs a train_op: an `Operation` that
144 tf.contrib.training.train(train_op, my_log_dir, scaffold=scaffold)
177 tf.contrib.training.train(train_op, my_log_dir, scaffold=scaffold)
208 tf.contrib.training.train(train_op, my_log_dir, scaffold=scaffold)
240 tf.contrib.training.train(train_op, my_log_dir, scaffold=scaffold)
267 'train',
393 variables_to_train: an optional list of variables to train. If None, it will
477 def train(train_op function
    [all...]
  /external/tensorflow/tensorflow/python/estimator/
estimator_test.py 413 model_fn_lib.ModeKeys.TRAIN)
419 est.train(
424 expected_mode = model_fn_lib.ModeKeys.TRAIN
448 est.train(InputFn(), steps=1)
452 expected_mode = model_fn_lib.ModeKeys.TRAIN
472 est.train(_input_fn, steps=1)
495 est.train(input_fn=_input_fn, steps=1)
513 est.train(input_fn=_input_fn_with_labels, steps=1)
525 est.train(input_fn=_input_fn, steps=1)
542 self.assertEqual(model_fn_lib.ModeKeys.TRAIN, mode
    [all...]
  /external/tensorflow/tensorflow/contrib/eager/python/examples/gan/
mnist_test.py 65 tf.train.get_or_create_global_step()
71 generator_optimizer = tf.train.AdamOptimizer(0.001)
73 discriminator_optimizer = tf.train.AdamOptimizer(0.001)
89 self._report('train', start, measure_batches, batch_size)
  /external/tensorflow/tensorflow/contrib/eager/python/examples/linear_regression/
linear_regression.py 86 tf.train.get_or_create_global_step()
98 optimizer.apply_gradients(grads, global_step=tf.train.get_global_step())
149 optimizer = tf.train.GradientDescentOptimizer(learning_rate)
  /external/tensorflow/tensorflow/contrib/predictor/
testing_common.py 81 def get_arithmetic_input_fn(core=True, train=False):
90 if train:
96 if train:
  /external/tensorflow/tensorflow/contrib/session_bundle/example/
export_half_plus_two.py 63 save = tf.train.Saver(
69 write_version=tf.train.SaverDef.V2 if use_checkpoint_v2 else
70 tf.train.SaverDef.V1)
  /external/tensorflow/tensorflow/examples/how_tos/reading_data/
fully_connected_reader.py 15 """Train and Eval the MNIST network.
18 to a TFRecords file containing tf.train.Example protocol buffers.
43 TRAIN_FILE = 'train.tfrecords'
83 def inputs(train, batch_size, num_epochs):
87 train: Selects between the training (True) and validation (False) data.
90 train forever.
106 if train else VALIDATION_FILE)
128 """Train MNIST for a number of steps."""
134 train=True, batch_size=FLAGS.batch_size, num_epochs=FLAGS.num_epochs)
142 # Add to the Graph operations that train the model
    [all...]

Completed in 2095 milliseconds

12 3 4 5 6 7