HomeSort by relevance Sort by last modified time
    Searched refs:cluster_spec (Results 1 - 25 of 52) sorted by null

1 2 3

  /external/tensorflow/tensorflow/python/distribute/
multi_worker_util.py 25 def normalize_cluster_spec(cluster_spec):
26 """Makes `cluster_spec` into a `ClusterSpec` object.
29 cluster_spec: a dict, ClusterDef or ClusterSpec object specifying the
36 ValueError: if `cluster_spec` is not a dict or a `ClusterSpec` or a
39 if isinstance(cluster_spec, (dict, cluster_pb2.ClusterDef)):
40 return server_lib.ClusterSpec(cluster_spec)
41 elif not isinstance(cluster_spec, server_lib.ClusterSpec):
43 "`cluster_spec' should be dict or a `tf.train.ClusterSpec` or a "
45 return cluster_spec
49 def _validate_cluster_spec(cluster_spec, task_type, task_id)
    [all...]
multi_worker_util_test.py 35 cluster_spec = {
41 cluster_spec, multi_worker_util.normalize_cluster_spec(cluster_spec))
63 cluster_spec = server_lib.ClusterSpec({
69 cluster_spec, multi_worker_util.normalize_cluster_spec(cluster_spec))
72 cluster_spec = ["127.0.0.1:8964", "127.0.0.1:2333"]
76 "`cluster_spec' should be dict or a `tf.train.ClusterSpec` or a "
78 multi_worker_util.normalize_cluster_spec(cluster_spec)
84 cluster_spec =
    [all...]
distribute_coordinator.py 95 def _get_num_workers(cluster_spec):
97 if not cluster_spec:
99 return len(cluster_spec.as_dict().get(_TaskType.WORKER, [])) + len(
100 cluster_spec.as_dict().get(_TaskType.CHIEF, []))
114 cluster_spec,
124 cluster_spec: a ClusterSpec object. It can be empty or None in the local
133 with worker masters. If None or empty, hosts in the `cluster_spec` will
138 self._cluster_spec = cluster_spec
145 self._num_workers = _get_num_workers(cluster_spec)
150 return "[cluster_spec: %r, task_type: %r, task_id: %r]" %
281 def cluster_spec(self): member in class:_WorkerContext
438 cluster_spec = _split_cluster_for_evaluator(cluster_spec, task_type) variable in class:_run_std_server._FakeServer
    [all...]
estimator_training.py 40 def _count_ps(cluster_spec):
41 """Counts the number of parameter servers in cluster_spec."""
42 if not cluster_spec:
44 'Internal error: `_count_ps` does not expect empty cluster_spec.')
46 return len(cluster_spec.as_dict().get(PS, []))
49 def _count_worker(cluster_spec, chief_task_type):
50 """Counts the number of workers (including chief) in cluster_spec."""
51 if not cluster_spec:
53 'Internal error: `_count_worker` does not expect empty cluster_spec.')
55 return (len(cluster_spec.as_dict().get(WORKER, [])) + len
    [all...]
distribute_coordinator_test.py 75 # cluster_spec expects "host:port" strings.
107 cluster_spec=None,
127 if (cluster_spec and task_type and task_id is not None and
201 cluster_spec = {}
203 cluster_spec[CHIEF] = ["localhost:%s" % portpicker.pick_unused_port()]
205 cluster_spec[WORKER] = [
210 cluster_spec[PS] = [
214 cluster_spec[EVALUATOR] = ["localhost:%s" % portpicker.pick_unused_port()]
215 return cluster_spec
248 cluster_spec, **kwargs)
    [all...]
parameter_server_strategy.py 110 if cluster_resolver.cluster_spec().as_dict():
140 cluster_spec = cluster_resolver.cluster_spec()
144 raise ValueError("When `cluster_spec` is given, you must also specify "
146 cluster_spec = multi_worker_util.normalize_cluster_spec(cluster_spec)
147 assert cluster_spec.as_dict()
173 num_ps_replicas = len(cluster_spec.as_dict().get("ps", []))
180 cluster=cluster_spec)
192 self._is_chief = multi_worker_util.is_chief(cluster_spec, task_type
    [all...]
collective_all_reduce_strategy.py 56 When 'TF_CONFIG' environment variable is given, it parses cluster_spec,
101 if cluster_resolver.cluster_spec().as_dict():
162 cluster_spec = multi_worker_util.normalize_cluster_spec(
163 cluster_resolver.cluster_spec())
167 raise ValueError("When `cluster_spec` is given, you must also specify "
174 self._num_workers = multi_worker_util.worker_count(cluster_spec, task_type)
177 "`cluster_spec`.")
179 self._is_chief = multi_worker_util.is_chief(cluster_spec, task_type,
203 self._cluster_spec = cluster_spec
212 "Multi-worker CollectiveAllReduceStrategy with cluster_spec = %r,
    [all...]
  /external/tensorflow/tensorflow/python/distribute/cluster_resolver/
cluster_resolver_test.py 35 def cluster_spec(self): member in class:MockBaseClusterResolver
126 def _verifyClusterSpecEquality(self, cluster_spec, expected_proto):
127 self.assertProtoEquals(expected_proto, cluster_spec.as_cluster_def())
129 expected_proto, server_lib.ClusterSpec(cluster_spec).as_cluster_def())
132 server_lib.ClusterSpec(cluster_spec.as_cluster_def()).as_cluster_def())
135 server_lib.ClusterSpec(cluster_spec.as_dict()).as_cluster_def())
152 actual_cluster_spec = union_resolver.cluster_spec()
274 cluster_spec = union_cluster.cluster_spec()
283 self._verifyClusterSpecEquality(cluster_spec, expected_proto
    [all...]
kubernetes_cluster_resolver_test.py 53 def _verifyClusterSpecEquality(self, cluster_spec, expected_proto):
61 cluster_spec: ClusterSpec returned by the TPUClusterResolver
64 self.assertProtoEquals(expected_proto, cluster_spec.as_cluster_def())
67 server_lib.ClusterSpec(cluster_spec).as_cluster_def())
70 cluster_spec.as_cluster_def()).as_cluster_def())
73 cluster_spec.as_dict()).as_cluster_def())
82 actual_cluster_spec = cluster_resolver.cluster_spec()
101 actual_cluster_spec = cluster_resolver.cluster_spec()
138 cluster_resolver.cluster_spec()
164 actual_cluster_spec = cluster_resolver.cluster_spec()
    [all...]
slurm_cluster_resolver_test.py 35 def _verifyClusterSpecEquality(self, cluster_spec, expected_proto):
36 self.assertProtoEquals(expected_proto, cluster_spec.as_cluster_def())
39 server_lib.ClusterSpec(cluster_spec).as_cluster_def())
42 server_lib.ClusterSpec(cluster_spec.as_cluster_def()).as_cluster_def())
45 server_lib.ClusterSpec(cluster_spec.as_dict()).as_cluster_def())
62 actual_cluster_spec = slurm_cluster_resolver.cluster_spec()
113 actual_cluster_spec = slurm_cluster_resolver.cluster_spec()
141 actual_cluster_spec = slurm_cluster_resolver.cluster_spec()
173 actual_cluster_spec = slurm_cluster_resolver.cluster_spec()
tfconfig_cluster_resolver.py 124 def cluster_spec(self): member in class:TFConfigClusterResolver
160 cluster_spec = self.cluster_spec()
161 if (not cluster_spec.jobs or
162 (len(cluster_spec.jobs) == 1 and
163 len(cluster_spec.job_tasks(cluster_spec.jobs[0])) == 1)):
171 return format_master_url(cluster_spec.task_address(task_type, task_id),
cluster_resolver.py 92 def cluster_spec(self): member in class:ClusterResolver
103 a cluster_spec, rather than attempting to cache anything.
185 def __init__(self, cluster_spec, master='', task_type=None, task_id=None,
198 if not isinstance(cluster_spec, ClusterSpec):
199 raise TypeError('cluster_spec must be a ClusterSpec.')
200 self._cluster_spec = cluster_spec
206 def cluster_spec(self): member in class:SimpleClusterResolver
225 master = self.cluster_spec().task_address(task_type, task_id)
288 when cluster_spec is called. The details of the merge function is
289 documented in the cluster_spec function
329 def cluster_spec(self): member in class:UnionClusterResolver
    [all...]
gce_cluster_resolver_test.py 32 def _verifyClusterSpecEquality(self, cluster_spec, expected_proto):
33 self.assertProtoEquals(expected_proto, cluster_spec.as_cluster_def())
35 expected_proto, server_lib.ClusterSpec(cluster_spec).as_cluster_def())
38 server_lib.ClusterSpec(cluster_spec.as_cluster_def()).as_cluster_def())
41 server_lib.ClusterSpec(cluster_spec.as_dict()).as_cluster_def())
132 actual_cluster_spec = gce_cluster_resolver.cluster_spec()
222 actual_cluster_spec = gce_cluster_resolver.cluster_spec()
243 actual_cluster_spec = gce_cluster_resolver.cluster_spec()
300 actual_cluster_spec = union_cluster_resolver.cluster_spec()
tfconfig_cluster_resolver_test.py 36 def _verifyClusterSpecEquality(self, cluster_spec, expected_proto):
37 self.assertProtoEquals(expected_proto, cluster_spec.as_cluster_def())
39 expected_proto, server_lib.ClusterSpec(cluster_spec).as_cluster_def())
42 server_lib.ClusterSpec(cluster_spec.as_cluster_def()).as_cluster_def())
45 server_lib.ClusterSpec(cluster_spec.as_dict()).as_cluster_def())
69 actual_cluster_spec = cluster_resolver.cluster_spec()
tpu_cluster_resolver_test.py 96 def _verifyClusterSpecEquality(self, cluster_spec, expected_proto):
104 cluster_spec: ClusterSpec returned by the TPUClusterResolver
107 self.assertProtoEquals(expected_proto, cluster_spec.as_cluster_def())
110 server_lib.ClusterSpec(cluster_spec).as_cluster_def())
113 cluster_spec.as_cluster_def()).as_cluster_def())
116 cluster_spec.as_dict()).as_cluster_def())
173 actual_cluster_spec = resolver.cluster_spec()
207 actual_cluster_spec = resolver.cluster_spec()
235 resolver.cluster_spec()
255 actual_cluster_spec = resolver.cluster_spec()
    [all...]
tpu_cluster_resolver.py 335 first instance in the ClusterSpec returned by the cluster_spec function.
358 cluster_spec = self.cluster_spec()
361 master = cluster_spec.task_address(task_type, task_id)
364 master = cluster_spec.task_address(self.task_type, self.task_id)
367 job_tasks = cluster_spec.job_tasks(self.task_type)
386 def cluster_spec(self): member in class:TPUClusterResolver
432 cluster_spec = {self.task_type: worker_list}
448 cluster_spec = {self.task_type: tpus}
452 cluster_spec[self._coordinator_name] = [self._coordinator_address
    [all...]
  /external/tensorflow/tensorflow/contrib/learn/python/learn/estimators/
run_config.py 84 Sets the properties `cluster_spec`, `is_chief`, `master` (if `None` in the
103 * `cluster_spec` is parsed from `TF_CONFIG['cluster']`. Defaults to {}.
105 `cluster_spec`. Defaults to ''.
107 in the `ps` attribute of `cluster_spec`. Defaults to 0.
109 in the `worker` attribute of `cluster_spec`. Defaults to 0.
125 assert config.cluster_spec == server_lib.ClusterSpec(cluster)
135 # environment variable is present, load cluster_spec from TF_CONFIG.
169 def cluster_spec(self): member in class:ClusterConfig
249 The superclass `ClusterConfig` may set properties like `cluster_spec`,
411 def _count_ps(cluster_spec)
    [all...]
  /external/tensorflow/tensorflow/python/training/
server_lib_test.py 358 cluster_spec = server_lib.ClusterSpec(cluster_def)
359 self.assertProtoEquals(cluster_def, cluster_spec.as_cluster_def())
377 cluster_spec = server_lib.ClusterSpec(cluster_def)
378 self.assertProtoEquals(cluster_def, cluster_spec.as_cluster_def())
400 cluster_spec = server_lib.ClusterSpec(cluster_def)
401 self.assertProtoEquals(cluster_def, cluster_spec.as_cluster_def())
425 cluster_spec = server_lib.ClusterSpec(cluster_def)
426 self.assertProtoEquals(cluster_def, cluster_spec.as_cluster_def())
432 cluster_spec = server_lib.ClusterSpec({
440 self.assertEqual(expected_str, str(cluster_spec))
    [all...]
device_setter.py 158 cluster_spec = {
161 with tf.device(tf.train.replica_device_setter(cluster=cluster_spec)):
196 cluster_spec = cluster.as_dict()
198 cluster_spec = server_lib.ClusterSpec(cluster).as_dict()
201 if ps_job_name not in cluster_spec or cluster_spec[ps_job_name] is None:
203 ps_tasks = len(cluster_spec[ps_job_name])
  /external/tensorflow/tensorflow/python/debug/lib/
grpc_tensorflow_server.py 17 Takes input arguments cluster_spec, job_name and task_id, and start a blocking
21 grpc_tensorflow_server.py --cluster_spec=SPEC --job_name=NAME --task_id=ID
45 def parse_cluster_spec(cluster_spec, cluster, verbose=False):
46 """Parse content of cluster_spec string and inject info into cluster protobuf.
49 cluster_spec: cluster specification string, e.g.,
55 ValueError: if the cluster_spec string is invalid.
58 job_strings = cluster_spec.split(",")
60 if not cluster_spec:
61 raise ValueError("Empty cluster_spec string")
67 raise ValueError("Not exactly one instance of '|' in cluster_spec")
    [all...]
  /external/tensorflow/tensorflow/contrib/distribute/python/
multi_worker_test_base.py 76 """Creates and starts local servers and returns the cluster_spec dict."""
183 cluster_spec = {}
185 cluster_spec['chief'] = ['localhost:%s' % pick_unused_port()]
187 cluster_spec['worker'] = [
191 cluster_spec['ps'] = [
195 cluster_spec['evaluator'] = ['localhost:%s' % pick_unused_port()]
196 return cluster_spec
289 def _run_between_graph_clients(self, client_fn, cluster_spec, num_gpus, *args,
296 cluster_spec: a dict specifying jobs in a cluster.
303 for task_id in range(len(cluster_spec.get(task_type, [])))
    [all...]
collective_all_reduce_strategy.py 37 When `cluster_spec` is given by the `configure` method, it turns into the
78 cluster_spec=tfconfig.cluster_spec(),
parameter_server_strategy.py 40 training for multiple workers. If `cluster_spec` is specified, either passed
45 other operations are assigned to workers. If `cluster_spec` is not set, it
86 ValueError: if `cluster_spec` is given but `task_type` or `task_id` is
158 cluster_spec=tfconfig.cluster_spec(),
  /external/tensorflow/tensorflow/core/distributed_runtime/rpc/
grpc_tensorflow_server.cc 41 Status FillServerDef(const string& cluster_spec, const string& job_name,
51 for (const string& job_str : str_util::Split(cluster_spec, ',')) {
89 << " --cluster_spec=SPEC --job_name=NAME --task_id=ID" << std::endl;
99 tensorflow::string cluster_spec; local
103 tensorflow::Flag("cluster_spec", &cluster_spec, "cluster spec"),
116 tensorflow::Status s = tensorflow::FillServerDef(cluster_spec, job_name,
  /external/tensorflow/tensorflow/contrib/tpu/profiler/pip_package/cloud_tpu_profiler/
main.py 76 cluster_spec = cluster_resolver.cluster_spec()
77 task_indices = cluster_spec.task_indices(JOB_NAME)
79 cluster_spec.task_address(JOB_NAME, i).split(':')[0] for i in task_indices

Completed in 728 milliseconds

1 2 3