HomeSort by relevance Sort by last modified time
    Searched refs:optimization_parameters (Results 1 - 2 of 2) sorted by null

  /external/tensorflow/tensorflow/python/tpu/
_tpu_estimator_embedding.py 60 def _get_slot_variable_names(scope_name, var_name, optimization_parameters):
62 if isinstance(optimization_parameters, tpu_embedding.AdagradParameters):
66 elif isinstance(optimization_parameters, tpu_embedding.AdamParameters):
71 elif isinstance(optimization_parameters,
77 .format(optimization_parameters))
81 graph, table_to_config_dict, optimization_parameters=None):
97 if optimization_parameters:
99 scope_name, var_name, optimization_parameters)
150 'feature_columns', 'optimization_parameters', 'clipping_limit',
156 optimization_parameters,
    [all...]
tpu_embedding.py 139 `optimization_parameters.proto` for details.
169 Please see `optimization_parameters.proto` for details.
171 `optimization_parameters.proto` for details.
174 `optimization_parameters.proto` for details.
223 optimization_parameters = tpu_embedding.AdagradParameters(1., 1.)
227 batch_size, num_hosts, mode, optimization_parameters)
289 # we can add `optimization_parameters` to `TableConfig` to override this
297 optimization_parameters=None,
312 optimization_parameters: `AdagradParameters`, `AdamParameters`,
360 _validate_optimization_parameters(optimization_parameters)
449 def optimization_parameters(self): member in class:TPUEmbedding
    [all...]

Completed in 192 milliseconds