/prebuilts/ndk/r16/sources/third_party/shaderc/third_party/spirv-tools/include/spirv-tools/ |
optimizer.hpp | 32 class Optimizer { 60 explicit Optimizer(spv_target_env env); 63 Optimizer(const Optimizer&) = delete; 64 Optimizer(Optimizer&&) = delete; 65 Optimizer& operator=(const Optimizer&) = delete; 66 Optimizer& operator=(Optimizer&&) = delete [all...] |
/prebuilts/ndk/r16/sources/third_party/shaderc/third_party/spirv-tools/source/opt/ |
optimizer.cpp | 15 #include "spirv-tools/optimizer.hpp" 24 struct Optimizer::PassToken::Impl { 30 Optimizer::PassToken::PassToken( 31 std::unique_ptr<Optimizer::PassToken::Impl> impl) 33 Optimizer::PassToken::PassToken(PassToken&& that) 36 Optimizer::PassToken& Optimizer::PassToken::operator=(PassToken&& that) { 41 Optimizer::PassToken::~PassToken() {} 43 struct Optimizer::Impl { 50 Optimizer::Optimizer(spv_target_env env) : impl_(new Impl(env)) { [all...] |
/dalvik/dx/src/com/android/dx/dex/cf/ |
OptimizerOptions.java | 21 import com.android.dx.ssa.Optimizer; 113 * Compares the output of the optimizer run normally with a run skipping 126 EnumSet<Optimizer.OptionalStep> steps; 128 steps = EnumSet.allOf(Optimizer.OptionalStep.class); 131 steps.remove(Optimizer.OptionalStep.CONST_COLLECTOR); 134 = Optimizer.optimize(nonOptRmeth,
|
/external/swiftshader/src/Reactor/ |
Optimizer.cpp | 15 #include "Optimizer.hpp" 25 class Optimizer 69 void Optimizer::run(Ice::Cfg *function) 83 void Optimizer::eliminateDeadCode() 109 void Optimizer::eliminateUnitializedLoads() 164 void Optimizer::eliminateLoadsFollowingSingleStore() 265 void Optimizer::optimizeStoresInSingleBasicBlock() 360 void Optimizer::analyzeUses(Ice::Cfg *function) 399 void Optimizer::replace(Ice::Inst *instruction, Ice::Operand *newValue) 428 void Optimizer::deleteInstruction(Ice::Inst *instruction 698 Optimizer optimizer; local [all...] |
/external/libmojo/third_party/jinja2/ |
optimizer.py | 3 jinja2.optimizer 6 The jinja optimizer is currently trying to constant fold a few expressions 26 optimizer = Optimizer(environment) 27 return optimizer.visit(node) 30 class Optimizer(NodeTransformer):
|
/dalvik/dx/src/com/android/dx/command/dump/ |
SsaDumper.java | 27 import com.android.dx.ssa.Optimizer; 99 ssaMeth = Optimizer.debugNoRegisterAllocation(rmeth, 101 EnumSet.allOf(Optimizer.OptionalStep.class)); 103 ssaMeth = Optimizer.debugEdgeSplit(rmeth, paramWidth, 106 ssaMeth = Optimizer.debugPhiPlacement( 109 ssaMeth = Optimizer.debugRenaming( 112 ssaMeth = Optimizer.debugDeadCodeRemover(
|
DotDumper.java | 33 import com.android.dx.ssa.Optimizer; 129 rmeth = Optimizer.optimize(rmeth,
|
/external/proguard/src/proguard/gui/ |
OptimizationsDialog.java | 23 import proguard.optimize.Optimizer; 50 private final JCheckBox[] optimizationCheckBoxes = new JCheckBox[Optimizer.OPTIMIZATION_NAMES.length]; 107 for (int index = 0; index < Optimizer.OPTIMIZATION_NAMES.length; index++) 109 String optimizationName = Optimizer.OPTIMIZATION_NAMES[index]; 195 for (int index = 0; index < Optimizer.OPTIMIZATION_NAMES.length; index++) 197 optimizationCheckBoxes[index].setSelected(filter.matches(Optimizer.OPTIMIZATION_NAMES[index]));
|
/external/tensorflow/tensorflow/python/estimator/canned/ |
optimizers.py | 28 from tensorflow.python.training import optimizer as optimizer_lib 42 """Returns an optimizer instance. 45 * An `Optimizer` instance: Returns the given `opt`. 46 * A string: Creates an `Optimizer` subclass with the given `learning_rate`. 55 opt: An `Optimizer` instance, or string, as discussed above. 59 An `Optimizer` instance. 73 'Unsupported optimizer name: {}. Supported names are: {}'.format( 75 if not isinstance(opt, optimizer_lib.Optimizer): 77 'The given object is not an Optimizer instance. Given: {}'.format(opt))
|
optimizers_test.py | 27 from tensorflow.python.training import optimizer as optimizer_lib 35 ValueError, 'Unsupported optimizer name: unsupported_name'): 69 class _TestOptimizer(optimizer_lib.Optimizer): 80 ValueError, 'The given object is not an Optimizer instance'):
|
/external/tensorflow/tensorflow/contrib/keras/api/keras/optimizers/ |
__init__.py | 21 # Optimizer classes. 27 from tensorflow.python.keras._impl.keras.optimizers import Optimizer
|
/external/tensorflow/tensorflow/python/keras/optimizers/ |
__init__.py | 21 # Optimizer classes. 27 from tensorflow.python.keras._impl.keras.optimizers import Optimizer
|
/external/tensorflow/tensorflow/python/training/ |
gradient_descent.py | 24 from tensorflow.python.training import optimizer 30 class GradientDescentOptimizer(optimizer.Optimizer): 31 """Optimizer that implements the gradient descent algorithm. 35 """Construct a new gradient descent optimizer.
|
proximal_gradient_descent.py | 25 from tensorflow.python.training import optimizer 31 class ProximalGradientDescentOptimizer(optimizer.Optimizer): 33 """Optimizer that implements the proximal gradient descent algorithm. 41 """Construct a new proximal gradient descent optimizer.
|
/external/tensorflow/tensorflow/contrib/opt/python/training/ |
multitask_optimizer_wrapper.py | 15 """An optimizer wrapper for stateful optimizers with multitask loss.""" 29 from tensorflow.python.training import optimizer 51 """Optimizer wrapper making all-zero gradients harmless. 57 which would normally affect the optimizer state 64 therefore preserving the optimizer state. 73 are delegated to an underlying optimizer. 95 opt: an instance of a class that implements tf.train.Optimizer. 97 if not isinstance(opt, optimizer.Optimizer): 99 'Supplied optimizer must be an instance of tf.train.Optimizer' [all...] |
drop_stale_gradient_optimizer.py | 16 """Wrapper optimizer for checking and dropping stale gradients.""" 31 from tensorflow.python.training import optimizer 35 class DropStaleGradientOptimizer(optimizer.Optimizer): 36 """Wrapper optimizer that checks and drops stale gradient. 38 This optimizer records the global step for each worker before computing 52 opt: The actual optimizer that will be used to compute and apply the 53 gradients. Must be one of the Optimizer classes. 54 staleness: The maximum staleness allowed for the optimizer.
|
/external/tensorflow/tensorflow/contrib/tpu/python/tpu/ |
tpu_optimizer.py | 16 """Optimizer that implements cross-shard gradient reduction for TPU.""" 26 from tensorflow.python.training import optimizer 29 class CrossShardOptimizer(optimizer.Optimizer): 30 """An optimizer that averages gradients across TPU shards.""" 36 """Construct a new cross-shard optimizer. 39 opt: An existing `Optimizer` to encapsulate. 57 This simply wraps the compute_gradients() from the real optimizer. The 91 replicas, and then applies the real optimizer. 99 name passed to the Optimizer constructor [all...] |
/prebuilts/ndk/r16/sources/third_party/shaderc/libshaderc_util/src/ |
spirv_tools_wrapper.cc | 20 #include "spirv-tools/optimizer.hpp" 94 spvtools::Optimizer optimizer(GetSpirvToolsTargetEnv(env)); 96 optimizer.SetMessageConsumer( 106 optimizer.RegisterPass(spvtools::CreateStripDebugInfoPass()); 109 optimizer.RegisterPass(spvtools::CreateUnifyConstantPass()); 114 if (!optimizer.Run(binary->data(), binary->size(), binary)) {
|
/external/tensorflow/tensorflow/python/keras/_impl/keras/ |
optimizers.py | 16 """Built-in optimizer classes. 34 from tensorflow.python.training import optimizer as tf_optimizer_module 69 @tf_export('keras.optimizers.Optimizer') 70 class Optimizer(object): 71 """Abstract optimizer base class. 73 Note: this is the parent class of all optimizers, not an actual optimizer 89 'passed to optimizer: ' + str(k)) 107 """Sets the weights of the optimizer, from Numpy arrays. 110 (otherwise the optimizer has no weights). 116 of the optimizer (i.e. it should match th [all...] |
/external/proguard/src/proguard/ |
ProGuard.java | 28 import proguard.optimize.Optimizer; 317 return new Optimizer(configuration).execute(programClassPool, libraryClassPool);
|
/external/tensorflow/tensorflow/contrib/layers/python/layers/ |
optimizers.py | 15 """Optimizer ops for use in layers and tf.learn.""" 37 from tensorflow.python.training import optimizer as optimizer_ 61 optimizer, 72 """Given loss and parameters for optimizer, returns a training op. 76 - by string specifying the name of the optimizer. See OPTIMIZER_CLS_NAMES 77 for full list. E.g. `optimize_loss(..., optimizer='Adam')`. 79 `Optimizer` instance. E.g. `optimize_loss(..., 80 optimizer=lambda lr: tf.train.MomentumOptimizer(lr, momentum=0.5))`. 83 optimizer=lambda: tf.train.MomentumOptimizer(0.5, momentum=0.5))`. 84 - by a subclass of `Optimizer` having a single-argument constructo [all...] |
/dalvik/dx/src/com/android/dx/ssa/ |
NormalSsaInsn.java | 231 = Optimizer.getPreserveLocals() && getLocalAssignment() != null;
|
Optimizer.java | 29 public class Optimizer { 34 /** optional optimizer steps */ 113 * Runs the optimizer with a strategy to minimize the number of rop-form
|
/external/tensorflow/tensorflow/contrib/bayesflow/python/ops/ |
variational_sgd_optimizer.py | 15 """An optimizer module for constant stochastic gradient descent.""" 29 from tensorflow.python.training import optimizer 33 class VariationalSGDOptimizer(optimizer.Optimizer): 34 """An optimizer module for constant stochastic gradient descent. 36 This implements an optimizer module for the constant stochastic gradient
|
/external/tensorflow/tensorflow/contrib/mpi_collectives/ |
__init__.py | 97 optimizer = mpi.DistributedOptimizer(tf.train.AdamOptimizer()) 98 train = optimizer.minimize(loss) 101 # Do not pass this to an optimizer! 169 class DistributedOptimizer(tf.train.Optimizer): 170 """An optimizer that wraps another tf.Optimizer, using an MPI allreduce to 173 def __init__(self, optimizer, name=None, use_locking=False): 174 """Construct a new DistributedOptimizer, which uses another optimizer 180 optimizer: Optimizer to use for computing gradients and applying updates [all...] |