HomeSort by relevance Sort by last modified time
    Searched defs:softmax (Results 1 - 16 of 16) sorted by null

  /external/libtextclassifier/util/math/
softmax.cc 17 #include "util/math/softmax.h"
33 // Standard softmax formula for label's probability is
77 std::vector<float> softmax; local
80 softmax.reserve(scores_size);
99 softmax.push_back(exp_scores[i] / denominator);
101 return softmax;
  /external/tensorflow/tensorflow/contrib/labeled_tensor/python/ops/
nn.py 35 softmax = core.define_unary_op('softmax', nn.softmax) variable
  /external/tensorflow/tensorflow/python/keras/_impl/keras/
activations.py 30 @tf_export('keras.activations.softmax')
31 def softmax(x, axis=-1): function
32 """Softmax activation function.
36 axis: Integer, axis along which the softmax normalization is applied.
39 Tensor, output of softmax transformation.
46 return K.softmax(x)
52 raise ValueError('Cannot apply softmax to a tensor that is 1D')
backend.py 3272 def softmax(x): function
    [all...]
  /external/tensorflow/tensorflow/cc/gradients/
nn_grad.cc 30 // Softmax gradient function.
31 // p = softmax(x) maps from [batch, n] to [batch, m]
48 REGISTER_GRADIENT_OP("Softmax", SoftmaxGrad);
53 auto softmax = Exp(scope, op.output(0)); local
55 auto mul = Mul(scope, sum, softmax);
  /external/tensorflow/tensorflow/compiler/tf2xla/kernels/
softmax_op.cc 16 // XLA-specific Ops for softmax.
55 xla::ComputationDataHandle softmax; variable
57 // softmax = shifted_logits - log(sum(exp(shifted_logits)))
61 softmax = b->Sub(shifted_logits, log_sum_exp, {kBatchDim});
63 // softmax = exp(shifted_logits) / sum(exp(shifted_logits))
67 softmax = b->Div(exp_shifted, sum_exp, {kBatchDim});
70 ctx->SetOutput(0, softmax);
77 REGISTER_XLA_OP(Name("Softmax"), SoftmaxOp);
  /external/tensorflow/tensorflow/contrib/distributions/python/ops/
vector_diffeomixture.py 103 grid = softmax(
949 def softmax(x, axis, name=None): function
    [all...]
  /external/tensorflow/tensorflow/core/grappler/costs/
analytical_cost_estimator_test.cc 82 auto softmax = ops::Softmax(s.WithOpName("softmax"), logits); local
83 auto lsm = ops::Log(s.WithOpName("lsm"), softmax);
109 // TODO(http://b/70031363): Accurate estimator for Softmax needed
  /external/tensorflow/tensorflow/examples/android/src/org/tensorflow/demo/
TensorFlowYoloDetector.java 118 private void softmax(final float[] vals) { method in class:TensorFlowYoloDetector
216 softmax(classes);
  /external/tensorflow/tensorflow/python/ops/
nn_ops.py 1717 def softmax(logits, axis=None, name=None, dim=None): function
    [all...]
  /hardware/qcom/neuralnetworks/hvxservice/1.0/
HexagonOperationsCheck.cpp 359 bool softmax(const std::vector<uint32_t>& ins, const std::vector<uint32_t>& outs, function in namespace:android::hardware::neuralnetworks::V1_0::implementation::hexagon::__anon52541
361 return activation(ins, outs, model, 2, OperationType::SOFTMAX);
451 {{OperationType::SOFTMAX, OperandType::TENSOR_FLOAT32}, softmax},
476 {{OperationType::SOFTMAX, OperandType::TENSOR_QUANT8_ASYMM}, softmax},
HexagonOperationsPrepare.cpp 455 bool softmax(const std::vector<uint32_t>& ins, const std::vector<uint32_t>& outs, function in namespace:android::hardware::neuralnetworks::V1_0::implementation::hexagon::__anon52542::float32
457 HEXAGON_SOFT_ASSERT_EQ(2, ins.size(), "Need 2 inputs for float32::softmax");
458 HEXAGON_SOFT_ASSERT_EQ(1, outs.size(), "Need 1 output for float32::softmax");
893 bool softmax(const std::vector<uint32_t>& ins, const std::vector<uint32_t>& outs, function in namespace:android::hardware::neuralnetworks::V1_0::implementation::hexagon::__anon52542::quant8_asym
    [all...]
  /external/tensorflow/tensorflow/contrib/layers/python/layers/
layers.py 67 'sequence_to_images', 'softmax', 'spatial_softmax', 'stack', 'unit_norm',
2732 def softmax(logits, scope=None): function
    [all...]
  /external/tensorflow/tensorflow/core/kernels/hexagon/
graph_transferer_test.cc 68 "Const", "Softmax", "Identity"};
91 const auto unique_name = scope.GetUniqueNameForOp("Softmax");
92 auto builder = NodeBuilder(unique_name, "Softmax").Input(_logits);
167 Output softmax = BuildSoftmaxOps(root.WithOpName("softmax"), conv); local
188 Output softmax = BuildSoftmaxOps(root.WithOpName("softmax"), max_pool); local
320 const std::vector<string> output_node_names = {"softmax"};
346 const std::vector<string> output_node_names = {"softmax"};
389 // output_node_names.emplace_back("softmax");
    [all...]
  /external/tensorflow/tensorflow/core/kernels/
nn_ops_test.cc 1265 auto softmax = ops::Softmax(root, input); local
    [all...]
  /external/tensorflow/tensorflow/contrib/lite/toco/
import_tensorflow.cc 847 CHECK_EQ(node.op(), "Softmax");
850 auto* softmax = new SoftmaxOperator; local
851 softmax->inputs.push_back(input_name);
852 softmax->outputs.push_back(node.name());
853 // TensorFlow's Softmax doesn't seem to admit a 'beta' parameter.
855 softmax->beta = 1.f;
856 model->operators.emplace_back(softmax);
    [all...]

Completed in 341 milliseconds