HomeSort by relevance Sort by last modified time
    Searched refs:FusedActivationFunctionType (Results 1 - 25 of 34) sorted by null

1 2

  /external/tensorflow/tensorflow/contrib/lite/toco/runtime/
types.h 27 using tflite::FusedActivationFunctionType;
  /external/tensorflow/tensorflow/contrib/lite/toco/graph_transformations/
unfuse_activation_functions.cc 39 case FusedActivationFunctionType::kRelu:
42 case FusedActivationFunctionType::kRelu6:
45 case FusedActivationFunctionType::kRelu1:
59 op->fused_activation_function = FusedActivationFunctionType::kNone;
remove_trivial_quantized_activation_func.cc 32 if (op->fused_activation_function != FusedActivationFunctionType::kRelu &&
33 op->fused_activation_function != FusedActivationFunctionType::kRelu6) {
48 if (op->fused_activation_function == FusedActivationFunctionType::kRelu ||
49 op->fused_activation_function == FusedActivationFunctionType::kRelu6) {
61 if (op->fused_activation_function == FusedActivationFunctionType::kRelu6) {
78 op->fused_activation_function = FusedActivationFunctionType::kNone;
fuse_activation_functions.cc 63 if (op->fused_activation_function != FusedActivationFunctionType::kNone) {
81 op->fused_activation_function = FusedActivationFunctionType::kRelu6;
83 op->fused_activation_function = FusedActivationFunctionType::kRelu1;
85 op->fused_activation_function = FusedActivationFunctionType::kRelu;
resolve_constant_binary.cc 69 FusedActivationFunctionType::kNone);
228 FusedActivationFunctionType::kNone) {
resolve_constant_unary.cc 71 FusedActivationFunctionType::kNone) {
  /external/tensorflow/tensorflow/contrib/lite/kernels/internal/
common.h 55 inline void GetActivationMinMax(FusedActivationFunctionType ac,
59 case FusedActivationFunctionType::kNone:
63 case FusedActivationFunctionType::kRelu:
67 case FusedActivationFunctionType::kRelu1:
71 case FusedActivationFunctionType::kRelu6:
84 template <FusedActivationFunctionType Ac>
types.h 22 enum class FusedActivationFunctionType : uint8 { kNone, kRelu6, kRelu1, kRelu };
  /external/tensorflow/tensorflow/contrib/lite/toco/tflite/
types.h 50 FusedActivationFunctionType faf_type);
51 static FusedActivationFunctionType Deserialize(int activation_function);
types.cc 146 FusedActivationFunctionType faf_type) {
148 case FusedActivationFunctionType::kNone:
150 case FusedActivationFunctionType::kRelu:
152 case FusedActivationFunctionType::kRelu6:
154 case FusedActivationFunctionType::kRelu1:
161 FusedActivationFunctionType ActivationFunction::Deserialize(
165 return FusedActivationFunctionType::kNone;
167 return FusedActivationFunctionType::kRelu;
169 return FusedActivationFunctionType::kRelu6;
171 return FusedActivationFunctionType::kRelu1
    [all...]
types_test.cc 168 std::pair<FusedActivationFunctionType, ::tflite::ActivationFunctionType>>
169 testdata = {{FusedActivationFunctionType::kNone,
171 {FusedActivationFunctionType::kRelu,
173 {FusedActivationFunctionType::kRelu6,
175 {FusedActivationFunctionType::kRelu1,
183 static_cast<FusedActivationFunctionType>(10000)),
operator_test.cc 114 op.fused_activation_function = FusedActivationFunctionType::kRelu6;
170 op.fused_activation_function = FusedActivationFunctionType::kRelu6;
266 op.fused_activation_function = FusedActivationFunctionType::kRelu6;
288 op.fused_activation_function = FusedActivationFunctionType::kRelu6;
304 op.fused_activation_function = FusedActivationFunctionType::kRelu6;
317 op.fused_activation_function = FusedActivationFunctionType::kRelu6;
326 op.fused_activation_function = FusedActivationFunctionType::kRelu6;
343 op.fused_activation_function = FusedActivationFunctionType::kRelu;
  /frameworks/ml/nn/common/operations/
Concatenation.cpp 36 tflite::optimized_ops::Concatenation<tflite::FusedActivationFunctionType::kNone, float>(
55 tflite::optimized_ops::Concatenation<tflite::FusedActivationFunctionType::kNone, uint8_t>(
Normalization.cpp 27 tflite::optimized_ops::L2Normalization<tflite::FusedActivationFunctionType::kNone>(
SimpleMath.cpp 57 tflite::optimized_ops::BroadcastAdd<tflite::FusedActivationFunctionType::activation>( \
132 tflite::optimized_ops::Add<tflite::FusedActivationFunctionType::activation>( \
157 tflite::optimized_ops::BroadcastMul<tflite::FusedActivationFunctionType::activation>( \
  /external/tensorflow/tensorflow/contrib/lite/kernels/internal/reference/
depthwiseconv_uint8.h 94 template <FusedActivationFunctionType Ac>
104 if (Ac == FusedActivationFunctionType::kNone) {
117 template <FusedActivationFunctionType Ac>
depthwiseconv_float.h 85 template <FusedActivationFunctionType Ac>
101 template <FusedActivationFunctionType Ac>
reference_ops.h 220 template <FusedActivationFunctionType Ac>
236 template <FusedActivationFunctionType Ac>
317 template <FusedActivationFunctionType Ac>
329 static_assert(Ac == FusedActivationFunctionType::kNone ||
330 Ac == FusedActivationFunctionType::kRelu ||
331 Ac == FusedActivationFunctionType::kRelu6 ||
332 Ac == FusedActivationFunctionType::kRelu1,
334 if (Ac == FusedActivationFunctionType::kNone) {
346 template <FusedActivationFunctionType Ac>
481 template <FusedActivationFunctionType Ac
    [all...]
  /external/tensorflow/tensorflow/core/kernels/neon/
depthwiseconv_float.h 552 template <FusedActivationFunctionType Ac>
559 static_assert(Ac == FusedActivationFunctionType::kNone ||
560 Ac == FusedActivationFunctionType::kRelu ||
561 Ac == FusedActivationFunctionType::kRelu6 ||
562 Ac == FusedActivationFunctionType::kRelu1,
670 if (Ac == FusedActivationFunctionType::kRelu) {
674 } else if (Ac == FusedActivationFunctionType::kRelu6) {
679 } else if (Ac == FusedActivationFunctionType::kRelu1) {
693 if (Ac == FusedActivationFunctionType::kRelu) {
695 } else if (Ac == FusedActivationFunctionType::kRelu6)
    [all...]
types.h 23 enum class FusedActivationFunctionType { kNone, kRelu6, kRelu1, kRelu };
  /external/tensorflow/tensorflow/contrib/lite/kernels/
l2norm.cc 67 type::L2Normalization<FusedActivationFunctionType::kNone>( \
concatenation.cc 113 type::Concatenation<FusedActivationFunctionType::kNone, scalar>( \
split.cc 120 optimized_ops::TensorFlowSplit<FusedActivationFunctionType::kNone, \
  /external/tensorflow/tensorflow/contrib/lite/kernels/internal/optimized/
optimized_ops.h 303 template <FusedActivationFunctionType Ac>
358 template <FusedActivationFunctionType Ac>
614 template <FusedActivationFunctionType Ac>
624 static_assert(Ac == FusedActivationFunctionType::kNone ||
625 Ac == FusedActivationFunctionType::kRelu ||
626 Ac == FusedActivationFunctionType::kRelu6 ||
627 Ac == FusedActivationFunctionType::kRelu1,
    [all...]
  /external/tensorflow/tensorflow/contrib/lite/toco/
dump_graphviz.cc 230 case FusedActivationFunctionType::kRelu:
233 case FusedActivationFunctionType::kRelu6:
236 case FusedActivationFunctionType::kRelu1:

Completed in 498 milliseconds

1 2