HomeSort by relevance Sort by last modified time
    Searched full:variance (Results 101 - 125 of 662) sorted by null

1 2 3 45 6 7 8 91011>>

  /external/tensorflow/tensorflow/tools/api/golden/
tensorflow.distributions.-student-t.pbtxt 144 name: "variance"
145 argspec: "args=[\'self\', \'name\'], varargs=None, keywords=None, defaults=[\'variance\'], "
tensorflow.distributions.-uniform.pbtxt 144 name: "variance"
145 argspec: "args=[\'self\', \'name\'], varargs=None, keywords=None, defaults=[\'variance\'], "
  /frameworks/opt/net/wifi/tests/wifitests/src/com/android/server/wifi/util/
KalmanFilterTest.java 123 double variance = (sumSquares - sum * sum) / (n * n); local
125 assertTrue(variance < 1.5);
  /art/libartbase/base/
histogram_test.cc 57 double variance; local
62 variance = hist->Variance();
63 EXPECT_DOUBLE_EQ(64.25, variance);
  /external/ImageMagick/MagickCore/
statistic.h 42 variance, member in struct:_ChannelStatistics
  /external/libvpx/libvpx/vp8/encoder/
mcomp.h 15 #include "vpx_dsp/variance.h"
  /external/skia/tools/
Stats.h 73 double var; // Estimate of population variance.
  /external/skqp/tools/
Stats.h 73 double var; // Estimate of population variance.
  /external/tensorflow/tensorflow/compiler/tf2xla/kernels/
batch_norm_op.cc 69 // calculated mean and variance.
77 // variance to the gradient. Here we maintain the same behavior by setting
78 // them to the mean and variance calculated by BatchNormTraining.
86 // Directly send input to output as mean and variance in inference mode.
  /external/webrtc/webrtc/modules/remote_bitrate_estimator/
overuse_estimator.h 33 // Returns the estimated noise/jitter variance in ms^2.
  /cts/tests/tests/uirendering/src/android/uirendering/cts/bitmapcomparers/
MSSIMComparer.java 120 * Finds the variance of the two sets of pixels, as well as the covariance of the windows. The
121 * return value is an array of doubles, the first is the variance of the first set of pixels,
122 * the second is the variance of the second set of pixels, and the third is the covariance.
  /external/ImageMagick/Magick++/lib/Magick++/
Statistic.h 175 // Standard deviation, sqrt(variance)
190 // Variance
191 double variance() const;
  /external/icu/icu4c/source/test/perf/howExpensiveIs/
readme.txt 72 Intel(R) Core(TM) i7-2720QM CPU @ 2.20GHz",MacBook 2.4ghz (Core2D),MacBook 2GhzCore2,AIX Power,MB 2.4 Variance,MB 2 variance,AIX Variance
  /external/tensorflow/tensorflow/core/kernels/
debug_ops_test.cc 297 8.97959183673, // variance of non-inf and non-nan elements.
336 8.97959183673, // variance of non-inf and non-nan elements.
364 7.33333333333, // variance of non-inf and non-nan elements.
431 0.0, // variance of non-inf and non-nan elements.
463 0.0, // variance of non-inf and non-nan elements.
488 14.75, // variance of non-inf and non-nan elements.
515 7.33333333333, // variance of non-inf and non-nan elements.
541 6.25, // variance of non-inf and non-nan elements.
567 576.0, // variance of non-inf and non-nan elements.
594 0.25, // variance of non-inf and non-nan elements
    [all...]
  /external/tensorflow/tensorflow/python/kernel_tests/distributions/
student_t_test.py 269 self.assertEqual(student.variance().get_shape(), (3,))
352 # df = 0.5 ==> undefined mean ==> undefined variance.
353 # df = 1.5 ==> infinite variance.
359 var = student.variance().eval()
360 ## scipy uses inf for variance when the mean is undefined. When mean is
361 # undefined we say variance is undefined as well. So test the first
377 # df = 1.5 ==> infinite variance.
382 var = student.variance().eval()
393 # df <= 1 ==> variance not defined
397 student.variance().eval(
    [all...]
  /external/webrtc/webrtc/modules/audio_processing/intelligibility/
intelligibility_utils.h 47 // The result is an array of variances per position: the i-th variance
48 // is the variance of the stream of data on the i-th positions in the
91 const float* variance() const { return variance_.get(); } function in class:webrtc::intelligibility::VarianceArray
intelligibility_enhancer.cc 184 clear_variance_.variance(), clear_variance_.variance() + freqs_, 0.f);
197 FilterVariance(clear_variance_.variance(), filtered_clear_var_.get());
198 FilterVariance(noise_variance_.variance(), filtered_noise_var_.get());
209 } // Else experiencing variance underflow, so do nothing.
  /external/webrtc/webrtc/modules/video_coding/
jitter_estimator.h 74 double _varNoise; // Variance of the time-deviation from the line
90 // Updates the random jitter estimate, i.e. the variance
141 double _varFrameSize; // Frame size variance
  /test/vti/dashboard/src/main/java/com/android/vts/util/
StatSummary.java 64 * <p>Sets the label as provided. Initializes the mean, variance, and n (number of values seen)
75 * Update the mean and variance using Welford's single-pass method.
89 * Combine the mean and variance with another StatSummary.
  /external/tensorflow/tensorflow/contrib/quantize/python/
fold_batch_norms.py 47 and variance and using them for batch normalization. This value is used
70 and variance and using them for batch normalization.
83 # new weights = old weights * gamma / sqrt(variance + epsilon)
84 # new biases = -mean * gamma / sqrt(variance + epsilon) + beta
215 # empty 'mean' and empty 'variance', and produces the mean and the variance
231 # The batch variance used during forward and backward prop is biased,
233 # calculation, the variance is corrected by the term N/N-1 (Bessel's
234 # correction). The variance tensor read from FuseBatchNorm has bessel's
306 from regular batch norm to frozen mean and variance
    [all...]
  /external/apache-commons-math/src/main/java/org/apache/commons/math/stat/correlation/
Covariance.java 24 import org.apache.commons.math.stat.descriptive.moment.Variance;
162 Variance variance = new Variance(biasCorrected); local
170 outMatrix.setEntry(i, i, variance.evaluate(matrix.getColumn(i)));
  /external/tensorflow/tensorflow/compiler/xla/service/gpu/
gpu_layout_assignment_test.cc 109 // The shape of the scale, offset, mean, and variance inputs to
130 auto* variance = builder.AddInstruction( local
131 HloInstruction::CreateParameter(4, aux_shape, "variance"));
141 {operand, scale, offset, mean, variance, epsilon, feature_index},
247 // The shape of the scale, mean, and variance inputs to BatchNormGrad. These
  /external/tensorflow/tensorflow/tools/graph_transforms/
fold_old_batch_norms.cc 73 Tensor variance = GetNodeTensorAttr(variance_node, "value"); local
80 TF_RETURN_IF_ERROR(ErrorIfNotVector(variance, "Variance", num_cols));
91 (1.0f / sqrtf(variance.flat<float>()(i) + variance_epsilon)) *
97 (1.0f / sqrtf(variance.flat<float>()(i) + variance_epsilon));
  /external/libvpx/libvpx/vp9/encoder/
vp9_speed_features.h 127 // Skips intra modes other than DC_PRED if the source variance is small
146 // Use an arbitrary partitioning scheme based on source variance within
150 // Use non-fixed partitions based on source variance
371 // A source variance threshold below which filter search is disabled
461 // variance.
465 // temporal variance. If the low temporal variance flag is set for a block,
  /external/tensorflow/tensorflow/contrib/kfac/python/ops/
loss_functions.py 455 """Negative log prob loss for a normal distribution with mean and variance.
459 assume the variance is held constant. The Fisher Information for n = 1
462 F = [[1 / variance, 0],
463 [ 0, 0.5 / variance^2]]
466 vector as [mean, variance]. For n > 1, the mean parameter vector is
467 concatenated with the variance parameter vector.
472 def __init__(self, mean, variance, targets=None, seed=None):
474 assert len(variance.shape) == 2, "Expect 2D variance tensor."
476 self._variance = variance
    [all...]

Completed in 537 milliseconds

1 2 3 45 6 7 8 91011>>