Home | History | Annotate | Download | only in src

Lines Matching refs:src1

60         worktype t0 = __op__((src1)[i], (src2)[i]);                 \
61 worktype t1 = __op__((src1)[i+1], (src2)[i+1]); \
66 t0 = __op__((src1)[i+2],(src2)[i+2]); \
67 t1 = __op__((src1)[i+3],(src2)[i+3]); \
75 worktype t0 = __op__((src1)[i],(src2)[i]); \
82 ( const type* src1, int step1, const type* src2, int step2, \
84 (src1, step1, src2, step2, dst, step, size) ) \
86 step1/=sizeof(src1[0]); step2/=sizeof(src2[0]); step/=sizeof(dst[0]); \
90 for( ; size.height--; src1 += step1, src2 += step2, dst += step ) \
92 worktype t0 = __op__((src1)[0],(src2)[0]); \
98 for( ; size.height--; src1 += step1, src2 += step2, dst += step ) \
111 ( const type* src1, int step1, const type* src2, int step2, \
113 (src1, step1, src2, step2, dst, step, size, 0) ) \
115 step1/=sizeof(src1[0]); step2/=sizeof(src2[0]); step/=sizeof(dst[0]); \
119 for( ; size.height--; src1 += step1, src2 += step2, dst += step ) \
121 worktype t0 = __op__((src1)[0],(src2)[0]); \
127 for( ; size.height--; src1 += step1, src2 += step2, dst += step ) \
286 CvMat srcstub1, srcstub2, *src1, *src2;
296 src1 = (CvMat*)srcarr1;
299 if( !CV_IS_MAT(src1) || !CV_IS_MAT(src2) || !CV_IS_MAT(dst))
301 if( CV_IS_MATND(src1) || CV_IS_MATND(src2) || CV_IS_MATND(dst))
303 CvArr* arrs[] = { src1, src2, dst };
359 CV_CALL( src1 = cvGetMat( src1, &srcstub1, &coi1 ));
367 if( !CV_ARE_TYPES_EQ( src1, src2 ) || !CV_ARE_TYPES_EQ( src1, dst ))
370 if( !CV_ARE_SIZES_EQ( src1, src2 ) || !CV_ARE_SIZES_EQ( src1, dst ))
373 type = CV_MAT_TYPE(src1->type);
374 size = cvGetMatSize( src1 );
380 if( CV_IS_MAT_CONT( src1->type & src2->type & dst->type ))
388 const float* src1data = (const float*)(src1->data.ptr);
403 const double* src1data = (const double*)(src1->data.ptr);
436 cont_flag = CV_IS_MAT_CONT( src1->type & src2->type & dst->type & mask->type );
480 src1_step = src1->step;
500 func_sfs( src1->data.ptr + y*src1->step, src1_step,
504 func( src1->data.ptr + y*src1->step, src1_step,
761 CvMat srcstub1, *src1 = (CvMat*)srcarr1;
771 if( !CV_IS_MAT(src1) || !CV_IS_MAT(src2) || !CV_IS_MAT(dst))
773 if( CV_IS_MATND(src1) || CV_IS_MATND(src2) || CV_IS_MATND(dst))
775 CvArr* arrs[] = { src1, src2, dst };
831 CV_CALL( src1 = cvGetMat( src1, &srcstub1, &coi1 ));
839 if( !CV_ARE_TYPES_EQ( src1, src2 ) || !CV_ARE_TYPES_EQ( src1, dst ))
842 if( !CV_ARE_SIZES_EQ( src1, src2 ) || !CV_ARE_SIZES_EQ( src1, dst ))
845 type = CV_MAT_TYPE(src1->type);
846 size = cvGetMatSize( src1 );
852 if( CV_IS_MAT_CONT( src1->type & src2->type & dst->type ))
860 const float* src1data = (const float*)(src1->data.ptr);
875 const double* src1data = (const double*)(src1->data.ptr);
908 cont_flag = CV_IS_MAT_CONT( src1->type & src2->type & dst->type & mask->type );
952 src1_step = src1->step;
972 func_sfs( src1->data.ptr + y*src1->step, src1_step,
976 func( src1->data.ptr + y*src1->step, src1_step,
1221 icvMul_##flavor##_C1R( const arrtype* src1, int step1, \
1226 step1 /= sizeof(src1[0]); step2 /= sizeof(src2[0]); step /= sizeof(dst[0]); \
1230 for( ; size.height--; src1+=step1, src2+=step2, dst+=step ) \
1235 worktype t0 = src1[i] * src2[i]; \
1236 worktype t1 = src1[i+1] * src2[i+1]; \
1241 t0 = src1[i+2] * src2[i+2]; \
1242 t1 = src1[i+3] * src2[i+3]; \
1250 worktype t0 = src1[i] * src2[i]; \
1257 for( ; size.height--; src1+=step1, src2+=step2, dst+=step ) \
1262 double ft0 = scale*_cvt_macro_(src1[i])*_cvt_macro_(src2[i]); \
1263 double ft1 = scale*_cvt_macro_(src1
1270 ft0 = scale*_cvt_macro_(src1[i+2])*_cvt_macro_(src2[i+2]); \
1271 ft1 = scale*_cvt_macro_(src1[i+3])*_cvt_macro_(src2[i+3]); \
1282 t0 = _cast_macro1_(scale*_cvt_macro_(src1[i])*_cvt_macro_(src2[i])); \
1303 typedef CvStatus (CV_STDCALL * CvScaledElWiseFunc)( const void* src1, int step1,
1321 CvMat srcstub1, *src1 = (CvMat*)srcarr1;
1333 if( !CV_IS_MAT(src1) )
1335 if( CV_IS_MATND(src1) )
1339 CV_CALL( src1 = cvGetMat( src1, &srcstub1, &coi ));
1371 CvArr* arrs[] = { src1, src2, dst };
1395 if( !CV_ARE_TYPES_EQ( src1, src2 ) || !CV_ARE_TYPES_EQ( src1, dst ))
1398 if( !CV_ARE_SIZES_EQ( src1, src2 ) || !CV_ARE_SIZES_EQ( src1, dst ))
1401 type = CV_MAT_TYPE(src1->type);
1402 size = cvGetMatSize( src1 );
1407 if( CV_IS_MAT_CONT( src1->type & src2->type & dst->type ))
1415 const float* src1data = (const float*)(src1->data.ptr);
1431 const double* src1data = (const double*)(src1->data.ptr);
1451 src1_step = src1->step;
1461 IPPI_CALL( func( src1->data.ptr, src1_step, src2->data.ptr, src2_step,
1474 icvDiv_##flavor##_C1R( const arrtype* src1, int step1, \
1479 step1 /= sizeof(src1[0]); step2 /= sizeof(src2[0]); step /= sizeof(dst[0]); \
1481 for( ; size.height--; src1+=step1, src2+=step2, dst+=step ) \
1496 worktype z0 = _cast_macro1_(src2[i+1] * _cvt_macro_(src1[i]) * b); \
1497 worktype z1 = _cast_macro1_(src2[i] * _cvt_macro_(src1[i+1]) * b); \
1498 worktype z2 = _cast_macro1_(src2[i+3] * _cvt_macro_(src1[i+2]) * a); \
1499 worktype z3 = _cast_macro1_(src2[i+2] * _cvt_macro_(src1[i+3]) * a); \
1509 _cast_macro1_(_cvt_macro_(src1[i])*scale/_cvt_macro_(src2[i])) : 0; \
1511 _cast_macro1_(_cvt_macro_(src1[i+1])*scale/_cvt_macro_(src2[i+1])):0;\
1513 _cast_macro1_(_cvt_macro_(src1[i+2])*scale/_cvt_macro_(src2[i+2])):0;\
1515 _cast_macro1_(_cvt_macro_(src1[i+3])*scale/_cvt_macro_(src2[i+3])):0;\
1527 _cast_macro1_(_cvt_macro_(src1[i])*scale/_cvt_macro_(src2[i])) : 0; \
1669 CvMat srcstub1, *src1 = (CvMat*)srcarr1;
1693 if( src1 )
1695 if( CV_IS_MATND(src1))
1699 if( !CV_IS_MAT(src1) )
1701 CV_CALL( src1 = cvGetMat( src1, &srcstub1, &coi ));
1706 if( !CV_ARE_TYPES_EQ( src1, src2 ))
1709 if( !CV_ARE_SIZES_EQ( src1, src2 ))
1711 src1_cont_flag = src1->type;
1729 CvArr* arrs[] = { dst, src2, src1 };
1733 CV_CALL( cvInitNArrayIterator( 2 + (src1 != 0), arrs, 0, stubs, &iterator ));
1738 if( src1 )
1790 src1_step = src1 ? src1->step : 0;
1795 if( src1 )
1802 IPPI_CALL( func( src1->data.ptr, src1_step, src2->data.ptr, src2_step,
1824 icvAddWeighted_##flavor##_C1R( const arrtype* src1, int step1, double alpha, \
1828 step1 /= sizeof(src1[0]); step2 /= sizeof(src2[0]); step /= sizeof(dst[0]); \
1830 for( ; size.height--; src1 += step1, src2 += step2, dst += step ) \
1836 worktype t0 = cast_macro1(load_macro((src1)[i])*alpha + \
1838 worktype t1 = cast_macro1(load_macro((src1)[i+1])*alpha + \
1844 t0 = cast_macro1(load_macro((src1)[i+2])*alpha + \
1846 t1 = cast_macro1(load_macro((src1)[i+3])*alpha + \
1855 worktype t0 = cast_macro1(load_macro((src1)[i])*alpha + \
1869 icvAddWeighted_8u_fast_C1R( const uchar* src1, int step1, double alpha,
1898 for( ; size.height--; src1 += step1, src2 += step2, dst += step )
1904 t0 = CV_FAST_CAST_8U((tab1[src1[i]] + tab2[src2[i]]) >> shift);
1905 t1 = CV_FAST_CAST_8U((tab1[src1[i+1]] + tab2[src2[i+1]]) >> shift);
1910 t0 = CV_FAST_CAST_8U((tab1[src1[i+2]] + tab2[src2[i+2]]) >> shift);
1911 t1 = CV_FAST_CAST_8U((tab1[src1[i+3]] + tab2[src2[i+3]]) >> shift);
1919 t0 = CV_FAST_CAST_8U((tab1[src1[i]] + tab2[src2[i]]) >> shift);
1927 for( ; size.height--; src1 += step1, src2 += step2, dst += step )
1933 t0 = (tab1[src1[i]] + tab2[src2[i]]) >> shift;
1934 t1 = (tab1[src1[i+1]] + tab2[src2[i+1]]) >> shift;
1939 t0 = (tab1[src1[i+2]] + tab2[src2[i+2]]) >> shift;
1940 t1 = (tab1[src1[i+3]] + tab2[src2[i+3]]) >> shift;
1948 t0 = (tab1[src1[i]] + tab2[src2[i]]) >> shift;
1968 typedef CvStatus (CV_STDCALL *CvAddWeightedFunc)( const void* src1, int step1, double alpha,