made random generators of MLL classes depended on default rng (theRNG) (#205).

This commit is contained in:
Maria Dimashova 2010-11-29 14:04:08 +00:00
parent 7860d52e14
commit c104cdce96
10 changed files with 53 additions and 55 deletions

View File

@ -913,7 +913,7 @@ struct CV_EXPORTS CvDTreeTrainData
CvSet* cv_heap; CvSet* cv_heap;
CvSet* nv_heap; CvSet* nv_heap;
CvRNG rng; cv::RNG* rng;
}; };
class CvDTree; class CvDTree;
@ -1147,7 +1147,7 @@ protected:
CvMat* var_importance; CvMat* var_importance;
int nsamples; int nsamples;
CvRNG rng; cv::RNG* rng;
CvMat* active_var_mask; CvMat* active_var_mask;
}; };
@ -1908,7 +1908,7 @@ protected:
CvMat* missing; CvMat* missing;
CvMat* class_labels; CvMat* class_labels;
CvRNG rng; cv::RNG* rng;
int class_count; int class_count;
float delta; float delta;
@ -2034,7 +2034,7 @@ protected:
int activ_func; int activ_func;
int max_count, max_buf_sz; int max_count, max_buf_sz;
CvANN_MLP_TrainParams params; CvANN_MLP_TrainParams params;
CvRNG rng; cv::RNG* rng;
}; };
/****************************************************************************************\ /****************************************************************************************\
@ -2177,7 +2177,7 @@ protected:
CvMat* test_sample_idx; CvMat* test_sample_idx;
int* sample_idx; // data of train_sample_idx and test_sample_idx int* sample_idx; // data of train_sample_idx and test_sample_idx
CvRNG rng; cv::RNG* rng;
}; };

View File

@ -95,7 +95,7 @@ CvANN_MLP::CvANN_MLP()
layer_sizes = wbuf = 0; layer_sizes = wbuf = 0;
min_val = max_val = min_val1 = max_val1 = 0.; min_val = max_val = min_val1 = max_val1 = 0.;
weights = 0; weights = 0;
rng = cvRNG(-1); rng = &cv::theRNG();
default_model_name = "my_nn"; default_model_name = "my_nn";
clear(); clear();
} }
@ -108,7 +108,7 @@ CvANN_MLP::CvANN_MLP( const CvMat* _layer_sizes,
layer_sizes = wbuf = 0; layer_sizes = wbuf = 0;
min_val = max_val = min_val1 = max_val1 = 0.; min_val = max_val = min_val1 = max_val1 = 0.;
weights = 0; weights = 0;
rng = cvRNG(-1); rng = &cv::theRNG();
default_model_name = "my_nn"; default_model_name = "my_nn";
create( _layer_sizes, _activ_func, _f_param1, _f_param2 ); create( _layer_sizes, _activ_func, _f_param1, _f_param2 );
} }
@ -190,7 +190,7 @@ void CvANN_MLP::init_weights()
double s = 0; double s = 0;
for( k = 0; k <= n1; k++ ) for( k = 0; k <= n1; k++ )
{ {
val = cvRandReal(&rng)*2-1.; val = rng->uniform(0., 1.)*2-1.;
w[k*n2 + j] = val; w[k*n2 + j] = val;
s += fabs(val); s += fabs(val);
} }
@ -928,8 +928,8 @@ int CvANN_MLP::train_backprop( CvVectors x0, CvVectors u, const double* sw )
for( i = 0; i < count; i++ ) for( i = 0; i < count; i++ )
{ {
int tt; int tt;
j = (unsigned)cvRandInt(&rng) % count; j = (*rng)(count);
k = (unsigned)cvRandInt(&rng) % count; k = (*rng)(count);
CV_SWAP( _idx->data.i[j], _idx->data.i[k], tt ); CV_SWAP( _idx->data.i[j], _idx->data.i[k], tt );
} }
} }
@ -1507,7 +1507,7 @@ CvANN_MLP::CvANN_MLP( const Mat& _layer_sizes, int _activ_func,
layer_sizes = wbuf = 0; layer_sizes = wbuf = 0;
min_val = max_val = min_val1 = max_val1 = 0.; min_val = max_val = min_val1 = max_val1 = 0.;
weights = 0; weights = 0;
rng = cvRNG(-1); rng = &cv::theRNG();
default_model_name = "my_nn"; default_model_name = "my_nn";
create( _layer_sizes, _activ_func, _f_param1, _f_param2 ); create( _layer_sizes, _activ_func, _f_param1, _f_param2 );
} }

View File

@ -84,7 +84,7 @@ CvMLData :: CvMLData()
//flt_separator = '.'; //flt_separator = '.';
class_map = new std::map<std::string, int>(); class_map = new std::map<std::string, int>();
rng = cvRNG( -cvGetTickCount() ); rng = &cv::theRNG();
} }
CvMLData :: ~CvMLData() CvMLData :: ~CvMLData()
@ -608,8 +608,8 @@ void CvMLData :: mix_train_and_test_idx()
int n = values->rows; int n = values->rows;
for (int i = 0; i < n; i++) for (int i = 0; i < n; i++)
{ {
int a = cvRandInt( &rng ) % n; int a = (*rng)(n);
int b = cvRandInt( &rng ) % n; int b = (*rng)(n);
int t; int t;
CV_SWAP( sample_idx[a], sample_idx[b], t ); CV_SWAP( sample_idx[a], sample_idx[b], t );
} }

View File

@ -581,7 +581,7 @@ void CvEM::kmeans( const CvVectors& train_data, int nclusters, CvMat* labels,
__BEGIN__; __BEGIN__;
CvRNG rng = cvRNG(-1); cv::RNG* rng = &cv::theRNG();
int i, j, k, nsamples, dims; int i, j, k, nsamples, dims;
int iter = 0; int iter = 0;
double max_dist = DBL_MAX; double max_dist = DBL_MAX;
@ -605,7 +605,7 @@ void CvEM::kmeans( const CvVectors& train_data, int nclusters, CvMat* labels,
{ {
for( i = 0; i < nsamples; i++ ) for( i = 0; i < nsamples; i++ )
labels->data.i[i] = i*nclusters/nsamples; labels->data.i[i] = i*nclusters/nsamples;
cvRandShuffle( labels, &rng ); cvRandShuffle( labels, &rng->state );
} }
for( ;; ) for( ;; )
@ -702,7 +702,7 @@ void CvEM::kmeans( const CvVectors& train_data, int nclusters, CvMat* labels,
const float* s; const float* s;
for( j = 0; j < 10; j++ ) for( j = 0; j < 10; j++ )
{ {
i = cvRandInt( &rng ) % nsamples; i = (*rng)(nsamples);
if( counters->data.i[labels->data.i[i]] > 1 ) if( counters->data.i[labels->data.i[i]] > 1 )
break; break;
} }
@ -738,7 +738,7 @@ void CvEM::kmeans( const CvVectors& train_data, int nclusters, CvMat* labels,
if( counters->data.i[k] == 0 ) if( counters->data.i[k] == 0 )
for(;;) for(;;)
{ {
i = cvRandInt(&rng) % nsamples; i = (*rng)(nsamples);
j = labels->data.i[i]; j = labels->data.i[i];
if( counters->data.i[j] > 1 ) if( counters->data.i[j] > 1 )
{ {

View File

@ -91,7 +91,7 @@ void CvERTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
clear(); clear();
var_all = 0; var_all = 0;
rng = cvRNG(-1); rng = &cv::theRNG();
CV_CALL( set_params( _params )); CV_CALL( set_params( _params ));
@ -444,7 +444,6 @@ void CvERTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
{ {
unsigned short* udst = 0; unsigned short* udst = 0;
int* idst = 0; int* idst = 0;
CvRNG* r = &rng;
if (is_buf_16u) if (is_buf_16u)
{ {
@ -457,8 +456,8 @@ void CvERTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
for( i = 0; i < sample_count; i++ ) for( i = 0; i < sample_count; i++ )
{ {
int a = cvRandInt(r) % sample_count; int a = (*rng)(sample_count);
int b = cvRandInt(r) % sample_count; int b = (*rng)(sample_count);
unsigned short unsh = (unsigned short)vi; unsigned short unsh = (unsigned short)vi;
CV_SWAP( udst[a], udst[b], unsh ); CV_SWAP( udst[a], udst[b], unsh );
} }
@ -474,8 +473,8 @@ void CvERTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
for( i = 0; i < sample_count; i++ ) for( i = 0; i < sample_count; i++ )
{ {
int a = cvRandInt(r) % sample_count; int a = (*rng)(sample_count);
int b = cvRandInt(r) % sample_count; int b = (*rng)(sample_count);
CV_SWAP( idst[a], idst[b], vi ); CV_SWAP( idst[a], idst[b], vi );
} }
} }
@ -894,8 +893,8 @@ CvDTreeSplit* CvForestERTree::find_split_ord_class( CvDTreeNode* node, int vi, f
if (fdiff > epsilon) if (fdiff > epsilon)
{ {
is_find_split = true; is_find_split = true;
CvRNG* rng = &data->rng; cv::RNG* rng = data->rng;
split_val = pmin + cvRandReal(rng) * fdiff ; split_val = pmin + rng->uniform(0.f, 1.f) * fdiff ;
if (split_val - pmin <= FLT_EPSILON) if (split_val - pmin <= FLT_EPSILON)
split_val = pmin + split_delta; split_val = pmin + split_delta;
if (pmax - split_val <= FLT_EPSILON) if (pmax - split_val <= FLT_EPSILON)
@ -1189,8 +1188,8 @@ CvDTreeSplit* CvForestERTree::find_split_ord_reg( CvDTreeNode* node, int vi, flo
if (fdiff > epsilon) if (fdiff > epsilon)
{ {
is_find_split = true; is_find_split = true;
CvRNG* rng = &data->rng; cv::RNG* rng = data->rng;
split_val = pmin + cvRandReal(rng) * fdiff ; split_val = pmin + rng->uniform(0.f, 1.f) * fdiff ;
if (split_val - pmin <= FLT_EPSILON) if (split_val - pmin <= FLT_EPSILON)
split_val = pmin + split_delta; split_val = pmin + split_delta;
if (pmax - split_val <= FLT_EPSILON) if (pmax - split_val <= FLT_EPSILON)
@ -1745,8 +1744,8 @@ bool CvERTrees::grow_forest( const CvTermCriteria term_crit )
int i1, i2; int i1, i2;
float temp; float temp;
i1 = cvRandInt( &rng ) % nsamples; i1 = (*rng)(nsamples);
i2 = cvRandInt( &rng ) % nsamples; i2 = (*rng)(nsamples);
CV_SWAP( mth_var_ptr[i1*dims], mth_var_ptr[i2*dims], temp ); CV_SWAP( mth_var_ptr[i1*dims], mth_var_ptr[i2*dims], temp );
// turn values of (m-1)-th variable, that were permuted // turn values of (m-1)-th variable, that were permuted

View File

@ -268,7 +268,7 @@ CvGBTrees::train( const CvMat* _train_data, int _tflag,
} }
// subsample params and data // subsample params and data
rng = CvRNG(time(0)); rng = &cv::theRNG();
int samples_count = get_len(sample_idx); int samples_count = get_len(sample_idx);
@ -698,8 +698,8 @@ void CvGBTrees::do_subsample()
if (subsample_test) if (subsample_test)
for (int i = 0; i < n; i++) for (int i = 0; i < n; i++)
{ {
int a = cvRandInt( &rng ) % n; int a = (*rng)(n);
int b = cvRandInt( &rng ) % n; int b = (*rng)(n);
int t; int t;
CV_SWAP( idx[a], idx[b], t ); CV_SWAP( idx[a], idx[b], t );
} }

View File

@ -200,7 +200,7 @@ CvRTrees::CvRTrees()
data = NULL; data = NULL;
active_var_mask = NULL; active_var_mask = NULL;
var_importance = NULL; var_importance = NULL;
rng = cvRNG(0xffffffff); rng = &cv::theRNG();
default_model_name = "my_random_trees"; default_model_name = "my_random_trees";
} }
@ -235,7 +235,7 @@ CvMat* CvRTrees::get_active_var_mask()
CvRNG* CvRTrees::get_rng() CvRNG* CvRTrees::get_rng()
{ {
return &rng; return &rng->state;
} }
bool CvRTrees::train( const CvMat* _train_data, int _tflag, bool CvRTrees::train( const CvMat* _train_data, int _tflag,
@ -375,7 +375,7 @@ bool CvRTrees::grow_forest( const CvTermCriteria term_crit )
cvZero( sample_idx_mask_for_tree ); cvZero( sample_idx_mask_for_tree );
for(i = 0; i < nsamples; i++ ) //form sample for creation one tree for(i = 0; i < nsamples; i++ ) //form sample for creation one tree
{ {
int idx = cvRandInt( &rng ) % nsamples; int idx = (*rng)(nsamples);
sample_idx_for_tree->data.i[i] = idx; sample_idx_for_tree->data.i[i] = idx;
sample_idx_mask_for_tree->data.ptr[idx] = 0xFF; sample_idx_mask_for_tree->data.ptr[idx] = 0xFF;
} }
@ -458,8 +458,8 @@ bool CvRTrees::grow_forest( const CvTermCriteria term_crit )
if( sample_idx_mask_for_tree->data.ptr[i] ) //the sample is not OOB if( sample_idx_mask_for_tree->data.ptr[i] ) //the sample is not OOB
continue; continue;
i1 = cvRandInt( &rng ) % nsamples; i1 = (*rng)(nsamples);
i2 = cvRandInt( &rng ) % nsamples; i2 = (*rng)(nsamples);
CV_SWAP( mth_var_ptr[i1*dims], mth_var_ptr[i2*dims], temp ); CV_SWAP( mth_var_ptr[i1*dims], mth_var_ptr[i2*dims], temp );
// turn values of (m-1)-th variable, that were permuted // turn values of (m-1)-th variable, that were permuted
@ -762,7 +762,7 @@ void CvRTrees::read( CvFileStorage* fs, CvFileNode* fnode )
CV_Error( CV_StsParseError, "Some <nclasses>, <nsamples>, <var_count>, " CV_Error( CV_StsParseError, "Some <nclasses>, <nsamples>, <var_count>, "
"<nactive_vars>, <oob_error>, <ntrees> of tags are missing" ); "<nactive_vars>, <oob_error>, <ntrees> of tags are missing" );
rng = CvRNG( -1 ); rng = &cv::theRNG();
trees = (CvForestTree**)cvAlloc( sizeof(trees[0])*ntrees ); trees = (CvForestTree**)cvAlloc( sizeof(trees[0])*ntrees );
memset( trees, 0, sizeof(trees[0])*ntrees ); memset( trees, 0, sizeof(trees[0])*ntrees );

View File

@ -1612,7 +1612,7 @@ bool CvSVM::train_auto( const CvMat* _train_data, const CvMat* _responses,
int block_size = 1 << 16; int block_size = 1 << 16;
double* alpha; double* alpha;
int i, k; int i, k;
CvRNG rng = cvRNG(-1); RNG* rng = &theRNG();
// all steps are logarithmic and must be > 1 // all steps are logarithmic and must be > 1
double degree_step = 10, g_step = 10, coef_step = 10, C_step = 10, nu_step = 10, p_step = 10; double degree_step = 10, g_step = 10, coef_step = 10, C_step = 10, nu_step = 10, p_step = 10;
@ -1745,8 +1745,8 @@ bool CvSVM::train_auto( const CvMat* _train_data, const CvMat* _responses,
// randomly permute samples and responses // randomly permute samples and responses
for( i = 0; i < sample_count; i++ ) for( i = 0; i < sample_count; i++ )
{ {
int i1 = cvRandInt( &rng ) % sample_count; int i1 = (*rng)(sample_count);
int i2 = cvRandInt( &rng ) % sample_count; int i2 = (*rng)(sample_count);
const float* temp; const float* temp;
float t; float t;
int y; int y;

View File

@ -196,7 +196,7 @@ void CvDTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
clear(); clear();
var_all = 0; var_all = 0;
rng = cvRNG(-1); rng = &cv::theRNG();
CV_CALL( set_params( _params )); CV_CALL( set_params( _params ));
@ -566,7 +566,6 @@ void CvDTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
{ {
unsigned short* udst = 0; unsigned short* udst = 0;
int* idst = 0; int* idst = 0;
CvRNG* r = &rng;
if (is_buf_16u) if (is_buf_16u)
{ {
@ -579,8 +578,8 @@ void CvDTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
for( i = 0; i < sample_count; i++ ) for( i = 0; i < sample_count; i++ )
{ {
int a = cvRandInt(r) % sample_count; int a = (*rng)(sample_count);
int b = cvRandInt(r) % sample_count; int b = (*rng)(sample_count);
unsigned short unsh = (unsigned short)vi; unsigned short unsh = (unsigned short)vi;
CV_SWAP( udst[a], udst[b], unsh ); CV_SWAP( udst[a], udst[b], unsh );
} }
@ -596,8 +595,8 @@ void CvDTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
for( i = 0; i < sample_count; i++ ) for( i = 0; i < sample_count; i++ )
{ {
int a = cvRandInt(r) % sample_count; int a = (*rng)(sample_count);
int b = cvRandInt(r) % sample_count; int b = (*rng)(sample_count);
CV_SWAP( idst[a], idst[b], vi ); CV_SWAP( idst[a], idst[b], vi );
} }
} }
@ -1134,7 +1133,7 @@ void CvDTreeTrainData::clear()
data_root = 0; data_root = 0;
rng = cvRNG(-1); rng = &cv::theRNG();
} }
@ -2040,14 +2039,14 @@ void CvDTree::cluster_categories( const int* vectors, int n, int m,
double* buf = (double*)cvStackAlloc( (n + k)*sizeof(buf[0]) ); double* buf = (double*)cvStackAlloc( (n + k)*sizeof(buf[0]) );
double *v_weights = buf, *c_weights = buf + n; double *v_weights = buf, *c_weights = buf + n;
bool modified = true; bool modified = true;
CvRNG* r = &data->rng; RNG* r = data->rng;
// assign labels randomly // assign labels randomly
for( i = 0; i < n; i++ ) for( i = 0; i < n; i++ )
{ {
int sum = 0; int sum = 0;
const int* v = vectors + i*m; const int* v = vectors + i*m;
labels[i] = i < k ? i : (cvRandInt(r) % k); labels[i] = i < k ? i : (*r)(k);
// compute weight of each vector // compute weight of each vector
for( j = 0; j < m; j++ ) for( j = 0; j < m; j++ )
@ -2057,8 +2056,8 @@ void CvDTree::cluster_categories( const int* vectors, int n, int m,
for( i = 0; i < n; i++ ) for( i = 0; i < n; i++ )
{ {
int i1 = cvRandInt(r) % n; int i1 = (*r)(n);
int i2 = cvRandInt(r) % n; int i2 = (*r)(n);
CV_SWAP( labels[i1], labels[i2], j ); CV_SWAP( labels[i1], labels[i2], j );
} }

View File

@ -218,7 +218,7 @@ void CvCascadeBoostTrainData::setData( const CvFeatureEvaluator* _featureEvaluat
have_priors = false; have_priors = false;
is_classifier = true; is_classifier = true;
rng = cvRNG(-1); rng = &cv::theRNG();
set_params( _params ); set_params( _params );