#include "agglomerative_clustering.h"
void write(FileStorage& fs, const std::string&, const HCluster& x)
{
x.write(fs);
}
void read(const FileNode& node, HCluster& x, const HCluster& default_value)
{
if(node.empty())
x = default_value;
else
x.read(node);
}
HierarchicalClustering::HierarchicalClustering(vector &_regions): regions(_regions)
{
#ifndef _TRAIN_
boost = StatModel::load( "./trained_boost_groups.xml" );
if( boost.empty() )
{
cout << "Could not read the classifier ./trained_boost_groups.xml" << endl;
CV_Error(Error::StsBadArg, "Could not read the default classifier!");
}
#endif
}
//For feature space
void HierarchicalClustering::operator()(t_float *data, unsigned int num, int dim, unsigned char method, unsigned char metric, vector &merge_info, float x_coord_mult, cv::Size imsize)
{
t_float *Z = (t_float*)malloc(((num-1)*4) * sizeof(t_float)); // we need 4 floats foreach merge
linkage_vector(data, (int)num, dim, Z, method, metric);
build_merge_info(Z, data, (int)num, dim, merge_info, x_coord_mult, imsize);
free(Z);
}
void HierarchicalClustering::build_merge_info(t_float *Z, t_float *X, int N, int dim, vector &merge_info, float x_coord_mult, cv::Size imsize)
{
int this_node = 0;
// walk the whole dendogram
for (int i=0; i<(N-1)*4; i=i+4)
{
HCluster cluster;
cluster.childs.push_back(this_node);
cluster.num_elem = Z[i+3]; //number of elements
int node1 = Z[i];
int node2 = Z[i+1];
float dist = Z[i+2];
if (node1 point;
//for (int n=0; n point;
//for (int n=0; nN) && (cluster.rect == merge_info.at(node1-N).rect)) )
cluster.inherit_cnn_probability = 1;
else if ( ((node2N) && (cluster.rect == merge_info.at(node2-N).rect)) )
cluster.inherit_cnn_probability = 2;
else
cluster.inherit_cnn_probability = 0;
cluster.node1 = node1-N;
cluster.node2 = node2-N;
//Minibox mb;
//for (int i=0; i= 1) volume = 0.999999; //TODO this may never happen!!!
if (volume == 0) volume = 0.000001; //TODO is this the minimum we can get? // better if we just quantize to a given grid of possible volumes ...
cluster.nfa = (int)NFA( N, cluster.points.size(), (double) volume, 0); //this uses an approximation for the nfa calculations (faster)
/* predict group class with boost */
int nfa2 = (int)NFA( N, cluster.points.size(), (double) ext_volume, 0); //this uses an approximation for the nfa calculations (faster)
cluster.feature_vector.push_back(cluster.elements.size());
cluster.feature_vector.push_back(cluster.nfa);
cluster.feature_vector.push_back(nfa2);
if (cluster.nfa == 0)
cluster.feature_vector.push_back(-1.0);
else
cluster.feature_vector.push_back((float)nfa2/cluster.nfa);
cluster.feature_vector.push_back((float)(volume/ext_volume));
cluster.feature_vector.push_back((float)centers_rect.width/cluster.rect.width);
int left_diff = centers_rect.x - cluster.rect.x;
int right_diff = (cluster.rect.x+cluster.rect.width) - (centers_rect.x+centers_rect.width);
cluster.feature_vector.push_back((float)left_diff/cluster.rect.width);
cluster.feature_vector.push_back((float)right_diff/cluster.rect.width);
if (max(left_diff,right_diff) == 0)
cluster.feature_vector.push_back(1.0);
else
cluster.feature_vector.push_back((float)min(left_diff,right_diff)/max(left_diff,right_diff));
cluster.feature_vector.push_back((float)centers_rect.height/cluster.rect.height);
int top_diff = centers_rect.y - cluster.rect.y;
int bottom_diff = (cluster.rect.y+cluster.rect.height) - (centers_rect.y+centers_rect.height);
cluster.feature_vector.push_back((float)top_diff/cluster.rect.height);
cluster.feature_vector.push_back((float)bottom_diff/cluster.rect.height);
if (max(top_diff,bottom_diff) == 0)
cluster.feature_vector.push_back(1.0);
else
cluster.feature_vector.push_back((float)min(top_diff,bottom_diff)/max(top_diff,bottom_diff));
Scalar mean,std;
meanStdDev( cluster.elements_diameters, mean, std );
cluster.feature_vector.push_back(std[0]/mean[0]);
meanStdDev( cluster.elements_strokes, mean, std );
cluster.feature_vector.push_back(std[0]/mean[0]);
meanStdDev( cluster.elements_gradients, mean, std );
cluster.feature_vector.push_back(std[0]);
meanStdDev( cluster.elements_intensities, mean, std );
cluster.feature_vector.push_back(std[0]);
meanStdDev( cluster.elements_b_intensities, mean, std );
cluster.feature_vector.push_back(std[0]);
#ifndef _TRAIN_
float votes_group = boost->predict( Mat(cluster.feature_vector), noArray(), DTrees::PREDICT_SUM | StatModel::RAW_OUTPUT);
cluster.probability = (double)1-(double)1/(1+exp(-2*votes_group));
#endif
merge_info.push_back(cluster);
this_node++;
}
}