mirror of https://github.com/CGAL/cgal
fix warnings (std::size_t to int conversions),
MAD : 1 -> 1.5,
This commit is contained in:
parent
1cae7388d0
commit
914ee9e6b4
|
|
@ -314,7 +314,7 @@ Comparison of biased toward the center and uniform disk sampling for 64 rays.
|
|||
We generate a set of sample points in a unit circle and place it tangent to the cone, with it supporting plane orthogonal to the cone direction. Then we combine each point with the apex of the cone to construct the rays.
|
||||
The sampling method is biased toward the center\cite Vogel1979Sampling in order to make the sampling uniform to the angle.
|
||||
As a result, we do not use the weighting scheme from the original algorithm in order to reduce the contributions of rays with larger angles.
|
||||
A comparison with biased and uniform sampling of points can be seen in \cgalFigureRef{Segmentation_vogel}. The final SDF value of a facet is then calculated by averaging the ray lengths which fall into one Median Absolute Deviation (MAD) from the median of all lengths.
|
||||
A comparison with biased and uniform sampling of points can be seen in \cgalFigureRef{Segmentation_vogel}. The final SDF value of a facet is then calculated by averaging the ray lengths which fall into 1.5 Median Absolute Deviation (MAD) from the median of all lengths.
|
||||
|
||||
\section Surface_mesh_segmentationImplementationhistory Implementation History
|
||||
The initial implementation of this package is the result of the work of Ilker during the 2012 season
|
||||
|
|
|
|||
|
|
@ -366,16 +366,16 @@ public:
|
|||
(std::numeric_limits<double>::max)()
|
||||
: probability_matrix[labels[vertex_i]][vertex_i];
|
||||
|
||||
add_edge_and_reverse(cluster_source, vertex_i + 2, source_weight, 0.0, edge_map,
|
||||
edge_map_weights);
|
||||
add_edge_and_reverse(vertex_i + 2, cluster_sink, sink_weight, 0.0, edge_map,
|
||||
edge_map_weights);
|
||||
add_edge_and_reverse(cluster_source, static_cast<int>(vertex_i) + 2,
|
||||
source_weight, 0.0, edge_map, edge_map_weights);
|
||||
add_edge_and_reverse(static_cast<int>(vertex_i) + 2, cluster_sink, sink_weight,
|
||||
0.0, edge_map, edge_map_weights);
|
||||
}
|
||||
vertex_creation_time += timer.time();
|
||||
timer.reset();
|
||||
// For E-Smooth
|
||||
// add edge between every vertex,
|
||||
int num_vert = labels.size() + 2;
|
||||
int num_vert = static_cast<int>(labels.size()) + 2;
|
||||
std::vector<double>::const_iterator weight_it = edge_weights.begin();
|
||||
for(std::vector<std::pair<int, int> >::const_iterator edge_it = edges.begin();
|
||||
edge_it != edges.end();
|
||||
|
|
|
|||
|
|
@ -214,7 +214,7 @@ private:
|
|||
double distance = std::abs(centers[i].mean - *it);
|
||||
if(distance < min_distance) {
|
||||
min_distance = distance;
|
||||
closest_center = i;
|
||||
closest_center = static_cast<int>(i);
|
||||
}
|
||||
}
|
||||
member_count[closest_center]++;
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ public:
|
|||
std::size_t random_range = points.size() - number_of_centers +
|
||||
i; // activate one more element in each iteration for as selectable
|
||||
std::size_t random_index = random.get_int(0,
|
||||
random_range + 1); // [0, random_range];
|
||||
static_cast<int>(random_range) + 1); // [0, random_range];
|
||||
|
||||
std::pair<std::set<std::size_t>::iterator, bool> random_index_unique =
|
||||
selected.insert(random_index);
|
||||
|
|
@ -107,7 +107,7 @@ public:
|
|||
// say, "distance_square" -> [ 0.1, 0.2, 0.3, 0.0, 0.2 ... ]
|
||||
// then distance_square_cumulative -> [ 0.1, 0.3, 0.6, 0.6, 0.8 ... ]
|
||||
std::size_t initial_index = random.get_int(0,
|
||||
points.size()); // [0, points size)
|
||||
static_cast<int>(points.size())); // [0, points size)
|
||||
centers.push_back(points[initial_index]);
|
||||
|
||||
for(int i = 1; i < number_of_centers; ++i) {
|
||||
|
|
@ -240,7 +240,7 @@ inline bool K_means_point::calculate_new_center(std::vector<K_means_center>&
|
|||
for(std::size_t i = 1; i < centers.size(); ++i) {
|
||||
double new_distance = std::abs(centers[i].mean - data);
|
||||
if(new_distance < min_distance) {
|
||||
new_center_id = i;
|
||||
new_center_id = static_cast<int>(i);
|
||||
min_distance = new_distance;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@
|
|||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/optional.hpp>
|
||||
|
||||
#define CGAL_ST_DEV_MULTIPLIER 1 //0.75
|
||||
#define CGAL_NUMBER_OF_MAD 1.5
|
||||
|
||||
namespace CGAL
|
||||
{
|
||||
|
|
@ -456,7 +456,7 @@ private:
|
|||
}
|
||||
|
||||
/**
|
||||
* Uses Median Absolute Deviation and removes rays which don't fall into `CGAL_ST_DEV_MULTIPLIER` * MAD.
|
||||
* Uses Median Absolute Deviation and removes rays which don't fall into `CGAL_NUMBER_OF_MAD` * MAD.
|
||||
* Also takes weighted average of accepted rays and calculate final sdf value.
|
||||
* @param ray_distances contains distance & weight pairs for each ray
|
||||
* @return outlier removed and averaged sdf value
|
||||
|
|
@ -465,7 +465,7 @@ private:
|
|||
std::vector<std::pair<double, double> >& ray_distances) const {
|
||||
// pair first -> distance, second -> weight
|
||||
|
||||
const int accepted_ray_count = ray_distances.size();
|
||||
const int accepted_ray_count = static_cast<int>(ray_distances.size());
|
||||
if(accepted_ray_count == 0) {
|
||||
return 0.0;
|
||||
} else if(accepted_ray_count == 1) {
|
||||
|
|
@ -504,8 +504,7 @@ private:
|
|||
double total_weights = 0.0, total_distance = 0.0;
|
||||
for(std::vector<std::pair<double, double> >::iterator it =
|
||||
ray_distances.begin(); it != ray_distances.end(); ++it) {
|
||||
if(std::abs(it->first - median_sdf) > (median_deviation *
|
||||
CGAL_ST_DEV_MULTIPLIER)) {
|
||||
if(std::abs(it->first - median_sdf) > (median_deviation * CGAL_NUMBER_OF_MAD)) {
|
||||
continue;
|
||||
}
|
||||
total_distance += it->first * it->second;
|
||||
|
|
@ -522,7 +521,6 @@ private:
|
|||
}//namespace internal
|
||||
/// @endcond
|
||||
}//namespace CGAL
|
||||
#undef CGAL_ST_DEV_MULTIPLIER
|
||||
#undef CGAL_ACCEPTANCE_RATE_THRESHOLD
|
||||
#undef CGAL_NUMBER_OF_MAD
|
||||
|
||||
#endif //CGAL_SURFACE_MESH_SEGMENTATION_SDF_CALCULATION_H
|
||||
|
|
@ -451,7 +451,7 @@ private:
|
|||
++index) {
|
||||
int segment_id = segments_with_average_sdf_values[index].first -
|
||||
number_of_clusters;
|
||||
segment_id_to_sorted_id_map[segment_id] = index;
|
||||
segment_id_to_sorted_id_map[segment_id] = static_cast<int>(index);
|
||||
}
|
||||
// make one-pass on facets. First make segment-id zero based by subtracting number_of_clusters
|
||||
// . Then place its sorted index to pmap
|
||||
|
|
|
|||
Loading…
Reference in New Issue