mirror of https://github.com/CGAL/cgal
Alpha_expansion_graph_cut includes simple graph-cut only.
This commit is contained in:
parent
3b2fd53b48
commit
9d28b10663
|
|
@ -4016,6 +4016,7 @@ Surface_mesh_segmentation/example/Surface_mesh_segmentation/segmentation_example
|
||||||
Surface_mesh_segmentation/example/Surface_mesh_segmentation/simple_example.cpp -text
|
Surface_mesh_segmentation/example/Surface_mesh_segmentation/simple_example.cpp -text
|
||||||
Surface_mesh_segmentation/include/CGAL/Surface_mesh_segmentation.h -text
|
Surface_mesh_segmentation/include/CGAL/Surface_mesh_segmentation.h -text
|
||||||
Surface_mesh_segmentation/include/CGAL/internal/Surface_mesh_segmentation/AABB_traversal_traits.h -text
|
Surface_mesh_segmentation/include/CGAL/internal/Surface_mesh_segmentation/AABB_traversal_traits.h -text
|
||||||
|
Surface_mesh_segmentation/include/CGAL/internal/Surface_mesh_segmentation/Alpha_expansion_graph_cut.h -text
|
||||||
Surface_mesh_segmentation/include/CGAL/internal/Surface_mesh_segmentation/Expectation_maximization.h -text
|
Surface_mesh_segmentation/include/CGAL/internal/Surface_mesh_segmentation/Expectation_maximization.h -text
|
||||||
Surface_mesh_segmentation/include/CGAL/internal/Surface_mesh_segmentation/K_means_clustering.h -text
|
Surface_mesh_segmentation/include/CGAL/internal/Surface_mesh_segmentation/K_means_clustering.h -text
|
||||||
Surface_mesh_segmentation/package_info/Surface_mesh_segmentation/copyright -text
|
Surface_mesh_segmentation/package_info/Surface_mesh_segmentation/copyright -text
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,7 @@
|
||||||
|
|
||||||
#include <CGAL/internal/Surface_mesh_segmentation/Expectation_maximization.h>
|
#include <CGAL/internal/Surface_mesh_segmentation/Expectation_maximization.h>
|
||||||
#include <CGAL/internal/Surface_mesh_segmentation/K_means_clustering.h>
|
#include <CGAL/internal/Surface_mesh_segmentation/K_means_clustering.h>
|
||||||
|
#include <CGAL/internal/Surface_mesh_segmentation/Alpha_expansion_graph_cut.h>
|
||||||
|
|
||||||
//AF: This files does not use Simple_cartesian
|
//AF: This files does not use Simple_cartesian
|
||||||
//IOY: Yes, not sure where it came from (with update may be), I am going to ask about it.
|
//IOY: Yes, not sure where it came from (with update may be), I am going to ask about it.
|
||||||
|
|
@ -44,7 +45,7 @@
|
||||||
#define CGAL_ANGLE_ST_DEV_DIVIDER 3.0
|
#define CGAL_ANGLE_ST_DEV_DIVIDER 3.0
|
||||||
|
|
||||||
//IOY: these are going to be removed at the end (no CGAL_ pref)
|
//IOY: these are going to be removed at the end (no CGAL_ pref)
|
||||||
//#define ACTIVATE_SEGMENTATION_DEBUG
|
#define ACTIVATE_SEGMENTATION_DEBUG
|
||||||
#ifdef ACTIVATE_SEGMENTATION_DEBUG
|
#ifdef ACTIVATE_SEGMENTATION_DEBUG
|
||||||
#define SEG_DEBUG(x) x;
|
#define SEG_DEBUG(x) x;
|
||||||
#else
|
#else
|
||||||
|
|
@ -136,11 +137,11 @@ public:
|
||||||
double calculate_sdf_value_of_facet (const Facet_handle& facet,
|
double calculate_sdf_value_of_facet (const Facet_handle& facet,
|
||||||
const Tree& tree) const;
|
const Tree& tree) const;
|
||||||
template <class Query>
|
template <class Query>
|
||||||
boost::optional<double> cast_and_return_minimum(const Query& ray,
|
boost::tuple<bool, bool, double> cast_and_return_minimum(const Query& ray,
|
||||||
const Tree& tree, const Facet_handle& facet) const;
|
const Tree& tree, const Facet_handle& facet) const;
|
||||||
template <class Query>
|
template <class Query>
|
||||||
boost::optional<double> cast_and_return_minimum_use_closest(const Query& ray,
|
boost::tuple<bool, bool, double> cast_and_return_minimum_use_closest(
|
||||||
const Tree& tree, const Facet_handle& facet) const;
|
const Query& ray, const Tree& tree, const Facet_handle& facet) const;
|
||||||
|
|
||||||
double calculate_sdf_value_from_rays (std::vector<double>& ray_distances,
|
double calculate_sdf_value_from_rays (std::vector<double>& ray_distances,
|
||||||
std::vector<double>& ray_weights) const;
|
std::vector<double>& ray_weights) const;
|
||||||
|
|
@ -165,6 +166,7 @@ public:
|
||||||
void apply_GMM_fitting();
|
void apply_GMM_fitting();
|
||||||
void apply_K_means_clustering();
|
void apply_K_means_clustering();
|
||||||
void apply_GMM_fitting_with_K_means_init();
|
void apply_GMM_fitting_with_K_means_init();
|
||||||
|
void apply_graph_cut();
|
||||||
|
|
||||||
void write_sdf_values(const char* file_name);
|
void write_sdf_values(const char* file_name);
|
||||||
void read_sdf_values(const char* file_name);
|
void read_sdf_values(const char* file_name);
|
||||||
|
|
@ -192,10 +194,12 @@ inline Surface_mesh_segmentation<Polyhedron>::Surface_mesh_segmentation(
|
||||||
calculate_sdf_values();
|
calculate_sdf_values();
|
||||||
SEG_DEBUG(std::cout << t.time() << std::endl)
|
SEG_DEBUG(std::cout << t.time() << std::endl)
|
||||||
#endif
|
#endif
|
||||||
//apply_GMM_fitting();
|
//
|
||||||
//write_sdf_values("sdf_values_sample_dino_ws.txt");
|
//write_sdf_values("sdf_values_sample_dino_ws.txt");
|
||||||
//read_sdf_values("sdf_values_sample_camel.txt");
|
//read_sdf_values("sdf_values_sample_elephant.txt");
|
||||||
//calculate_dihedral_angles();
|
//apply_GMM_fitting();
|
||||||
|
apply_graph_cut();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template <class Polyhedron>
|
template <class Polyhedron>
|
||||||
|
|
@ -207,7 +211,6 @@ inline void Surface_mesh_segmentation<Polyhedron>::calculate_sdf_values()
|
||||||
facet_it != mesh->facets_end(); ++facet_it) {
|
facet_it != mesh->facets_end(); ++facet_it) {
|
||||||
CGAL_precondition(facet_it->is_triangle()); //Mesh should contain triangles.
|
CGAL_precondition(facet_it->is_triangle()); //Mesh should contain triangles.
|
||||||
|
|
||||||
//SL: cone angle and number of rays should be parameters.
|
|
||||||
double sdf = calculate_sdf_value_of_facet(facet_it, tree);
|
double sdf = calculate_sdf_value_of_facet(facet_it, tree);
|
||||||
sdf_values.insert(std::pair<Facet_handle, double>(facet_it, sdf));
|
sdf_values.insert(std::pair<Facet_handle, double>(facet_it, sdf));
|
||||||
}
|
}
|
||||||
|
|
@ -244,7 +247,7 @@ Surface_mesh_segmentation<Polyhedron>::calculate_sdf_value_of_facet(
|
||||||
v1 = v1 / sqrt(v1.squared_length());
|
v1 = v1 / sqrt(v1.squared_length());
|
||||||
v2 = v2 / sqrt(v2.squared_length());
|
v2 = v2 / sqrt(v2.squared_length());
|
||||||
|
|
||||||
arrange_center_orientation(plane, normal, center);
|
//arrange_center_orientation(plane, normal, center);
|
||||||
|
|
||||||
int ray_count = number_of_rays_sqrt * number_of_rays_sqrt;
|
int ray_count = number_of_rays_sqrt * number_of_rays_sqrt;
|
||||||
|
|
||||||
|
|
@ -265,22 +268,25 @@ Surface_mesh_segmentation<Polyhedron>::calculate_sdf_value_of_facet(
|
||||||
#endif
|
#endif
|
||||||
for(Disk_samples_list::const_iterator sample_it = disk_samples.begin();
|
for(Disk_samples_list::const_iterator sample_it = disk_samples.begin();
|
||||||
sample_it != disk_samples.end(); ++sample_it) {
|
sample_it != disk_samples.end(); ++sample_it) {
|
||||||
boost::optional<double> min_distance;
|
bool is_intersected, intersection_is_acute;
|
||||||
|
double min_distance;
|
||||||
Vector disk_vector = v1 * sample_it->first + v2 * sample_it->second;
|
Vector disk_vector = v1 * sample_it->first + v2 * sample_it->second;
|
||||||
Vector ray_direction = normal + disk_vector;
|
Vector ray_direction = normal + disk_vector;
|
||||||
|
|
||||||
#ifdef SHOOT_ONLY_RAYS
|
#ifdef SHOOT_ONLY_RAYS
|
||||||
Ray ray(center, ray_direction);
|
Ray ray(center, ray_direction);
|
||||||
min_distance = cast_and_return_minimum(ray, tree, facet);
|
boost::tie(is_intersected, intersection_is_acute,
|
||||||
if(!min_distance) {
|
min_distance) = cast_and_return_minimum(ray, tree, facet);
|
||||||
|
if(!intersection_is_acute) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
// at first cast ray
|
// at first cast ray
|
||||||
if(!segment_distance) {
|
if(!segment_distance) {
|
||||||
Ray ray(center, ray_direction);
|
Ray ray(center, ray_direction);
|
||||||
min_distance = cast_and_return_minimum(ray, tree, facet);
|
boost::tie(is_intersected, intersection_is_acute,
|
||||||
if(!min_distance) {
|
min_distance) = cast_and_return_minimum(ray, tree, facet);
|
||||||
|
if(!intersection_is_acute) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
segment_distance = min_distance; //first assignment of the segment_distance
|
segment_distance = min_distance; //first assignment of the segment_distance
|
||||||
|
|
@ -289,43 +295,54 @@ Surface_mesh_segmentation<Polyhedron>::calculate_sdf_value_of_facet(
|
||||||
|
|
||||||
ray_direction = ray_direction * (*segment_distance * multiplier_for_segment);
|
ray_direction = ray_direction * (*segment_distance * multiplier_for_segment);
|
||||||
Segment segment(center, CGAL::operator+(center, ray_direction));
|
Segment segment(center, CGAL::operator+(center, ray_direction));
|
||||||
min_distance = cast_and_return_minimum(segment, tree, facet);
|
boost::tie(is_intersected, intersection_is_acute,
|
||||||
if(!min_distance) {
|
min_distance) = cast_and_return_minimum(segment, tree, facet);
|
||||||
|
if(!is_intersected) {
|
||||||
//continue; // for utopia case - just continue on miss
|
//continue; // for utopia case - just continue on miss
|
||||||
++miss_counter;
|
++miss_counter;
|
||||||
|
//Ray ray(segment.target(), ray_direction);
|
||||||
Ray ray(segment.target(), ray_direction);
|
Ray ray(segment.target(), ray_direction);
|
||||||
min_distance = cast_and_return_minimum(ray, tree, facet);
|
boost::tie(is_intersected, intersection_is_acute,
|
||||||
if(!min_distance) {
|
min_distance) = cast_and_return_minimum(ray, tree, facet);
|
||||||
|
if(!intersection_is_acute) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
min_distance += CGAL::sqrt(
|
||||||
|
segment.squared_length()); // since we our source is segment.target()
|
||||||
|
} else if(!intersection_is_acute) {
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if(use_minimum_segment) {
|
if(use_minimum_segment) {
|
||||||
if(*min_distance <
|
if(min_distance <
|
||||||
*segment_distance) { // update segment_distance (minimum / maximum)
|
*segment_distance) { // update segment_distance (minimum / maximum)
|
||||||
*segment_distance = *min_distance;
|
*segment_distance = min_distance;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if(*min_distance >
|
if(min_distance >
|
||||||
*segment_distance) { // update segment_distance (minimum / maximum)
|
*segment_distance) { // update segment_distance (minimum / maximum)
|
||||||
*segment_distance = *min_distance;
|
*segment_distance = min_distance;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
ray_weights.push_back(sample_it->third);
|
ray_weights.push_back(sample_it->third);
|
||||||
ray_distances.push_back(*min_distance);
|
ray_distances.push_back(min_distance);
|
||||||
}
|
}
|
||||||
return calculate_sdf_value_from_rays_with_trimmed_mean(ray_distances,
|
return calculate_sdf_value_from_rays_with_trimmed_mean(ray_distances,
|
||||||
ray_weights);
|
ray_weights);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// just for Ray and Segment
|
||||||
|
|
||||||
template <class Polyhedron>
|
template <class Polyhedron>
|
||||||
template <class Query> // just for Ray and Segment
|
template <class Query>
|
||||||
boost::optional<double>
|
boost::tuple<bool, bool, double>
|
||||||
Surface_mesh_segmentation<Polyhedron>::cast_and_return_minimum(
|
Surface_mesh_segmentation<Polyhedron>::cast_and_return_minimum(
|
||||||
const Query& query, const Tree& tree, const Facet_handle& facet) const
|
const Query& query, const Tree& tree, const Facet_handle& facet) const
|
||||||
{
|
{
|
||||||
boost::optional<double> min_distance;
|
boost::tuple<bool, bool, double> min_distance = boost::make_tuple(false, false,
|
||||||
|
0);
|
||||||
std::list<Object_and_primitive_id> intersections;
|
std::list<Object_and_primitive_id> intersections;
|
||||||
#if 1
|
#if 1
|
||||||
//SL: the difference with all_intersections is that in the traversal traits, we do do_intersect before calling intersection.
|
//SL: the difference with all_intersections is that in the traversal traits, we do do_intersect before calling intersection.
|
||||||
|
|
@ -359,13 +376,14 @@ Surface_mesh_segmentation<Polyhedron>::cast_and_return_minimum(
|
||||||
|
|
||||||
Vector i_ray = (query.source() - *i_point);
|
Vector i_ray = (query.source() - *i_point);
|
||||||
double new_distance = i_ray.squared_length();
|
double new_distance = i_ray.squared_length();
|
||||||
if(!min_distance || new_distance < min_distance) {
|
if(!min_distance.get<0>() || new_distance < min_distance.get<2>()) {
|
||||||
min_distance = new_distance;
|
min_distance.get<2>() = new_distance;
|
||||||
|
min_distance.get<0>() = true;
|
||||||
min_id = id;
|
min_id = id;
|
||||||
min_i_ray = i_ray;
|
min_i_ray = i_ray;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if(!min_distance) {
|
if(!min_distance.get<0>()) {
|
||||||
return min_distance;
|
return min_distance;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -376,14 +394,16 @@ Surface_mesh_segmentation<Polyhedron>::cast_and_return_minimum(
|
||||||
|
|
||||||
if(CGAL::angle(CGAL::ORIGIN + min_i_ray, Point(CGAL::ORIGIN),
|
if(CGAL::angle(CGAL::ORIGIN + min_i_ray, Point(CGAL::ORIGIN),
|
||||||
CGAL::ORIGIN + min_normal) != CGAL::ACUTE) {
|
CGAL::ORIGIN + min_normal) != CGAL::ACUTE) {
|
||||||
return boost::optional<double>();
|
return min_distance;
|
||||||
}
|
}
|
||||||
return (*min_distance = sqrt(*min_distance));
|
min_distance.get<1>() = true; // founded intersection is acceptable.
|
||||||
|
min_distance.get<2>() = sqrt(min_distance.get<2>());
|
||||||
|
return min_distance;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <class Polyhedron>
|
template <class Polyhedron>
|
||||||
template <class Query>
|
template <class Query>
|
||||||
boost::optional<double>
|
boost::tuple<bool, bool, double>
|
||||||
Surface_mesh_segmentation<Polyhedron>::cast_and_return_minimum_use_closest (
|
Surface_mesh_segmentation<Polyhedron>::cast_and_return_minimum_use_closest (
|
||||||
const Query& ray, const Tree& tree,
|
const Query& ray, const Tree& tree,
|
||||||
const Facet_handle& facet) const
|
const Facet_handle& facet) const
|
||||||
|
|
@ -391,7 +411,9 @@ Surface_mesh_segmentation<Polyhedron>::cast_and_return_minimum_use_closest (
|
||||||
//static double dist = 0.1;
|
//static double dist = 0.1;
|
||||||
//boost::optional<double> min_distance_2 = dist++;
|
//boost::optional<double> min_distance_2 = dist++;
|
||||||
//return min_distance_2;
|
//return min_distance_2;
|
||||||
boost::optional<double> min_distance;
|
|
||||||
|
boost::tuple<bool, bool, double> min_distance = boost::make_tuple(false, false,
|
||||||
|
0);
|
||||||
#if 1
|
#if 1
|
||||||
Closest_intersection_traits<typename Tree::AABB_traits, Query> traversal_traits;
|
Closest_intersection_traits<typename Tree::AABB_traits, Query> traversal_traits;
|
||||||
tree.traversal(ray, traversal_traits);
|
tree.traversal(ray, traversal_traits);
|
||||||
|
|
@ -404,6 +426,7 @@ Surface_mesh_segmentation<Polyhedron>::cast_and_return_minimum_use_closest (
|
||||||
if(!intersection) {
|
if(!intersection) {
|
||||||
return min_distance;
|
return min_distance;
|
||||||
}
|
}
|
||||||
|
min_distance.get<0>() = true; // intersection is found
|
||||||
|
|
||||||
CGAL::Object object = intersection->first;
|
CGAL::Object object = intersection->first;
|
||||||
Primitive_id min_id = intersection->second;
|
Primitive_id min_id = intersection->second;
|
||||||
|
|
@ -427,7 +450,9 @@ Surface_mesh_segmentation<Polyhedron>::cast_and_return_minimum_use_closest (
|
||||||
CGAL::ORIGIN + min_normal) != CGAL::ACUTE) {
|
CGAL::ORIGIN + min_normal) != CGAL::ACUTE) {
|
||||||
return min_distance;
|
return min_distance;
|
||||||
}
|
}
|
||||||
return (min_distance = sqrt(min_i_ray.squared_length()));
|
min_distance.get<1>() = true;
|
||||||
|
min_distance.get<2>() = sqrt(min_i_ray.squared_length());
|
||||||
|
return min_distance;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <class Polyhedron>
|
template <class Polyhedron>
|
||||||
|
|
@ -873,7 +898,7 @@ inline void Surface_mesh_segmentation<Polyhedron>::apply_GMM_fitting()
|
||||||
SEG_DEBUG(CGAL::Timer t)
|
SEG_DEBUG(CGAL::Timer t)
|
||||||
SEG_DEBUG(t.start())
|
SEG_DEBUG(t.start())
|
||||||
// apply em with 5 runs, number of runs might become a parameter.
|
// apply em with 5 runs, number of runs might become a parameter.
|
||||||
internal::Expectation_maximization fitter(number_of_centers, sdf_vector, 5);
|
internal::Expectation_maximization fitter(number_of_centers, sdf_vector, 10);
|
||||||
SEG_DEBUG(std::cout << t.time() << std::endl)
|
SEG_DEBUG(std::cout << t.time() << std::endl)
|
||||||
std::vector<int> center_memberships;
|
std::vector<int> center_memberships;
|
||||||
fitter.fill_with_center_ids(center_memberships);
|
fitter.fill_with_center_ids(center_memberships);
|
||||||
|
|
@ -930,6 +955,59 @@ Surface_mesh_segmentation<Polyhedron>::apply_GMM_fitting_with_K_means_init()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <class Polyhedron>
|
||||||
|
void Surface_mesh_segmentation<Polyhedron>::apply_graph_cut()
|
||||||
|
{
|
||||||
|
//assign an id for every facet (facet-id)
|
||||||
|
std::map<Facet_handle, int> facet_indices;
|
||||||
|
int index = 0;
|
||||||
|
for(Facet_iterator facet_it = mesh->facets_begin();
|
||||||
|
facet_it != mesh->facets_end();
|
||||||
|
++facet_it, ++index) {
|
||||||
|
facet_indices.insert(std::pair<Facet_handle, int>(facet_it, index));
|
||||||
|
}
|
||||||
|
//edges and their weights. pair<int, int> stores facet-id pairs (see above) (may be using CGAL::Triple can be more suitable)
|
||||||
|
std::vector<std::pair<int, int> > edges;
|
||||||
|
std::vector<double> edge_weights;
|
||||||
|
for(Edge_iterator edge_it = mesh->edges_begin(); edge_it != mesh->edges_end();
|
||||||
|
++edge_it) {
|
||||||
|
double angle = calculate_dihedral_angle_of_edge(edge_it);
|
||||||
|
int index_f1 = facet_indices[edge_it->facet()];
|
||||||
|
int index_f2 = facet_indices[edge_it->opposite()->facet()];
|
||||||
|
edges.push_back(std::pair<int, int>(index_f1, index_f2));
|
||||||
|
angle = -log(angle);
|
||||||
|
angle *= 15; //lambda, will be variable.
|
||||||
|
// we may also want to consider edge lengths, also penalize convex angles.
|
||||||
|
edge_weights.push_back(angle);
|
||||||
|
}
|
||||||
|
//apply gmm fitting
|
||||||
|
std::vector<double> sdf_vector;
|
||||||
|
sdf_vector.reserve(sdf_values.size());
|
||||||
|
for(typename Face_value_map::iterator pair_it = sdf_values.begin();
|
||||||
|
pair_it != sdf_values.end(); ++pair_it) {
|
||||||
|
sdf_vector.push_back(pair_it->second);
|
||||||
|
}
|
||||||
|
internal::Expectation_maximization fitter(number_of_centers, sdf_vector, 50);
|
||||||
|
//fill probability matrix.
|
||||||
|
std::vector<std::vector<double> > probability_matrix;
|
||||||
|
fitter.fill_with_minus_log_probabilities(probability_matrix);
|
||||||
|
std::vector<int> labels;
|
||||||
|
fitter.fill_with_center_ids(labels);
|
||||||
|
|
||||||
|
//apply graph cut
|
||||||
|
std::vector<int> center_ids;
|
||||||
|
internal::Alpha_expansion_graph_cut gc(edges, edge_weights, labels,
|
||||||
|
probability_matrix, center_ids);
|
||||||
|
|
||||||
|
std::vector<int>::iterator center_it = center_ids.begin();
|
||||||
|
centers.clear();
|
||||||
|
for(Facet_iterator facet_it = mesh->facets_begin();
|
||||||
|
facet_it != mesh->facets_end();
|
||||||
|
++facet_it, ++center_it) {
|
||||||
|
centers.insert(std::pair<Facet_handle, int>(facet_it, (*center_it)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
//AF: it is not common in CGAL to have functions with a file name as argument
|
//AF: it is not common in CGAL to have functions with a file name as argument
|
||||||
//IOY: Yes, I am just using these read-write functions in development phase,
|
//IOY: Yes, I am just using these read-write functions in development phase,
|
||||||
// in order to not to compute sdf-values everytime program runs.
|
// in order to not to compute sdf-values everytime program runs.
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,141 @@
|
||||||
|
#ifndef CGAL_ALPHA_EXPANSION_GRAPH_CUT_H
|
||||||
|
#define CGAL_ALPHA_EXPANSION_GRAPH_CUT_H
|
||||||
|
|
||||||
|
#include <CGAL/assertions.h>
|
||||||
|
|
||||||
|
#include <boost/graph/adjacency_list.hpp>
|
||||||
|
#include <boost/graph/boykov_kolmogorov_max_flow.hpp>
|
||||||
|
#include <boost/math/distributions/normal.hpp>
|
||||||
|
|
||||||
|
#include <iostream>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
namespace CGAL
|
||||||
|
{
|
||||||
|
namespace internal
|
||||||
|
{
|
||||||
|
|
||||||
|
class Alpha_expansion_graph_cut
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
typedef boost::adjacency_list_traits<boost::vecS, boost::vecS, boost::directedS>
|
||||||
|
Adjacency_list_traits;
|
||||||
|
|
||||||
|
typedef boost::adjacency_list<boost::vecS, boost::vecS, boost::directedS,
|
||||||
|
// 4 vertex properties (nested)
|
||||||
|
//boost::no_property
|
||||||
|
boost::property<boost::vertex_index_t, int,
|
||||||
|
boost::property<boost::vertex_color_t, boost::default_color_type,
|
||||||
|
boost::property<boost::vertex_distance_t, double,
|
||||||
|
boost::property<boost::vertex_predecessor_t, Adjacency_list_traits::edge_descriptor >
|
||||||
|
> > >,
|
||||||
|
// 3 edge properties (nested)
|
||||||
|
boost::property<boost::edge_capacity_t, double,
|
||||||
|
boost::property<boost::edge_residual_capacity_t, double,
|
||||||
|
boost::property<boost::edge_reverse_t, Adjacency_list_traits::edge_descriptor> >
|
||||||
|
> > Graph;
|
||||||
|
|
||||||
|
typedef boost::graph_traits<Graph> Traits;
|
||||||
|
typedef boost::color_traits<boost::default_color_type> ColorTraits;
|
||||||
|
|
||||||
|
typedef Traits::vertex_descriptor Vertex_descriptor;
|
||||||
|
typedef Traits::edge_descriptor Edge_descriptor;
|
||||||
|
typedef Traits::vertex_iterator Vertex_iterator;
|
||||||
|
typedef Traits::edge_iterator Edge_iterator;
|
||||||
|
|
||||||
|
Alpha_expansion_graph_cut(std::vector<std::pair<int, int> >& edges,
|
||||||
|
std::vector<double>& edge_weights, std::vector<int>& labels,
|
||||||
|
std::vector<std::vector<double> >& probability_matrix,
|
||||||
|
std::vector<int>& center_ids) {
|
||||||
|
//Graph graph(edges.begin(), edges.end(), vertex_weights.size(), edge_weights.size());
|
||||||
|
Graph graph;
|
||||||
|
Vertex_descriptor cluster_source = boost::add_vertex(graph);
|
||||||
|
Vertex_descriptor cluster_sink = boost::add_vertex(graph);
|
||||||
|
std::vector<Vertex_descriptor> inserted_vertices;
|
||||||
|
//inserted_vertices.reserve(vertex_weights.size());
|
||||||
|
for(std::size_t vertex_i = 0; vertex_i < probability_matrix[0].size();
|
||||||
|
++vertex_i) {
|
||||||
|
Vertex_descriptor new_vertex = boost::add_vertex(graph);
|
||||||
|
inserted_vertices.push_back(new_vertex);
|
||||||
|
|
||||||
|
Edge_descriptor source_e, source_e_rev, sink_e, sink_e_rev;
|
||||||
|
bool source_e_added, source_e_rev_added, sink_e_added, sink_e_rev_added;
|
||||||
|
|
||||||
|
tie(source_e, source_e_added) = boost::add_edge(cluster_source, new_vertex,
|
||||||
|
graph);
|
||||||
|
tie(source_e_rev, source_e_rev_added) = boost::add_edge(new_vertex,
|
||||||
|
cluster_source, graph);
|
||||||
|
|
||||||
|
tie(sink_e, sink_e_added) = boost::add_edge(new_vertex, cluster_sink, graph);
|
||||||
|
tie(sink_e_rev, sink_e_rev_added) = boost::add_edge(cluster_sink, new_vertex,
|
||||||
|
graph);
|
||||||
|
|
||||||
|
CGAL_assertion(source_e_added && source_e_rev_added && sink_e_added
|
||||||
|
&& sink_e_rev_added);
|
||||||
|
|
||||||
|
//put vertex capacities (to edges between itself and source & sink)
|
||||||
|
boost::put(boost::edge_capacity, graph, source_e,
|
||||||
|
probability_matrix[0][vertex_i]);
|
||||||
|
boost::put(boost::edge_capacity, graph, source_e_rev, 0);
|
||||||
|
boost::put(boost::edge_capacity, graph, sink_e,
|
||||||
|
probability_matrix[1][vertex_i]);
|
||||||
|
boost::put(boost::edge_capacity, graph, sink_e_rev, 0);
|
||||||
|
//map reverse edges
|
||||||
|
boost::put(boost::edge_reverse, graph, source_e, source_e_rev);
|
||||||
|
boost::put(boost::edge_reverse, graph, source_e_rev, source_e);
|
||||||
|
boost::put(boost::edge_reverse, graph, sink_e, sink_e_rev);
|
||||||
|
boost::put(boost::edge_reverse, graph, sink_e_rev, sink_e);
|
||||||
|
}
|
||||||
|
std::vector<double>::iterator weight_it = edge_weights.begin();
|
||||||
|
for(std::vector<std::pair<int, int> >::iterator edge_it = edges.begin();
|
||||||
|
edge_it != edges.end();
|
||||||
|
++edge_it, ++weight_it) {
|
||||||
|
Vertex_descriptor v1 = inserted_vertices[edge_it->first];
|
||||||
|
Vertex_descriptor v2 = inserted_vertices[edge_it->second];
|
||||||
|
|
||||||
|
bool edge_added, edge_rev_added;
|
||||||
|
Edge_descriptor edge, edge_rev;
|
||||||
|
tie(edge, edge_added) = boost::add_edge(v1, v2, graph);
|
||||||
|
tie(edge_rev, edge_rev_added) = boost::add_edge(v2, v1, graph);
|
||||||
|
|
||||||
|
CGAL_assertion(edge_added && edge_rev_added);
|
||||||
|
|
||||||
|
//put edge capacities
|
||||||
|
if(labels[edge_it->first] == labels[edge_it->second]) {
|
||||||
|
boost::put(boost::edge_capacity, graph, edge, *weight_it);
|
||||||
|
boost::put(boost::edge_capacity, graph, edge_rev, *weight_it);
|
||||||
|
} else {
|
||||||
|
boost::put(boost::edge_capacity, graph, edge, *weight_it);
|
||||||
|
boost::put(boost::edge_capacity, graph, edge_rev, *weight_it);
|
||||||
|
}
|
||||||
|
|
||||||
|
//map reverse edges
|
||||||
|
boost::put(boost::edge_reverse, graph, edge, edge_rev);
|
||||||
|
boost::put(boost::edge_reverse, graph, edge_rev, edge);
|
||||||
|
}
|
||||||
|
double flow = boost::boykov_kolmogorov_max_flow(graph, cluster_source,
|
||||||
|
cluster_sink);
|
||||||
|
|
||||||
|
for(std::vector<Vertex_descriptor>::iterator vertex_it =
|
||||||
|
inserted_vertices.begin();
|
||||||
|
vertex_it != inserted_vertices.end(); ++vertex_it) {
|
||||||
|
boost::default_color_type color = boost::get(boost::vertex_color, graph,
|
||||||
|
*vertex_it);
|
||||||
|
if(color == ColorTraits::black() ) { // in sink
|
||||||
|
center_ids.push_back(1);
|
||||||
|
} else {
|
||||||
|
center_ids.push_back(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
std::cout << flow << std::endl;
|
||||||
|
//for(std::pair<Vertex_iterator, Vertex_iterator> pair_v = boost::vertices(graph);
|
||||||
|
// pair_v.first != pair_v.second; ++(pair_v.first))
|
||||||
|
//{
|
||||||
|
// if(cluster_source == (*pair_v.first) || cluster_sink == (*pair_v.first)) { continue; }
|
||||||
|
//
|
||||||
|
//}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}//namespace internal
|
||||||
|
}//namespace CGAL
|
||||||
|
#endif //CGAL_ALPHA_EXPANSION_GRAPH_CUT_H
|
||||||
|
|
@ -19,7 +19,8 @@
|
||||||
#define DEF_MAX_ITER 100
|
#define DEF_MAX_ITER 100
|
||||||
#define DEF_THRESHOLD 1e-4
|
#define DEF_THRESHOLD 1e-4
|
||||||
#define USE_MATRIX true
|
#define USE_MATRIX true
|
||||||
//#define ACTIVATE_SEGMENTATION_EM_DEBUG
|
|
||||||
|
#define ACTIVATE_SEGMENTATION_EM_DEBUG
|
||||||
#ifdef ACTIVATE_SEGMENTATION_EM_DEBUG
|
#ifdef ACTIVATE_SEGMENTATION_EM_DEBUG
|
||||||
#define SEG_DEBUG(x) x;
|
#define SEG_DEBUG(x) x;
|
||||||
#else
|
#else
|
||||||
|
|
@ -111,7 +112,7 @@ public:
|
||||||
int max_center = 0, center_counter = 0;
|
int max_center = 0, center_counter = 0;
|
||||||
for(std::vector<Gaussian_center>::iterator center_it = centers.begin();
|
for(std::vector<Gaussian_center>::iterator center_it = centers.begin();
|
||||||
center_it != centers.end(); ++center_it, center_counter++) {
|
center_it != centers.end(); ++center_it, center_counter++) {
|
||||||
double likelihood = center_it->probability_with_coef(*point_it);
|
double likelihood = center_it->probability(*point_it);
|
||||||
if(max_likelihood < likelihood) {
|
if(max_likelihood < likelihood) {
|
||||||
max_likelihood = likelihood;
|
max_likelihood = likelihood;
|
||||||
max_center = center_counter;
|
max_center = center_counter;
|
||||||
|
|
@ -120,6 +121,46 @@ public:
|
||||||
data_centers.push_back(max_center);
|
data_centers.push_back(max_center);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void fill_with_minus_log_probabilities(std::vector<std::vector<double> >&
|
||||||
|
probabilities) {
|
||||||
|
double epsilon = 1e
|
||||||
|
-8; // this epsilon should be consistent with epsilon in calculate_dihedral_angle_of_edge!
|
||||||
|
probabilities = std::vector<std::vector<double> >
|
||||||
|
(centers.size(), std::vector<double>(points.size()));
|
||||||
|
for(std::size_t center_i = 0; center_i < centers.size(); ++center_i) {
|
||||||
|
double sum = 0.0;
|
||||||
|
|
||||||
|
for(std::size_t point_i = 0; point_i < points.size(); ++point_i) {
|
||||||
|
double probability = centers[center_i].probability(points[point_i]);
|
||||||
|
sum += probability;
|
||||||
|
probabilities[center_i][point_i] = probability;
|
||||||
|
}
|
||||||
|
#if 0
|
||||||
|
// pdf values scaled so that their sum will equal to 1.
|
||||||
|
for(std::size_t point_i = 0; point_i < points.size(); ++point_i) {
|
||||||
|
double probability = probabilities[center_i][point_i] / sum;
|
||||||
|
probability = (CGAL::max)(probability, epsilon);
|
||||||
|
probabilities[center_i][point_i] = -log(probability);
|
||||||
|
}
|
||||||
|
#else
|
||||||
|
//pdf values scaled between [0-1]
|
||||||
|
std::pair<std::vector<double>::iterator, std::vector<double>::iterator>
|
||||||
|
min_max_pair =
|
||||||
|
CGAL::min_max_element(probabilities[center_i].begin(),
|
||||||
|
probabilities[center_i].end());
|
||||||
|
double max_value = *min_max_pair.second, min_value = *min_max_pair.first;
|
||||||
|
double max_min_dif = max_value - min_value;
|
||||||
|
for(std::size_t point_i = 0; point_i < points.size(); ++point_i) {
|
||||||
|
double probability = probabilities[center_i][point_i];
|
||||||
|
probability = (probability - min_value) / max_min_dif;
|
||||||
|
probability = (CGAL::max)(probability, epsilon);
|
||||||
|
probabilities[center_i][point_i] = -log(probability);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
// Initialization functions for centers.
|
// Initialization functions for centers.
|
||||||
|
|
@ -257,7 +298,7 @@ protected:
|
||||||
likelihood = iterate(iteration_count == 1);
|
likelihood = iterate(iteration_count == 1);
|
||||||
is_converged = likelihood - prev_likelihood < threshold * fabs(likelihood);
|
is_converged = likelihood - prev_likelihood < threshold * fabs(likelihood);
|
||||||
}
|
}
|
||||||
SEG_DEBUG(std::cout << likelihood << " " << iteration_count << std::endl)
|
//SEG_DEBUG(std::cout << likelihood << " " << iteration_count << std::endl)
|
||||||
return likelihood;
|
return likelihood;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -276,6 +317,7 @@ protected:
|
||||||
max_likelihood = likelihood;
|
max_likelihood = likelihood;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
SEG_DEBUG(std::cout << "max likelihood: " << max_likelihood << std::endl)
|
||||||
centers = max_centers;
|
centers = max_centers;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -152,7 +152,11 @@ public:
|
||||||
|
|
||||||
initiate_centers_randomly(number_of_centers);
|
initiate_centers_randomly(number_of_centers);
|
||||||
calculate_clustering();
|
calculate_clustering();
|
||||||
|
#if 0
|
||||||
double new_error = sum_of_squares();
|
double new_error = sum_of_squares();
|
||||||
|
#else
|
||||||
|
double new_error = within_cluster_sum_of_squares();
|
||||||
|
#endif
|
||||||
if(error > new_error) {
|
if(error > new_error) {
|
||||||
error = new_error;
|
error = new_error;
|
||||||
min_centers = centers;
|
min_centers = centers;
|
||||||
|
|
@ -176,6 +180,15 @@ public:
|
||||||
return sum;
|
return sum;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
double within_cluster_sum_of_squares() const {
|
||||||
|
double sum = 0;
|
||||||
|
for(std::vector<K_means_point>::const_iterator point_it = points.begin();
|
||||||
|
point_it != points.end(); ++point_it) {
|
||||||
|
sum += pow(centers[point_it->center_id].mean - point_it->data, 2);
|
||||||
|
}
|
||||||
|
return sum;
|
||||||
|
}
|
||||||
|
|
||||||
void clear_center_ids() {
|
void clear_center_ids() {
|
||||||
for(std::vector<K_means_point>::iterator point_it = points.begin();
|
for(std::vector<K_means_point>::iterator point_it = points.begin();
|
||||||
point_it != points.end(); ++point_it) {
|
point_it != points.end(); ++point_it) {
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue