mirror of https://github.com/CGAL/cgal
Merge remote-tracking branch 'cgal/5.2.x-branch' into HEAD
This commit is contained in:
commit
cff3cdb40e
|
|
@ -24,7 +24,8 @@ typedef std::vector<Point_2> Polyline_2;
|
||||||
// inserts a polyline into a graph
|
// inserts a polyline into a graph
|
||||||
void insert(const std::vector<Point_2>& poly, Graph& graph, Point_vertex_map& pvmap)
|
void insert(const std::vector<Point_2>& poly, Graph& graph, Point_vertex_map& pvmap)
|
||||||
{
|
{
|
||||||
vertex_descriptor u, v;
|
vertex_descriptor u = boost::graph_traits<Graph>::null_vertex();
|
||||||
|
vertex_descriptor v;
|
||||||
for (std::size_t i = 0; i < poly.size(); i++) {
|
for (std::size_t i = 0; i < poly.size(); i++) {
|
||||||
// check if the point is not yet in the graph
|
// check if the point is not yet in the graph
|
||||||
if (pvmap.find(poly[i]) == pvmap.end()) {
|
if (pvmap.find(poly[i]) == pvmap.end()) {
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ FAILURES=()
|
||||||
for dir in $PATH_TO_DOC/*
|
for dir in $PATH_TO_DOC/*
|
||||||
do
|
do
|
||||||
OUTPUT=$(basename $dir)
|
OUTPUT=$(basename $dir)
|
||||||
python ../documentation_parser.py $dir/xml > ./"$OUTPUT.txt"
|
python3 ../documentation_parser.py $dir/xml > ./"$OUTPUT.txt"
|
||||||
if [ $? -eq 0 ]; then
|
if [ $? -eq 0 ]; then
|
||||||
echo "$dir OK"
|
echo "$dir OK"
|
||||||
else
|
else
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
#/usr/bin/env python3
|
||||||
|
|
||||||
from pyquery import PyQuery as pq
|
from pyquery import PyQuery as pq
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from sys import argv
|
from sys import argv
|
||||||
|
|
@ -12,17 +14,17 @@ def check_type(_in, args):
|
||||||
|
|
||||||
root_path=argv[1]
|
root_path=argv[1]
|
||||||
d = pq(filename=op.join(op.sep, root_path,'index.xml'), parser="xml")
|
d = pq(filename=op.join(op.sep, root_path,'index.xml'), parser="xml")
|
||||||
compounds=[p.text() for p in d('compound').items()]
|
compounds=[p.text() for p in list(d('compound').items())]
|
||||||
types=[p.attr('kind') for p in d('compound').items()]
|
types=[p.attr('kind') for p in list(d('compound').items())]
|
||||||
type_map = defaultdict(list) #map <type, name>
|
type_map = defaultdict(list) #map <type, name>
|
||||||
dict_map = defaultdict(dict)#map <name, map<member type, member name>>
|
dict_map = defaultdict(dict)#map <name, map<member type, member name>>
|
||||||
#FOREACH compounds : fill maps
|
#FOREACH compounds : fill maps
|
||||||
for i in xrange(0,len(compounds)):
|
for i in range(0,len(compounds)):
|
||||||
if check_type(types[i], "typedef"):
|
if check_type(types[i], "typedef"):
|
||||||
types[i]="type"
|
types[i]="type"
|
||||||
name=d('compound').children("name").eq(i).text()
|
name=d('compound').children("name").eq(i).text()
|
||||||
members=[p.text() for p in d('compound').eq(i).children("member").items()]
|
members=[p.text() for p in list(d('compound').eq(i).children("member").items())]
|
||||||
m_types=[p.attr('kind') for p in d('compound').eq(i).children("member").items()]
|
m_types=[p.attr('kind') for p in list(d('compound').eq(i).children("member").items())]
|
||||||
if (not check_type(types[i], ['example', 'file', 'dir', 'page', 'group']) and
|
if (not check_type(types[i], ['example', 'file', 'dir', 'page', 'group']) and
|
||||||
not (types[i] == "namespace" and len(members) == 0) and
|
not (types[i] == "namespace" and len(members) == 0) and
|
||||||
not (types[i] == "enum" and len(members) == 0) ):
|
not (types[i] == "enum" and len(members) == 0) ):
|
||||||
|
|
@ -32,7 +34,7 @@ for i in xrange(0,len(compounds)):
|
||||||
total_path=op.join(op.sep, root_path,filepath)
|
total_path=op.join(op.sep, root_path,filepath)
|
||||||
if(op.isfile(total_path)):
|
if(op.isfile(total_path)):
|
||||||
e = pq(filename=total_path, parser="xml")
|
e = pq(filename=total_path, parser="xml")
|
||||||
compoundnames=[p.text() for p in e('includes').items()]
|
compoundnames=[p.text() for p in list(e('includes').items())]
|
||||||
|
|
||||||
if(len(compoundnames) > 1 and compoundnames[0].find("Concept") != -1):
|
if(len(compoundnames) > 1 and compoundnames[0].find("Concept") != -1):
|
||||||
types[i] = 'Concept '+types[i].lower()
|
types[i] = 'Concept '+types[i].lower()
|
||||||
|
|
@ -41,7 +43,7 @@ for i in xrange(0,len(compounds)):
|
||||||
mtype_map = defaultdict(list)# map<member type, member name>
|
mtype_map = defaultdict(list)# map<member type, member name>
|
||||||
|
|
||||||
#FOREACH member :
|
#FOREACH member :
|
||||||
for j in xrange(0,len(members)):
|
for j in range(0,len(members)):
|
||||||
if(check_type(types[i], ['class', 'Concept class'])
|
if(check_type(types[i], ['class', 'Concept class'])
|
||||||
and m_types[j] == "function"):
|
and m_types[j] == "function"):
|
||||||
m_types[j]="method"
|
m_types[j]="method"
|
||||||
|
|
@ -62,7 +64,7 @@ for btype in type_map:
|
||||||
out=btype
|
out=btype
|
||||||
if btype.endswith('s'):
|
if btype.endswith('s'):
|
||||||
out+='e'
|
out+='e'
|
||||||
print out.title()+'s'
|
print(out.title()+'s')
|
||||||
indent+=" "
|
indent+=" "
|
||||||
#FOREACH name
|
#FOREACH name
|
||||||
for name in type_map[btype]:
|
for name in type_map[btype]:
|
||||||
|
|
@ -74,7 +76,7 @@ for btype in type_map:
|
||||||
templates=[]
|
templates=[]
|
||||||
if op.isfile(op.join(op.sep, root_path,filepath)):
|
if op.isfile(op.join(op.sep, root_path,filepath)):
|
||||||
f=pq(filename=op.join(op.sep, root_path,filepath), parser="xml")
|
f=pq(filename=op.join(op.sep, root_path,filepath), parser="xml")
|
||||||
templateparams=f("compounddef").children("templateparamlist").eq(0).children("param").items()
|
templateparams=list(f("compounddef").children("templateparamlist").eq(0).children("param").items())
|
||||||
for param in templateparams:
|
for param in templateparams:
|
||||||
template_type=""
|
template_type=""
|
||||||
template_name=""
|
template_name=""
|
||||||
|
|
@ -91,7 +93,7 @@ for btype in type_map:
|
||||||
complete_template+=' = '+template_defval
|
complete_template+=' = '+template_defval
|
||||||
templates.append(complete_template)
|
templates.append(complete_template)
|
||||||
if templates==[]:#if no child was found, just take param.text()
|
if templates==[]:#if no child was found, just take param.text()
|
||||||
templates=[t.text() for t in param.items()]
|
templates=[t.text() for t in list(param.items())]
|
||||||
suffix="<"
|
suffix="<"
|
||||||
#as template got type, defname and declname, name is twice in template. keep only one of them.
|
#as template got type, defname and declname, name is twice in template. keep only one of them.
|
||||||
to_remove=[""]
|
to_remove=[""]
|
||||||
|
|
@ -101,7 +103,7 @@ for btype in type_map:
|
||||||
suffix=""
|
suffix=""
|
||||||
if suffix.endswith(', '):
|
if suffix.endswith(', '):
|
||||||
suffix = suffix[:-2]+'>'
|
suffix = suffix[:-2]+'>'
|
||||||
print indent+name+suffix
|
print(indent+name+suffix)
|
||||||
|
|
||||||
indent+=" "
|
indent+=" "
|
||||||
#FOREACH mtype
|
#FOREACH mtype
|
||||||
|
|
@ -109,7 +111,7 @@ for btype in type_map:
|
||||||
out=mtype
|
out=mtype
|
||||||
if mtype.endswith('s'):
|
if mtype.endswith('s'):
|
||||||
out+='e'
|
out+='e'
|
||||||
print indent+out.title()+'s'
|
print(indent+out.title()+'s')
|
||||||
indent+=" "
|
indent+=" "
|
||||||
#FOREACH member
|
#FOREACH member
|
||||||
overload_map = defaultdict(int) #contains the number of times a member has appeared (to manage the overloads)
|
overload_map = defaultdict(int) #contains the number of times a member has appeared (to manage the overloads)
|
||||||
|
|
@ -123,16 +125,16 @@ for btype in type_map:
|
||||||
if op.isfile(op.join(op.sep, root_path,filepath)):
|
if op.isfile(op.join(op.sep, root_path,filepath)):
|
||||||
f=pq(filename=op.join(op.sep, root_path,filepath), parser="xml")
|
f=pq(filename=op.join(op.sep, root_path,filepath), parser="xml")
|
||||||
index=0
|
index=0
|
||||||
memberdefs=[m.text() for m in f("memberdef").items()]
|
memberdefs=[m.text() for m in list(f("memberdef").items())]
|
||||||
for i in xrange(0,len(memberdefs)):
|
for i in range(0,len(memberdefs)):
|
||||||
member_names=[member_name.text() for member_name in f('memberdef').eq(i).children("name").items()]
|
member_names=[member_name.text() for member_name in list(f('memberdef').eq(i).children("name").items())]
|
||||||
if f('memberdef').eq(i).children("name").text() == member:
|
if f('memberdef').eq(i).children("name").text() == member:
|
||||||
if (index < overload_map[member]):
|
if (index < overload_map[member]):
|
||||||
index+=1
|
index+=1
|
||||||
elif (index == overload_map[member]):
|
elif (index == overload_map[member]):
|
||||||
if check_type(mtype, ['function', 'method']):
|
if check_type(mtype, ['function', 'method']):
|
||||||
args=[f('memberdef').eq(i).children("argsstring").text()]
|
args=[f('memberdef').eq(i).children("argsstring").text()]
|
||||||
templateparams=f('memberdef').eq(i).children("templateparamlist").children("param").items()
|
templateparams=list(f('memberdef').eq(i).children("templateparamlist").children("param").items())
|
||||||
if check_type(mtype, ['function', 'method', 'type', 'variable']):
|
if check_type(mtype, ['function', 'method', 'type', 'variable']):
|
||||||
return_type=[f('memberdef').eq(i).children("type").text()]
|
return_type=[f('memberdef').eq(i).children("type").text()]
|
||||||
break;
|
break;
|
||||||
|
|
@ -158,7 +160,7 @@ for btype in type_map:
|
||||||
complete_template+=' = '+template_defval
|
complete_template+=' = '+template_defval
|
||||||
templates.append(complete_template)
|
templates.append(complete_template)
|
||||||
if templates==[]:#if no child was found, just take param.text()
|
if templates==[]:#if no child was found, just take param.text()
|
||||||
templates=[t.text() for t in param.items()]
|
templates=[t.text() for t in list(param.items())]
|
||||||
|
|
||||||
prefix="template <"
|
prefix="template <"
|
||||||
for template in templates:
|
for template in templates:
|
||||||
|
|
@ -171,7 +173,7 @@ for btype in type_map:
|
||||||
prefix+=definition
|
prefix+=definition
|
||||||
if(prefix != ""):
|
if(prefix != ""):
|
||||||
prefix+=" "
|
prefix+=" "
|
||||||
print indent+prefix+member+arguments
|
print(indent+prefix+member+arguments)
|
||||||
overload_map[member]+=1
|
overload_map[member]+=1
|
||||||
#END foreach member
|
#END foreach member
|
||||||
indent=indent[:-2]
|
indent=indent[:-2]
|
||||||
|
|
|
||||||
|
|
@ -316,6 +316,7 @@ void split_along_edges(TriangleMesh& tm,
|
||||||
std::set<halfedge_descriptor> extra_border_hedges;
|
std::set<halfedge_descriptor> extra_border_hedges;
|
||||||
for(std::size_t k=0; k<nb_shared_edges; ++k)
|
for(std::size_t k=0; k<nb_shared_edges; ++k)
|
||||||
{
|
{
|
||||||
|
if (is_border(shared_edges[k], tm)) continue;
|
||||||
for(halfedge_descriptor h : halfedges_around_target(target(shared_edges[k], tm), tm))
|
for(halfedge_descriptor h : halfedges_around_target(target(shared_edges[k], tm), tm))
|
||||||
if(is_border(h, tm))
|
if(is_border(h, tm))
|
||||||
extra_border_hedges.insert(h);
|
extra_border_hedges.insert(h);
|
||||||
|
|
@ -334,6 +335,7 @@ void split_along_edges(TriangleMesh& tm,
|
||||||
// now duplicate the edge and set its pointers
|
// now duplicate the edge and set its pointers
|
||||||
for(std::size_t k=0; k<nb_shared_edges; ++k)
|
for(std::size_t k=0; k<nb_shared_edges; ++k)
|
||||||
{
|
{
|
||||||
|
if (is_border(shared_edges[k], tm)) continue;
|
||||||
halfedge_descriptor h = halfedge(shared_edges[k], tm);
|
halfedge_descriptor h = halfedge(shared_edges[k], tm);
|
||||||
face_descriptor fh = face(h, tm);
|
face_descriptor fh = face(h, tm);
|
||||||
//add edge
|
//add edge
|
||||||
|
|
|
||||||
|
|
@ -497,12 +497,70 @@ void test()
|
||||||
PMP::clip(tm1, K::Plane_3(0,-1,0,0));
|
PMP::clip(tm1, K::Plane_3(0,-1,0,0));
|
||||||
assert(vertices(tm1).size() == 7);
|
assert(vertices(tm1).size() == 7);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
TriangleMesh tm1;
|
||||||
|
std::ifstream("data-coref/open_large_cube.off") >> tm1;
|
||||||
|
PMP::clip(tm1, K::Plane_3(0,0,1,-1), CGAL::parameters::use_compact_clipper(false));
|
||||||
|
assert(vertices(tm1).size()==753);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
TriangleMesh tm1;
|
||||||
|
std::ifstream("data-coref/open_large_cube.off") >> tm1;
|
||||||
|
std::size_t nbv = vertices(tm1).size();
|
||||||
|
PMP::clip(tm1, K::Plane_3(0,0,1,-1), CGAL::parameters::use_compact_clipper(true));
|
||||||
|
assert(vertices(tm1).size()==nbv+2); // because of the plane diagonal
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
TriangleMesh tm1;
|
||||||
|
std::ifstream("data-coref/open_large_cube.off") >> tm1;
|
||||||
|
PMP::clip(tm1, K::Plane_3(0,0,1,-1), CGAL::parameters::use_compact_clipper(false).allow_self_intersections(true));
|
||||||
|
assert(vertices(tm1).size()==753);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
TriangleMesh tm1;
|
||||||
|
std::ifstream("data-coref/open_large_cube.off") >> tm1;
|
||||||
|
std::size_t nbv = vertices(tm1).size();
|
||||||
|
PMP::clip(tm1, K::Plane_3(0,0,1,-1), CGAL::parameters::use_compact_clipper(true).allow_self_intersections(true));
|
||||||
|
assert(vertices(tm1).size()==nbv+2); // because of the plane diagonal
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
TriangleMesh tm1;
|
||||||
|
std::ifstream("data-coref/open_large_cube.off") >> tm1;
|
||||||
|
PMP::clip(tm1, K::Plane_3(0,0,-1,1), CGAL::parameters::use_compact_clipper(false));
|
||||||
|
assert(vertices(tm1).size()==0);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
TriangleMesh tm1;
|
||||||
|
std::ifstream("data-coref/open_large_cube.off") >> tm1;
|
||||||
|
PMP::clip(tm1, K::Plane_3(0,0,-1,1), CGAL::parameters::use_compact_clipper(true));
|
||||||
|
assert(vertices(tm1).size()==176);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
TriangleMesh tm1;
|
||||||
|
std::ifstream("data-coref/open_large_cube.off") >> tm1;
|
||||||
|
PMP::clip(tm1, K::Plane_3(0,0,-1,1), CGAL::parameters::use_compact_clipper(false).allow_self_intersections(true));
|
||||||
|
assert(vertices(tm1).size()==0);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
TriangleMesh tm1;
|
||||||
|
std::ifstream("data-coref/open_large_cube.off") >> tm1;
|
||||||
|
PMP::clip(tm1, K::Plane_3(0,0,-1,1), CGAL::parameters::use_compact_clipper(true).allow_self_intersections(true));
|
||||||
|
assert(vertices(tm1).size()==176);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
template <class Mesh>
|
template <class Mesh>
|
||||||
void test_split_plane()
|
void test_split_plane()
|
||||||
{
|
{
|
||||||
// test with a clipper mesh
|
//test with a splitter mesh
|
||||||
Mesh tm1;
|
Mesh tm1;
|
||||||
std::ifstream input("data-coref/elephant.off");
|
std::ifstream input("data-coref/elephant.off");
|
||||||
input >> tm1;
|
input >> tm1;
|
||||||
|
|
@ -529,7 +587,45 @@ void test_split_plane()
|
||||||
CGAL::clear(tm1);
|
CGAL::clear(tm1);
|
||||||
meshes.clear();
|
meshes.clear();
|
||||||
|
|
||||||
//test with SI
|
//test with a non-closed splitter mesh (border edges in the plane)
|
||||||
|
input.open("data-coref/open_large_cube.off");
|
||||||
|
input >> tm1;
|
||||||
|
|
||||||
|
if(!input)
|
||||||
|
{
|
||||||
|
std::cerr<<"File not found. Aborting."<<std::endl;
|
||||||
|
assert(false);
|
||||||
|
return ;
|
||||||
|
}
|
||||||
|
input.close();
|
||||||
|
|
||||||
|
PMP::split(tm1,K::Plane_3(0,0,1,-1));
|
||||||
|
PMP::split_connected_components(tm1, meshes, params::all_default());
|
||||||
|
assert(meshes.size() == 281);
|
||||||
|
|
||||||
|
CGAL::clear(tm1);
|
||||||
|
meshes.clear();
|
||||||
|
|
||||||
|
//test with a non-closed splitter mesh (border edges in the plane)
|
||||||
|
input.open("data-coref/open_large_cube.off");
|
||||||
|
input >> tm1;
|
||||||
|
|
||||||
|
if(!input)
|
||||||
|
{
|
||||||
|
std::cerr<<"File not found. Aborting."<<std::endl;
|
||||||
|
assert(false);
|
||||||
|
return ;
|
||||||
|
}
|
||||||
|
input.close();
|
||||||
|
|
||||||
|
PMP::split(tm1,K::Plane_3(0,-1,0,0.3));
|
||||||
|
PMP::split_connected_components(tm1, meshes, params::all_default());
|
||||||
|
assert(meshes.size() == 2);
|
||||||
|
|
||||||
|
CGAL::clear(tm1);
|
||||||
|
meshes.clear();
|
||||||
|
|
||||||
|
//test with SI
|
||||||
std::ifstream("data-clip/tet_si_to_split.off") >> tm1;
|
std::ifstream("data-clip/tet_si_to_split.off") >> tm1;
|
||||||
if(num_vertices(tm1) == 0)
|
if(num_vertices(tm1) == 0)
|
||||||
{
|
{
|
||||||
|
|
@ -735,12 +831,15 @@ void test_isocuboid()
|
||||||
.allow_self_intersections(true));
|
.allow_self_intersections(true));
|
||||||
PMP::split_connected_components(tm, meshes, params::all_default());
|
PMP::split_connected_components(tm, meshes, params::all_default());
|
||||||
assert(meshes.size() == 4);
|
assert(meshes.size() == 4);
|
||||||
//if the order is not deterministc, put the num_vertices in a list and check
|
|
||||||
//if the list does contain all those numbers.
|
std::set<std::size_t> sizes;
|
||||||
assert(vertices(meshes[0]).size() == 22);
|
for (int i=0; i<4; ++i)
|
||||||
assert(vertices(meshes[1]).size() == 23);
|
sizes.insert(vertices(meshes[i]).size());
|
||||||
assert(vertices(meshes[2]).size() == 7);
|
|
||||||
assert(vertices(meshes[3]).size() == 4);
|
assert(sizes.count(22)==1);
|
||||||
|
assert(sizes.count(23)==1);
|
||||||
|
assert(sizes.count(7)==1);
|
||||||
|
assert(sizes.count(4)==1);
|
||||||
|
|
||||||
CGAL::clear(tm);
|
CGAL::clear(tm);
|
||||||
meshes.clear();
|
meshes.clear();
|
||||||
|
|
|
||||||
|
|
@ -110,7 +110,8 @@ public:
|
||||||
|
|
||||||
// the degree of the vertex, i.e., edges emanating from this vertex
|
// the degree of the vertex, i.e., edges emanating from this vertex
|
||||||
std::size_t vertex_degree() const {
|
std::size_t vertex_degree() const {
|
||||||
return this->halfedge()->vertex_degree();
|
return this->halfedge()!=Halfedge_const_handle()
|
||||||
|
? this->halfedge()->vertex_degree() : 0;
|
||||||
}
|
}
|
||||||
size_type degree() const { return vertex_degree(); } //backwards compatible
|
size_type degree() const { return vertex_degree(); } //backwards compatible
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -252,10 +252,10 @@ for drawing operations in many \cgal output streams.
|
||||||
|
|
||||||
Each color is defined by a triple of integers `(r,g,b)` with
|
Each color is defined by a triple of integers `(r,g,b)` with
|
||||||
0 \f$ \le \f$ r,g,b \f$ \le \f$ 255, the so-called <I>rgb-value</I> of the color.
|
0 \f$ \le \f$ r,g,b \f$ \le \f$ 255, the so-called <I>rgb-value</I> of the color.
|
||||||
There are a 11 predefined `Color` constants available:
|
There are a 11 predefined `Color` functions available:
|
||||||
`BLACK`, `WHITE`, `GRAY`, `RED`, `GREEN`,
|
`black()`, `white()`, `gray()`, `red()`, `green()`,
|
||||||
`DEEPBLUE`, `BLUE`, `PURPLE`, `VIOLET`, `ORANGE`,
|
`deep_blue()`, `blue()`, `purple()`, `violet()`, `orange()`,
|
||||||
and `YELLOW`.
|
and `yellow()`.
|
||||||
|
|
||||||
\subsection IOstreamStream Stream Support
|
\subsection IOstreamStream Stream Support
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -175,8 +175,12 @@ public:
|
||||||
bool has_simplex_specific_property(internal::PLY_read_number* property, Edge_index)
|
bool has_simplex_specific_property(internal::PLY_read_number* property, Edge_index)
|
||||||
{
|
{
|
||||||
const std::string& name = property->name();
|
const std::string& name = property->name();
|
||||||
|
if(name == "vertex1" || name == "vertex2")
|
||||||
|
return true;
|
||||||
|
#ifndef CGAL_NO_DEPRECATED_CODE
|
||||||
if(name == "v0" || name == "v1")
|
if(name == "v0" || name == "v1")
|
||||||
return true;
|
return true;
|
||||||
|
#endif
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -365,8 +369,8 @@ public:
|
||||||
void process_line(PLY_element& element, Edge_index& ei)
|
void process_line(PLY_element& element, Edge_index& ei)
|
||||||
{
|
{
|
||||||
IntType v0, v1;
|
IntType v0, v1;
|
||||||
element.assign(v0, "v0");
|
element.assign(v0, "vertex1");
|
||||||
element.assign(v1, "v1");
|
element.assign(v1, "vertex2");
|
||||||
|
|
||||||
Halfedge_index hi = m_mesh.halfedge(m_map_v2v[std::size_t(v0)],
|
Halfedge_index hi = m_mesh.halfedge(m_map_v2v[std::size_t(v0)],
|
||||||
m_map_v2v[std::size_t(v1)]);
|
m_map_v2v[std::size_t(v1)]);
|
||||||
|
|
|
||||||
|
|
@ -18,8 +18,8 @@ property uchar green
|
||||||
property uchar blue
|
property uchar blue
|
||||||
property int label
|
property int label
|
||||||
element edge 6
|
element edge 6
|
||||||
property int v0
|
property int vertex1
|
||||||
property int v1
|
property int vertex2
|
||||||
property float confidence
|
property float confidence
|
||||||
end_header
|
end_header
|
||||||
0 0 0 -0.5 -0.5 -0.5 255 255 0 0
|
0 0 0 -0.5 -0.5 -0.5 255 255 0 0
|
||||||
|
|
|
||||||
|
|
@ -1997,13 +1997,16 @@ copy_tds(const TDS_src& tds_src,
|
||||||
CGAL_triangulation_precondition( tds_src.is_vertex(vert));
|
CGAL_triangulation_precondition( tds_src.is_vertex(vert));
|
||||||
|
|
||||||
clear();
|
clear();
|
||||||
size_type n = tds_src.number_of_vertices();
|
|
||||||
set_dimension(tds_src.dimension());
|
set_dimension(tds_src.dimension());
|
||||||
|
|
||||||
// Number of pointers to cell/vertex to copy per cell.
|
if(tds_src.number_of_vertices() == 0)
|
||||||
int dim = (std::max)(1, dimension() + 1);
|
return Vertex_handle();
|
||||||
|
|
||||||
if(n == 0) {return Vertex_handle();}
|
// Number of pointers to face/vertex to copy per face.
|
||||||
|
const int dim = (std::max)(1, dimension() + 1);
|
||||||
|
|
||||||
|
// Number of neighbors to set in each face (dim -1 has a single face)
|
||||||
|
const int nn = (std::max)(0, dimension() + 1);
|
||||||
|
|
||||||
//initializes maps
|
//initializes maps
|
||||||
Unique_hash_map<typename TDS_src::Vertex_handle,Vertex_handle> vmap;
|
Unique_hash_map<typename TDS_src::Vertex_handle,Vertex_handle> vmap;
|
||||||
|
|
@ -2025,7 +2028,7 @@ copy_tds(const TDS_src& tds_src,
|
||||||
convert_face(*fit1, *fh);
|
convert_face(*fit1, *fh);
|
||||||
}
|
}
|
||||||
|
|
||||||
//link vertices to a cell
|
//link vertices to a face
|
||||||
vit1 = tds_src.vertices_begin();
|
vit1 = tds_src.vertices_begin();
|
||||||
for ( ; vit1 != tds_src.vertices_end(); vit1++) {
|
for ( ; vit1 != tds_src.vertices_end(); vit1++) {
|
||||||
vmap[vit1]->set_face(fmap[vit1->face()]);
|
vmap[vit1]->set_face(fmap[vit1->face()]);
|
||||||
|
|
@ -2034,11 +2037,11 @@ copy_tds(const TDS_src& tds_src,
|
||||||
//update vertices and neighbor pointers
|
//update vertices and neighbor pointers
|
||||||
fit1 = tds_src.faces().begin();
|
fit1 = tds_src.faces().begin();
|
||||||
for ( ; fit1 != tds_src.faces_end(); ++fit1) {
|
for ( ; fit1 != tds_src.faces_end(); ++fit1) {
|
||||||
for (int j = 0; j < dim ; ++j) {
|
for (int j = 0; j < dim ; ++j)
|
||||||
fmap[fit1]->set_vertex(j, vmap[fit1->vertex(j)] );
|
fmap[fit1]->set_vertex(j, vmap[fit1->vertex(j)] );
|
||||||
|
for (int j = 0; j < nn ; ++j)
|
||||||
fmap[fit1]->set_neighbor(j, fmap[fit1->neighbor(j)]);
|
fmap[fit1]->set_neighbor(j, fmap[fit1->neighbor(j)]);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// remove the post condition because it is false when copying the
|
// remove the post condition because it is false when copying the
|
||||||
// TDS of a regular triangulation because of hidden vertices
|
// TDS of a regular triangulation because of hidden vertices
|
||||||
|
|
|
||||||
|
|
@ -240,7 +240,7 @@ Triangulation_ds_face_base_2<TDS> ::
|
||||||
set_neighbor(int i, Face_handle n)
|
set_neighbor(int i, Face_handle n)
|
||||||
{
|
{
|
||||||
CGAL_triangulation_precondition( i == 0 || i == 1 || i == 2);
|
CGAL_triangulation_precondition( i == 0 || i == 1 || i == 2);
|
||||||
CGAL_triangulation_precondition( this != &*n );
|
CGAL_triangulation_precondition( this != n.operator->() );
|
||||||
N[i] = n;
|
N[i] = n;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4043,32 +4043,27 @@ copy_tds(const TDS_src& tds,
|
||||||
|| tds.is_vertex(vert) );
|
|| tds.is_vertex(vert) );
|
||||||
|
|
||||||
clear();
|
clear();
|
||||||
|
|
||||||
size_type n = tds.number_of_vertices();
|
|
||||||
set_dimension(tds.dimension());
|
set_dimension(tds.dimension());
|
||||||
|
|
||||||
if (n == 0) return Vertex_handle();
|
if(tds.number_of_vertices() == 0)
|
||||||
|
return Vertex_handle();
|
||||||
|
|
||||||
// Number of pointers to cell/vertex to copy per cell.
|
// Number of pointers to cell/vertex to copy per cell.
|
||||||
int dim = (std::max)(1, dimension() + 1);
|
const int dim = (std::max)(1, dimension() + 1);
|
||||||
|
|
||||||
// Create the vertices.
|
// Number of neighbors to set
|
||||||
std::vector<typename TDS_src::Vertex_handle> TV(n);
|
const int nn = (std::max)(0, dimension() + 1);
|
||||||
size_type i = 0;
|
|
||||||
|
|
||||||
for (typename TDS_src::Vertex_iterator vit = tds.vertices_begin();
|
|
||||||
vit != tds.vertices_end(); ++vit)
|
|
||||||
TV[i++] = vit;
|
|
||||||
|
|
||||||
CGAL_triangulation_assertion( i == n );
|
|
||||||
|
|
||||||
|
// Initializes maps
|
||||||
Unique_hash_map< typename TDS_src::Vertex_handle,Vertex_handle > V;
|
Unique_hash_map< typename TDS_src::Vertex_handle,Vertex_handle > V;
|
||||||
Unique_hash_map< typename TDS_src::Cell_handle,Cell_handle > F;
|
Unique_hash_map< typename TDS_src::Cell_handle,Cell_handle > F;
|
||||||
|
|
||||||
for (i=0; i <= n-1; ++i){
|
// Create the vertices.
|
||||||
Vertex_handle vh=create_vertex( convert_vertex(*TV[i]) );
|
for (typename TDS_src::Vertex_iterator vit = tds.vertices_begin();
|
||||||
V[ TV[i] ] = vh;
|
vit != tds.vertices_end(); ++vit) {
|
||||||
convert_vertex(*TV[i],*vh);
|
Vertex_handle vh = create_vertex( convert_vertex(*vit) );
|
||||||
|
V[vit] = vh;
|
||||||
|
convert_vertex(*vit,*vh);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create the cells.
|
// Create the cells.
|
||||||
|
|
@ -4089,7 +4084,7 @@ copy_tds(const TDS_src& tds,
|
||||||
// Hook neighbor pointers of the cells.
|
// Hook neighbor pointers of the cells.
|
||||||
for (typename TDS_src::Cell_iterator cit2 = tds.cells().begin();
|
for (typename TDS_src::Cell_iterator cit2 = tds.cells().begin();
|
||||||
cit2 != tds.cells_end(); ++cit2) {
|
cit2 != tds.cells_end(); ++cit2) {
|
||||||
for (int j = 0; j < dim; j++)
|
for (int j = 0; j < nn; j++)
|
||||||
F[cit2]->set_neighbor(j, F[cit2->neighbor(j)] );
|
F[cit2]->set_neighbor(j, F[cit2->neighbor(j)] );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -132,9 +132,14 @@ _test_cls_triangulation_2( const Triangul & )
|
||||||
assert( T1.number_of_vertices() == 0 );
|
assert( T1.number_of_vertices() == 0 );
|
||||||
|
|
||||||
Triangul T3(T1);
|
Triangul T3(T1);
|
||||||
Triangul T4 = T1;
|
assert(T3.tds().vertices().size() == T1.tds().vertices().size());
|
||||||
T3.swap(T1);
|
assert(T3.tds().faces().size() == T1.tds().faces().size());
|
||||||
|
|
||||||
|
Triangul T4 = T1;
|
||||||
|
assert(T4.tds().vertices().size() == T1.tds().vertices().size());
|
||||||
|
assert(T4.tds().faces().size() == T1.tds().faces().size());
|
||||||
|
|
||||||
|
T3.swap(T1);
|
||||||
|
|
||||||
/**************************/
|
/**************************/
|
||||||
/******* INSERTIONS *******/
|
/******* INSERTIONS *******/
|
||||||
|
|
@ -162,6 +167,10 @@ _test_cls_triangulation_2( const Triangul & )
|
||||||
assert( T0_1.number_of_faces() == 0);
|
assert( T0_1.number_of_faces() == 0);
|
||||||
assert( T0_1.is_valid() );
|
assert( T0_1.is_valid() );
|
||||||
|
|
||||||
|
Triangul T0_1b(T0_1);
|
||||||
|
assert(T0_1b.tds().vertices().size() == T0_1.tds().vertices().size());
|
||||||
|
assert(T0_1b.tds().faces().size() == T0_1.tds().faces().size());
|
||||||
|
|
||||||
// test insert_first()
|
// test insert_first()
|
||||||
Triangul T0_2;
|
Triangul T0_2;
|
||||||
Vertex_handle v0_2_0 = T0_2.insert_first(p0);
|
Vertex_handle v0_2_0 = T0_2.insert_first(p0);
|
||||||
|
|
@ -184,6 +193,10 @@ _test_cls_triangulation_2( const Triangul & )
|
||||||
assert( T1_2.number_of_faces() == 0 );
|
assert( T1_2.number_of_faces() == 0 );
|
||||||
assert( T1_2.is_valid() );
|
assert( T1_2.is_valid() );
|
||||||
|
|
||||||
|
Triangul T1_2b(T1_2);
|
||||||
|
assert(T1_2b.tds().vertices().size() == T1_2.tds().vertices().size());
|
||||||
|
assert(T1_2b.tds().faces().size() == T1_2.tds().faces().size());
|
||||||
|
|
||||||
// p1,p3,p2 [endpoints first]
|
// p1,p3,p2 [endpoints first]
|
||||||
Triangul T1_3_0;
|
Triangul T1_3_0;
|
||||||
Vertex_handle v1_3_0_1 = T1_3_0.insert(p1); assert( v1_3_0_1 != nullptr );
|
Vertex_handle v1_3_0_1 = T1_3_0.insert(p1); assert( v1_3_0_1 != nullptr );
|
||||||
|
|
|
||||||
|
|
@ -190,7 +190,21 @@ _test_cls_triangulation_3(const Triangulation &)
|
||||||
//########################################################################
|
//########################################################################
|
||||||
|
|
||||||
|
|
||||||
/**************CONSTRUCTORS (1)*********************/
|
/************** CONSTRUCTORS (1)********************/
|
||||||
|
|
||||||
|
Cls Tm1;
|
||||||
|
assert( Tm1.dimension() == -1 );
|
||||||
|
assert( Tm1.number_of_vertices() == 0 );
|
||||||
|
|
||||||
|
Cls Tm3(Tm1);
|
||||||
|
assert(Tm3 == Tm1);
|
||||||
|
|
||||||
|
Cls Tm4 = Tm1;
|
||||||
|
assert(Tm4 == Tm1);
|
||||||
|
|
||||||
|
Tm3.swap(Tm1);
|
||||||
|
|
||||||
|
/************** INSERTIONS *************************/
|
||||||
/************** and I/O ****************************/
|
/************** and I/O ****************************/
|
||||||
|
|
||||||
std::cout << " Constructor " << std::endl;
|
std::cout << " Constructor " << std::endl;
|
||||||
|
|
@ -214,6 +228,9 @@ _test_cls_triangulation_3(const Triangulation &)
|
||||||
assert(T0.number_of_vertices() == 1);
|
assert(T0.number_of_vertices() == 1);
|
||||||
assert(T0.is_valid());
|
assert(T0.is_valid());
|
||||||
|
|
||||||
|
Cls T0d0(T0);
|
||||||
|
assert(T0 == T0d0);
|
||||||
|
|
||||||
if (! del) // to avoid doing the following tests for both Delaunay
|
if (! del) // to avoid doing the following tests for both Delaunay
|
||||||
// and non Delaunay triangulations
|
// and non Delaunay triangulations
|
||||||
{
|
{
|
||||||
|
|
@ -228,6 +245,9 @@ _test_cls_triangulation_3(const Triangulation &)
|
||||||
assert(T0.number_of_vertices() == 2);
|
assert(T0.number_of_vertices() == 2);
|
||||||
assert(T0.is_valid());
|
assert(T0.is_valid());
|
||||||
|
|
||||||
|
Cls T0d1(T0);
|
||||||
|
assert(T0 == T0d1);
|
||||||
|
|
||||||
if (! del) // to avoid doing the following tests for both Delaunay
|
if (! del) // to avoid doing the following tests for both Delaunay
|
||||||
// and non Delaunay triangulations
|
// and non Delaunay triangulations
|
||||||
{
|
{
|
||||||
|
|
@ -242,6 +262,9 @@ _test_cls_triangulation_3(const Triangulation &)
|
||||||
assert(T0.number_of_vertices() == 3);
|
assert(T0.number_of_vertices() == 3);
|
||||||
assert(T0.is_valid());
|
assert(T0.is_valid());
|
||||||
|
|
||||||
|
Cls T0d2(T0);
|
||||||
|
assert(T0 == T0d2);
|
||||||
|
|
||||||
if (! del) // to avoid doing the following tests for both Delaunay
|
if (! del) // to avoid doing the following tests for both Delaunay
|
||||||
// and non Delaunay triangulations
|
// and non Delaunay triangulations
|
||||||
{
|
{
|
||||||
|
|
@ -256,6 +279,9 @@ _test_cls_triangulation_3(const Triangulation &)
|
||||||
assert(T0.number_of_vertices() == 4);
|
assert(T0.number_of_vertices() == 4);
|
||||||
assert(T0.is_valid());
|
assert(T0.is_valid());
|
||||||
|
|
||||||
|
Cls T0d3(T0);
|
||||||
|
assert(T0 == T0d3);
|
||||||
|
|
||||||
if (! del) // to avoid doing the following tests for both Delaunay
|
if (! del) // to avoid doing the following tests for both Delaunay
|
||||||
// and non Delaunay triangulations
|
// and non Delaunay triangulations
|
||||||
{
|
{
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue