Merge pull request #8523 from lrineau/CGAL_CGAL-protect_against_macro_free-GF

protect CGAL against macro `free`
This commit is contained in:
Sébastien Loriot 2024-10-18 18:00:37 +02:00
commit e7b8a4eb17
13 changed files with 38 additions and 31 deletions

View File

@ -116,8 +116,8 @@ void partition_dual_graph(const TriangleMesh& tm,
delete[] eptr;
delete[] eind;
std::free(npart);
std::free(epart);
(std::free)(npart);
(std::free)(epart);
}
template<typename TriangleMesh, typename NamedParameters>

View File

@ -151,8 +151,8 @@ void partition_graph(const TriangleMesh& tm,
delete[] eptr;
delete[] eind;
std::free(npart);
std::free(epart);
(std::free)(npart);
(std::free)(epart);
}
template<typename TriangleMesh, typename NamedParameters>

View File

@ -595,7 +595,7 @@ public:
}
void operator delete( void *p, size_t ){
MemoryPool<ConstPolyRep>::global_allocator().free(p);
(MemoryPool<ConstPolyRep>::global_allocator().free)(p);
}
private:
@ -1248,7 +1248,7 @@ void * AddSubRep<O>::operator new( size_t size)
template <typename O>
void AddSubRep<O>::operator delete( void *p, size_t )
{ MemoryPool<AddSubRep<O> >::global_allocator().free(p); }
{ (MemoryPool<AddSubRep<O> >::global_allocator().free)(p); }
/// \typedef AddRep

View File

@ -51,14 +51,14 @@
CGAL_INLINE_FUNCTION void *T::operator new( size_t size) \
{ return MemoryPool<T>::global_allocator().allocate(size); } \
CGAL_INLINE_FUNCTION void T::operator delete( void *p, size_t ) \
{ MemoryPool<T>::global_allocator().free(p); }
{ (MemoryPool<T>::global_allocator().free)(p); }
#define CORE_MEMORY_IMPL_TEMPLATE_WITH_ONE_ARG(C) \
template <typename T> \
CGAL_INLINE_FUNCTION void *C<T>::operator new( size_t size) \
{ return MemoryPool<C<T> >::global_allocator().allocate(size); } \
template <typename T> \
CGAL_INLINE_FUNCTION void C<T>::operator delete( void *p, size_t ) \
{ MemoryPool<C<T> >::global_allocator().free(p); }
{ (MemoryPool<C<T> >::global_allocator().free)(p); }
#endif
// include some common header files

View File

@ -73,7 +73,7 @@ public:
void* allocate(std::size_t size);
void free(void* p);
void free BOOST_PREVENT_MACRO_SUBSTITUTION (void* p);
// Access the corresponding static global allocator.
static MemoryPool<T,nObjects>& global_allocator() {
@ -116,7 +116,7 @@ void* MemoryPool< T, nObjects >::allocate(std::size_t) {
}
template< class T, int nObjects >
void MemoryPool< T, nObjects >::free(void* t) {
void MemoryPool< T, nObjects >::free BOOST_PREVENT_MACRO_SUBSTITUTION (void* t) {
CGAL_assertion(t != 0);
if (t == 0) return; // for safety
if(blocks.empty()){

View File

@ -154,7 +154,7 @@ void * Realbase_for<T>::operator new( size_t size)
template <class T>
void Realbase_for<T>::operator delete( void *p, size_t )
{ MemoryPool<Realbase_for<T> >::global_allocator().free(p); }
{ (MemoryPool<Realbase_for<T> >::global_allocator().free)(p); }
typedef Realbase_for<long> RealLong;
typedef Realbase_for<double> RealDouble;

View File

@ -130,7 +130,7 @@ public:
std::nth_element (z.begin(), z.begin() + (z.size() / 10), z.end());
dtm_x(i,j) = z[z.size() / 10];
}
dem.free();
(dem.free)();
if (grid.width() * grid.height() > input.size())
values.resize (input.size(), compressed_float(0));
@ -162,7 +162,7 @@ public:
values[*it] = v;
}
}
dtm_x.free();
(dtm_x.free)();
}

View File

@ -100,7 +100,7 @@ public:
std::size_t J = grid.y(i);
values[i] = float(dtm(I,J) - get (point_map, *(input.begin() + i)).z());
}
dtm.free();
(dtm.free)();
}
}

View File

@ -100,7 +100,7 @@ public:
std::size_t J = grid.y(i);
values[i] = float(get (point_map, *(input.begin() + i)).z() - dtm(I,J));
}
dtm.free();
(dtm.free)();
}
}

View File

@ -102,7 +102,7 @@ public:
std::size_t J = grid.y(i);
values[i] = dtm(I,J);
}
dtm.free();
(dtm.free)();
}
}

View File

@ -71,7 +71,7 @@ public:
{
}
void free()
void free BOOST_PREVENT_MACRO_SUBSTITUTION ()
{
m_raw.reset();
m_sparse.reset();

View File

@ -45,7 +45,7 @@ class chained_map
chained_map_elem<T>* table;
chained_map_elem<T>* table_end;
chained_map_elem<T>* free;
chained_map_elem<T>* freelist;
std::size_t table_size;
std::size_t table_size_1;
@ -144,10 +144,10 @@ void chained_map<T, Allocator>::init_table(std::size_t n)
std::allocator_traits<allocator_type>::construct(alloc,table + i);
}
free = table + t;
freelist = table + t;
table_end = table + t + t/2;
for (Item p = table; p < free; ++p)
for (Item p = table; p < freelist; ++p)
{ p->succ = nullptr;
p->k = nullkey;
}
@ -161,10 +161,10 @@ inline void chained_map<T, Allocator>::insert(std::size_t x, T y)
q->k = x;
q->i = y;
} else {
free->k = x;
free->i = y;
free->succ = q->succ;
q->succ = free++;
freelist->k = x;
freelist->i = y;
freelist->succ = q->succ;
q->succ = freelist++;
}
}
@ -214,7 +214,7 @@ T& chained_map<T, Allocator>::access(Item p, std::size_t x)
// index x not present, insert it
if (free == table_end) // table full: rehash
if (freelist == table_end) // table full: rehash
{ rehash();
p = HASH(x);
}
@ -225,7 +225,7 @@ T& chained_map<T, Allocator>::access(Item p, std::size_t x)
return p->i;
}
q = free++;
q = freelist++;
q->k = x;
init_inf(q->i); // initializes q->i to xdef
q->succ = p->succ;
@ -246,7 +246,7 @@ chained_map<T, Allocator>::chained_map(const chained_map<T, Allocator>& D)
{
init_table(D.table_size);
for(Item p = D.table; p < D.free; ++p)
for(Item p = D.table; p < D.freelist; ++p)
{ if (p->k != nullkey || p >= D.table + D.table_size)
{ insert(p->k,p->i);
//D.copy_inf(p->i); // see chapter Implementation
@ -258,7 +258,7 @@ chained_map<T, Allocator>::chained_map(chained_map<T, Allocator>&& D)
noexcept(std::is_nothrow_move_constructible_v<Allocator> && std::is_nothrow_move_constructible_v<T>)
: table(std::exchange(D.table, nullptr))
, table_end(std::exchange(D.table_end, nullptr))
, free(std::exchange(D.free, nullptr))
, freelist(std::exchange(D.freelist, nullptr))
, table_size(std::exchange(D.table_size, 0))
, table_size_1(std::exchange(D.table_size_1, 0))
, alloc(std::move(D.alloc))
@ -273,7 +273,7 @@ chained_map<T, Allocator>& chained_map<T, Allocator>::operator=(const chained_ma
init_table(D.table_size);
for(Item p = D.table; p < D.free; ++p)
for(Item p = D.table; p < D.freelist; ++p)
{ if (p->k != nullkey || p >= D.table + D.table_size)
{ insert(p->k,p->i);
//copy_inf(p->i); // see chapter Implementation
@ -290,7 +290,7 @@ chained_map<T, Allocator>& chained_map<T, Allocator>::operator=(chained_map<T, A
table = std::exchange(D.table, nullptr);
table_end = std::exchange(D.table_end, nullptr);
free = std::exchange(D.free, nullptr);
freelist = std::exchange(D.freelist, nullptr);
table_size = std::exchange(D.table_size, 0);
table_size_1 = std::exchange(D.table_size_1, 0);
alloc = std::move(D.alloc);
@ -339,7 +339,7 @@ void chained_map<T, Allocator>::statistics() const
std::size_t n = 0;
for (Item p = table; p < table + table_size; ++p)
if (p ->k != nullkey) ++n;
std::size_t used_in_overflow = free - (table + table_size );
std::size_t used_in_overflow = freelist - (table + table_size );
n += used_in_overflow;
std::cout << "number of entries: " << n << "\n";
std::cout << "fraction of entries in first position: " <<

View File

@ -37,6 +37,13 @@
#endif
#ifdef CGAL_INCLUDE_WINDOWS_DOT_H
#if defined(_MSC_VER) && defined(_DEBUG)
// Include support for memory leak detection
// This is only available in debug mode and when _CRTDBG_MAP_ALLOC is defined.
// It will include <crtdbg.h> which will redefine `malloc` and `free`.
# define _CRTDBG_MAP_ALLOC 1
#endif
// Mimic users including this file which defines min max macros
// and other names leading to name clashes
#include <windows.h>