Benchmark script for Nicolas

This commit is contained in:
Léo Valque 2025-05-20 18:36:33 +02:00
parent 549b8f1430
commit 100ab58f33
7 changed files with 364 additions and 0 deletions

View File

@ -0,0 +1,50 @@
#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
#include <CGAL/Polygon_mesh_processing/repair_polygon_soup.h>
#include <CGAL/Polygon_mesh_processing/autorefinement.h>
#include <CGAL/Polygon_mesh_processing/triangulate_faces.h>
#include <CGAL/IO/polygon_soup_io.h>
#include <boost/container/small_vector.hpp>
#include <iostream>
typedef CGAL::Exact_predicates_inexact_constructions_kernel Kernel;
typedef typename Kernel::Point_3 Point_3;
namespace PMP = CGAL::Polygon_mesh_processing;
enum EXIT_CODES { VALID_OUTPUT=0,
INVALID_INPUT=1,
ROUNDING_FAILED=2,
SIGSEGV=10,
SIGSABRT=11,
SIGFPE=12,
TIMEOUT=13
};
int main(int argc, char** argv)
{
if(argc<4){
std::cout << "Invalid argument" << std::endl;
return 1;
}
const std::string filename = std::string(argv[1]);
const int grid_size = std::stoi(std::string(argv[2]));
const bool erase_duplicate = std::stoi(argv[3])==1;
std::vector<Point_3> points;
std::vector<boost::container::small_vector<std::size_t, 3>> triangles;
if (!CGAL::IO::read_polygon_soup(filename, points, triangles))
{
std::cerr << "Cannot read " << filename << "\n";
return 1;
}
PMP::repair_polygon_soup(points, triangles);
PMP::triangulate_polygons(points, triangles);
PMP::autorefine_triangle_soup(points, triangles, CGAL::parameters::apply_iterative_snap_rounding(true).erase_all_duplicates(erase_duplicate).concurrency_tag(CGAL::Parallel_if_available_tag()).snap_grid_size(grid_size).number_of_iterations(15));
return 0;
}

View File

@ -0,0 +1,28 @@
#!/bin/bash
set -e
if [ "$#" -lt 4 ]; then
echo "Usage: $0 <input_file> <timeout> [component_params...]"
exit 1
fi
INPUT_FILE=$1
TIMEOUT=$2
GRID_SIZE=$3
ERASE_ALL_DUPLICATE=$4
# Use /usr/bin/time for memory usage (maximum resident set size in KB)
TMP_LOG=$(mktemp)
# Run the benchmarked command
/usr/bin/time -f "TIME:%e\nMEM:%M" timeout "$TIMEOUT"s performance_snap_polygon_soup "$INPUT_FILE" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE" 2> "$TMP_LOG"
# Parse time and memory
SECONDS=$(grep "TIME" "$TMP_LOG" | cut -d':' -f2)
MEMORY=$(grep "MEM" "$TMP_LOG" | cut -d':' -f2)
rm -f "$TMP_LOG"
# Output JSON
echo "{\"seconds\": \"$SECONDS\", \"memory_peaks\": \"$MEMORY\"}"

View File

@ -0,0 +1,60 @@
#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
#include <CGAL/Polygon_mesh_processing/repair_polygon_soup.h>
#include <CGAL/Polygon_mesh_processing/autorefinement.h>
#include <CGAL/Polygon_mesh_processing/triangulate_faces.h>
#include <CGAL/Polygon_mesh_processing/distance.h>
#include <CGAL/IO/polygon_soup_io.h>
#include <CGAL/Bbox_3.h>
#include <CGAL/boost/graph/helpers.h>
#include <CGAL/Surface_mesh.h>
#include <boost/container/small_vector.hpp>
#include <CGAL/Polygon_mesh_processing/orientation.h>
using Kernel = CGAL::Exact_predicates_inexact_constructions_kernel;
typedef typename Kernel::Point_3 Point_3;
namespace PMP = CGAL::Polygon_mesh_processing;
int main(int argc, char** argv)
{
if(argc<4){
std::cout << "Invalid argument" << std::endl;
return 1;
}
const std::string filename = std::string(argv[1]);
const int grid_size = std::stoi(std::string(argv[2]));
const bool erase_duplicate = std::stoi(argv[3])==1;
std::vector<Point_3> points;
std::vector<boost::container::small_vector<std::size_t, 3>> triangles;
CGAL::Bbox_3 bb = CGAL::bbox_3(points.begin(), points.end());
double diag_length=std::sqrt((bb.xmax()-bb.xmin())*(bb.xmax()-bb.xmin()) + (bb.ymax()-bb.ymin())*(bb.ymax()-bb.ymin()) + (bb.zmax()-bb.zmin())*(bb.zmax()-bb.zmin()));
if (!CGAL::IO::read_polygon_soup(filename, points, triangles))
{
std::cerr << "Cannot read " << filename << "\n";
return 1;
}
std::vector<Point_3> input_points(points.begin(), points.end());
PMP::autorefine_triangle_soup(points, triangles, CGAL::parameters::apply_iterative_snap_rounding(true).erase_all_duplicates(erase_duplicate).concurrency_tag(CGAL::Parallel_if_available_tag()).snap_grid_size(grid_size).number_of_iterations(15));
std::cout << "{" <<
"\"Nb_output_points\": \"" << points.size() << "\",\n" <<
"\"Nb_output_triangles\": \"" << triangles.size() << "\",\n" <<
"\"Is_2_manifold\": \"" << (PMP::orient_polygon_soup(points, triangles)?"True":"False") << "\",\n";
CGAL::Surface_mesh<Point_3> sm;
PMP::polygon_soup_to_polygon_mesh(points, triangles, sm);
std::cout << std::setprecision(17) <<
"\"Hausdorff_distance_output_to_input_(divide_by_bbox_diag)\": \"" << PMP::max_distance_to_triangle_mesh<CGAL::Parallel_if_available_tag>(input_points, sm) / diag_length << "\",\n" <<
"\"Closed_output\": \"" << (CGAL::is_closed(sm)?"True":"False") << "\",\n" <<
"\"Ouput_bound_a_volume\": \"" << (PMP::does_bound_a_volume(sm)?"True":"False") << "\"\n}"
<< std::endl;
return 0;
}

View File

@ -0,0 +1,19 @@
#!/bin/bash
set -e
if [ "$#" -lt 4 ]; then
echo "Usage: $0 <input_file> <timeout> [component_params...]"
exit 1
fi
INPUT_FILE=$1
TIMEOUT=$2
GRID_SIZE=$3
ERASE_ALL_DUPLICATE=$4
TMP_LOG=$(mktemp)
timeout "$TIMEOUT"s quality_snap_polygon_soup "$INPUT_FILE" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE" > "$TMP_LOG"
cat $TMP_LOG
rm -f "$TMP_LOG"

View File

@ -0,0 +1,53 @@
#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
#include <CGAL/Polygon_mesh_processing/repair_polygon_soup.h>
#include <CGAL/Polygon_mesh_processing/autorefinement.h>
#include <CGAL/Polygon_mesh_processing/triangulate_faces.h>
#include <CGAL/IO/polygon_soup_io.h>
#include <boost/container/small_vector.hpp>
typedef CGAL::Exact_predicates_inexact_constructions_kernel Kernel;
typedef typename Kernel::Point_3 Point_3;
namespace PMP = CGAL::Polygon_mesh_processing;
enum EXIT_CODES { VALID_OUTPUT=0,
INVALID_INPUT=1,
ROUNDING_FAILED=2,
SELF_INTERSECTING_OUTPUT=3,
SIGSEGV=10,
SIGSABRT=11,
SIGFPE=12,
TIMEOUT=13
};
int main(int argc, char** argv)
{
if(argc<4){
std::cout << "Invalid argument" << std::endl;
return 1;
}
const std::string filename = std::string(argv[1]);
const int grid_size = std::stoi(std::string(argv[2]));
const bool erase_duplicate = std::stoi(argv[3])==1;
std::vector<Point_3> points;
std::vector<boost::container::small_vector<std::size_t, 3>> triangles;
if (!CGAL::IO::read_polygon_soup(filename, points, triangles) || points.size()==0 || triangles.size()==0)
{
return INVALID_INPUT;
}
PMP::repair_polygon_soup(points, triangles);
PMP::triangulate_polygons(points, triangles);
bool success=PMP::autorefine_triangle_soup(points, triangles, CGAL::parameters::apply_iterative_snap_rounding(true).erase_all_duplicates(erase_duplicate).concurrency_tag(CGAL::Parallel_if_available_tag()).snap_grid_size(grid_size).number_of_iterations(15));
if(!success)
return ROUNDING_FAILED;
if( PMP::does_triangle_soup_self_intersect<CGAL::Parallel_if_available_tag>(points, triangles) )
return SELF_INTERSECTING_OUTPUT;
return VALID_OUTPUT;
}

View File

@ -0,0 +1,38 @@
#!/bin/bash
if [ "$#" -lt 4 ]; then
echo "Usage: $0 <input_file> <timeout> [component_params...]"
exit 1
fi
timeout_bis() {
timeout 5 sleep 10
}
INPUT_FILE=$1
TIMEOUT=$2
GRID_SIZE=$3
ERASE_ALL_DUPLICATE=$4
# Run with timeout, capture exit code
timeout "--foreground" "$TIMEOUT"s robustness_snap_polygon_soup "$INPUT_FILE" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE"
EXIT_CODE=$?
# Interpret exit codes
declare -A TAGS
TAGS[0]="VALID_OUTPUT"
TAGS[1]="INPUT_IS_INVALID"
TAGS[2]="ROUNDING_FAILED"
TAGS[3]="SELF_INTERSECTING_OUTPUT"
TAGS[139]="SIGSEGV"
TAGS[11]="SIGSEGV"
TAGS[6]="SIGABRT"
TAGS[8]="SIGFPE"
TAGS[132]="SIGILL"
TAGS[124]="TIMEOUT"
TAG_NAME=${TAGS[$EXIT_CODE]:-UNKNOWN}
TAG_DESC=$([[ "$EXIT_CODE" -eq 0 ]] && echo "OK" || echo "Error")
# Output JSON
echo "{\"TAG_NAME\": \"$TAG_NAME\", \"TAG\": \"$TAG_DESC\"}"

View File

@ -0,0 +1,116 @@
#!/bin/bash
# Temp directory for individual result JSONs
TMP_RESULT_DIR=$(mktemp -d)
# Job control
JOBS=0
MAX_JOBS=$NUM_THREADS
# Function to process a single file
process_file() {
INPUT_PATH="$1"
INPUT_ID=$(basename "$INPUT_PATH" | cut -d. -f1)
COMPONENT_NAME="$2"
PROJECT_DIR="$3"
TIMEOUT="$4"
OUTPUT_DIR="$5"
TMP_RESULT_FILE="$6"
GRID_SIZE="$7"
ERASE_ALL_DUPLICATE="$8"
{
echo " \"$INPUT_ID\": {"
echo " \"path\": \"$INPUT_PATH\","
PERF_OUTPUT=$(bash "$PROJECT_DIR/Performance/run_performance.sh" "$INPUT_PATH" "$TIMEOUT" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE" 2>> "$OUTPUT_DIR/Logs/$COMPONENT_NAME/Performance/$INPUT_ID.log")
echo " \"Performance\": $PERF_OUTPUT,"
QUALITY_OUTPUT=$(bash "$PROJECT_DIR/Quality/run_quality.sh" "$INPUT_PATH" "$TIMEOUT" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE" 2>> "$OUTPUT_DIR/Logs/$COMPONENT_NAME/Quality/$INPUT_ID.log")
echo " \"Quality\": $QUALITY_OUTPUT,"
ROBUST_OUTPUT=$(bash "$PROJECT_DIR/Robustness/run_robustness.sh" "$INPUT_PATH" "$TIMEOUT" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE" 2>> "$OUTPUT_DIR/Logs/$COMPONENT_NAME/Robustness/$INPUT_ID.log")
echo " \"Robustness\": $ROBUST_OUTPUT"
echo " }"
} > "$TMP_RESULT_FILE"
}
export -f process_file
# Usage function
usage() {
echo "Usage: $0 <project_dir> <input_data_dir> <output_results_dir> <timeout> <num_threads> [component_params...]"
exit 1
}
# Check parameters
if [ "$#" -lt 5 ]; then
usage
fi
# Arguments
PROJECT_DIR=$1
INPUT_DIR=$2
OUTPUT_DIR=$3
TIMEOUT=$4
NUM_THREADS=$5
GRID_SIZE=$6
ERASE_ALL_DUPLICATE=$7
# Get component name from the project directory name
COMPONENT_NAME=$(basename "$PROJECT_DIR")
DATE_TAG=$(date +"%Y-%m-%d")
TIMESTAMP=$(date +"%Y-%m-%d %H:%M:%S")
RESULT_JSON="$OUTPUT_DIR/${COMPONENT_NAME}_results_${DATE_TAG}.json"
# Compile
# Do not forget to define CGAL_DIR
cmake "$PROJECT_DIR" "-DCMAKE_BUILD_TYPE=Release" "-DCMAKE_CXX_FLAGS=-O3"
make -j $NUM_THREADS
# Prepare log directories
mkdir -p "$OUTPUT_DIR/Logs/$COMPONENT_NAME/Performance"
mkdir -p "$OUTPUT_DIR/Logs/$COMPONENT_NAME/Quality"
mkdir -p "$OUTPUT_DIR/Logs/$COMPONENT_NAME/Robustness"
# Initialize JSON
echo "{" > "$RESULT_JSON"
echo " \"$COMPONENT_NAME\": {" >> "$RESULT_JSON"
echo " \"Thingi10K\": {" >> "$RESULT_JSON"
#process_file "$INPUT_DIR/100036.stl" "$COMPONENT_NAME" "$PROJECT_DIR" "$TIMEOUT" "$OUTPUT_DIR" "$TMP_RESULT_FILE" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE"
# Loop input files and spawn parallel jobs
for INPUT_FILE in "$INPUT_DIR"/*; do
INPUT_ID=$(basename "$INPUT_FILE" | cut -d. -f1)
TMP_RESULT_FILE="$TMP_RESULT_DIR/$INPUT_ID.json"
process_file "$INPUT_FILE" "$COMPONENT_NAME" "$PROJECT_DIR" "$TIMEOUT" "$OUTPUT_DIR" "$TMP_RESULT_FILE" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE"
((JOBS+=1))
if [ "$JOBS" -ge "$NUM_THREADS" ]; then
wait
JOBS=0
fi
done
wait
# Merge all partial JSONs
echo "{" > "$RESULT_JSON"
echo " \"$COMPONENT_NAME\": {" >> "$RESULT_JSON"
echo " \"Thingi10K\": {" >> "$RESULT_JSON"
FIRST_ENTRY=true
for FILE in "$TMP_RESULT_DIR"/*.json; do
if [ "$FIRST_ENTRY" = true ]; then
FIRST_ENTRY=false
else
echo "," >> "$RESULT_JSON"
fi
cat "$FILE" >> "$RESULT_JSON"
done
echo "" >> "$RESULT_JSON"
echo " }," >> "$RESULT_JSON"
echo " \"finished_at\": \"$TIMESTAMP\"" >> "$RESULT_JSON"
echo " }" >> "$RESULT_JSON"
echo "}" >> "$RESULT_JSON"