diff --git a/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Performance/performance_snap_polygon_soup.cpp b/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Performance/performance_snap_polygon_soup.cpp new file mode 100644 index 00000000000..d64cb27da9b --- /dev/null +++ b/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Performance/performance_snap_polygon_soup.cpp @@ -0,0 +1,50 @@ +#include +#include +#include +#include +#include + +#include + +#include + +typedef CGAL::Exact_predicates_inexact_constructions_kernel Kernel; +typedef typename Kernel::Point_3 Point_3; +namespace PMP = CGAL::Polygon_mesh_processing; + +enum EXIT_CODES { VALID_OUTPUT=0, + INVALID_INPUT=1, + ROUNDING_FAILED=2, + SIGSEGV=10, + SIGSABRT=11, + SIGFPE=12, + TIMEOUT=13 + }; + +int main(int argc, char** argv) +{ + if(argc<4){ + std::cout << "Invalid argument" << std::endl; + return 1; + } + + const std::string filename = std::string(argv[1]); + const int grid_size = std::stoi(std::string(argv[2])); + const bool erase_duplicate = std::stoi(argv[3])==1; + + std::vector points; + std::vector> triangles; + + if (!CGAL::IO::read_polygon_soup(filename, points, triangles)) + { + std::cerr << "Cannot read " << filename << "\n"; + return 1; + } + + PMP::repair_polygon_soup(points, triangles); + PMP::triangulate_polygons(points, triangles); + + PMP::autorefine_triangle_soup(points, triangles, CGAL::parameters::apply_iterative_snap_rounding(true).erase_all_duplicates(erase_duplicate).concurrency_tag(CGAL::Parallel_if_available_tag()).snap_grid_size(grid_size).number_of_iterations(15)); + + return 0; +} diff --git a/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Performance/run_performance.sh b/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Performance/run_performance.sh new file mode 100755 index 00000000000..aa6ea1a28ba --- /dev/null +++ b/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Performance/run_performance.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +set -e + +if [ "$#" -lt 4 ]; then + echo "Usage: $0 [component_params...]" + exit 1 +fi + +INPUT_FILE=$1 +TIMEOUT=$2 +GRID_SIZE=$3 +ERASE_ALL_DUPLICATE=$4 + +# Use /usr/bin/time for memory usage (maximum resident set size in KB) +TMP_LOG=$(mktemp) + +# Run the benchmarked command +/usr/bin/time -f "TIME:%e\nMEM:%M" timeout "$TIMEOUT"s performance_snap_polygon_soup "$INPUT_FILE" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE" 2> "$TMP_LOG" + +# Parse time and memory +SECONDS=$(grep "TIME" "$TMP_LOG" | cut -d':' -f2) +MEMORY=$(grep "MEM" "$TMP_LOG" | cut -d':' -f2) + +rm -f "$TMP_LOG" + +# Output JSON +echo "{\"seconds\": \"$SECONDS\", \"memory_peaks\": \"$MEMORY\"}" diff --git a/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Quality/quality_snap_polygon_soup.cpp b/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Quality/quality_snap_polygon_soup.cpp new file mode 100644 index 00000000000..8e4bb99c2ba --- /dev/null +++ b/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Quality/quality_snap_polygon_soup.cpp @@ -0,0 +1,60 @@ +#include +#include +#include +#include +#include +#include + +#include +#include + +#include + +#include +#include + +using Kernel = CGAL::Exact_predicates_inexact_constructions_kernel; +typedef typename Kernel::Point_3 Point_3; +namespace PMP = CGAL::Polygon_mesh_processing; + +int main(int argc, char** argv) +{ + if(argc<4){ + std::cout << "Invalid argument" << std::endl; + return 1; + } + const std::string filename = std::string(argv[1]); + const int grid_size = std::stoi(std::string(argv[2])); + const bool erase_duplicate = std::stoi(argv[3])==1; + + std::vector points; + std::vector> triangles; + + CGAL::Bbox_3 bb = CGAL::bbox_3(points.begin(), points.end()); + double diag_length=std::sqrt((bb.xmax()-bb.xmin())*(bb.xmax()-bb.xmin()) + (bb.ymax()-bb.ymin())*(bb.ymax()-bb.ymin()) + (bb.zmax()-bb.zmin())*(bb.zmax()-bb.zmin())); + if (!CGAL::IO::read_polygon_soup(filename, points, triangles)) + { + std::cerr << "Cannot read " << filename << "\n"; + return 1; + } + + std::vector input_points(points.begin(), points.end()); + + PMP::autorefine_triangle_soup(points, triangles, CGAL::parameters::apply_iterative_snap_rounding(true).erase_all_duplicates(erase_duplicate).concurrency_tag(CGAL::Parallel_if_available_tag()).snap_grid_size(grid_size).number_of_iterations(15)); + + + std::cout << "{" << + "\"Nb_output_points\": \"" << points.size() << "\",\n" << + "\"Nb_output_triangles\": \"" << triangles.size() << "\",\n" << + "\"Is_2_manifold\": \"" << (PMP::orient_polygon_soup(points, triangles)?"True":"False") << "\",\n"; + CGAL::Surface_mesh sm; + PMP::polygon_soup_to_polygon_mesh(points, triangles, sm); + + std::cout << std::setprecision(17) << + "\"Hausdorff_distance_output_to_input_(divide_by_bbox_diag)\": \"" << PMP::max_distance_to_triangle_mesh(input_points, sm) / diag_length << "\",\n" << + "\"Closed_output\": \"" << (CGAL::is_closed(sm)?"True":"False") << "\",\n" << + "\"Ouput_bound_a_volume\": \"" << (PMP::does_bound_a_volume(sm)?"True":"False") << "\"\n}" + << std::endl; + + return 0; +} \ No newline at end of file diff --git a/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Quality/run_quality.sh b/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Quality/run_quality.sh new file mode 100755 index 00000000000..c3a194483eb --- /dev/null +++ b/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Quality/run_quality.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +set -e + +if [ "$#" -lt 4 ]; then + echo "Usage: $0 [component_params...]" + exit 1 +fi + +INPUT_FILE=$1 +TIMEOUT=$2 +GRID_SIZE=$3 +ERASE_ALL_DUPLICATE=$4 + +TMP_LOG=$(mktemp) +timeout "$TIMEOUT"s quality_snap_polygon_soup "$INPUT_FILE" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE" > "$TMP_LOG" + +cat $TMP_LOG +rm -f "$TMP_LOG" \ No newline at end of file diff --git a/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Robustness/robustness_snap_polygon_soup.cpp b/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Robustness/robustness_snap_polygon_soup.cpp new file mode 100644 index 00000000000..da424eb6422 --- /dev/null +++ b/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Robustness/robustness_snap_polygon_soup.cpp @@ -0,0 +1,53 @@ +#include +#include +#include +#include +#include + +#include + +typedef CGAL::Exact_predicates_inexact_constructions_kernel Kernel; +typedef typename Kernel::Point_3 Point_3; +namespace PMP = CGAL::Polygon_mesh_processing; + +enum EXIT_CODES { VALID_OUTPUT=0, + INVALID_INPUT=1, + ROUNDING_FAILED=2, + SELF_INTERSECTING_OUTPUT=3, + SIGSEGV=10, + SIGSABRT=11, + SIGFPE=12, + TIMEOUT=13 + }; + +int main(int argc, char** argv) +{ + if(argc<4){ + std::cout << "Invalid argument" << std::endl; + return 1; + } + + const std::string filename = std::string(argv[1]); + const int grid_size = std::stoi(std::string(argv[2])); + const bool erase_duplicate = std::stoi(argv[3])==1; + + std::vector points; + std::vector> triangles; + + if (!CGAL::IO::read_polygon_soup(filename, points, triangles) || points.size()==0 || triangles.size()==0) + { + return INVALID_INPUT; + } + + PMP::repair_polygon_soup(points, triangles); + PMP::triangulate_polygons(points, triangles); + + bool success=PMP::autorefine_triangle_soup(points, triangles, CGAL::parameters::apply_iterative_snap_rounding(true).erase_all_duplicates(erase_duplicate).concurrency_tag(CGAL::Parallel_if_available_tag()).snap_grid_size(grid_size).number_of_iterations(15)); + + if(!success) + return ROUNDING_FAILED; + if( PMP::does_triangle_soup_self_intersect(points, triangles) ) + return SELF_INTERSECTING_OUTPUT; + + return VALID_OUTPUT; +} diff --git a/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Robustness/run_robustness.sh b/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Robustness/run_robustness.sh new file mode 100755 index 00000000000..4e4e28dcdfd --- /dev/null +++ b/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/Robustness/run_robustness.sh @@ -0,0 +1,38 @@ +#!/bin/bash + +if [ "$#" -lt 4 ]; then + echo "Usage: $0 [component_params...]" + exit 1 +fi + +timeout_bis() { + timeout 5 sleep 10 +} + +INPUT_FILE=$1 +TIMEOUT=$2 +GRID_SIZE=$3 +ERASE_ALL_DUPLICATE=$4 + +# Run with timeout, capture exit code +timeout "--foreground" "$TIMEOUT"s robustness_snap_polygon_soup "$INPUT_FILE" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE" +EXIT_CODE=$? + +# Interpret exit codes +declare -A TAGS +TAGS[0]="VALID_OUTPUT" +TAGS[1]="INPUT_IS_INVALID" +TAGS[2]="ROUNDING_FAILED" +TAGS[3]="SELF_INTERSECTING_OUTPUT" +TAGS[139]="SIGSEGV" +TAGS[11]="SIGSEGV" +TAGS[6]="SIGABRT" +TAGS[8]="SIGFPE" +TAGS[132]="SIGILL" +TAGS[124]="TIMEOUT" + +TAG_NAME=${TAGS[$EXIT_CODE]:-UNKNOWN} +TAG_DESC=$([[ "$EXIT_CODE" -eq 0 ]] && echo "OK" || echo "Error") + +# Output JSON +echo "{\"TAG_NAME\": \"$TAG_NAME\", \"TAG\": \"$TAG_DESC\"}" \ No newline at end of file diff --git a/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/benchmarking_snap_polygon_soup.sh b/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/benchmarking_snap_polygon_soup.sh new file mode 100755 index 00000000000..da3f363566a --- /dev/null +++ b/Polygon_mesh_processing/benchmark/Polygon_mesh_processing/benchmarking_snap_polygon_soup.sh @@ -0,0 +1,116 @@ +#!/bin/bash + +# Temp directory for individual result JSONs +TMP_RESULT_DIR=$(mktemp -d) + +# Job control +JOBS=0 +MAX_JOBS=$NUM_THREADS + +# Function to process a single file +process_file() { + INPUT_PATH="$1" + INPUT_ID=$(basename "$INPUT_PATH" | cut -d. -f1) + COMPONENT_NAME="$2" + PROJECT_DIR="$3" + TIMEOUT="$4" + OUTPUT_DIR="$5" + TMP_RESULT_FILE="$6" + GRID_SIZE="$7" + ERASE_ALL_DUPLICATE="$8" + { + echo " \"$INPUT_ID\": {" + echo " \"path\": \"$INPUT_PATH\"," + + PERF_OUTPUT=$(bash "$PROJECT_DIR/Performance/run_performance.sh" "$INPUT_PATH" "$TIMEOUT" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE" 2>> "$OUTPUT_DIR/Logs/$COMPONENT_NAME/Performance/$INPUT_ID.log") + echo " \"Performance\": $PERF_OUTPUT," + + QUALITY_OUTPUT=$(bash "$PROJECT_DIR/Quality/run_quality.sh" "$INPUT_PATH" "$TIMEOUT" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE" 2>> "$OUTPUT_DIR/Logs/$COMPONENT_NAME/Quality/$INPUT_ID.log") + echo " \"Quality\": $QUALITY_OUTPUT," + + ROBUST_OUTPUT=$(bash "$PROJECT_DIR/Robustness/run_robustness.sh" "$INPUT_PATH" "$TIMEOUT" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE" 2>> "$OUTPUT_DIR/Logs/$COMPONENT_NAME/Robustness/$INPUT_ID.log") + echo " \"Robustness\": $ROBUST_OUTPUT" + + echo " }" + } > "$TMP_RESULT_FILE" +} +export -f process_file + +# Usage function +usage() { + echo "Usage: $0 [component_params...]" + exit 1 +} + +# Check parameters +if [ "$#" -lt 5 ]; then + usage +fi + +# Arguments +PROJECT_DIR=$1 +INPUT_DIR=$2 +OUTPUT_DIR=$3 +TIMEOUT=$4 +NUM_THREADS=$5 +GRID_SIZE=$6 +ERASE_ALL_DUPLICATE=$7 + +# Get component name from the project directory name +COMPONENT_NAME=$(basename "$PROJECT_DIR") +DATE_TAG=$(date +"%Y-%m-%d") +TIMESTAMP=$(date +"%Y-%m-%d %H:%M:%S") +RESULT_JSON="$OUTPUT_DIR/${COMPONENT_NAME}_results_${DATE_TAG}.json" + +# Compile +# Do not forget to define CGAL_DIR +cmake "$PROJECT_DIR" "-DCMAKE_BUILD_TYPE=Release" "-DCMAKE_CXX_FLAGS=-O3" +make -j $NUM_THREADS + +# Prepare log directories +mkdir -p "$OUTPUT_DIR/Logs/$COMPONENT_NAME/Performance" +mkdir -p "$OUTPUT_DIR/Logs/$COMPONENT_NAME/Quality" +mkdir -p "$OUTPUT_DIR/Logs/$COMPONENT_NAME/Robustness" + +# Initialize JSON +echo "{" > "$RESULT_JSON" +echo " \"$COMPONENT_NAME\": {" >> "$RESULT_JSON" +echo " \"Thingi10K\": {" >> "$RESULT_JSON" + +#process_file "$INPUT_DIR/100036.stl" "$COMPONENT_NAME" "$PROJECT_DIR" "$TIMEOUT" "$OUTPUT_DIR" "$TMP_RESULT_FILE" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE" +# Loop input files and spawn parallel jobs +for INPUT_FILE in "$INPUT_DIR"/*; do + INPUT_ID=$(basename "$INPUT_FILE" | cut -d. -f1) + TMP_RESULT_FILE="$TMP_RESULT_DIR/$INPUT_ID.json" + + process_file "$INPUT_FILE" "$COMPONENT_NAME" "$PROJECT_DIR" "$TIMEOUT" "$OUTPUT_DIR" "$TMP_RESULT_FILE" "$GRID_SIZE" "$ERASE_ALL_DUPLICATE" + + ((JOBS+=1)) + if [ "$JOBS" -ge "$NUM_THREADS" ]; then + wait + JOBS=0 + fi +done + +wait + +# Merge all partial JSONs +echo "{" > "$RESULT_JSON" +echo " \"$COMPONENT_NAME\": {" >> "$RESULT_JSON" +echo " \"Thingi10K\": {" >> "$RESULT_JSON" + +FIRST_ENTRY=true +for FILE in "$TMP_RESULT_DIR"/*.json; do + if [ "$FIRST_ENTRY" = true ]; then + FIRST_ENTRY=false + else + echo "," >> "$RESULT_JSON" + fi + cat "$FILE" >> "$RESULT_JSON" +done + +echo "" >> "$RESULT_JSON" +echo " }," >> "$RESULT_JSON" +echo " \"finished_at\": \"$TIMESTAMP\"" >> "$RESULT_JSON" +echo " }" >> "$RESULT_JSON" +echo "}" >> "$RESULT_JSON" \ No newline at end of file