mirror of https://github.com/CGAL/cgal
Merge pull request #4448 from maxGimeno/Update_python_scripts-maxGimeno
Update python scripts
This commit is contained in:
commit
b0026b1f5e
|
|
@ -27,7 +27,7 @@ else()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
find_package(Doxygen)
|
find_package(Doxygen)
|
||||||
find_package(PythonInterp)
|
find_package(Python3 COMPONENTS Interpreter)
|
||||||
|
|
||||||
if(NOT DOXYGEN_FOUND)
|
if(NOT DOXYGEN_FOUND)
|
||||||
message(WARNING "Cannot build the documentation without Doxygen!")
|
message(WARNING "Cannot build the documentation without Doxygen!")
|
||||||
|
|
@ -322,9 +322,9 @@ endif()
|
||||||
set(CGAL_DOC_VERSION ${CGAL_CREATED_VERSION_NUM})
|
set(CGAL_DOC_VERSION ${CGAL_CREATED_VERSION_NUM})
|
||||||
|
|
||||||
## generate how_to_cite files
|
## generate how_to_cite files
|
||||||
if(PYTHONINTERP_FOUND)
|
if(Python3_Interpreter_FOUND)
|
||||||
execute_process(
|
execute_process(
|
||||||
COMMAND ${PYTHON_EXECUTABLE} ${CGAL_DOC_SCRIPT_DIR}/generate_how_to_cite.py
|
COMMAND ${Python3_EXECUTABLE} ${CGAL_DOC_SCRIPT_DIR}/generate_how_to_cite.py
|
||||||
${CGAL_ROOT} ${CMAKE_BINARY_DIR} "${CGAL_BRANCH_BUILD}"
|
${CGAL_ROOT} ${CMAKE_BINARY_DIR} "${CGAL_BRANCH_BUILD}"
|
||||||
RESULT_VARIABLE GENERATE_HOW_TO_CITE_RESULT)
|
RESULT_VARIABLE GENERATE_HOW_TO_CITE_RESULT)
|
||||||
if(NOT GENERATE_HOW_TO_CITE_RESULT EQUAL "0")
|
if(NOT GENERATE_HOW_TO_CITE_RESULT EQUAL "0")
|
||||||
|
|
@ -436,25 +436,25 @@ add_dependencies(Documentation_copy_doc_tags doc_pre)
|
||||||
#total level doc dependencies
|
#total level doc dependencies
|
||||||
add_dependencies(doc doc_post)
|
add_dependencies(doc doc_post)
|
||||||
|
|
||||||
if(PYTHONINTERP_FOUND)
|
if(Python3_Interpreter_FOUND)
|
||||||
set(CGAL_DOC_TESTSUITE_SCRIPT "${CGAL_DOC_SCRIPT_DIR}/testsuite.py")
|
set(CGAL_DOC_TESTSUITE_SCRIPT "${CGAL_DOC_SCRIPT_DIR}/testsuite.py")
|
||||||
|
|
||||||
add_custom_target(
|
add_custom_target(
|
||||||
doc_with_postprocessing
|
doc_with_postprocessing
|
||||||
${PYTHON_EXECUTABLE} ${CGAL_DOC_SCRIPT_DIR}/html_output_post_processing.py
|
${Python3_EXECUTABLE} ${CGAL_DOC_SCRIPT_DIR}/html_output_post_processing.py
|
||||||
--output ${CGAL_DOC_OUTPUT_DIR} --resources ${CGAL_DOC_RESOURCE_DIR})
|
--output ${CGAL_DOC_OUTPUT_DIR} --resources ${CGAL_DOC_RESOURCE_DIR})
|
||||||
add_dependencies(doc_with_postprocessing doc)
|
add_dependencies(doc_with_postprocessing doc)
|
||||||
|
|
||||||
if(CGAL_DOC_CREATE_LOGS)
|
if(CGAL_DOC_CREATE_LOGS)
|
||||||
add_custom_target(
|
add_custom_target(
|
||||||
Documentation_test
|
Documentation_test
|
||||||
${PYTHON_EXECUTABLE} ${CGAL_DOC_TESTSUITE_SCRIPT} --output-dir
|
${Python3_EXECUTABLE} ${CGAL_DOC_TESTSUITE_SCRIPT} --output-dir
|
||||||
${CGAL_DOC_OUTPUT_DIR} --doc-log-dir ${CGAL_DOC_LOG_DIR})
|
${CGAL_DOC_OUTPUT_DIR} --doc-log-dir ${CGAL_DOC_LOG_DIR})
|
||||||
add_dependencies(Documentation_test doc)
|
add_dependencies(Documentation_test doc)
|
||||||
|
|
||||||
add_custom_target(
|
add_custom_target(
|
||||||
Documentation_test_publish
|
Documentation_test_publish
|
||||||
${PYTHON_EXECUTABLE}
|
${Python3_EXECUTABLE}
|
||||||
${CGAL_DOC_TESTSUITE_SCRIPT}
|
${CGAL_DOC_TESTSUITE_SCRIPT}
|
||||||
--output-dir
|
--output-dir
|
||||||
${CGAL_DOC_OUTPUT_DIR}
|
${CGAL_DOC_OUTPUT_DIR}
|
||||||
|
|
@ -467,7 +467,7 @@ if(PYTHONINTERP_FOUND)
|
||||||
|
|
||||||
add_custom_target(
|
add_custom_target(
|
||||||
doc_and_publish_testsuite
|
doc_and_publish_testsuite
|
||||||
${PYTHON_EXECUTABLE}
|
${Python3_EXECUTABLE}
|
||||||
${CGAL_DOC_TESTSUITE_SCRIPT}
|
${CGAL_DOC_TESTSUITE_SCRIPT}
|
||||||
--output-dir
|
--output-dir
|
||||||
${CGAL_DOC_OUTPUT_DIR}
|
${CGAL_DOC_OUTPUT_DIR}
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ from pyquery import PyQuery as pq
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from sys import argv
|
from sys import argv
|
||||||
import os.path as op
|
import os.path as op
|
||||||
|
import codecs
|
||||||
|
|
||||||
# if _in is part of args, return true.
|
# if _in is part of args, return true.
|
||||||
def check_type(_in, args):
|
def check_type(_in, args):
|
||||||
|
|
@ -33,7 +34,8 @@ for i in range(0,len(compounds)):
|
||||||
filepath='class'+compound+'.xml'
|
filepath='class'+compound+'.xml'
|
||||||
total_path=op.join(op.sep, root_path,filepath)
|
total_path=op.join(op.sep, root_path,filepath)
|
||||||
if(op.isfile(total_path)):
|
if(op.isfile(total_path)):
|
||||||
e = pq(filename=total_path, parser="xml")
|
file_content = codecs.open(total_path, 'rb')
|
||||||
|
e = pq(file_content.read(), parser="xml")
|
||||||
compoundnames=[p.text() for p in list(e('includes').items())]
|
compoundnames=[p.text() for p in list(e('includes').items())]
|
||||||
|
|
||||||
if(len(compoundnames) > 1 and compoundnames[0].find("Concept") != -1):
|
if(len(compoundnames) > 1 and compoundnames[0].find("Concept") != -1):
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# coding: utf8
|
# coding: utf8
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
@ -157,7 +157,7 @@ def protect_upper_case(title):
|
||||||
return title.replace("dD","{dD}").replace("2D","{2D}").replace("3D","{3D}").replace("CGAL","{CGAL}").replace("Qt","{Qt}").replace("Boost","{Boost}")
|
return title.replace("dD","{dD}").replace("2D","{2D}").replace("3D","{3D}").replace("CGAL","{CGAL}").replace("Qt","{Qt}").replace("Boost","{Boost}")
|
||||||
|
|
||||||
def protect_accentuated_letters(authors):
|
def protect_accentuated_letters(authors):
|
||||||
res=authors.replace(u"é",r"{\'e}").replace(u"è",r"{\`e}").replace(u"É",r"{\'E}").replace(u"ä",r"{\"a}").replace(u"ö",r"{\"o}").replace(u"ñ",r"{\~n}").replace(u"ã",r"{\~a}").replace(u"ë",r"{\"e}").replace(u"ı",r"{\i}").replace(u"Ş",r"{\c{S}}").replace(u"ş",r"{\c{s}}").replace("%","")
|
res=authors.replace("é",r"{\'e}").replace("è",r"{\`e}").replace("É",r"{\'E}").replace("ä",r"{\"a}").replace("ö",r"{\"o}").replace("ñ",r"{\~n}").replace("ã",r"{\~a}").replace("ë",r"{\"e}").replace("ı",r"{\i}").replace("Ş",r"{\c{S}}").replace("ş",r"{\c{s}}").replace("%","")
|
||||||
try:
|
try:
|
||||||
res.encode('ascii')
|
res.encode('ascii')
|
||||||
except UnicodeEncodeError:
|
except UnicodeEncodeError:
|
||||||
|
|
|
||||||
|
|
@ -56,13 +56,14 @@ def write_out_html(d, fn):
|
||||||
# this is the normal doxygen doctype, which is thrown away by pyquery
|
# this is the normal doxygen doctype, which is thrown away by pyquery
|
||||||
f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "https://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n')
|
f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "https://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n')
|
||||||
f.write('<html xmlns=\"http://www.w3.org/1999/xhtml\">')
|
f.write('<html xmlns=\"http://www.w3.org/1999/xhtml\">')
|
||||||
f.write(d.html())
|
if d.html() is not None:
|
||||||
|
f.write(d.html())
|
||||||
f.write('\n')
|
f.write('\n')
|
||||||
f.write('</html>\n')
|
f.write('</html>\n')
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
def package_glob(target):
|
def package_glob(target):
|
||||||
return filter(lambda x: not os.path.join(os.path.join('.','Manual'),'') in x, glob.glob(target))
|
return [x for x in glob.glob(target) if not os.path.join(os.path.join('.','Manual'),'') in x]
|
||||||
|
|
||||||
# remove duplicate files
|
# remove duplicate files
|
||||||
def clean_doc():
|
def clean_doc():
|
||||||
|
|
@ -127,7 +128,8 @@ def re_replace_first_in_file(pat, s_after, fname):
|
||||||
def is_concept_file(filename):
|
def is_concept_file(filename):
|
||||||
if not path.exists(filename):
|
if not path.exists(filename):
|
||||||
return False;
|
return False;
|
||||||
d = pq(filename=filename, parser='html', encoding='utf-8')
|
file_content = codecs.open(filename, 'r', encoding='utf-8')
|
||||||
|
d = pq(file_content.read(),parser="html")
|
||||||
ident = d('#CGALConcept')
|
ident = d('#CGALConcept')
|
||||||
return ident.size() == 1
|
return ident.size() == 1
|
||||||
|
|
||||||
|
|
@ -190,7 +192,8 @@ def automagically_number_figures():
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
d = pq(filename="./Manual/packages.html", parser='html', encoding='utf-8')
|
file_content = codecs.open("./Manual/packages.html", 'r', encoding='utf-8')
|
||||||
|
d = pq(file_content.read(),parser="html")
|
||||||
for el in d('a.elRef'):
|
for el in d('a.elRef'):
|
||||||
text = pq(el).attr('href')
|
text = pq(el).attr('href')
|
||||||
if text.find("index.html")!=-1:
|
if text.find("index.html")!=-1:
|
||||||
|
|
@ -210,14 +213,16 @@ def automagically_number_figures():
|
||||||
all_pkg_files.remove(userman)
|
all_pkg_files.remove(userman)
|
||||||
for fname in [userman]+all_pkg_files:
|
for fname in [userman]+all_pkg_files:
|
||||||
infos=figure_anchor_info(pkg_id, global_anchor_map)
|
infos=figure_anchor_info(pkg_id, global_anchor_map)
|
||||||
d = pq(filename=fname, parser='html', encoding='utf-8')
|
file_content = codecs.open(fname, 'r', encoding='utf-8')
|
||||||
|
d = pq(file_content.read(), parser="html")
|
||||||
d('a.anchor').each( lambda i: collect_figure_anchors(i,infos) )
|
d('a.anchor').each( lambda i: collect_figure_anchors(i,infos) )
|
||||||
pkg_id+=1
|
pkg_id+=1
|
||||||
|
|
||||||
#Figure link dev Manual
|
#Figure link dev Manual
|
||||||
for fname in glob.glob("Manual/*.html"):
|
for fname in glob.glob("Manual/*.html"):
|
||||||
infos=figure_anchor_info(0, global_anchor_map)
|
infos=figure_anchor_info(0, global_anchor_map)
|
||||||
d = pq(filename=fname, parser='html', encoding='utf-8')
|
file_content = codecs.open(fname, 'r', encoding='utf-8')
|
||||||
|
d = pq(file_content.read(),parser="html")
|
||||||
d('a.anchor').each( lambda i: collect_figure_anchors(i,infos) )
|
d('a.anchor').each( lambda i: collect_figure_anchors(i,infos) )
|
||||||
|
|
||||||
#replace each link to a figure by its unique id
|
#replace each link to a figure by its unique id
|
||||||
|
|
@ -227,7 +232,8 @@ def automagically_number_figures():
|
||||||
with codecs.open(fname, encoding='utf-8') as f:
|
with codecs.open(fname, encoding='utf-8') as f:
|
||||||
if not any(re.search("fig__", line) for line in f):
|
if not any(re.search("fig__", line) for line in f):
|
||||||
continue # pattern does not occur in file so we are done.
|
continue # pattern does not occur in file so we are done.
|
||||||
d = pq(filename=fname, parser='html', encoding='utf-8')
|
file_content = codecs.open(fname, 'r', encoding='utf-8')
|
||||||
|
d = pq(file_content.read(), parser="html")
|
||||||
d('a.el').each( lambda i: update_figure_ref(i,global_anchor_map) )
|
d('a.el').each( lambda i: update_figure_ref(i,global_anchor_map) )
|
||||||
d('a.elRef').each( lambda i: update_figure_ref(i,global_anchor_map) )
|
d('a.elRef').each( lambda i: update_figure_ref(i,global_anchor_map) )
|
||||||
write_out_html(d, fname)
|
write_out_html(d, fname)
|
||||||
|
|
@ -261,7 +267,8 @@ removes some unneeded files, and performs minor repair on some glitches.''')
|
||||||
re_replace_in_file("<span class=\"icon\">N</span>", "<span class=\"icon-namespace\">N</span>", fn)
|
re_replace_in_file("<span class=\"icon\">N</span>", "<span class=\"icon-namespace\">N</span>", fn)
|
||||||
re_replace_in_file("<span class=\"icon\">C</span>", "<span class=\"icon-class\">C</span>", fn)
|
re_replace_in_file("<span class=\"icon\">C</span>", "<span class=\"icon-class\">C</span>", fn)
|
||||||
dir_name=path.dirname(fn)
|
dir_name=path.dirname(fn)
|
||||||
d = pq(filename=fn, parser='html', encoding='utf-8')
|
file_content = codecs.open(fn, 'r', encoding='utf-8')
|
||||||
|
d = pq(file_content.read(), parser="html")
|
||||||
tr_tags = d('table.directory tr img')
|
tr_tags = d('table.directory tr img')
|
||||||
tr_tags.each(lambda i: rearrange_img(i, dir_name))
|
tr_tags.each(lambda i: rearrange_img(i, dir_name))
|
||||||
span_tags = d('table.directory tr span')
|
span_tags = d('table.directory tr span')
|
||||||
|
|
@ -270,7 +277,8 @@ removes some unneeded files, and performs minor repair on some glitches.''')
|
||||||
class_files=list(package_glob('./*/class*.html'))
|
class_files=list(package_glob('./*/class*.html'))
|
||||||
class_files.extend(package_glob('./*/struct*.html'))
|
class_files.extend(package_glob('./*/struct*.html'))
|
||||||
for fn in class_files:
|
for fn in class_files:
|
||||||
d = pq(filename=fn, parser='html', encoding='utf-8')
|
file_content = codecs.open(fn, 'r', encoding='utf-8')
|
||||||
|
d = pq(file_content.read(), parser="html")
|
||||||
ident = d('#CGALConcept')
|
ident = d('#CGALConcept')
|
||||||
if ident.size() == 1:
|
if ident.size() == 1:
|
||||||
conceptify(d);
|
conceptify(d);
|
||||||
|
|
@ -284,7 +292,8 @@ removes some unneeded files, and performs minor repair on some glitches.''')
|
||||||
|
|
||||||
namespace_files=package_glob('./*/namespace*.html')
|
namespace_files=package_glob('./*/namespace*.html')
|
||||||
for fn in namespace_files:
|
for fn in namespace_files:
|
||||||
d = pq(filename=fn, parser='html', encoding='utf-8')
|
file_content = codecs.open(fn, 'r', encoding='utf-8')
|
||||||
|
d = pq(file_content.read(), parser="html")
|
||||||
ident = d('#CGALConceptNS')
|
ident = d('#CGALConceptNS')
|
||||||
if ident.size() == 1:
|
if ident.size() == 1:
|
||||||
conceptify_ns(d);
|
conceptify_ns(d);
|
||||||
|
|
@ -294,14 +303,16 @@ removes some unneeded files, and performs minor repair on some glitches.''')
|
||||||
# in a group we only need to change the nested-classes
|
# in a group we only need to change the nested-classes
|
||||||
group_files=package_glob('./*/group*Concepts*.html')
|
group_files=package_glob('./*/group*Concepts*.html')
|
||||||
for fn in group_files:
|
for fn in group_files:
|
||||||
d = pq(filename=fn, parser='html',encoding='utf-8')
|
file_content = codecs.open(fn, 'r', encoding='utf-8')
|
||||||
|
d = pq(file_content.read(), parser="html")
|
||||||
conceptify_nested_classes(d)
|
conceptify_nested_classes(d)
|
||||||
write_out_html(d, fn)
|
write_out_html(d, fn)
|
||||||
|
|
||||||
# fix up Files
|
# fix up Files
|
||||||
files_files=package_glob('./*/files.html')
|
files_files=package_glob('./*/files.html')
|
||||||
for fn in files_files:
|
for fn in files_files:
|
||||||
d = pq(filename=fn, parser='html',encoding='utf-8')
|
file_content = codecs.open(fn, 'r', encoding='utf-8')
|
||||||
|
d = pq(file_content.read(), parser="html")
|
||||||
table = d("table.directory")
|
table = d("table.directory")
|
||||||
row_id=table("td.entry").filter(lambda i: pq(this).text() == 'Concepts').parent().attr('id')
|
row_id=table("td.entry").filter(lambda i: pq(this).text() == 'Concepts').parent().attr('id')
|
||||||
if row_id != None:
|
if row_id != None:
|
||||||
|
|
@ -331,7 +342,8 @@ removes some unneeded files, and performs minor repair on some glitches.''')
|
||||||
relationship_pages.extend(package_glob('./*/generalizes.html'))
|
relationship_pages.extend(package_glob('./*/generalizes.html'))
|
||||||
relationship_pages.extend(package_glob('./*/refines.html'))
|
relationship_pages.extend(package_glob('./*/refines.html'))
|
||||||
for fn in relationship_pages:
|
for fn in relationship_pages:
|
||||||
d = pq(filename=fn, parser='html',encoding='utf-8')
|
file_content = codecs.open(fn, 'r', encoding='utf-8')
|
||||||
|
d = pq(file_content.read(), parser="html")
|
||||||
dts=d(".textblock .reflist dt")
|
dts=d(".textblock .reflist dt")
|
||||||
# no contents() on pyquery, do it the hard way
|
# no contents() on pyquery, do it the hard way
|
||||||
# Note that in the following regular expression, the Struct did not appear in doxygen version 1.8.3
|
# Note that in the following regular expression, the Struct did not appear in doxygen version 1.8.3
|
||||||
|
|
@ -343,7 +355,8 @@ removes some unneeded files, and performs minor repair on some glitches.''')
|
||||||
# throw out nav-sync
|
# throw out nav-sync
|
||||||
all_pages=glob.glob('./*/*.html')
|
all_pages=glob.glob('./*/*.html')
|
||||||
for fn in all_pages:
|
for fn in all_pages:
|
||||||
d = pq(filename=fn, parser='html',encoding='utf-8')
|
file_content = codecs.open(fn, 'r', encoding='utf-8')
|
||||||
|
d = pq(file_content.read(), parser="html")
|
||||||
d('#nav-sync').hide()
|
d('#nav-sync').hide()
|
||||||
# TODO count figures
|
# TODO count figures
|
||||||
write_out_html(d, fn)
|
write_out_html(d, fn)
|
||||||
|
|
@ -366,7 +379,8 @@ removes some unneeded files, and performs minor repair on some glitches.''')
|
||||||
# remove class name in Definition section if there is no default template
|
# remove class name in Definition section if there is no default template
|
||||||
# parameter documented
|
# parameter documented
|
||||||
for fn in class_and_struct_files:
|
for fn in class_and_struct_files:
|
||||||
d = pq(filename=fn, parser='html',encoding='utf-8')
|
file_content = codecs.open(fn, 'r', encoding='utf-8')
|
||||||
|
d = pq(file_content.read(), parser="html")
|
||||||
for el in d('h3'):
|
for el in d('h3'):
|
||||||
text = pq(el).text()
|
text = pq(el).text()
|
||||||
if text[0:9]=="template<" and text.find('=')==-1:
|
if text[0:9]=="template<" and text.find('=')==-1:
|
||||||
|
|
|
||||||
|
|
@ -53,7 +53,11 @@ PATH_TO_MASTER="$PWD/doxygen_master/build/bin/doxygen"
|
||||||
echo "done."
|
echo "done."
|
||||||
|
|
||||||
echo "comparing versions 1.8.4 and 1.8.13"
|
echo "comparing versions 1.8.4 and 1.8.13"
|
||||||
bash -$- test_doxygen_versions.sh $PATH_TO_1_8_4 $PATH_TO_1_8_13 $PWD/doc_1_8_4 $PWD/doc_1_8_13 $PUBLISH_DIR
|
bash -$- test_doxygen_versions.sh $PATH_TO_1_8_4 $PATH_TO_1_8_13 $PWD/doc_1_8_4 $PWD/doc_1_8_13 $PUBLISH_DIR
|
||||||
|
if [ ! -d $PWD/doc_1_8_13/doc_log ]; then
|
||||||
|
echo "NO DOC LOGS."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
mv diff.txt diff1.txt
|
mv diff.txt diff1.txt
|
||||||
|
|
||||||
echo "comparing versions 1.8.4 and master"
|
echo "comparing versions 1.8.4 and master"
|
||||||
|
|
@ -70,13 +74,13 @@ fi
|
||||||
#update overview
|
#update overview
|
||||||
CGAL_NAME=$(cat cgal_version)
|
CGAL_NAME=$(cat cgal_version)
|
||||||
if [ "$DO_COMPARE" = "TRUE" ]; then
|
if [ "$DO_COMPARE" = "TRUE" ]; then
|
||||||
python ${PWD}/testsuite.py --output-dir1 $PWD/doc_1_8_4/doc_output/ --output-dir2 $PWD/doc_1_8_13/doc_output/ --doc-log-dir1 $PWD/doc_1_8_4/doc_log/ \
|
python3 ${PWD}/testsuite.py --output-dir1 $PWD/doc_1_8_4/doc_output/ --output-dir2 $PWD/doc_1_8_13/doc_output/ --doc-log-dir1 $PWD/doc_1_8_4/doc_log/ \
|
||||||
--doc-log-dir2 $PWD/doc_1_8_13/doc_log/ --doc-log-dir-master $PWD/doc_master/doc_log/ \
|
--doc-log-dir2 $PWD/doc_1_8_13/doc_log/ --doc-log-dir-master $PWD/doc_master/doc_log/ \
|
||||||
--publish $PUBLISH_DIR --diff1 $PWD/diff1.txt --diff2 $PWD/diff2.txt --master-dir $PWD/doc_master/doc_output/ \
|
--publish $PUBLISH_DIR --diff1 $PWD/diff1.txt --diff2 $PWD/diff2.txt --master-dir $PWD/doc_master/doc_output/ \
|
||||||
--cgal-version "$CGAL_NAME" --do-copy-results --version-to-keep 10 --doxygen-version1 "$DOXYGEN_1" --doxygen-version2 "$DOXYGEN_2" --master-describe "$MASTER_DESCRIBE"
|
--cgal-version "$CGAL_NAME" --do-copy-results --version-to-keep 10 --doxygen-version1 "$DOXYGEN_1" --doxygen-version2 "$DOXYGEN_2" --master-describe "$MASTER_DESCRIBE"
|
||||||
else
|
else
|
||||||
echo "NO MASTER"
|
echo "NO MASTER"
|
||||||
python ${PWD}/testsuite.py --output-dir1 $PWD/doc_1_8_4/doc_output/ --output-dir2 $PWD/doc_1_8_13/doc_output/ --doc-log-dir1 $PWD/doc_1_8_4/doc_log/ \
|
python3 ${PWD}/testsuite.py --output-dir1 $PWD/doc_1_8_4/doc_output/ --output-dir2 $PWD/doc_1_8_13/doc_output/ --doc-log-dir1 $PWD/doc_1_8_4/doc_log/ \
|
||||||
--doc-log-dir2 $PWD/doc_1_8_13/doc_log/ --doc-log-dir-master $PWD/doc_master/ \
|
--doc-log-dir2 $PWD/doc_1_8_13/doc_log/ --doc-log-dir-master $PWD/doc_master/ \
|
||||||
--publish $PUBLISH_DIR --diff1 $PWD/diff1.txt \
|
--publish $PUBLISH_DIR --diff1 $PWD/diff1.txt \
|
||||||
--cgal-version "$CGAL_NAME" --do-copy-results --version-to-keep 10 --doxygen-version1 "$DOXYGEN_1" --doxygen-version2 "$DOXYGEN_2"
|
--cgal-version "$CGAL_NAME" --do-copy-results --version-to-keep 10 --doxygen-version1 "$DOXYGEN_1" --doxygen-version2 "$DOXYGEN_2"
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# Copyright (c) 2012 GeometryFactory (France). All rights reserved.
|
# Copyright (c) 2012 GeometryFactory (France). All rights reserved.
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
|
|
|
||||||
|
|
@ -58,8 +58,6 @@ if [ ! -d "${CGAL_DOC_BUILD}/${CGAL_RELEASE_ID}" ]; then
|
||||||
fi
|
fi
|
||||||
cd "${CGAL_RELEASE_ID}"
|
cd "${CGAL_RELEASE_ID}"
|
||||||
|
|
||||||
PYTHONPATH=/home/cgal-testsuite/.local/lib/python2.6/site-packages
|
|
||||||
export PYTHONPATH
|
|
||||||
PATH=/home/cgal-testsuite/local/bin:$PATH
|
PATH=/home/cgal-testsuite/local/bin:$PATH
|
||||||
export PATH
|
export PATH
|
||||||
cd "$PWD/doc/scripts"
|
cd "$PWD/doc/scripts"
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue