WIP testing doc pipeline

This commit is contained in:
Maxime Gimeno 2020-01-03 16:45:35 +01:00
parent 76ff945bf4
commit 94dcdd65a8
7 changed files with 28 additions and 26 deletions

View File

@ -25,7 +25,7 @@ else()
endif()
find_package(Doxygen)
find_package(PythonInterp 2.6.7)
find_package(PythonInterp 3.5)
if(NOT DOXYGEN_FOUND)
message(WARNING "Cannot build the documentation without Doxygen!")

View File

@ -28,7 +28,7 @@ FAILURES=()
for dir in $PATH_TO_DOC/*
do
OUTPUT=$(basename $dir)
python ../documentation_parser.py $dir/xml > ./"$OUTPUT.txt"
python3 ../documentation_parser.py $dir/xml > ./"$OUTPUT.txt"
if [ $? -eq 0 ]; then
echo "$dir OK"
else

View File

@ -2,6 +2,7 @@ from pyquery import PyQuery as pq
from collections import defaultdict
from sys import argv
import os.path as op
import codecs
# if _in is part of args, return true.
def check_type(_in, args):
@ -12,17 +13,17 @@ def check_type(_in, args):
root_path=argv[1]
d = pq(filename=op.join(op.sep, root_path,'index.xml'), parser="xml")
compounds=[p.text() for p in d('compound').items()]
types=[p.attr('kind') for p in d('compound').items()]
compounds=[p.text() for p in list(d('compound').items())]
types=[p.attr('kind') for p in list(d('compound').items())]
type_map = defaultdict(list) #map <type, name>
dict_map = defaultdict(dict)#map <name, map<member type, member name>>
#FOREACH compounds : fill maps
for i in xrange(0,len(compounds)):
for i in range(0,len(compounds)):
if check_type(types[i], "typedef"):
types[i]="type"
name=d('compound').children("name").eq(i).text()
members=[p.text() for p in d('compound').eq(i).children("member").items()]
m_types=[p.attr('kind') for p in d('compound').eq(i).children("member").items()]
members=[p.text() for p in list(d('compound').eq(i).children("member").items())]
m_types=[p.attr('kind') for p in list(d('compound').eq(i).children("member").items())]
if (not check_type(types[i], ['example', 'file', 'dir', 'page', 'group']) and
not (types[i] == "namespace" and len(members) == 0) and
not (types[i] == "enum" and len(members) == 0) ):
@ -31,8 +32,9 @@ for i in xrange(0,len(compounds)):
filepath='class'+compound+'.xml'
total_path=op.join(op.sep, root_path,filepath)
if(op.isfile(total_path)):
e = pq(filename=total_path, parser="xml")
compoundnames=[p.text() for p in e('includes').items()]
file_content = codecs.open(total_path, 'r', encoding='utf-8')
e = pq(file_content, parser="xml")
compoundnames=[p.text() for p in list(e('includes').items())]
if(len(compoundnames) > 1 and compoundnames[0].find("Concept") != -1):
types[i] = 'Concept '+types[i].lower()
@ -41,7 +43,7 @@ for i in xrange(0,len(compounds)):
mtype_map = defaultdict(list)# map<member type, member name>
#FOREACH member :
for j in xrange(0,len(members)):
for j in range(0,len(members)):
if(check_type(types[i], ['class', 'Concept class'])
and m_types[j] == "function"):
m_types[j]="method"
@ -62,7 +64,7 @@ for btype in type_map:
out=btype
if btype.endswith('s'):
out+='e'
print out.title()+'s'
print(out.title()+'s')
indent+=" "
#FOREACH name
for name in type_map[btype]:
@ -74,7 +76,7 @@ for btype in type_map:
templates=[]
if op.isfile(op.join(op.sep, root_path,filepath)):
f=pq(filename=op.join(op.sep, root_path,filepath), parser="xml")
templateparams=f("compounddef").children("templateparamlist").eq(0).children("param").items()
templateparams=list(f("compounddef").children("templateparamlist").eq(0).children("param").items())
for param in templateparams:
template_type=""
template_name=""
@ -91,7 +93,7 @@ for btype in type_map:
complete_template+=' = '+template_defval
templates.append(complete_template)
if templates==[]:#if no child was found, just take param.text()
templates=[t.text() for t in param.items()]
templates=[t.text() for t in list(param.items())]
suffix="<"
#as template got type, defname and declname, name is twice in template. keep only one of them.
to_remove=[""]
@ -101,7 +103,7 @@ for btype in type_map:
suffix=""
if suffix.endswith(', '):
suffix = suffix[:-2]+'>'
print indent+name+suffix
print(indent+name+suffix)
indent+=" "
#FOREACH mtype
@ -109,7 +111,7 @@ for btype in type_map:
out=mtype
if mtype.endswith('s'):
out+='e'
print indent+out.title()+'s'
print(indent+out.title()+'s')
indent+=" "
#FOREACH member
overload_map = defaultdict(int) #contains the number of times a member has appeared (to manage the overloads)
@ -123,16 +125,16 @@ for btype in type_map:
if op.isfile(op.join(op.sep, root_path,filepath)):
f=pq(filename=op.join(op.sep, root_path,filepath), parser="xml")
index=0
memberdefs=[m.text() for m in f("memberdef").items()]
for i in xrange(0,len(memberdefs)):
member_names=[member_name.text() for member_name in f('memberdef').eq(i).children("name").items()]
memberdefs=[m.text() for m in list(f("memberdef").items())]
for i in range(0,len(memberdefs)):
member_names=[member_name.text() for member_name in list(f('memberdef').eq(i).children("name").items())]
if f('memberdef').eq(i).children("name").text() == member:
if (index < overload_map[member]):
index+=1
elif (index == overload_map[member]):
if check_type(mtype, ['function', 'method']):
args=[f('memberdef').eq(i).children("argsstring").text()]
templateparams=f('memberdef').eq(i).children("templateparamlist").children("param").items()
templateparams=list(f('memberdef').eq(i).children("templateparamlist").children("param").items())
if check_type(mtype, ['function', 'method', 'type', 'variable']):
return_type=[f('memberdef').eq(i).children("type").text()]
break;
@ -158,7 +160,7 @@ for btype in type_map:
complete_template+=' = '+template_defval
templates.append(complete_template)
if templates==[]:#if no child was found, just take param.text()
templates=[t.text() for t in param.items()]
templates=[t.text() for t in list(param.items())]
prefix="template <"
for template in templates:
@ -171,7 +173,7 @@ for btype in type_map:
prefix+=definition
if(prefix != ""):
prefix+=" "
print indent+prefix+member+arguments
print(indent+prefix+member+arguments)
overload_map[member]+=1
#END foreach member
indent=indent[:-2]

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python2
#!/usr/bin/env python3
# coding: utf8
import re
@ -157,7 +157,7 @@ def protect_upper_case(title):
return title.replace("dD","{dD}").replace("2D","{2D}").replace("3D","{3D}").replace("CGAL","{CGAL}").replace("Qt","{Qt}").replace("Boost","{Boost}")
def protect_accentuated_letters(authors):
res=authors.replace(u"é",r"{\'e}").replace(u"è",r"{\`e}").replace(u"É",r"{\'E}").replace(u"ä",r"{\"a}").replace(u"ö",r"{\"o}").replace(u"ñ",r"{\~n}").replace(u"ã",r"{\~a}").replace(u"ë",r"{\"e}").replace(u"ı",r"{\i}").replace(u"Ş",r"{\c{S}}").replace(u"ş",r"{\c{s}}").replace("%","")
res=authors.replace("é",r"{\'e}").replace("è",r"{\`e}").replace("É",r"{\'E}").replace("ä",r"{\"a}").replace("ö",r"{\"o}").replace("ñ",r"{\~n}").replace("ã",r"{\~a}").replace("ë",r"{\"e}").replace("ı",r"{\i}").replace("Ş",r"{\c{S}}").replace("ş",r"{\c{s}}").replace("%","")
try:
res.encode('ascii')
except UnicodeEncodeError:

View File

@ -62,7 +62,7 @@ def write_out_html(d, fn):
f.close()
def package_glob(target):
return filter(lambda x: not os.path.join(os.path.join('.','Manual'),'') in x, glob.glob(target))
return [x for x in glob.glob(target) if not os.path.join(os.path.join('.','Manual'),'') in x]
# remove duplicate files
def clean_doc():

View File

@ -45,7 +45,7 @@ mv diff.txt diff2.txt
#update overview
CGAL_NAME=$(cat cgal_version)
python ${PWD}/testsuite.py --output-dir1 $PWD/doc_1_8_4/doc_output/ --output-dir2 $PWD/doc_1_8_13/doc_output/ --doc-log-dir1 $PWD/doc_1_8_4/doc_log/ \
python3 ${PWD}/testsuite.py --output-dir1 $PWD/doc_1_8_4/doc_output/ --output-dir2 $PWD/doc_1_8_13/doc_output/ --doc-log-dir1 $PWD/doc_1_8_4/doc_log/ \
--doc-log-dir2 $PWD/doc_1_8_13/doc_log/ --doc-log-dir-master $PWD/doc_master/doc_log/ \
--publish $PUBLISH_DIR --diff1 $PWD/diff1.txt --diff2 $PWD/diff2.txt --master-dir $PWD/doc_master/doc_output/ \
--cgal-version "$CGAL_NAME" --do-copy-results --version-to-keep 10 --doxygen-version1 "$DOXYGEN_1" --doxygen-version2 "$DOXYGEN_2" --master-describe "$MASTER_DESCRIBE"

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python2
#!/usr/bin/env python3
# Copyright (c) 2012 GeometryFactory (France). All rights reserved.
# All rights reserved.
#