Page Menu
Home
c4science
Search
Configure Global Search
Log In
Files
F72914554
generate_doc.py
No One
Temporary
Actions
Download File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Subscribers
None
File Metadata
Details
File Info
Storage
Attached
Created
Wed, Jul 17, 14:48
Size
5 KB
Mime Type
text/x-python
Expires
Fri, Jul 19, 14:48 (1 d, 23 h)
Engine
blob
Format
Raw Data
Handle
19118501
Attached To
rLIBMULTISCALE LibMultiScale
generate_doc.py
View Options
#!/usr/bin/env python3
import
argparse
import
os
import
re
import
pylibmultiscale
as
lm
import
pyparsing
as
pp
from
libmultiscale_doc_parser
import
LibMultiScaleParser
from
generate_rst
import
generateRST
################################################################
def
LMDocData
():
return
{
'name'
:
None
,
'type'
:
None
,
'description'
:
None
,
'children'
:
[],
'var_name'
:
None
,
'c_type'
:
None
,
'filename'
:
None
,
'heritance'
:
[],
'example'
:
None
,
'defaults'
:
None
}
def
LMDataNode
():
return
{
'name'
:
None
,
'children'
:
{},
'data'
:
[]
}
################################################################
def
readListOfSourceFiles
(
fname
):
_f
=
open
(
fname
)
list_files
=
[]
for
line
in
_f
.
readlines
():
fname
=
line
.
strip
()
if
(
line
==
""
):
continue
_str
=
open
(
fname
)
.
read
()
m
=
re
.
search
(
r'.*(declareParams\(\))\s+(\{).*'
,
_str
)
if
not
m
:
continue
source_file
=
fname
list_files
.
append
(
source_file
)
return
list_files
################################################################
def
addDoc
(
root
,
doc
,
_file
):
dirs
=
_file
.
split
(
'/'
)
i
=
0
for
i
in
range
(
0
,
len
(
dirs
)):
if
dirs
[
i
]
==
'src'
:
break
++
i
if
i
==
len
(
dirs
):
raise
RuntimeError
(
"problem in the file structure"
)
dirs
=
dirs
[
i
+
1
:]
_file
=
os
.
path
.
join
(
*
dirs
)
# print(_file)
node
=
root
sublevel
=
dirs
[
0
]
for
sublevel
in
dirs
:
if
sublevel
not
in
node
[
'children'
]:
node
[
'children'
][
sublevel
]
=
LMDataNode
()
node
=
node
[
'children'
][
sublevel
]
node
[
'name'
]
=
sublevel
node
[
'data'
]
.
append
(
doc
)
################################################################
def
addDocByName
(
sorted_by_name
,
doc
,
_file
):
short_name
=
os
.
path
.
splitext
(
os
.
path
.
basename
(
_file
))[
0
]
# print("record heritance ", short_name)
sorted_by_name
[
short_name
]
=
doc
################################################################
def
generatorInternalKeyWordList
():
return
[
'DomainDD'
,
'DomainAtomic'
,
'DomainContinuum'
,
'ActionInterface'
,
'DomainInterface'
,
'Dumper'
,
'ComposedGeom'
,
'CouplerManager'
,
'DofAssociation'
,
'Bridging'
,
'Geometry'
,
'ArlequinTemplate'
,
'PointAssociation'
,
'LAMMPS_BASE'
,
'AKANTU_BASE'
,
'AKANTU_DYNAMIC'
,
'AKANTU_STATIC'
,
'FILTER'
]
################################################################
def
checkKeywordExistance
(
key
,
key_list
,
internal_key_list
):
if
(
key
not
in
key_list
)
and
(
key
not
in
internal_key_list
):
print
(
"Warning:Undeclared "
+
key
+
": keywords are "
+
', '
.
join
(
key_list
))
return
elif
key
in
key_list
:
key_list
[
key
]
.
pop
()
if
not
key_list
[
key
]:
del
key_list
[
key
]
# print("removing key")
# print('')
################################################################
def
main
():
parser
=
argparse
.
ArgumentParser
(
description
=
'Python embedded in Latex tool'
)
parser
.
add_argument
(
'filelist'
,
type
=
str
,
help
=
'The file containing the files to parse'
)
parser
.
add_argument
(
'output'
,
type
=
str
,
help
=
'The output rst file'
)
args
=
parser
.
parse_args
()
files
=
readListOfSourceFiles
(
args
.
filelist
)
key_list
=
lm
.
getKeyWordList
()
internal_key_list
=
generatorInternalKeyWordList
()
parser
=
LibMultiScaleParser
()
root
=
LMDataNode
()
sorted_by_name
=
{}
for
fname
in
files
:
res_doc
=
LMDocData
()
res_doc
[
'filename'
]
=
fname
res_doc
[
'class_name'
]
=
os
.
path
.
splitext
(
os
.
path
.
basename
(
fname
))[
0
]
print
(
"parsing: "
+
res_doc
[
'filename'
])
try
:
res
=
parser
.
parse
(
fname
,
internal_key_list
)
# for k, v in res.items():
# print(f'{k}:{v}')
res_doc
[
'name'
]
=
res
[
'description'
][
0
]
res_doc
[
'description'
]
=
res
[
'description'
][
1
]
if
'example'
in
res
:
res_doc
[
'example'
]
=
res
[
'example'
]
if
'heritance'
in
res
:
for
h
in
res
[
'heritance'
]:
res_doc
[
'heritance'
]
+=
h
.
split
()
if
'keywords'
in
res
:
res_doc
[
'children'
]
=
[{
'name'
:
k
[
0
],
'value'
:
k
}
for
k
in
res
[
'keywords'
]]
checkKeywordExistance
(
res_doc
[
'name'
],
key_list
,
internal_key_list
)
for
child
in
res_doc
[
'children'
]:
child
[
'filename'
]
=
res_doc
[
'filename'
]
child
[
'description'
]
=
child
[
'value'
][
1
]
addDoc
(
root
,
res_doc
,
fname
)
addDocByName
(
sorted_by_name
,
res_doc
,
fname
)
except
pp
.
ParseException
as
e
:
print
(
f
'{res_doc["filename"]}:{e.lineno}:{e.col}:{e}'
)
except
pp
.
ParseSyntaxException
as
e
:
print
(
f
'{res_doc["filename"]}:{e.lineno}:{e.col}:{e}'
)
for
k
,
v
in
key_list
.
items
():
print
(
"Warning: keyword "
+
k
+
" was not documented "
)
# print(root)
fout
=
open
(
args
.
output
,
'w'
)
fout
.
write
(
"""
Keywords
========
------------
"""
)
# print('AAAA', sorted_by_name.keys())
generateRST
(
fout
,
root
,
sorted_by_name
)
fout
.
close
()
################################################################
if
__name__
==
"__main__"
:
main
()
Event Timeline
Log In to Comment