diff --git a/doc/manual/libmultiscale_doc_parser.py b/doc/manual/libmultiscale_doc_parser.py index 458b8fe..bf029ae 100644 --- a/doc/manual/libmultiscale_doc_parser.py +++ b/doc/manual/libmultiscale_doc_parser.py @@ -1,80 +1,56 @@ +#!/usr/bin/env python3 -# -# template < typename Iterator, typename Skipper = boost:: spirit: : ascii: : blank_type > -# struct LibMultiScaleDocParser: -# boost: : spirit: : qi: : grammar < Iterator, LMDocData(), Skipper > { -# -# public: -# -# LibMultiScaleDocParser() -# -# public: -# -# -# //! parse a single file(filename is stored in the structure) -# bool parse(LMDocData & result, std:: set < std: : string > & internal_key_list) -# //! call back function for error handling -# void errorHandlerMain(const std:: string: : const_iterator & a, -# const std: : string: : const_iterator & b, -# const std:: string: : const_iterator & c, -# const boost:: spirit: : info & what) -# -# //! call back function for error handling -# void errorHandlerType(const std:: string: : const_iterator & a, -# const std: : string: : const_iterator & b, -# const std:: string: : const_iterator & c, -# const boost:: spirit: : info & what) -# -# //! call back function for error handling -# void errorHandlerTodo(const std:: string: : const_iterator & a, -# const std: : string: : const_iterator & b, -# const std:: string: : const_iterator & c, -# const boost:: spirit: : info & what) -# -# //! call back function for error handling -# void errorHandlerSyntax(const std:: string: : const_iterator & a, -# const std: : string: : const_iterator & b, -# const std:: string: : const_iterator & c, -# const boost:: spirit: : info & what) -# -# -# //! get the line number and last extract last line -# int getLineNumber(const std:: string & text, -# std:: string & now_parsing) -# -# -# //! grammar rules -# boost:: spirit: : qi: : rule < Iterator, LMDocData(), Skipper > start -# boost:: spirit: : qi: : rule < Iterator, std: : string() > name -# boost:: spirit: : qi: : rule < Iterator, std: : string(std: : string) > text -# boost:: spirit: : qi: : rule < Iterator, std: : string() > c_type -# boost:: spirit: : qi: : rule < Iterator, std: : string() > brackets -# boost:: spirit: : qi: : rule < Iterator, std: : string(std: : string) > innertext -# boost:: spirit: : qi: : rule < Iterator, LMDocData() > description -# boost:: spirit: : qi: : rule < Iterator, LMDocData(), Skipper > keyword -# boost:: spirit: : qi: : rule < Iterator, std: : vector < std: : string > () > heritance -# boost:: spirit: : qi: : rule < Iterator, std: : string(std: : string) > lookup_type -# boost:: spirit: : qi: : rule < Iterator, std: : string() > lookup_todo -# boost:: spirit: : qi: : rule < Iterator, std: : string() > example -# boost:: spirit: : qi: : rule < Iterator, std: : string(std: : string) > declaration -# boost:: spirit: : qi: : rule < Iterator, std: : string() > lookup_syntax -# boost:: spirit: : qi: : rule < Iterator, std: : string() > expr -# boost:: spirit: : qi: : rule < Iterator, std: : string() > defaults -# // boost: : spirit: : qi: : rule < Iterator, std: : string() > quantity_type -# boost:: spirit: : qi: : rule < Iterator, std: : string() > vec_quantity_type -# -# std:: string fname -# -# std:: string: : const_iterator iter -# std: : string: : const_iterator end -# } -# -# /* -------------------------------------------------------------------------- */ -# typedef LibMultiScaleDocParser < std:: string: : const_iterator > LibMultiScaleParser -# /* -------------------------------------------------------------------------- */ -# -# # endif /* __LIBMULTISCALE_LIBMULTISCALE_DOC_PARSER_HH__ */ +import pyparsing as pp class LibMultiScaleParser: - pass + + etag = pp.Literal("*/") + + tag_desc = pp.Literal("/* LMDESC") + tag_keyword = pp.Literal("/* LMKEYWORD") + tag_heritance = pp.Literal("/* LMHERITANCE") + tag_example = pp.Literal("/* LMEXAMPLE") + + def block(self, name, tag): + _block = pp.SkipTo(tag) + tag + _block += pp.SkipTo(self.etag).setResultsName(name) + _block += self.etag + return _block.leaveWhitespace() + + def named_block(self, name, tag): + _block = pp.SkipTo(tag) + tag + _block += pp.ZeroOrMore(pp.White()) + \ + pp.Word(pp.alphas.upper()).setResultsName('name') + _block += pp.ZeroOrMore(pp.LineEnd()) + _block += pp.SkipTo(self.etag).setResultsName(name) + _block += self.etag + return _block.leaveWhitespace() + + _key = pp.SkipTo(self.tag_keyword) + self.tag_keyword + _key_content = pp.ZeroOrMore(pp.White()) + _key_content += pp.Word(pp.alphas.upper()).setResultsName("keyword") + _key_content += pp.SkipTo(self.etag) + _key += _key_content.setResultsName("keyword") + _key += self.etag + + return _key.leaveWhitespace() + + def parse(self, filename, internal_key_list): + fin = open(filename, 'r') + inp = fin.read() + fin.close() + + _content = self.block('description', self.tag_desc) + _content += pp.Optional(self.block('example', self.tag_example)) + _content += pp.Optional(self.block('heritance', self.tag_heritance)) + _content += pp.Group(pp.ZeroOrMore( + self.named_block('keyword', self.tag_keyword)).addParseAction( + lambda toks: (toks.get('name'), + toks.get('keyword')))).setResultsName("keywords") + + _content.leaveWhitespace() + res = _content.parseString(inp) + for k, v in res.items(): + print(k, v) + return res diff --git a/doc/manual/main.py b/doc/manual/main.py index d535f85..7a1e6e1 100644 --- a/doc/manual/main.py +++ b/doc/manual/main.py @@ -1,287 +1,296 @@ #!/usr/bin/env python3 import sys import re from libmultiscale_doc_parser import LibMultiScaleParser +import pyparsing as pp + +# import libmultiscale as lm + +################################################################ class LMDocData(object): def __init__(self): self.name = None self.type = None self.description = None self.children = [] self.var_name = None self.c_type = None self.filename = None self.heritance = None self.example = None self.defaults = None class LMDataNode(object): def __init__(self): self.name = None self.children = [] self.data = [] ################################################################ def readListOfSourceFiles(fname): _f = open(fname) list_files = [] for line in _f.readlines(): fname = line.strip() if (line == ""): continue _str = open(fname).read() - m = re.search('.*(declareParams().*)', _str) + m = re.search(r'.*(declareParams\(\))\s+(\{).*', _str) if not m: continue source_file = fname - print("I will parse file " + source_file) list_files.append(source_file) return list_files ################################################################ def addDoc(root, doc, _file): # path:: iterator end = file.end() # path:: iterator begin = file.begin() # # - -end # # for ( # end != begin # - -end) { # if (end -> native() == "src"){ # ++end # break # } # } # if (end == file.end()){ # std:: cerr << "problem in the file structure" << std: : endl # exit(EXIT_FAILURE) # } # # begin = end # end = file.end() # LMDataNode * it = &root # for ( # begin != end # + +begin) { # std:: string sublevel = begin -> native() # it = &(it -> children[sublevel]) # it -> name = sublevel # } # it -> data.push_back(doc) pass ################################################################ def addDocByName(sorted_by_name, doc, _file): # std: : string short_name = file.stem().native() # // std: : cerr << "record heritance " << short_name << std: : endl # sorted_by_name[protectString(short_name)] = doc pass ################################################################ def protectUnderscoreInDoc(doc): # doc.name = protectString(doc.name) # doc.type = protectString(doc.type) # doc.var_name = protectString(doc.var_name) # doc.defaults = protectString(doc.defaults) # doc.c_type = protectString(doc.c_type) # doc.example = protectString(doc.example) # for (UInt i=0 # i < doc.heritance.size() # + +i) { # doc.heritance[i] = protectString(doc.heritance[i]) # } # for (UInt i=0 # i < doc.children.size() # + +i) # protectUnderscoreInDoc(doc.children[i]) pass ################################################################ # include "atom_model_list.hh" # include "compute_real_input_list.hh" # include "compute_ref_input_list.hh" # include "continuum_model_list.hh" # include "coupler_list.hh" # include "dd_model_list.hh" # include "dumper_real_input_list.hh" # include "dumper_ref_input_list.hh" # include "filter_list.hh" # include "stimulation_real_input_list.hh" # include "stimulation_ref_input_list.hh" # include "geometry_list.hh" # include # define stringify_macro(s) stringify_macro_1(s) # define stringify_macro_1(s) #s # define APPEND_MODEL_KEY_TO_LIST(r,data,x) \ # data[stringify_macro(BOOST_PP_TUPLE_ELEM(3, 2, x))].resize(1) # data[stringify_macro(BOOST_PP_TUPLE_ELEM(3, 2, x)) # ][0] = stringify_macro(BOOST_PP_TUPLE_ELEM(3, 0, x)) # define APPEND_COUPLER_KEY_TO_LIST(r,data,x) \ # data[stringify_macro(BOOST_PP_TUPLE_ELEM(4, 1, x))]. \ # push_back( # stringify_macro(BOOST_PP_TUPLE_ELEM(4, 0, x))) # # define APPEND_NORMAL_KEY_TO_LIST(r,data,x) \ # data[stringify_macro(BOOST_PP_TUPLE_ELEM(2, 1, x))]. \ # push_back( # stringify_macro(BOOST_PP_TUPLE_ELEM(2, 0, x))) # ################################################################ def generatorKeyWordList(): # std:: map < std: : string, std: : vector < std: : string > > key_list # BOOST_PP_SEQ_FOR_EACH(APPEND_MODEL_KEY_TO_LIST, key_list, LIST_ATOM_MODEL) # BOOST_PP_SEQ_FOR_EACH(APPEND_MODEL_KEY_TO_LIST, key_list, LIST_DD_MODEL) # BOOST_PP_SEQ_FOR_EACH(APPEND_MODEL_KEY_TO_LIST, # key_list, LIST_CONTINUUM_MODEL) # # BOOST_PP_SEQ_FOR_EACH(APPEND_COUPLER_KEY_TO_LIST, key_list, LIST_COUPLER) # BOOST_PP_SEQ_FOR_EACH(APPEND_NORMAL_KEY_TO_LIST, key_list, LIST_GEOMETRY) # # BOOST_PP_SEQ_FOR_EACH(APPEND_NORMAL_KEY_TO_LIST, # key_list, LIST_STIMULATION_REF_INPUT) # BOOST_PP_SEQ_FOR_EACH(APPEND_NORMAL_KEY_TO_LIST, # key_list, LIST_STIMULATION_REAL_INPUT) # # BOOST_PP_SEQ_FOR_EACH(APPEND_NORMAL_KEY_TO_LIST, # key_list, LIST_DUMPER_REF_INPUT) # BOOST_PP_SEQ_FOR_EACH(APPEND_NORMAL_KEY_TO_LIST, # key_list, LIST_DUMPER_REAL_INPUT) # # BOOST_PP_SEQ_FOR_EACH(APPEND_NORMAL_KEY_TO_LIST, # key_list, LIST_COMPUTE_REF_INPUT) # BOOST_PP_SEQ_FOR_EACH(APPEND_NORMAL_KEY_TO_LIST, # key_list, LIST_COMPUTE_REAL_INPUT) # BOOST_PP_SEQ_FOR_EACH(APPEND_NORMAL_KEY_TO_LIST, key_list, LIST_FILTER) # # return key_list pass ################################################################ # include ################################################################ def generatorInternalKeyWordList(): # std:: set < std: : string > key_list # # key_list.insert("DomainDD") # key_list.insert("DomainAtomic") # key_list.insert("DomainContinuum") # key_list.insert("ActionInterface") # key_list.insert("DomainInterface") # key_list.insert("Dumper") # key_list.insert("ComposedGeom") # key_list.insert("CouplerManager") # key_list.insert("DofAssociation") # key_list.insert("Bridging") # key_list.insert("Geometry") # key_list.insert("ArlequinTemplate") # key_list.insert("PointAssociation") # key_list.insert("LAMMPS_BASE") # key_list.insert("AKANTU_BASE") # key_list.insert("AKANTU_DYNAMIC") # key_list.insert("AKANTU_STATIC") # key_list.insert("FILTER") # return key_list pass ################################################################ def checkKeywordExistance(key, key_list, internal_key_list): # if (key_list.count(key) == 0 & & internal_key_list.count(key) == 0){ # std: : cerr << "Error: Undeclared keyword " << key << std: : endl # std: : map < std: : string, std: : vector < std: : string > >: : iterator it = key_list.begin() # std: : map < std: : string, std: : vector < std: : string > >: : iterator end = key_list.end() # std: : cerr << "declared keywords are " # while (it != end){ # std:: cerr << it -> first << " " # + +it # } # std: : cerr << std: : endl # exit(EXIT_FAILURE) # } # else if(key_list.count(key)) { # // std: : cerr << "treating key " << key << " " << key_list[key].size() # key_list[key].pop_back() # // std:: cerr << " " << key_list[key].size() << std: : endl # if (key_list[key].size() == 0){ # key_list.erase(key) # // std:: cerr << "removing key" # } # // std:: cerr << std: : endl # } pass ################################################################ def main(): p = sys.argv[1] files = readListOfSourceFiles(p) key_list = generatorKeyWordList() internal_key_list = generatorInternalKeyWordList() parser = LibMultiScaleParser() root = LMDataNode() sorted_by_name = {} for fname in files: res = LMDocData() res.filename = fname print("parsing " + res.filename) - r = parser.parse(res, internal_key_list) - if r: - checkKeywordExistance(res.name, key_list, internal_key_list) - protectUnderscoreInDoc(res) - for child in res.children: - child.filename = res.filename - - protectUnderscoreInDoc(res) - addDoc(root, res, fname) - addDocByName(sorted_by_name, res, fname) - else: - print("parsing of " + res.filename + " failed") + try: + r = parser.parse(fname, internal_key_list) + if r: + checkKeywordExistance(res.name, key_list, internal_key_list) + protectUnderscoreInDoc(res) + for child in res.children: + child.filename = res.filename + + protectUnderscoreInDoc(res) + addDoc(root, res, fname) + addDocByName(sorted_by_name, res, fname) + else: + print("parsing of " + res.filename + " failed") + + except pp.ParseException as e: + print(e, dir(e)) + print(f'{res.filename}:{e.lineno}:{e.col}:{e}') for k, v in key_list.items(): print("Error: keyword " + k + " was not documented ") sys.exit(-1) fname = "manual-generated.tex" fout = open(fname.c_str()) ltx = GenerateLatex() ltx.generateLatex(fout, root, sorted_by_name) ################################################################ if __name__ == "__main__": main()