#include "parser_foundation.h" #include "token_impls.h" using namespace SyntaxX; using namespace Lex; using namespace Parser; #define Chain KeywordDef("^#脉络$", "chain-def") #define Node KeywordDef("^#节点$", "node-def") #define NodeRef SectionMatch("^@\\{", "target-ref") #define EOL KeywordDef("^\\n$", "line-break") #define SStart KeywordDef("^\\{$", "section-start") #define SEnd KeywordDef("^\\}$", "section-end") #define Texts SectionMatch("^([^@\\{\\}\\|/]+)", "desc-block") #define TypeSep KeywordDef("^\\|$", "type-def") #define LevSep KeywordDef("^/$", "type-def") KeywordDef::KeywordDef(const QString ®ex, const QString &type) : regex_store(regex), type_store(type) {} QString KeywordDef::typeName() const { return "<" + type_store + ">"; } QString KeywordDef::regexp() const { return regex_store; } Lex::Token *KeywordDef::analysis(const Lex::WordBase &word) { QRegExp regx(regexp()); if (regx.indexIn(word.content()) != -1) { return new TokenResult(this, word, word.content().length()); } return nullptr; } SectionMatch::SectionMatch(const QString ®ex, const QString &type) : regex_store(regex), type_store(type) {} QString SectionMatch::typeName() const { return "<" + type_store + ">"; } QString SectionMatch::regexp() const { return regex_store; } Token *SectionMatch::analysis(const Lex::WordBase &word) { QRegExp regx(regexp()); if (regx.indexIn(word.content()) != -1) { return new TokenResult(this, word, regx.matchedLength()); } return nullptr; } ChainParser::ChainParser() { node_refer.setRule(Seqs(T(NodeRef), T(Texts), T(TypeSep), T(Texts), Repeat(Seqs(T(LevSep), T(Texts))), T(SEnd))); text_section.setRule(T(Texts)); desc_pragraph.setRule(Seqs(Repeat(Any(&text_section, &node_refer), 1), T(EOL))); node_def.setRule(Seqs(T(Node), Repeat(T(Texts), 1), T(SStart), Repeat(&desc_pragraph), T(SEnd))); chain_def.setRule(Seqs(T(Node), Repeat(T(Texts)), T(SStart), Repeat(Any(&desc_pragraph, &node_def)), T(SEnd))); // chain_def.resetProcess([](Lex::TokenReader *port, Ast::ASTList *pnode) -> Ast::ASTList * { // }); }