暂停开发C++版解析器,开发状态暂存
This commit is contained in:
parent
4dd5a0e728
commit
d4d0af7ea7
|
|
@ -1,8 +1,8 @@
|
|||
#脉络 大乾国共受难主线 xcvkjzlvj
|
||||
{
|
||||
diyigeakldfj;dl来翻案;dlfj;sl分类风
|
||||
diyigeakldfj;dl来翻案;dlfj;sl分类风@{节点|脉络名/节点名}
|
||||
|
||||
#节点 初始登场{ 大乾国公府鸡飞狗跳,人字广场集训大练兵}
|
||||
#节点 初始登场{ 大乾国公府鸡飞狗跳,人字广场集训大练兵}
|
||||
#节点 高潮迭起
|
||||
{混乱大逃杀,初始情况混乱。人间清醒}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE QtCreatorProject>
|
||||
<!-- Written by QtCreator 4.15.0, 2023-03-24T01:07:04. -->
|
||||
<!-- Written by QtCreator 4.15.0, 2023-08-10T21:33:27. -->
|
||||
<qtcreator>
|
||||
<data>
|
||||
<variable>EnvironmentId</variable>
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ namespace Syntax
|
|||
void appendParser(Syntax::SyntaxParser *u);
|
||||
|
||||
private:
|
||||
Lex::TokensReader *const tokens_in;
|
||||
Lex::TokenReader *const tokens_in;
|
||||
|
||||
QString unknown_token;
|
||||
QList<Lex::TokenDef *> token_seqs;
|
||||
|
|
|
|||
|
|
@ -101,7 +101,7 @@ namespace Parse {
|
|||
void setMatchEnable(bool v);
|
||||
|
||||
private:
|
||||
Syntax::ParseRule *critical_rule;
|
||||
Syntax::ParserRule *critical_rule;
|
||||
};
|
||||
|
||||
class NodeStoryMixedDesGroupParser : public Syntax::XSyntaxBase
|
||||
|
|
|
|||
|
|
@ -7,18 +7,6 @@
|
|||
namespace Syntax {
|
||||
|
||||
|
||||
/**
|
||||
* 解析结果标志.
|
||||
*/
|
||||
enum class ParseResult
|
||||
{
|
||||
SelfManipulate = 1,
|
||||
EnterNext = 2,
|
||||
Completed = 3,
|
||||
Failed = 4,
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* 解析器接口.
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -125,27 +125,27 @@ Expression::elementCheck(const QList<Token>& tokens, int offset) const
|
|||
return chain_store->linkCheck(tokens, offset);
|
||||
}
|
||||
|
||||
ParseRule::ParseRule(XSyntaxBase * host, const QString & rule_name, unsigned short level,
|
||||
ParserRule::ParserRule(XSyntaxBase * host, const QString & rule_name, unsigned short level,
|
||||
std::function<ParseResult(const QList<LexResult>&, int)> exc)
|
||||
: enable_state(true), host_ins(host), level_store(level),
|
||||
name_store(rule_name), exc_store(exc) {}
|
||||
|
||||
int ParseRule::level() const
|
||||
int ParserRule::level() const
|
||||
{
|
||||
return level_store;
|
||||
}
|
||||
|
||||
QString ParseRule::name() const
|
||||
QString ParserRule::name() const
|
||||
{
|
||||
return name_store;
|
||||
}
|
||||
|
||||
void ParseRule::setEnable(bool v)
|
||||
void ParserRule::setEnable(bool v)
|
||||
{
|
||||
this->enable_state = v;
|
||||
}
|
||||
|
||||
void Syntax::ParseRule::addExpression(const QString &name, const QList<Elm> &defines)
|
||||
void Syntax::ParserRule::addExpression(const QString &name, const QList<Elm> &defines)
|
||||
{
|
||||
// 生成表达式实例
|
||||
auto exp = host_ins->get_expression(Link::name(name));
|
||||
|
|
@ -188,7 +188,7 @@ void Syntax::ParseRule::addExpression(const QString &name, const QList<Elm> &def
|
|||
expression_list << exp;
|
||||
}
|
||||
|
||||
std::tuple<bool, int> ParseRule::tokensMatch(const QList<Token>& token) const
|
||||
std::tuple<bool, int> ParserRule::tokensMatch(const QList<Token>& token) const
|
||||
{
|
||||
if(enable_state)
|
||||
for (auto expx : expression_list) {
|
||||
|
|
@ -201,18 +201,18 @@ std::tuple<bool, int> ParseRule::tokensMatch(const QList<Token>& token) const
|
|||
return std::make_tuple(false, 0);
|
||||
}
|
||||
|
||||
ParseResult ParseRule::syntaxTrigger(const QList<Token>& srcs, int count) {
|
||||
ParseResult ParserRule::syntaxTrigger(const QList<Token>& srcs, int count) {
|
||||
return exc_store(srcs, count);
|
||||
}
|
||||
|
||||
XSyntaxBase::XSyntaxBase(const QString & section, MatchType type)
|
||||
: target_type(type), section_name(section), current_level(INT_MAX), current_node(nullptr) {}
|
||||
|
||||
ParseRule * XSyntaxBase::addRule(const QString & name, unsigned short level,
|
||||
ParserRule * XSyntaxBase::addRule(const QString & name, unsigned short level,
|
||||
std::function<ParseResult(const QList<LexResult>&, int)> exc)
|
||||
{
|
||||
if (!rule_collect.contains(name)) {
|
||||
auto rule = new ParseRule(this, name, level, exc);
|
||||
auto rule = new ParserRule(this, name, level, exc);
|
||||
rule_collect[name] = rule;
|
||||
}
|
||||
return rule_collect[name];
|
||||
|
|
@ -258,7 +258,7 @@ bool XSyntaxBase::applied(const QList<Token>& seqs)
|
|||
return false;
|
||||
|
||||
// 求取最小等级的parse-rule
|
||||
ParseRule* first_rule = *rule_collect.cbegin();
|
||||
ParserRule* first_rule = *rule_collect.cbegin();
|
||||
for (auto &rule : rule_collect) {
|
||||
if (rule->level() <= first_rule->level())
|
||||
first_rule = rule;
|
||||
|
|
@ -298,13 +298,13 @@ ParseResult XSyntaxBase::parse(QList<Token>& seqs)
|
|||
return ParseResult::Failed;
|
||||
|
||||
// 求取符合等级的parse-rule
|
||||
QList<ParseRule*> rules_set;
|
||||
QList<ParserRule*> rules_set;
|
||||
for (auto &rule : rule_collect) {
|
||||
if (rule->level() >= current_level)
|
||||
rules_set << rule;
|
||||
}
|
||||
|
||||
std::tuple<bool, int, ParseRule*> max_result = std::make_tuple(false, 0, nullptr);
|
||||
std::tuple<bool, int, ParserRule*> max_result = std::make_tuple(false, 0, nullptr);
|
||||
// 使用符合等级的解析规则解析
|
||||
for (auto &rule : rules_set) {
|
||||
auto result = rule->tokensMatch(seqs);
|
||||
|
|
|
|||
|
|
@ -175,12 +175,12 @@ namespace Syntax
|
|||
/**
|
||||
* @brief 定义解析规则,支持多范式表达式匹配
|
||||
*/
|
||||
class ParseRule
|
||||
class ParserRule
|
||||
{
|
||||
public:
|
||||
ParseRule(XSyntaxBase *host, const QString &rule_name, unsigned short level,
|
||||
ParserRule(XSyntaxBase *host, const QString &rule_name, unsigned short level,
|
||||
std::function<ParseResult(const QList<Lex::LexResult>&, int)>);
|
||||
virtual ~ParseRule() = default;
|
||||
virtual ~ParserRule() = default;
|
||||
|
||||
int level() const;
|
||||
QString name() const;
|
||||
|
|
@ -232,7 +232,7 @@ namespace Syntax
|
|||
virtual Parse::Result::DesNode * currNode() const override;
|
||||
|
||||
protected:
|
||||
ParseRule* addRule(const QString &name, unsigned short level, std::function<ParseResult(const QList<Lex::LexResult>&, int)> exc);
|
||||
ParserRule* addRule(const QString &name, unsigned short level, std::function<ParseResult(const QList<Lex::LexResult>&, int)> exc);
|
||||
virtual void addChild(QList<Syntax::SyntaxParser*> parsers) override;
|
||||
void refocusNode(Parse::Result::DesNode *ins);
|
||||
|
||||
|
|
@ -250,7 +250,7 @@ namespace Syntax
|
|||
QHash<QString, Expression*> expressions_store;
|
||||
QList<Syntax::SyntaxParser*> child_parsers;
|
||||
|
||||
QHash<QString, ParseRule*> rule_collect;
|
||||
QHash<QString, ParserRule*> rule_collect;
|
||||
};
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,69 @@
|
|||
#include "ast_foundation.h"
|
||||
|
||||
using namespace Ast;
|
||||
|
||||
ASTLeaf::ASTLeaf(ASTTree *parent, Lex::Token *inst) : _store(inst), parent_ins(parent) {}
|
||||
|
||||
ASTLeaf::~ASTLeaf() { delete _store; }
|
||||
|
||||
const ASTTree *ASTLeaf::parent() const { return parent_ins; }
|
||||
|
||||
uint ASTLeaf::depth() const {
|
||||
auto depth_value = 0;
|
||||
const ASTTree *temp_node = this;
|
||||
while (temp_node->parent()) {
|
||||
depth_value++;
|
||||
temp_node = temp_node->parent();
|
||||
}
|
||||
return depth_value;
|
||||
}
|
||||
|
||||
QList<ASTTree *> ASTLeaf::children() const { return QList<ASTTree *>(); }
|
||||
|
||||
uint ASTLeaf::count() const { return 0; }
|
||||
|
||||
ASTTree *ASTLeaf::child(uint) const { return nullptr; }
|
||||
|
||||
QString ASTLeaf::toString() const { return _store->content(); }
|
||||
|
||||
Lex::Token *ASTLeaf::tokenIns() const { return _store; }
|
||||
|
||||
ASTList::ASTList(ASTTree *parent) : parent_ins(parent) {}
|
||||
|
||||
ASTList::~ASTList() {
|
||||
for (auto &it : _store)
|
||||
delete it;
|
||||
}
|
||||
|
||||
void ASTList::resetChildren(const QList<ASTTree *> &tokens) { _store = tokens; }
|
||||
|
||||
const ASTTree *ASTList::parent() const { return parent_ins; }
|
||||
|
||||
uint ASTList::depth() const {
|
||||
auto depth_value = 0;
|
||||
const ASTTree *temp_node = this;
|
||||
while (temp_node->parent()) {
|
||||
depth_value++;
|
||||
temp_node = temp_node->parent();
|
||||
}
|
||||
return depth_value;
|
||||
}
|
||||
|
||||
QList<ASTTree *> ASTList::children() const { return _store; }
|
||||
|
||||
uint ASTList::count() const { return _store.count(); }
|
||||
|
||||
ASTTree *ASTList::child(uint index) const {
|
||||
if (index >= count())
|
||||
return nullptr;
|
||||
return _store[index];
|
||||
}
|
||||
|
||||
void ASTList::insert(uint idx, ASTTree *child) { this->_store.insert(idx, child); }
|
||||
|
||||
QString ASTList::toString() const {
|
||||
QString content;
|
||||
for (auto &it : _store)
|
||||
content += it->toString() + " ";
|
||||
return content;
|
||||
}
|
||||
|
|
@ -0,0 +1,77 @@
|
|||
#ifndef AST_FOUNDATION_H
|
||||
#define AST_FOUNDATION_H
|
||||
|
||||
#include "lex_foundation.h"
|
||||
#include <QList>
|
||||
|
||||
namespace Ast {
|
||||
|
||||
class ASTTree {
|
||||
public:
|
||||
virtual ~ASTTree() = default;
|
||||
|
||||
virtual const ASTTree *parent() const = 0;
|
||||
virtual uint depth() const = 0;
|
||||
|
||||
virtual QList<ASTTree *> children() const = 0;
|
||||
|
||||
virtual uint count() const = 0;
|
||||
virtual ASTTree *child(uint index) const = 0;
|
||||
|
||||
virtual QString toString() const = 0;
|
||||
};
|
||||
|
||||
class ASTLeaf : public ASTTree {
|
||||
public:
|
||||
explicit ASTLeaf(ASTTree *parent, Lex::Token *inst);
|
||||
virtual ~ASTLeaf();
|
||||
|
||||
virtual const ASTTree *parent() const override;
|
||||
virtual uint depth() const override;
|
||||
|
||||
virtual QList<ASTTree *> children() const final;
|
||||
virtual uint count() const final;
|
||||
virtual ASTTree *child(uint) const final;
|
||||
|
||||
virtual QString toString() const override;
|
||||
|
||||
protected:
|
||||
Lex::Token *tokenIns() const;
|
||||
|
||||
private:
|
||||
Lex::Token *const _store;
|
||||
ASTTree *const parent_ins;
|
||||
};
|
||||
|
||||
class ASTList : public ASTTree {
|
||||
public:
|
||||
explicit ASTList(ASTTree *parent);
|
||||
virtual ~ASTList();
|
||||
|
||||
virtual void resetChildren(const QList<ASTTree *> &tokens);
|
||||
|
||||
// ASTTree interface
|
||||
public:
|
||||
virtual const ASTTree *parent() const override;
|
||||
virtual uint depth() const override;
|
||||
|
||||
virtual QList<ASTTree *> children() const override final;
|
||||
virtual uint count() const override final;
|
||||
virtual ASTTree *child(uint index) const override final;
|
||||
|
||||
/**
|
||||
* @brief 插入子节点
|
||||
* @param child
|
||||
*/
|
||||
virtual void insert(uint idx, ASTTree *child);
|
||||
|
||||
virtual QString toString() const override;
|
||||
|
||||
private:
|
||||
QList<ASTTree *> _store;
|
||||
ASTTree *const parent_ins;
|
||||
};
|
||||
|
||||
} // namespace Ast
|
||||
|
||||
#endif // AST_FOUNDATION_H
|
||||
|
|
@ -1,93 +1,112 @@
|
|||
#include "lex_foundation.h"
|
||||
#include "tokeniimpl.h"
|
||||
#include "token_impls.h"
|
||||
#include <QTextStream>
|
||||
#include <tuple>
|
||||
|
||||
using namespace Lex;
|
||||
|
||||
TokensReader::TokensReader(QList<TokenDef *> sequence) { analysis_sequences = sequence; }
|
||||
TokenReader::TokenReader(QList<TokenDef *> sequence, const QFileInfo &target)
|
||||
: line_number(0), target_info(target), bin_source(nullptr), content_stack(""), line_source(nullptr), analysis_sequences(sequence) {
|
||||
this->bin_source = new QFile(target.absoluteFilePath());
|
||||
if (!this->bin_source->open(QIODevice::ReadOnly | QIODevice::Text))
|
||||
throw new WsBaseException("指定源文件无法打开!");
|
||||
|
||||
QList<Token *> TokensReader::getTokensOfDocument(const QFileInfo &file) {
|
||||
auto batch_row = 0;
|
||||
QList<Token *> list;
|
||||
|
||||
QFile byte_input(file.canonicalFilePath());
|
||||
if (!byte_input.open(QIODevice::Text | QIODevice::ReadOnly))
|
||||
throw new LexException("指定文件无法打开:" + file.canonicalFilePath());
|
||||
|
||||
QTextStream source(&byte_input);
|
||||
source.setCodec("UTF-8");
|
||||
|
||||
while (!source.atEnd()) {
|
||||
auto line = source.readLine();
|
||||
list.append(get_tokens_of_line(file, line, batch_row));
|
||||
batch_row++;
|
||||
}
|
||||
|
||||
return list;
|
||||
this->line_source = new QTextStream(this->bin_source);
|
||||
}
|
||||
|
||||
QList<Token *> TokensReader::getTokensOfContents(const QByteArray &buff, const QFileInfo &_file) {
|
||||
auto batch_row = 0;
|
||||
QList<Token *> list;
|
||||
|
||||
QTextStream source(buff, QIODevice::ReadOnly);
|
||||
source.setCodec("UTF-8");
|
||||
|
||||
while (!source.atEnd()) {
|
||||
auto line = source.readLine();
|
||||
list.append(get_tokens_of_line(_file, line, batch_row));
|
||||
batch_row++;
|
||||
}
|
||||
|
||||
return list;
|
||||
TokenReader::TokenReader(QList<TokenDef *> sequence, const QFileInfo &target, const QString &content)
|
||||
: line_number(0), target_info(target), bin_source(nullptr), content_stack(content), line_source(nullptr), analysis_sequences(sequence) {
|
||||
this->line_source = new QTextStream(&content_stack, QIODevice::ReadOnly);
|
||||
}
|
||||
|
||||
QList<Token *> TokensReader::get_tokens_of_line(const QFileInfo &associate, const QString &line, int row) {
|
||||
auto split_seqs = line.split(" ", QString::SplitBehavior::SkipEmptyParts);
|
||||
TokenReader::~TokenReader() {
|
||||
for (auto &ins : tokens_buffer)
|
||||
delete ins;
|
||||
for (auto &ins : analysis_sequences)
|
||||
delete ins;
|
||||
|
||||
delete line_source;
|
||||
delete bin_source;
|
||||
}
|
||||
|
||||
Token *TokenReader::read() {
|
||||
if (this->tokens_buffer.count() < 1)
|
||||
this->tokens_buffer.append(get_tokens_of_line(this->target_info, *this->line_source));
|
||||
|
||||
if (this->tokens_buffer.count() < 1)
|
||||
return nullptr;
|
||||
|
||||
return this->tokens_buffer.takeAt(0);
|
||||
}
|
||||
|
||||
void TokenReader::tokenRemove(uint index) {
|
||||
if (this->tokens_buffer.count() <= index)
|
||||
throw new WsBaseException("索引超出当前序列最大容量");
|
||||
|
||||
delete this->tokens_buffer.takeAt(index);
|
||||
}
|
||||
|
||||
Token *TokenReader::tokenPeak(uint index) {
|
||||
if (this->tokens_buffer.count() <= index)
|
||||
this->tokens_buffer.append(get_tokens_of_line(this->target_info, *this->line_source));
|
||||
|
||||
if (this->tokens_buffer.count() <= index)
|
||||
throw new WsBaseException("目标获取数量超出最大能力");
|
||||
|
||||
return this->tokens_buffer.at(index);
|
||||
}
|
||||
|
||||
QList<Token *> TokenReader::get_tokens_of_line(const QFileInfo &associate, QTextStream &lines_source) {
|
||||
const QString line = line_source->readLine();
|
||||
auto split_seqs = line.split(QRegExp("\\s\\t"), QString::SplitBehavior::SkipEmptyParts);
|
||||
split_seqs.append("\n");
|
||||
auto batch_column = 0;
|
||||
|
||||
// 转换单行的内容为源列表
|
||||
QList<WordBase *> source_sequences;
|
||||
// 转换单行的内容为词组列表
|
||||
QList<WordBase *> word_source;
|
||||
for (auto &it : split_seqs) {
|
||||
auto inst = new TokenWord(associate.canonicalFilePath());
|
||||
source_sequences.append(inst);
|
||||
word_source.append(inst);
|
||||
|
||||
auto start_index = line.indexOf(it, batch_column);
|
||||
inst->reset(it, row, start_index);
|
||||
inst->reset(it, line_number, start_index);
|
||||
batch_column = start_index + it.length();
|
||||
}
|
||||
|
||||
// 对单行的所有的内容进行解析
|
||||
QList<Token *> results;
|
||||
for (auto idx = 0; idx < source_sequences.size(); ++idx) {
|
||||
QList<Token *> token_results;
|
||||
for (auto idx = 0; idx < word_source.size(); ++idx) {
|
||||
// 对单个词语进行解析
|
||||
auto inst = source_sequences[idx];
|
||||
auto inst = word_source[idx];
|
||||
|
||||
auto retv = get_token(*inst);
|
||||
results.append(retv);
|
||||
token_results.append(retv);
|
||||
|
||||
// 如果存在未解析的剩余的内容
|
||||
if (retv->remains())
|
||||
source_sequences.insert(idx + 1, retv->remains());
|
||||
|
||||
delete inst;
|
||||
word_source.insert(idx + 1, retv->remains());
|
||||
}
|
||||
|
||||
return results;
|
||||
// 删除所有的词元列表
|
||||
for (auto &token_ins : word_source)
|
||||
delete token_ins;
|
||||
|
||||
line_number++;
|
||||
return token_results;
|
||||
}
|
||||
|
||||
Token *TokensReader::get_token(const WordBase &word) {
|
||||
Token *TokenReader::get_token(const WordBase &word) {
|
||||
for (auto &it : analysis_sequences) {
|
||||
auto lex_result = it->analysis(word);
|
||||
if (lex_result)
|
||||
return lex_result;
|
||||
}
|
||||
|
||||
throw new LexException(QString("指定的词语无法解析:%1 <row:%2,col:%3>").arg(word.content()).arg(word.row()).arg(word.column()));
|
||||
throw new WsBaseException(QString("指定的词语无法解析:%1 <row:%2,col:%3>").arg(word.content()).arg(word.row()).arg(word.column()));
|
||||
}
|
||||
|
||||
LexException::LexException(const QString &msg) { this->msg_store = msg; }
|
||||
WsBaseException::WsBaseException(const QString &msg) { this->msg_store = msg; }
|
||||
|
||||
QString LexException::message() { return msg_store; }
|
||||
QString WsBaseException::message() const { return msg_store; }
|
||||
|
||||
const char *LexException::what() const { return msg_store.toLocal8Bit(); }
|
||||
const char *WsBaseException::what() const { return msg_store.toLocal8Bit(); }
|
||||
|
|
|
|||
|
|
@ -11,11 +11,11 @@ namespace Lex {
|
|||
/**
|
||||
* @brief 此法解析过程中出现的异常
|
||||
*/
|
||||
class LexException : std::exception {
|
||||
class WsBaseException : std::exception {
|
||||
public:
|
||||
explicit LexException(const QString &msg);
|
||||
explicit WsBaseException(const QString &msg);
|
||||
|
||||
virtual QString message();
|
||||
virtual QString message() const;
|
||||
|
||||
private:
|
||||
QString msg_store;
|
||||
|
|
@ -65,12 +65,12 @@ namespace Lex {
|
|||
* \brief 获取Token类型
|
||||
* \return
|
||||
*/
|
||||
virtual QString typeName() = 0;
|
||||
virtual QString typeName() const = 0;
|
||||
/**
|
||||
* \brief 基准定义单元的正则表达式定义
|
||||
* \return
|
||||
*/
|
||||
virtual QString regexp() = 0;
|
||||
virtual QString regexp() const = 0;
|
||||
/**
|
||||
* \brief 对指定的文字段落进行解析,生成Token实例并移交实例所有权
|
||||
* @param word 文字段落
|
||||
|
|
@ -84,6 +84,7 @@ namespace Lex {
|
|||
*/
|
||||
class Token : public WordBase {
|
||||
public:
|
||||
virtual ~Token() = default;
|
||||
/**
|
||||
* @brief 获取此Token关联的(生成源)解析单元
|
||||
* @return
|
||||
|
|
@ -99,38 +100,57 @@ namespace Lex {
|
|||
/**
|
||||
* \brief Token读取数据源定义类型
|
||||
*/
|
||||
class TokensReader {
|
||||
class TokenReader {
|
||||
public:
|
||||
/**
|
||||
* @brief 构建Token数据源
|
||||
* @param file
|
||||
* @param sequence
|
||||
* @brief 拖过文件构建Token数据源
|
||||
* @param sequence 词法解析器定义
|
||||
* @param target 指定文件info
|
||||
*/
|
||||
TokensReader(QList<TokenDef *> sequence);
|
||||
virtual ~TokensReader() = default;
|
||||
TokenReader(QList<TokenDef *> sequence, const QFileInfo &target);
|
||||
/**
|
||||
* @brief 通过文件内容构建Token数据源
|
||||
* @param sequence 词法解析器定义
|
||||
* @param target 指定文件info
|
||||
* @param content 指定文件的内容文本
|
||||
*/
|
||||
TokenReader(QList<TokenDef *> sequence, const QFileInfo &target, const QString &content);
|
||||
virtual ~TokenReader();
|
||||
|
||||
/**
|
||||
* \brief 获取此文件的所有Tokens,转移所有权
|
||||
* \return
|
||||
* @brief 提取下一个Token,交付控制权
|
||||
* @return 返回Token*实例
|
||||
*/
|
||||
QList<Token *> getTokensOfDocument(const QFileInfo &file);
|
||||
Token *read();
|
||||
|
||||
/**
|
||||
* @brief 获取指定缓冲区内的文本代表的所有Tokens,转移所有权
|
||||
* @param buff 缓冲区
|
||||
* @param file 提供文件符号
|
||||
* @return
|
||||
* @brief 删除指定索引的token实例
|
||||
* @param index
|
||||
*/
|
||||
QList<Token *> getTokensOfContents(const QByteArray &buff, const QFileInfo &file);
|
||||
void tokenRemove(uint index);
|
||||
|
||||
/**
|
||||
* @brief 当前的Token进度下,预读指定数量的Token
|
||||
* @param count 指定预读数量
|
||||
* @return 预读的Token数量列表
|
||||
*/
|
||||
Token *tokenPeak(uint index);
|
||||
|
||||
private:
|
||||
int line_number;
|
||||
QFileInfo target_info;
|
||||
QFile *bin_source;
|
||||
QString content_stack;
|
||||
QTextStream *line_source;
|
||||
QList<TokenDef *> analysis_sequences;
|
||||
QList<Token *> tokens_buffer;
|
||||
|
||||
/**
|
||||
* \brief 获取Token序列集合,移交所有权
|
||||
* \param source 获取一列内容包含的
|
||||
* \return
|
||||
*/
|
||||
QList<Token *> get_tokens_of_line(const QFileInfo &associate, const QString &line, int row);
|
||||
QList<Token *> get_tokens_of_line(const QFileInfo &associate, QTextStream &lines_source);
|
||||
|
||||
/**
|
||||
* \brief 分析单个单词的类型,产生Token结果实例,移交所有权
|
||||
|
|
|
|||
|
|
@ -16,35 +16,43 @@ msvc{
|
|||
}
|
||||
|
||||
SOURCES += \
|
||||
ParseFrame.cpp \
|
||||
StoryBoardDocumentParser.cpp \
|
||||
StoryChainDocumentParser.cpp \
|
||||
StoryOutlineDocumentParser.cpp \
|
||||
StoryTool.cpp \
|
||||
StoryUnitDocumentParser.cpp \
|
||||
XSyntaxBase.cpp \
|
||||
# ParseFrame.cpp \
|
||||
# StoryBoardDocumentParser.cpp \
|
||||
# StoryChainDocumentParser.cpp \
|
||||
# StoryOutlineDocumentParser.cpp \
|
||||
# StoryTool.cpp \
|
||||
# StoryUnitDocumentParser.cpp \
|
||||
# XSyntaxBase.cpp \
|
||||
ast_foundation.cpp \
|
||||
lex_foundation.cpp \
|
||||
libParse.cpp \
|
||||
parsechecks.cpp \
|
||||
storyconceptdocumentparser.cpp \
|
||||
tokeniimpl.cpp
|
||||
# libParse.cpp \
|
||||
# parsechecks.cpp \
|
||||
node_impls.cpp \
|
||||
parser_foundation.cpp \
|
||||
# storyconceptdocumentparser.cpp \
|
||||
syntax_foundation.cpp \
|
||||
token_impls.cpp
|
||||
|
||||
HEADERS += \
|
||||
ComnDef.h \
|
||||
ParseFrame.h \
|
||||
StoryBoardDocumentParser.h \
|
||||
StoryChainDocumentParser.h \
|
||||
StoryOutlineDocumentParser.h \
|
||||
StoryTool.h \
|
||||
StoryUnitDocumentParser.h \
|
||||
SyntaxBase.h \
|
||||
XSyntaxBase.h \
|
||||
# ParseFrame.h \
|
||||
# StoryBoardDocumentParser.h \
|
||||
# StoryChainDocumentParser.h \
|
||||
# StoryOutlineDocumentParser.h \
|
||||
# StoryTool.h \
|
||||
# StoryUnitDocumentParser.h \
|
||||
# SyntaxBase.h \
|
||||
# XSyntaxBase.h \
|
||||
ast_foundation.h \
|
||||
lex_foundation.h \
|
||||
libParse.h \
|
||||
# libParse.h \
|
||||
libParse_global.h \
|
||||
parsechecks.h \
|
||||
storyconceptdocumentparser.h \
|
||||
tokeniimpl.h
|
||||
# parsechecks.h \
|
||||
node_impls.h \
|
||||
parser_foundation.h \
|
||||
# storyconceptdocumentparser.h \
|
||||
syntax_foundation.h \
|
||||
token_impls.h
|
||||
|
||||
TRANSLATIONS += \
|
||||
libParse_zh_CN.ts
|
||||
|
|
|
|||
|
|
@ -0,0 +1,16 @@
|
|||
#include "node_impls.h"
|
||||
|
||||
using namespace SyntaxNode;
|
||||
using namespace Ast;
|
||||
|
||||
Document::Document(const QString &path) : ASTList(nullptr) {}
|
||||
|
||||
QString Document::filePath() const { return this->path_store; }
|
||||
|
||||
QString Document::toString() const {
|
||||
QString contents = "";
|
||||
for (auto &it : children()) {
|
||||
contents += it->toString();
|
||||
}
|
||||
return contents;
|
||||
}
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
#ifndef NODE_IMPLS_H
|
||||
#define NODE_IMPLS_H
|
||||
|
||||
#include "ast_foundation.h"
|
||||
|
||||
namespace SyntaxNode {
|
||||
class Document : public Ast::ASTList {
|
||||
public:
|
||||
Document(const QString &path);
|
||||
virtual ~Document() = default;
|
||||
|
||||
virtual QString filePath() const;
|
||||
|
||||
// ASTTree interface
|
||||
public:
|
||||
virtual QString toString() const override;
|
||||
|
||||
private:
|
||||
QString path_store;
|
||||
};
|
||||
|
||||
class NodeChain : public Ast::ASTList {
|
||||
public:
|
||||
NodeChain(ASTList *parent, const QString &name);
|
||||
|
||||
// ASTTree interface
|
||||
public:
|
||||
virtual QString toString() const override;
|
||||
|
||||
private:
|
||||
QString name_store;
|
||||
};
|
||||
|
||||
} // namespace SyntaxNode
|
||||
|
||||
#endif // NODE_IMPLS_H
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
#include "parser_foundation.h"
|
||||
#include "token_impls.h"
|
||||
|
||||
using namespace SyntaxX;
|
||||
using namespace Lex;
|
||||
using namespace Parser;
|
||||
|
||||
#define Chain KeywordDef("^#脉络$", "chain-def")
|
||||
#define Node KeywordDef("^#节点$", "node-def")
|
||||
#define NodeRef SectionMatch("^@\\{", "target-ref")
|
||||
#define EOL KeywordDef("^\\n$", "line-break")
|
||||
|
||||
#define SStart KeywordDef("^\\{$", "section-start")
|
||||
#define SEnd KeywordDef("^\\}$", "section-end")
|
||||
|
||||
#define Texts SectionMatch("^([^@\\{\\}\\|/]+)", "desc-block")
|
||||
#define TypeSep KeywordDef("^\\|$", "type-def")
|
||||
#define LevSep KeywordDef("^/$", "type-def")
|
||||
|
||||
KeywordDef::KeywordDef(const QString ®ex, const QString &type) : regex_store(regex), type_store(type) {}
|
||||
|
||||
QString KeywordDef::typeName() const { return "<" + type_store + ">"; }
|
||||
|
||||
QString KeywordDef::regexp() const { return regex_store; }
|
||||
|
||||
Lex::Token *KeywordDef::analysis(const Lex::WordBase &word) {
|
||||
QRegExp regx(regexp());
|
||||
if (regx.indexIn(word.content()) != -1) {
|
||||
return new TokenResult(this, word, word.content().length());
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
SectionMatch::SectionMatch(const QString ®ex, const QString &type) : regex_store(regex), type_store(type) {}
|
||||
|
||||
QString SectionMatch::typeName() const { return "<" + type_store + ">"; }
|
||||
|
||||
QString SectionMatch::regexp() const { return regex_store; }
|
||||
|
||||
Token *SectionMatch::analysis(const Lex::WordBase &word) {
|
||||
QRegExp regx(regexp());
|
||||
if (regx.indexIn(word.content()) != -1) {
|
||||
return new TokenResult(this, word, regx.matchedLength());
|
||||
}
|
||||
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
ChainParser::ChainParser() {
|
||||
node_refer.setRule(Seqs(T(NodeRef), T(Texts), T(TypeSep), T(Texts), Repeat(Seqs(T(LevSep), T(Texts))), T(SEnd)));
|
||||
text_section.setRule(T(Texts));
|
||||
desc_pragraph.setRule(Seqs(Repeat(Any(&text_section, &node_refer), 1), T(EOL)));
|
||||
|
||||
node_def.setRule(Seqs(T(Node), Repeat(T(Texts), 1), T(SStart), Repeat(&desc_pragraph), T(SEnd)));
|
||||
|
||||
chain_def.setRule(Seqs(T(Node), Repeat(T(Texts)), T(SStart), Repeat(Any(&desc_pragraph, &node_def)), T(SEnd)));
|
||||
// chain_def.resetProcess([](Lex::TokenReader *port, Ast::ASTList *pnode) -> Ast::ASTList * {
|
||||
|
||||
// });
|
||||
}
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
#ifndef PARSER_FOUNDATION_H
|
||||
#define PARSER_FOUNDATION_H
|
||||
|
||||
#include "libParse_global.h"
|
||||
#include "syntax_foundation.h"
|
||||
|
||||
namespace Parser {
|
||||
/**
|
||||
* @brief 关键字定义,完全匹配
|
||||
*/
|
||||
class KeywordDef : public Lex::TokenDef {
|
||||
public:
|
||||
KeywordDef(const QString ®ex, const QString &type);
|
||||
|
||||
private:
|
||||
QString regex_store;
|
||||
QString type_store;
|
||||
|
||||
// TokenDef interface
|
||||
public:
|
||||
virtual QString typeName() const override;
|
||||
virtual QString regexp() const override;
|
||||
virtual Lex::Token *analysis(const Lex::WordBase &word) override;
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief 部分匹配
|
||||
*/
|
||||
class SectionMatch : public Lex::TokenDef {
|
||||
public:
|
||||
SectionMatch(const QString ®ex, const QString &type);
|
||||
|
||||
private:
|
||||
QString regex_store;
|
||||
QString type_store;
|
||||
|
||||
// TokenDef interface
|
||||
public:
|
||||
virtual QString typeName() const override;
|
||||
virtual QString regexp() const override;
|
||||
virtual Lex::Token *analysis(const Lex::WordBase &word) override;
|
||||
};
|
||||
|
||||
class LIBPARSE_EXPORT ChainParser {
|
||||
public:
|
||||
ChainParser();
|
||||
|
||||
private:
|
||||
SyntaxX::ElmRule node_refer;
|
||||
SyntaxX::ElmRule text_section;
|
||||
SyntaxX::ElmRule desc_pragraph;
|
||||
SyntaxX::ElmRule node_def;
|
||||
SyntaxX::ElmRule chain_def;
|
||||
};
|
||||
|
||||
} // namespace Parser
|
||||
|
||||
#endif // PARSER_FOUNDATION_H
|
||||
|
|
@ -0,0 +1,242 @@
|
|||
#include "syntax_foundation.h"
|
||||
#include "lex_foundation.h"
|
||||
|
||||
using namespace SyntaxX;
|
||||
using namespace Ast;
|
||||
|
||||
void *TokenRule::operator new(size_t s) {
|
||||
auto ins = ::operator new(s);
|
||||
static_cast<TokenRule *>(ins)->build_within_heap = true;
|
||||
return ins;
|
||||
}
|
||||
|
||||
TokenRule::TokenRule(const Lex::TokenDef &def) : token_type(def) {}
|
||||
|
||||
BaseRule *TokenRule::toHeap() && {
|
||||
auto ins = new TokenRule(this->token_type);
|
||||
return ins;
|
||||
}
|
||||
|
||||
bool TokenRule::heapMark() const { return build_within_heap; }
|
||||
|
||||
QPair<uint, Result> TokenRule::match(Lex::TokenReader *port, uint start, uint count) const noexcept {
|
||||
auto target = port->tokenPeak(start);
|
||||
|
||||
if (target->def()->typeName() == this->token_type.typeName())
|
||||
return qMakePair(1, Result::All);
|
||||
|
||||
return qMakePair(0, Result::Fail);
|
||||
}
|
||||
|
||||
void TokenRule::parse(Lex::TokenReader *port, Ast::ASTList *parent) {
|
||||
auto token = port->read();
|
||||
auto leaf = new ASTLeaf(parent, token);
|
||||
parent->insert(parent->count(), leaf);
|
||||
}
|
||||
|
||||
Repeat::Repeat(BaseRule *item, uint min, uint max) : item_store(*item), match_max(max), match_min(min) {
|
||||
if (match_min < 1 || match_min >= match_max)
|
||||
throw new Lex::WsBaseException("匹配参数错误:匹配次数参数设置错误");
|
||||
}
|
||||
Repeat::Repeat(BaseRule &&item, uint min, uint max) : Repeat(&item, min, max) {}
|
||||
|
||||
void *Repeat::operator new(size_t s) {
|
||||
auto ins = ::operator new(s);
|
||||
static_cast<Repeat *>(ins)->build_within_heap = true;
|
||||
return ins;
|
||||
}
|
||||
|
||||
BaseRule *Repeat::toHeap() && { return new Repeat(&this->item_store, match_max); }
|
||||
|
||||
bool Repeat::heapMark() const { return this->build_within_heap; }
|
||||
|
||||
QPair<uint, Result> Repeat::match(Lex::TokenReader *port, uint start, uint count) const noexcept { return item_store.match(port, start, count); }
|
||||
|
||||
void Repeat::parse(Lex::TokenReader *port, Ast::ASTList *parent) {
|
||||
auto repeat_times = 0;
|
||||
|
||||
while (item_store.match(port, 0, UINT_MAX).second == Result::All) {
|
||||
item_store.parse(port, parent);
|
||||
repeat_times++;
|
||||
|
||||
if (repeat_times == match_max)
|
||||
break;
|
||||
}
|
||||
|
||||
if (repeat_times < match_min)
|
||||
throw new SyntaxException(*port->tokenPeak(0), "指定Token不符合Repeat语法定义");
|
||||
}
|
||||
|
||||
using namespace Lex;
|
||||
|
||||
ElmRule::ElmRule() : item_store(nullptr) {}
|
||||
|
||||
ElmRule::~ElmRule() { delete item_store; }
|
||||
|
||||
void ElmRule::resetProcess(std::function<ASTList *(Lex::TokenReader *, Ast::ASTList *)> exec) { this->exec_store = exec; }
|
||||
|
||||
void ElmRule::parse(Lex::TokenReader *port, Ast::ASTList *parent) {
|
||||
auto result = this->exec_store(port, parent);
|
||||
parent->insert(parent->count(), result);
|
||||
this->item_store->parse(port, result);
|
||||
}
|
||||
|
||||
BaseRule *ElmRule::toHeap() && { throw new WsBaseException("不允许ElmRule构建堆实例"); }
|
||||
|
||||
bool ElmRule::heapMark() const { return false; }
|
||||
|
||||
void ElmRule::setRule(BaseRule &&item) { this->item_store = std::move(item).toHeap(); }
|
||||
|
||||
QPair<uint, Result> ElmRule::match(Lex::TokenReader *port, uint start, uint count) const noexcept { return item_store->match(port, start, count); }
|
||||
|
||||
Seqs::Seqs() {}
|
||||
|
||||
Seqs::Seqs(Seqs &&other) {
|
||||
this->items_rule.append(other.items_rule);
|
||||
other.items_rule.clear();
|
||||
}
|
||||
|
||||
Seqs::~Seqs() {
|
||||
for (auto &it : items_rule)
|
||||
if (it->heapMark())
|
||||
delete it;
|
||||
items_rule.clear();
|
||||
}
|
||||
|
||||
void *Seqs::operator new(size_t s) {
|
||||
auto ins = ::operator new(s);
|
||||
static_cast<Seqs *>(ins)->build_within_heap = true;
|
||||
return ins;
|
||||
}
|
||||
|
||||
BaseRule *Seqs::toHeap() && {
|
||||
auto ins = new Seqs();
|
||||
ins->items_rule.append(this->items_rule);
|
||||
this->items_rule.clear();
|
||||
return ins;
|
||||
}
|
||||
|
||||
bool Seqs::heapMark() const { return this->build_within_heap; }
|
||||
|
||||
QPair<uint, Result> Seqs::match(Lex::TokenReader *port, uint start, uint count) const noexcept {
|
||||
auto m_len = 0;
|
||||
|
||||
for (auto &it : items_rule) {
|
||||
auto next_count = count - m_len;
|
||||
if (next_count <= 0)
|
||||
return qMakePair(count, Result::Part);
|
||||
|
||||
auto m_rst = it->match(port, start + m_len, next_count);
|
||||
if (m_rst.second == Result::All) {
|
||||
m_len += m_rst.first;
|
||||
} else {
|
||||
if (m_len + m_rst.first == 0)
|
||||
return qMakePair(0, Result::Fail);
|
||||
else
|
||||
return qMakePair(m_len + m_rst.first, Result::Part);
|
||||
}
|
||||
}
|
||||
return qMakePair(m_len, Result::All);
|
||||
}
|
||||
|
||||
void Seqs::parse(Lex::TokenReader *port, Ast::ASTList *parent) {
|
||||
for (auto &it : items_rule) {
|
||||
if (it->match(port, 0, UINT32_MAX).second == Result::All) {
|
||||
it->parse(port, parent);
|
||||
} else {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Any::Any() {}
|
||||
|
||||
Any::Any(Any &&other) {
|
||||
items_rule.append(other.items_rule);
|
||||
other.items_rule.clear();
|
||||
}
|
||||
|
||||
Any::~Any() {
|
||||
for (auto &it : items_rule)
|
||||
if (it->heapMark())
|
||||
delete it;
|
||||
items_rule.clear();
|
||||
}
|
||||
|
||||
void *Any::operator new(size_t s) {
|
||||
auto ins = ::operator new(s);
|
||||
static_cast<Any *>(ins)->build_within_heap = true;
|
||||
return ins;
|
||||
}
|
||||
|
||||
BaseRule *Any::toHeap() && {
|
||||
auto ins = new Any();
|
||||
ins->items_rule.append(this->items_rule);
|
||||
this->items_rule.clear();
|
||||
return ins;
|
||||
}
|
||||
|
||||
bool Any::heapMark() const { return this->build_within_heap; }
|
||||
|
||||
QPair<uint, Result> Any::match(Lex::TokenReader *port, uint start, uint count) const noexcept {
|
||||
auto match_rst = qMakePair(0, Result::Fail);
|
||||
|
||||
for (auto &it : items_rule) {
|
||||
auto rst = it->match(port, start, count);
|
||||
if (rst.second == Result::All)
|
||||
return rst;
|
||||
|
||||
if (rst.first > match_rst.first) {
|
||||
match_rst = rst;
|
||||
}
|
||||
}
|
||||
|
||||
return match_rst;
|
||||
}
|
||||
|
||||
void Any::parse(Lex::TokenReader *port, Ast::ASTList *parent) {
|
||||
auto match_rst = qMakePair<uint, BaseRule *>(-1, items_rule.first());
|
||||
|
||||
for (auto &it : items_rule) {
|
||||
auto rst = it->match(port, 0, UINT32_MAX);
|
||||
if (rst.second == Result::All) {
|
||||
match_rst = qMakePair(rst.first, it);
|
||||
break;
|
||||
}
|
||||
|
||||
if (rst.first > match_rst.first) {
|
||||
match_rst = qMakePair(rst.first, it);
|
||||
}
|
||||
}
|
||||
|
||||
match_rst.second->parse(port, parent);
|
||||
}
|
||||
|
||||
SyntaxException::SyntaxException(const Lex::Token &tins, const QString &simple) : Lex::WsBaseException(simple), target(tins) {}
|
||||
|
||||
const Token &SyntaxException::targetToken() const { return this->target; }
|
||||
|
||||
void *Optional::operator new(size_t s) {
|
||||
auto ins = ::operator new(s);
|
||||
static_cast<Optional *>(ins)->build_within_heap = true;
|
||||
return ins;
|
||||
}
|
||||
|
||||
Optional::Optional(BaseRule *item) : item_store(*item) {}
|
||||
|
||||
Optional::Optional(BaseRule &&item) : item_store(item) {}
|
||||
|
||||
BaseRule *Optional::toHeap() && { return new Optional(&item_store); }
|
||||
|
||||
bool Optional::heapMark() const { return build_within_heap; }
|
||||
|
||||
QPair<uint, Result> Optional::match(Lex::TokenReader *port, uint start, uint count) const noexcept {
|
||||
auto rst = item_store.match(port, start, count);
|
||||
if (rst.second != Result::All)
|
||||
return qMakePair(0, Result::All);
|
||||
return rst;
|
||||
}
|
||||
|
||||
void Optional::parse(Lex::TokenReader *port, Ast::ASTList *parent) {
|
||||
if (this->match(port, 0, UINT32_MAX).second == Result::All)
|
||||
item_store.parse(port, parent);
|
||||
}
|
||||
|
|
@ -0,0 +1,235 @@
|
|||
#ifndef SYNTAX_FOUNDATION_H
|
||||
#define SYNTAX_FOUNDATION_H
|
||||
|
||||
#include "ast_foundation.h"
|
||||
#include <QString>
|
||||
|
||||
namespace Load {
|
||||
class DocIns;
|
||||
}
|
||||
|
||||
namespace SyntaxX {
|
||||
/**
|
||||
* @brief 通用语法异常
|
||||
*/
|
||||
class SyntaxException : public Lex::WsBaseException {
|
||||
private:
|
||||
const Lex::Token ⌖
|
||||
|
||||
public:
|
||||
SyntaxException(const Lex::Token &tins, const QString &simple);
|
||||
virtual ~SyntaxException() = default;
|
||||
|
||||
virtual const Lex::Token &targetToken() const;
|
||||
};
|
||||
|
||||
enum class Result { All, Part, Fail };
|
||||
|
||||
/**
|
||||
* @brief 解析匹配规则
|
||||
*/
|
||||
class BaseRule {
|
||||
public:
|
||||
virtual ~BaseRule() = default;
|
||||
|
||||
/**
|
||||
* @brief 转换成堆变量,持久化生命周期
|
||||
* @return
|
||||
*/
|
||||
virtual BaseRule *toHeap() && = 0;
|
||||
|
||||
/**
|
||||
* @brief 堆实例标志
|
||||
* @return
|
||||
*/
|
||||
virtual bool heapMark() const = 0;
|
||||
|
||||
/**
|
||||
* @brief 使用预读取方式匹配当前Token流,返回匹配结果
|
||||
* @param port Token流
|
||||
* @param start 起始Token索引
|
||||
* @param count 本次匹配长度
|
||||
* @return 匹配结果长度
|
||||
*/
|
||||
virtual QPair<uint, Result> match(Lex::TokenReader *port, uint start, uint count) const noexcept = 0;
|
||||
|
||||
/**
|
||||
* @brief 解析Token流,构建AST
|
||||
* @param port 输入端口
|
||||
* @param start 父节点
|
||||
*/
|
||||
virtual void parse(Lex::TokenReader *port, Ast::ASTList *parent) = 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief Token匹配解析规则
|
||||
*/
|
||||
class TokenRule : public BaseRule {
|
||||
private:
|
||||
const Lex::TokenDef &token_type;
|
||||
bool build_within_heap = false;
|
||||
void *operator new(size_t s);
|
||||
|
||||
public:
|
||||
explicit TokenRule(const Lex::TokenDef &def);
|
||||
TokenRule *operator&() = delete;
|
||||
|
||||
virtual BaseRule *toHeap() && override;
|
||||
virtual bool heapMark() const override;
|
||||
|
||||
virtual QPair<uint, Result> match(Lex::TokenReader *port, uint start, uint count) const noexcept override;
|
||||
virtual void parse(Lex::TokenReader *port, Ast::ASTList *parent) override;
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief 解析规则承载
|
||||
*/
|
||||
class ElmRule : public BaseRule {
|
||||
private:
|
||||
std::function<Ast::ASTList *(Lex::TokenReader *reader, Ast::ASTList *parent)> exec_store;
|
||||
BaseRule *item_store;
|
||||
|
||||
public:
|
||||
explicit ElmRule();
|
||||
ElmRule(const ElmRule &) = delete;
|
||||
ElmRule(const ElmRule &&) = delete;
|
||||
virtual ~ElmRule();
|
||||
|
||||
void *operator new(size_t s) = delete;
|
||||
ElmRule &operator=(const ElmRule &) = delete;
|
||||
ElmRule &operator=(const ElmRule &&) = delete;
|
||||
|
||||
/**
|
||||
* @brief 设置处理过程
|
||||
* @param exec
|
||||
*/
|
||||
virtual void resetProcess(std::function<Ast::ASTList *(Lex::TokenReader *reader, Ast::ASTList *parent)> exec);
|
||||
|
||||
/**
|
||||
* @brief 重置内部实际解析规则
|
||||
* @param item
|
||||
*/
|
||||
void setRule(BaseRule &&item);
|
||||
|
||||
virtual QPair<uint, Result> match(Lex::TokenReader *port, uint start, uint count) const noexcept override;
|
||||
virtual void parse(Lex::TokenReader *port, Ast::ASTList *parent) override;
|
||||
|
||||
// BaseRule interface
|
||||
public:
|
||||
virtual BaseRule *toHeap() && override;
|
||||
virtual bool heapMark() const override;
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief 序列解析规则
|
||||
*/
|
||||
class Seqs : public BaseRule {
|
||||
private:
|
||||
QList<BaseRule *> items_rule;
|
||||
bool build_within_heap = false;
|
||||
|
||||
void *operator new(size_t s);
|
||||
|
||||
public:
|
||||
Seqs();
|
||||
template <class... Args> explicit Seqs(BaseRule *head, Args... args) : Seqs(std::forward<Args>(args)...) { items_rule.push_front(head); }
|
||||
template <class... Args> explicit Seqs(BaseRule &&head, Args... args) : Seqs(std::forward<Args>(args)...) {
|
||||
items_rule.push_front(std::move(head).toHeap());
|
||||
}
|
||||
Seqs(Seqs &&other);
|
||||
virtual ~Seqs();
|
||||
|
||||
Seqs *operator&() = delete;
|
||||
|
||||
// BaseRule interface
|
||||
public:
|
||||
virtual BaseRule *toHeap() && override;
|
||||
virtual bool heapMark() const override;
|
||||
|
||||
virtual QPair<uint, Result> match(Lex::TokenReader *port, uint start, uint count) const noexcept override;
|
||||
virtual void parse(Lex::TokenReader *port, Ast::ASTList *parent) override;
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief 多个分支,任意一个匹配都可以
|
||||
*/
|
||||
class Any : public BaseRule {
|
||||
private:
|
||||
QList<BaseRule *> items_rule;
|
||||
bool build_within_heap = false;
|
||||
|
||||
void *operator new(size_t s);
|
||||
|
||||
public:
|
||||
Any();
|
||||
template <class... Args> explicit Any(BaseRule *head, Args... args) : Any(std::forward<Args>(args)...) { items_rule.push_front(head); }
|
||||
template <class... Args> explicit Any(BaseRule &&head, Args... args) : Any(std::forward<Args>(args)...) {
|
||||
items_rule.push_front(std::move(head).toHeap());
|
||||
}
|
||||
Any(Any &&other);
|
||||
|
||||
virtual ~Any();
|
||||
|
||||
Any *operator&() = delete;
|
||||
|
||||
// BaseRule interface
|
||||
public:
|
||||
virtual BaseRule *toHeap() && override;
|
||||
virtual bool heapMark() const override;
|
||||
|
||||
virtual QPair<uint, Result> match(Lex::TokenReader *port, uint start, uint count) const noexcept override;
|
||||
virtual void parse(Lex::TokenReader *port, Ast::ASTList *parent) override;
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief 重复,指定匹配次数
|
||||
*/
|
||||
class Repeat : public BaseRule {
|
||||
private:
|
||||
BaseRule &item_store;
|
||||
uint match_max, match_min;
|
||||
bool build_within_heap = false;
|
||||
|
||||
void *operator new(size_t s);
|
||||
|
||||
public:
|
||||
Repeat(BaseRule &&item, uint min = 1, uint max = UINT_MAX);
|
||||
Repeat(BaseRule *item, uint min = 1, uint max = UINT_MAX);
|
||||
|
||||
Repeat *operator&() = delete;
|
||||
|
||||
// BaseRule interface
|
||||
public:
|
||||
virtual BaseRule *toHeap() && override;
|
||||
virtual bool heapMark() const override;
|
||||
|
||||
virtual QPair<uint, Result> match(Lex::TokenReader *port, uint start, uint count) const noexcept override;
|
||||
virtual void parse(Lex::TokenReader *port, Ast::ASTList *parent) override;
|
||||
};
|
||||
|
||||
class Optional : public BaseRule {
|
||||
private:
|
||||
BaseRule &item_store;
|
||||
bool build_within_heap = false;
|
||||
|
||||
void *operator new(size_t s);
|
||||
|
||||
public:
|
||||
Optional(BaseRule *item);
|
||||
Optional(BaseRule &&item);
|
||||
|
||||
Optional *operator&() = delete;
|
||||
|
||||
// BaseRule interface
|
||||
public:
|
||||
virtual BaseRule *toHeap() && override;
|
||||
virtual bool heapMark() const override;
|
||||
|
||||
virtual QPair<uint, Result> match(Lex::TokenReader *port, uint start, uint count) const noexcept override;
|
||||
virtual void parse(Lex::TokenReader *port, Ast::ASTList *parent) override;
|
||||
};
|
||||
|
||||
typedef TokenRule T;
|
||||
} // namespace SyntaxX
|
||||
|
||||
#endif // SYNTAX_FOUNDATION_H
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
#include "tokeniimpl.h"
|
||||
#include "token_impls.h"
|
||||
|
||||
using namespace Lex;
|
||||
|
||||
|
|
@ -27,17 +27,17 @@ void TokenWord::reset(const QString &word, int row, int col) {
|
|||
column_val = col;
|
||||
}
|
||||
|
||||
TokenResult::TokenResult(TokenDef *def, WordBase *word, int length) : def_store(def), remains_store(nullptr) {
|
||||
TokenResult::TokenResult(TokenDef *def, const WordBase &word, int length) : def_store(def), remains_store(nullptr) {
|
||||
TokenWord *inst = nullptr;
|
||||
if (word->content().length() != length) {
|
||||
inst = new TokenWord(word->filePath());
|
||||
inst->reset(word->content().mid(length), word->row(), word->column() + length);
|
||||
if (word.content().length() != length) {
|
||||
inst = new TokenWord(word.filePath());
|
||||
inst->reset(word.content().mid(length), word.row(), word.column() + length);
|
||||
}
|
||||
remains_store = inst;
|
||||
content_store = word->content().mid(0, length);
|
||||
filepath_store = word->filePath();
|
||||
row_store = word->row();
|
||||
col_store = word->column();
|
||||
content_store = word.content().mid(0, length);
|
||||
filepath_store = word.filePath();
|
||||
row_store = word.row();
|
||||
col_store = word.column();
|
||||
}
|
||||
|
||||
TokenDef *TokenResult::def() const { return def_store; }
|
||||
|
|
@ -0,0 +1,104 @@
|
|||
#ifndef TOKEN_IMPLS_H
|
||||
#define TOKEN_IMPLS_H
|
||||
|
||||
#include "lex_foundation.h"
|
||||
|
||||
namespace Lex {
|
||||
|
||||
/**
|
||||
* \brief 閺傚洦婀扮拠宥堫嚔鐟欙絾鐎界€圭偟骞囩猾 */
|
||||
class TokenWord : public WordBase {
|
||||
public:
|
||||
virtual ~TokenWord() = default;
|
||||
|
||||
/**
|
||||
* \brief 閺傛澘缂撶拠宥堫嚔鐎圭偘绶 * \param file_path 閺傚洣娆㈢捄顖氱窞娑撳秴褰查崣 */
|
||||
explicit TokenWord(const QString &file_path);
|
||||
|
||||
/**
|
||||
* \brief 閼惧嘲褰囬崘鍛啇
|
||||
*/
|
||||
virtual QString content() const override;
|
||||
|
||||
/**
|
||||
* \brief 閼惧嘲褰囬弬鍥︽鐠侯垰绶 */
|
||||
virtual QString filePath() const override;
|
||||
|
||||
/**
|
||||
* \brief 閼惧嘲褰囨禒锝囩垳鐞 */
|
||||
virtual int row() const override;
|
||||
|
||||
/**
|
||||
* \brief 閼惧嘲褰囨禒锝囩垳閸 */
|
||||
virtual int column() const override;
|
||||
|
||||
/**
|
||||
* @brief 鐠у鈧壈绻嶇粻妤冾儊閿涘奔鎱ㄩ弨瑙勫閺堝鍞寸€ * @param other
|
||||
* @return
|
||||
*/
|
||||
// virtual WordBase &operator=(const WordBase &other) override;
|
||||
|
||||
/**
|
||||
* \brief 鐠佸墽鐤嗙€圭偘绶ラ惃鍕敶鐎圭懓鐣炬稊 /// <param name="word">鐠囧秷顕㈤弬鍥ㄦ拱</param>
|
||||
/// <param name="row">鐞涘苯鐣炬稊/param>
|
||||
/// <param name="col">閸掓鐣炬稊/param>
|
||||
*/
|
||||
void reset(const QString &word, int row, int col);
|
||||
|
||||
private:
|
||||
QString content_val;
|
||||
QString filepath_val;
|
||||
int row_val, column_val;
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief 瑜版挸澧犵憴锝嗙€介崘鍛啇
|
||||
*/
|
||||
class TokenResult : public Token {
|
||||
private:
|
||||
TokenDef *const def_store;
|
||||
WordBase *remains_store;
|
||||
QString content_store;
|
||||
QString filepath_store;
|
||||
int row_store, col_store;
|
||||
|
||||
public:
|
||||
/**
|
||||
* @brief 閺嬪嫬缂揟oken鐟欙絾鐎界紒鎾寸亯鐎圭偘绶 * @param def 鐟欙絾鐎介張鍝勫煑鐎规矮绠 * @param word 閺傚洦婀扮€圭偘绶ラ崘鍛啇
|
||||
* @param length 缂佹挻鐏夐柅鍌炲帳闂€鍨
|
||||
*/
|
||||
TokenResult(TokenDef *def, const WordBase &word, int length);
|
||||
virtual ~TokenResult() = default;
|
||||
|
||||
/**
|
||||
* @brief Token鐟欙絾鐎界€规矮绠 * @return 鐎规矮绠熺€圭偘绶ラ敍宀€顓搁悶鍡樻綀娑撳秶些娴 */
|
||||
virtual TokenDef *def() const override;
|
||||
/**
|
||||
* @brief 鐟欙絾鐎介崜鈺€绗呴惃鍕敶鐎 * @return 閼惧嘲褰囬崜鈺€绗呴惃鍕槤鐠 */
|
||||
virtual WordBase *remains() const override;
|
||||
|
||||
/**
|
||||
* \brief 閼惧嘲褰嘥oken閸栧懎鎯堥崘鍛啇
|
||||
* \return 閸愬懎顔 */
|
||||
virtual QString content() const override;
|
||||
|
||||
/**
|
||||
* \brief 閼惧嘲褰嘥oken缂佹垵鐣鹃惃鍕瀮濡楋綀鐭惧 * \return 鐠侯垰绶 */
|
||||
virtual QString filePath() const override;
|
||||
/**
|
||||
* \brief 閺堢悡oken濠ф劒鍞惍浣筋攽鐎规矮绠 * \return 鐞涘苯褰 */
|
||||
virtual int row() const override;
|
||||
/**
|
||||
* \brief 閺堢悡oken濠ф劒鍞惍浣稿灙鐎规矮绠 * \return 閸掓褰 */
|
||||
virtual int column() const override;
|
||||
|
||||
/**
|
||||
* @brief 閸愬懎顔愭径宥呭煑閿涘奔绗夋导姘叏閺€鐟扮暰娑斿琚崹瀣拨鐎规艾鎷伴崜鈺€缍戦崘鍛啇鐎规矮绠 * @param other 閸忔湹绮崘鍛啇
|
||||
* @return
|
||||
*/
|
||||
// virtual WordBase &operator=(const WordBase &other);
|
||||
};
|
||||
|
||||
} // namespace Lex
|
||||
|
||||
#endif // TOKEN_IMPLS_H
|
||||
|
|
@ -1,126 +0,0 @@
|
|||
#ifndef TOKENIIMPL_H
|
||||
#define TOKENIIMPL_H
|
||||
|
||||
#include "lex_foundation.h"
|
||||
|
||||
namespace Lex {
|
||||
|
||||
/**
|
||||
* \brief 文本词语解析实现类
|
||||
*/
|
||||
class TokenWord : public WordBase {
|
||||
public:
|
||||
virtual ~TokenWord() = default;
|
||||
|
||||
/**
|
||||
* \brief 新建词语实例
|
||||
* \param file_path 文件路径不可变
|
||||
*/
|
||||
explicit TokenWord(const QString &file_path);
|
||||
|
||||
/**
|
||||
* \brief 获取内容
|
||||
*/
|
||||
virtual QString content() const override;
|
||||
|
||||
/**
|
||||
* \brief 获取文件路径
|
||||
*/
|
||||
virtual QString filePath() const override;
|
||||
|
||||
/**
|
||||
* \brief 获取代码行
|
||||
*/
|
||||
virtual int row() const override;
|
||||
|
||||
/**
|
||||
* \brief 获取代码列
|
||||
*/
|
||||
virtual int column() const override;
|
||||
|
||||
/**
|
||||
* @brief 赋值运算符,修改所有内容
|
||||
* @param other
|
||||
* @return
|
||||
*/
|
||||
// virtual WordBase &operator=(const WordBase &other) override;
|
||||
|
||||
/**
|
||||
* \brief 设置实例的内容定义
|
||||
/// <param name="word">词语文本</param>
|
||||
/// <param name="row">行定义</param>
|
||||
/// <param name="col">列定义</param>
|
||||
*/
|
||||
void reset(const QString &word, int row, int col);
|
||||
|
||||
private:
|
||||
QString content_val;
|
||||
QString filepath_val;
|
||||
int row_val, column_val;
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief 当前解析内容
|
||||
*/
|
||||
class TokenResult : public Token {
|
||||
private:
|
||||
TokenDef *const def_store;
|
||||
WordBase *remains_store;
|
||||
QString content_store;
|
||||
QString filepath_store;
|
||||
int row_store, col_store;
|
||||
|
||||
public:
|
||||
/**
|
||||
* @brief 构建Token解析结果实例
|
||||
* @param def 解析机制定义
|
||||
* @param word 文本实例内容
|
||||
* @param length 结果适配长度
|
||||
*/
|
||||
TokenResult(TokenDef *def, WordBase *word, int length);
|
||||
virtual ~TokenResult() = default;
|
||||
|
||||
/**
|
||||
* @brief Token解析定义
|
||||
* @return 定义实例,管理权不移交
|
||||
*/
|
||||
virtual TokenDef *def() const override;
|
||||
/**
|
||||
* @brief 解析剩下的内容
|
||||
* @return 获取剩下的词语
|
||||
*/
|
||||
virtual WordBase *remains() const override;
|
||||
|
||||
/**
|
||||
* \brief 获取Token包含内容
|
||||
* \return 内容
|
||||
*/
|
||||
virtual QString content() const override;
|
||||
|
||||
/**
|
||||
* \brief 获取Token绑定的文档路径
|
||||
* \return 路径
|
||||
*/
|
||||
virtual QString filePath() const override;
|
||||
/**
|
||||
* \brief 本Token源代码行定义
|
||||
* \return 行号
|
||||
*/
|
||||
virtual int row() const override;
|
||||
/**
|
||||
* \brief 本Token源代码列定义
|
||||
* \return 列号
|
||||
*/
|
||||
virtual int column() const override;
|
||||
|
||||
/**
|
||||
* @brief 内容复制,不会修改定义类型绑定和剩余内容定义
|
||||
* @param other 其他内容
|
||||
* @return
|
||||
*/
|
||||
// virtual WordBase &operator=(const WordBase &other);
|
||||
};
|
||||
|
||||
} // namespace Lex
|
||||
|
||||
#endif // TOKENIIMPL_H
|
||||
Loading…
Reference in New Issue