WsParser_VS/libToken/libtoken.cpp

144 lines
4.0 KiB
C++

#include "libtoken.h"
#include <QFile>
#include <QTextStream>
using namespace lib_token;
TokenReader::TokenReader(const QList<std::shared_ptr<const TokenDefine>> rulers) : rules_store(rulers) {}
std::shared_ptr<const Token> lib_token::TokenReader::tokensWithin(const QString& path) const {
auto content_list = extract_from(path);
if (!content_list.size())
return nullptr;
std::shared_ptr<const Token> prev_ptr = std::make_shared<const TokenImpl>(content_list.last(), nullptr);
for (auto idx = content_list.size() - 2; idx >=0; --idx) {
auto content_ptr = content_list[idx];
prev_ptr = std::make_shared<const TokenImpl>(content_ptr, prev_ptr);
}
return prev_ptr;
}
QList<std::shared_ptr<const Token>> TokenReader::extract_from(const QString& path) const {
QFile file(path);
if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) {
throw new TokenException(u8"Lex[0x0000]Ö¸¶¨ÎļþÎÞ·¨´ò¿ª£º" + path);
}
QTextStream tin(&file);
tin.setCodec("UTF-8");
QList<std::shared_ptr<const Token>> ret_list;
int line_number = 1;
while (!tin.atEnd()) {
auto line = tin.readLine() + "\n";
ret_list.append(this->parse_line(line_number++, line, path));
}
return ret_list;
}
QList<std::shared_ptr<const Token>> TokenReader::parse_line(int row, const QString& line_text, const QString& path) const {
auto words = line_text.split(" ", QString::SplitBehavior::SkipEmptyParts);
QList<std::shared_ptr<const WordBase>> primary_words;
int columns_offset = 0;
for (auto& w : words) {
auto column_start = line_text.indexOf(w, columns_offset);
auto token = std::make_shared<WordPeaks>(row, column_start + 1, w, path);
primary_words << token;
columns_offset = column_start + w.length();
}
QList<std::shared_ptr<const Token>> rets_tokens;
for (auto idx = 0; idx < primary_words.size(); ++idx) {
auto word = primary_words[idx];
auto result = parse_token(word);
rets_tokens.append(std::get<0>(result));
auto remains = std::get<1>(result);
if (remains)
primary_words.insert(idx + 1, remains);
}
return rets_tokens;
}
std::tuple<std::shared_ptr<const Token>, std::shared_ptr<const WordBase>> TokenReader::parse_token(std::shared_ptr<const WordBase> word) const {
for (auto& it : this->rules_store) {
auto result = it->analysis(word);
if (std::get<0>(result)) {
return result;
}
}
throw new TokenException(QString(u8"Lex[0x0001]Ö¸¶¨´ÊÓïÎÞ·¨½âÎö£º%1,<%2,%3>").arg(word->content()).arg(word->row()).arg(word->column()));
}
TokenException::TokenException(const QString& message) : msg_store(message) {}
QString TokenException::message() const { return msg_store; }
WordPeaks::WordPeaks(int r, int c, const QString& t, const QString& p) : row_n(r), col_n(c), text_n(t), path_p(p) {}
QString WordPeaks::file() const { return path_p; }
QString WordPeaks::content() const { return text_n; }
int WordPeaks::row() const { return row_n; }
int WordPeaks::column() const { return col_n; }
TokenContent::TokenContent(int r, int c, const QString& t, const QString& p, std::shared_ptr<const TokenDefine> type)
: row_n(r), col_n(c), text_n(t), path_p(p), type_def(type) {}
QString TokenContent::file() const { return path_p; }
QString TokenContent::content() const { return text_n; }
int TokenContent::row() const { return row_n; }
int TokenContent::column() const { return col_n; }
std::shared_ptr<const TokenDefine> TokenContent::define() const { return this->type_def; }
std::shared_ptr<const Token> TokenContent::nextToken() const
{
return nullptr;
}
TokenImpl::TokenImpl(std::shared_ptr<const Token> content, std::shared_ptr<const Token> next)
: content_ptr(content), next_element(next) {}
QString TokenImpl::file() const
{
return content_ptr->file();
}
QString TokenImpl::content() const
{
return content_ptr->content();
}
int TokenImpl::row() const
{
return content_ptr->row();
}
int TokenImpl::column() const
{
return content_ptr->column();
}
std::shared_ptr<const TokenDefine> TokenImpl::define() const
{
return content_ptr->define();
}
std::shared_ptr<const Token> TokenImpl::nextToken() const
{
return next_element;
}