#include "libtoken.h" #include #include using namespace lib_token; TokenReader::TokenReader(const QList> rulers) : rules_store(rulers) {} std::shared_ptr lib_token::TokenReader::tokensWithin(const QString& path) const { auto content_list = extract_from(path); if (!content_list.size()) return nullptr; std::shared_ptr prev_ptr = std::make_shared(content_list.last(), nullptr); for (auto idx = content_list.size() - 2; idx >=0; --idx) { auto content_ptr = content_list[idx]; prev_ptr = std::make_shared(content_ptr, prev_ptr); } return prev_ptr; } QList> TokenReader::extract_from(const QString& path) const { QFile file(path); if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) { throw new TokenException(u8"Lex[0x0000]指定文件无法打开:" + path); } QTextStream tin(&file); tin.setCodec("UTF-8"); QList> ret_list; int line_number = 1; while (!tin.atEnd()) { auto line = tin.readLine() + "\n"; ret_list.append(this->parse_line(line_number++, line, path)); } return ret_list; } QList> TokenReader::parse_line(int row, const QString& line_text, const QString& path) const { auto words = line_text.split(" ", QString::SplitBehavior::SkipEmptyParts); QList> primary_words; int columns_offset = 0; for (auto& w : words) { auto column_start = line_text.indexOf(w, columns_offset); auto token = std::make_shared(row, column_start + 1, w, path); primary_words << token; columns_offset = column_start + w.length(); } QList> rets_tokens; for (auto idx = 0; idx < primary_words.size(); ++idx) { auto word = primary_words[idx]; auto result = parse_token(word); rets_tokens.append(std::get<0>(result)); auto remains = std::get<1>(result); if (remains) primary_words.insert(idx + 1, remains); } return rets_tokens; } std::tuple, std::shared_ptr> TokenReader::parse_token(std::shared_ptr word) const { for (auto& it : this->rules_store) { auto result = it->analysis(word); if (std::get<0>(result)) { return result; } } throw new TokenException(QString(u8"Lex[0x0001]指定词语无法解析:%1,<%2,%3>").arg(word->content()).arg(word->row()).arg(word->column())); } TokenException::TokenException(const QString& message) : msg_store(message) {} QString TokenException::message() const { return msg_store; } WordPeaks::WordPeaks(int r, int c, const QString& t, const QString& p) : row_n(r), col_n(c), text_n(t), path_p(p) {} QString WordPeaks::file() const { return path_p; } QString WordPeaks::content() const { return text_n; } int WordPeaks::row() const { return row_n; } int WordPeaks::column() const { return col_n; } TokenContent::TokenContent(int r, int c, const QString& t, const QString& p, std::shared_ptr type) : row_n(r), col_n(c), text_n(t), path_p(p), type_def(type) {} QString TokenContent::file() const { return path_p; } QString TokenContent::content() const { return text_n; } int TokenContent::row() const { return row_n; } int TokenContent::column() const { return col_n; } std::shared_ptr TokenContent::define() const { return this->type_def; } std::shared_ptr TokenContent::nextToken() const { return nullptr; } TokenImpl::TokenImpl(std::shared_ptr content, std::shared_ptr next) : content_ptr(content), next_element(next) {} QString TokenImpl::file() const { return content_ptr->file(); } QString TokenImpl::content() const { return content_ptr->content(); } int TokenImpl::row() const { return content_ptr->row(); } int TokenImpl::column() const { return content_ptr->column(); } std::shared_ptr TokenImpl::define() const { return content_ptr->define(); } std::shared_ptr TokenImpl::nextToken() const { return next_element; }