92 lines
3.0 KiB
C++
92 lines
3.0 KiB
C++
#include "libtoken.h"
|
|
|
|
#include <QFile>
|
|
#include <QTextStream>
|
|
|
|
using namespace lib_token;
|
|
|
|
TokenReader::TokenReader(const QList<std::shared_ptr<const TokenDefine>> rulers) : rules_store(rulers) {}
|
|
|
|
QList<std::shared_ptr<const lib_token::Token>> lib_token::TokenReader::extractFrom(const QString& path) const {
|
|
QFile file(path);
|
|
if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) {
|
|
throw new TokenException(u8"Lex[0x0000]Ö¸¶¨ÎļþÎÞ·¨´ò¿ª£º" + path);
|
|
}
|
|
QTextStream tin(&file);
|
|
tin.setCodec("UTF-8");
|
|
|
|
QList<std::shared_ptr<const Token>> ret_list;
|
|
int line_number = 1;
|
|
while (!tin.atEnd()) {
|
|
auto line = tin.readLine() + "\n";
|
|
ret_list.append(this->parse_line(line_number++, line, path));
|
|
}
|
|
|
|
return ret_list;
|
|
}
|
|
|
|
QList<std::shared_ptr<const Token>> TokenReader::parse_line(int row, const QString& line_text, const QString& path) const {
|
|
auto words = line_text.split(" ", QString::SplitBehavior::SkipEmptyParts);
|
|
|
|
QList<std::shared_ptr<const WordBase>> primary_words;
|
|
int columns_offset = 0;
|
|
for (auto& w : words) {
|
|
auto column_start = line_text.indexOf(w, columns_offset);
|
|
auto token = std::make_shared<WordPeaks>(row, column_start + 1, w, path);
|
|
primary_words << token;
|
|
|
|
columns_offset = column_start + w.length();
|
|
}
|
|
|
|
QList<std::shared_ptr<const Token>> rets_tokens;
|
|
for (auto idx = 0; idx < primary_words.size(); ++idx) {
|
|
auto word = primary_words[idx];
|
|
auto result = parse_token(word);
|
|
rets_tokens.append(std::get<0>(result));
|
|
|
|
auto remains = std::get<1>(result);
|
|
if (remains)
|
|
primary_words.insert(idx + 1, remains);
|
|
}
|
|
|
|
return rets_tokens;
|
|
}
|
|
|
|
std::tuple<std::shared_ptr<const Token>, std::shared_ptr<const WordBase>> TokenReader::parse_token(std::shared_ptr<const WordBase> word) const {
|
|
for (auto& it : this->rules_store) {
|
|
auto result = it->analysis(word);
|
|
if (std::get<0>(result)) {
|
|
return result;
|
|
}
|
|
}
|
|
|
|
throw new TokenException(QString(u8"Lex[0x0001]Ö¸¶¨´ÊÓïÎÞ·¨½âÎö£º%1,<%2,%3>").arg(word->content()).arg(word->row()).arg(word->column()));
|
|
}
|
|
|
|
TokenException::TokenException(const QString& message) : msg_store(message) {}
|
|
|
|
QString TokenException::message() const { return msg_store; }
|
|
|
|
WordPeaks::WordPeaks(int r, int c, const QString& t, const QString& p) : row_n(r), col_n(c), text_n(t), path_p(p) {}
|
|
|
|
QString WordPeaks::file() const { return path_p; }
|
|
|
|
QString WordPeaks::content() const { return text_n; }
|
|
|
|
int WordPeaks::row() const { return row_n; }
|
|
|
|
int WordPeaks::column() const { return col_n; }
|
|
|
|
TokenInst::TokenInst(int r, int c, const QString& t, const QString& p, std::shared_ptr<const TokenDefine> type)
|
|
: row_n(r), col_n(c), text_n(t), path_p(p), type_def(type) {}
|
|
|
|
QString TokenInst::file() const { return path_p; }
|
|
|
|
QString TokenInst::content() const { return text_n; }
|
|
|
|
int TokenInst::row() const { return row_n; }
|
|
|
|
int TokenInst::column() const { return col_n; }
|
|
|
|
std::shared_ptr<const TokenDefine> TokenInst::define() const { return this->type_def; }
|