2020-05-13 20:55:58 +02:00
|
|
|
#[cfg_attr(tarpaulin, skip)]
|
|
|
|
mod test_files {
|
|
|
|
use std::ops::Deref;
|
|
|
|
|
|
|
|
pub mod plain;
|
|
|
|
pub mod rust_lang;
|
|
|
|
|
|
|
|
#[cfg_attr(tarpaulin, skip)]
|
|
|
|
mod test_files {
|
|
|
|
#[derive(Debug, Clone)]
|
|
|
|
pub enum Language {
|
|
|
|
PlainText,
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
|
2020-05-13 20:55:58 +02:00
|
|
|
#[derive(Debug, Clone)]
|
|
|
|
pub enum TokenType {
|
|
|
|
Whitespace { token: Token },
|
|
|
|
Keyword { token: Token },
|
|
|
|
String { token: Token },
|
|
|
|
Number { token: Token },
|
|
|
|
Identifier { token: Token },
|
|
|
|
Literal { token: Token },
|
|
|
|
Comment { token: Token },
|
|
|
|
Operator { token: Token },
|
|
|
|
Separator { token: Token },
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
|
2020-05-13 20:55:58 +02:00
|
|
|
impl TokenType {
|
|
|
|
pub fn move_to(&self, line: usize, character: usize, start: usize, end: usize) -> Self {
|
|
|
|
match self {
|
|
|
|
TokenType::Whitespace { token } => TokenType::Whitespace {
|
|
|
|
token: token.move_to(line, character, start, end),
|
|
|
|
},
|
|
|
|
TokenType::Keyword { token } => TokenType::Keyword {
|
|
|
|
token: token.move_to(line, character, start, end),
|
|
|
|
},
|
|
|
|
TokenType::String { token } => TokenType::String {
|
|
|
|
token: token.move_to(line, character, start, end),
|
|
|
|
},
|
|
|
|
TokenType::Number { token } => TokenType::Number {
|
|
|
|
token: token.move_to(line, character, start, end),
|
|
|
|
},
|
|
|
|
TokenType::Identifier { token } => TokenType::Identifier {
|
|
|
|
token: token.move_to(line, character, start, end),
|
|
|
|
},
|
|
|
|
TokenType::Literal { token } => TokenType::Literal {
|
|
|
|
token: token.move_to(line, character, start, end),
|
|
|
|
},
|
|
|
|
TokenType::Comment { token } => TokenType::Comment {
|
|
|
|
token: token.move_to(line, character, start, end),
|
|
|
|
},
|
|
|
|
TokenType::Operator { token } => TokenType::Operator {
|
|
|
|
token: token.move_to(line, character, start, end),
|
|
|
|
},
|
|
|
|
TokenType::Separator { token } => TokenType::Separator {
|
|
|
|
token: token.move_to(line, character, start, end),
|
|
|
|
},
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
}
|
|
|
|
|
2020-05-13 20:55:58 +02:00
|
|
|
pub fn is_new_line(&self) -> bool {
|
|
|
|
match self {
|
|
|
|
TokenType::Whitespace { token } => token.text().as_str() == "\n",
|
|
|
|
_ => false,
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
}
|
|
|
|
|
2020-05-13 20:55:58 +02:00
|
|
|
pub fn is_space(&self) -> bool {
|
|
|
|
match self {
|
|
|
|
TokenType::Whitespace { token } => token.text().as_str() == " ",
|
|
|
|
_ => false,
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-13 20:55:58 +02:00
|
|
|
impl Deref for TokenType {
|
|
|
|
type Target = Token;
|
|
|
|
|
|
|
|
fn deref(&self) -> &<Self as Deref>::Target {
|
|
|
|
match self {
|
|
|
|
TokenType::Whitespace { token } => token,
|
|
|
|
TokenType::Keyword { token } => token,
|
|
|
|
TokenType::String { token } => token,
|
|
|
|
TokenType::Number { token } => token,
|
|
|
|
TokenType::Identifier { token } => token,
|
|
|
|
TokenType::Literal { token } => token,
|
|
|
|
TokenType::Comment { token } => token,
|
|
|
|
TokenType::Operator { token } => token,
|
|
|
|
TokenType::Separator { token } => token,
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-13 20:55:58 +02:00
|
|
|
#[derive(Debug, Clone)]
|
|
|
|
pub struct Token {
|
|
|
|
line: usize,
|
|
|
|
character: usize,
|
|
|
|
start: usize,
|
|
|
|
end: usize,
|
|
|
|
text: String,
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
|
2020-05-13 20:55:58 +02:00
|
|
|
#[derive(Debug, Clone, Copy)]
|
|
|
|
pub struct Span {
|
|
|
|
pub lo: usize,
|
|
|
|
pub hi: usize,
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
|
2020-05-13 20:55:58 +02:00
|
|
|
impl Token {
|
|
|
|
pub fn new(text: String, line: usize, character: usize, start: usize, end: usize) -> Self {
|
|
|
|
Self {
|
|
|
|
text,
|
|
|
|
line,
|
|
|
|
character,
|
|
|
|
start,
|
|
|
|
end,
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
}
|
|
|
|
|
2020-05-13 20:55:58 +02:00
|
|
|
pub fn text(&self) -> &String {
|
|
|
|
&self.text
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
|
2020-05-13 20:55:58 +02:00
|
|
|
pub fn line(&self) -> usize {
|
|
|
|
self.line.clone()
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
|
2020-05-13 20:55:58 +02:00
|
|
|
pub fn character(&self) -> usize {
|
|
|
|
self.character.clone()
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
|
2020-05-13 20:55:58 +02:00
|
|
|
pub fn start(&self) -> usize {
|
|
|
|
self.start.clone()
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
|
2020-05-13 20:55:58 +02:00
|
|
|
pub fn end(&self) -> usize {
|
|
|
|
self.end.clone()
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
|
2020-05-13 20:55:58 +02:00
|
|
|
pub fn move_to(&self, line: usize, character: usize, start: usize, end: usize) -> Self {
|
|
|
|
Self {
|
|
|
|
text: self.text.clone(),
|
|
|
|
line,
|
|
|
|
character,
|
|
|
|
start,
|
|
|
|
end,
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-13 20:55:58 +02:00
|
|
|
pub fn parse(text: String, language: Language) -> Vec<TokenType> {
|
|
|
|
match language {
|
|
|
|
Language::PlainText => plain::lexer::Lexer::new(text.as_str())
|
|
|
|
// .inspect(|tok| println!("tok: {:?}", tok))
|
|
|
|
.map(|t| t.0)
|
|
|
|
.collect(),
|
|
|
|
}
|
2019-01-04 14:43:15 +01:00
|
|
|
}
|
|
|
|
}
|