Some additional tests
This commit is contained in:
parent
493699e56b
commit
ac0f55e18e
1
.gitignore
vendored
1
.gitignore
vendored
@ -6,3 +6,4 @@ log
|
|||||||
.codecov
|
.codecov
|
||||||
cobertura.xml
|
cobertura.xml
|
||||||
cov
|
cov
|
||||||
|
tarpaulin-report.html
|
||||||
|
@ -28,6 +28,7 @@ impl Config {
|
|||||||
extensions_mapping.insert(".".to_string(), Language::PlainText);
|
extensions_mapping.insert(".".to_string(), Language::PlainText);
|
||||||
extensions_mapping.insert("txt".to_string(), Language::PlainText);
|
extensions_mapping.insert("txt".to_string(), Language::PlainText);
|
||||||
extensions_mapping.insert("rs".to_string(), Language::Rust);
|
extensions_mapping.insert("rs".to_string(), Language::Rust);
|
||||||
|
extensions_mapping.insert("toml".to_string(), Language::Toml);
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
width: 1024,
|
width: 1024,
|
||||||
@ -130,16 +131,24 @@ mod tests {
|
|||||||
let mapping = config.extensions_mapping();
|
let mapping = config.extensions_mapping();
|
||||||
{
|
{
|
||||||
let mut keys: Vec<String> = mapping.keys().map(|s| s.to_string()).collect();
|
let mut keys: Vec<String> = mapping.keys().map(|s| s.to_string()).collect();
|
||||||
let mut expected: Vec<String> =
|
let mut expected: Vec<String> = vec![
|
||||||
vec![".".to_string(), "txt".to_string(), "rs".to_string()];
|
".".to_string(),
|
||||||
|
"txt".to_string(),
|
||||||
|
"rs".to_string(),
|
||||||
|
"toml".to_string(),
|
||||||
|
];
|
||||||
keys.sort();
|
keys.sort();
|
||||||
expected.sort();
|
expected.sort();
|
||||||
assert_eq!(keys, expected);
|
assert_eq!(keys, expected);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let mut keys: Vec<Language> = mapping.values().map(|s| s.clone()).collect();
|
let mut keys: Vec<Language> = mapping.values().map(|s| s.clone()).collect();
|
||||||
let mut expected: Vec<Language> =
|
let mut expected: Vec<Language> = vec![
|
||||||
vec![Language::PlainText, Language::PlainText, Language::Rust];
|
Language::PlainText,
|
||||||
|
Language::PlainText,
|
||||||
|
Language::Rust,
|
||||||
|
Language::Toml,
|
||||||
|
];
|
||||||
keys.sort();
|
keys.sort();
|
||||||
expected.sort();
|
expected.sort();
|
||||||
assert_eq!(keys, expected);
|
assert_eq!(keys, expected);
|
||||||
@ -234,7 +243,12 @@ mod test_getters {
|
|||||||
.map(|s| s.to_owned())
|
.map(|s| s.to_owned())
|
||||||
.collect();
|
.collect();
|
||||||
result.sort();
|
result.sort();
|
||||||
let mut expected: Vec<String> = vec!["rs".to_string(), "txt".to_string(), ".".to_string()];
|
let mut expected: Vec<String> = vec![
|
||||||
|
"rs".to_string(),
|
||||||
|
"txt".to_string(),
|
||||||
|
".".to_string(),
|
||||||
|
"toml".to_string(),
|
||||||
|
];
|
||||||
expected.sort();
|
expected.sort();
|
||||||
assert_eq!(result, expected);
|
assert_eq!(result, expected);
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
use crate::app::application::Application;
|
use crate::app::application::Application;
|
||||||
use crate::app::UpdateResult;
|
use crate::app::UpdateResult;
|
||||||
use crate::renderer::renderer::Renderer;
|
use crate::renderer::renderer::Renderer;
|
||||||
use crate::renderer::CanvasRenderer;
|
|
||||||
use crate::ui::*;
|
use crate::ui::*;
|
||||||
use rider_config::*;
|
use rider_config::*;
|
||||||
use sdl2::rect::Point;
|
use sdl2::rect::Point;
|
||||||
@ -35,21 +34,22 @@ impl AppState {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(tarpaulin, skip)]
|
#[cfg_attr(tarpaulin, skip)]
|
||||||
pub fn open_file(&mut self, file_path: String, renderer: &mut CanvasRenderer) {
|
pub fn open_file<R>(&mut self, file_path: String, renderer: &mut R) -> Result<(), String>
|
||||||
if let Ok(buffer) = read_to_string(&file_path) {
|
where
|
||||||
let mut file = EditorFile::new(file_path.clone(), buffer, self.config.clone());
|
R: Renderer + CharacterSizeManager + ConfigHolder,
|
||||||
file.prepare_ui(renderer);
|
{
|
||||||
match self.file_editor.open_file(file) {
|
let buffer = read_to_string(&file_path)
|
||||||
Some(old) => self.files.push(old),
|
.map_err(|file_path| format!("Failed to open file: {}", file_path))?;
|
||||||
_ => (),
|
let mut file = EditorFile::new(file_path.clone(), buffer, self.config.clone());
|
||||||
}
|
file.prepare_ui(renderer);
|
||||||
} else {
|
match self.file_editor.open_file(file) {
|
||||||
eprintln!("Failed to open file: {}", file_path);
|
Some(old) => self.files.push(old),
|
||||||
};
|
_ => (),
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save_file(&self) -> Result<(), String> {
|
pub fn save_file(&self) -> Result<(), String> {
|
||||||
println!("Saving file...");
|
|
||||||
let editor_file = match self.file_editor.file() {
|
let editor_file = match self.file_editor.file() {
|
||||||
Some(f) => f,
|
Some(f) => f,
|
||||||
_ => Err("No buffer found".to_string())?,
|
_ => Err("No buffer found".to_string())?,
|
||||||
@ -69,7 +69,6 @@ impl AppState {
|
|||||||
where
|
where
|
||||||
R: Renderer + CharacterSizeManager,
|
R: Renderer + CharacterSizeManager,
|
||||||
{
|
{
|
||||||
println!("Open Settings...");
|
|
||||||
match self.modal {
|
match self.modal {
|
||||||
None => {
|
None => {
|
||||||
let mut settings = Settings::new(self.config.clone());
|
let mut settings = Settings::new(self.config.clone());
|
||||||
@ -255,7 +254,6 @@ impl ConfigHolder for AppState {
|
|||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::tests::support;
|
use crate::tests::support;
|
||||||
// use crate::ui::modal::open_file;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -294,4 +292,94 @@ mod tests {
|
|||||||
old_scroll
|
old_scroll
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn must_fail_save_file_when_none_is_open() {
|
||||||
|
let config = support::build_config();
|
||||||
|
let state = AppState::new(Arc::clone(&config));
|
||||||
|
let result = state.save_file();
|
||||||
|
assert_eq!(result, Err(format!("No buffer found")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn must_succeed_save_file_when_file_is_open() {
|
||||||
|
assert_eq!(std::fs::create_dir_all("/tmp").is_ok(), true);
|
||||||
|
assert_eq!(
|
||||||
|
std::fs::write(
|
||||||
|
"/tmp/must_succeed_save_file_when_file_is_open.md",
|
||||||
|
"Foo bar"
|
||||||
|
)
|
||||||
|
.is_ok(),
|
||||||
|
true
|
||||||
|
);
|
||||||
|
|
||||||
|
let config = support::build_config();
|
||||||
|
let mut renderer = support::SimpleRendererMock::new(config.clone());
|
||||||
|
let mut state = AppState::new(Arc::clone(&config));
|
||||||
|
let result = state.open_file(
|
||||||
|
format!("/tmp/must_succeed_save_file_when_file_is_open.md"),
|
||||||
|
&mut renderer,
|
||||||
|
);
|
||||||
|
assert_eq!(result, Ok(()));
|
||||||
|
let result = state.save_file();
|
||||||
|
assert_eq!(result, Ok(()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn must_succeed_save_file_when_file_does_not_exists() {
|
||||||
|
assert_eq!(std::fs::create_dir_all("/tmp").is_ok(), true);
|
||||||
|
assert_eq!(
|
||||||
|
std::fs::write(
|
||||||
|
"/tmp/must_succeed_save_file_when_file_does_not_exists.md",
|
||||||
|
"Foo bar"
|
||||||
|
)
|
||||||
|
.is_ok(),
|
||||||
|
true
|
||||||
|
);
|
||||||
|
|
||||||
|
let config = support::build_config();
|
||||||
|
let mut renderer = support::SimpleRendererMock::new(config.clone());
|
||||||
|
let mut state = AppState::new(Arc::clone(&config));
|
||||||
|
let result = state.open_file(
|
||||||
|
format!("/tmp/must_succeed_save_file_when_file_does_not_exists.md"),
|
||||||
|
&mut renderer,
|
||||||
|
);
|
||||||
|
assert_eq!(result, Ok(()));
|
||||||
|
let result = state.save_file();
|
||||||
|
assert_eq!(result, Ok(()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn must_close_modal_when_no_modal_is_open() {
|
||||||
|
let config = support::build_config();
|
||||||
|
let mut state = AppState::new(Arc::clone(&config));
|
||||||
|
assert_eq!(state.close_modal(), Ok(()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn must_close_modal_when_some_modal_is_open() {
|
||||||
|
let config = support::build_config();
|
||||||
|
let mut state = AppState::new(Arc::clone(&config));
|
||||||
|
let modal = OpenFile::new("/".to_owned(), 100, 100, Arc::clone(&config));
|
||||||
|
state.set_open_file_modal(Some(modal));
|
||||||
|
assert_eq!(state.close_modal(), Ok(()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn open_settings_when_there_is_no_other_modal() {
|
||||||
|
let config = support::build_config();
|
||||||
|
let mut renderer = support::SimpleRendererMock::new(config.clone());
|
||||||
|
let mut state = AppState::new(Arc::clone(&config));
|
||||||
|
assert_eq!(state.open_settings(&mut renderer), Ok(()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn open_settings_when_other_modal_is_open() {
|
||||||
|
let config = support::build_config();
|
||||||
|
let mut renderer = support::SimpleRendererMock::new(config.clone());
|
||||||
|
let mut state = AppState::new(Arc::clone(&config));
|
||||||
|
let modal = OpenFile::new("/".to_owned(), 100, 100, Arc::clone(&config));
|
||||||
|
state.set_open_file_modal(Some(modal));
|
||||||
|
assert_eq!(state.open_settings(&mut renderer), Ok(()));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -117,10 +117,10 @@ impl Application {
|
|||||||
self.clear();
|
self.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(&mut self) {
|
pub fn run(&mut self) -> Result<(), String> {
|
||||||
let mut timer: TimerSubsystem = self.sdl_context.timer().unwrap();
|
let mut timer: TimerSubsystem = self.sdl_context.timer()?;
|
||||||
let mut event_pump = self.sdl_context.event_pump().unwrap();
|
let mut event_pump = self.sdl_context.event_pump()?;
|
||||||
let font_context = sdl2::ttf::init().unwrap();
|
let font_context = sdl2::ttf::init().map_err(|e| format!("{:?}", e))?;
|
||||||
let texture_creator = self.canvas.texture_creator();
|
let texture_creator = self.canvas.texture_creator();
|
||||||
let sleep_time = Duration::new(0, 1_000_000_000u32 / 60);
|
let sleep_time = Duration::new(0, 1_000_000_000u32 / 60);
|
||||||
let mut app_state = AppState::new(Arc::clone(&self.config));
|
let mut app_state = AppState::new(Arc::clone(&self.config));
|
||||||
@ -159,14 +159,12 @@ impl Application {
|
|||||||
UpdateResult::Input(text) => app_state
|
UpdateResult::Input(text) => app_state
|
||||||
.file_editor_mut()
|
.file_editor_mut()
|
||||||
.insert_text(text.clone(), &mut renderer),
|
.insert_text(text.clone(), &mut renderer),
|
||||||
UpdateResult::InsertNewLine => app_state
|
UpdateResult::InsertNewLine => {
|
||||||
.file_editor_mut()
|
app_state.file_editor_mut().insert_new_line(&mut renderer)?
|
||||||
.insert_new_line(&mut renderer)
|
}
|
||||||
.unwrap_or_else(|e| eprintln!("Failed to delete line {:?}", e)),
|
|
||||||
UpdateResult::DeleteLine => app_state
|
UpdateResult::DeleteLine => app_state
|
||||||
.file_editor_mut()
|
.file_editor_mut()
|
||||||
.delete_current_line(&mut renderer)
|
.delete_current_line(&mut renderer)?,
|
||||||
.unwrap_or_else(|e| eprintln!("Failed to delete line {:?}", e)),
|
|
||||||
UpdateResult::MoveCaretLeft => {
|
UpdateResult::MoveCaretLeft => {
|
||||||
app_state.file_editor_mut().move_caret(MoveDirection::Left);
|
app_state.file_editor_mut().move_caret(MoveDirection::Left);
|
||||||
}
|
}
|
||||||
@ -193,10 +191,10 @@ impl Application {
|
|||||||
c.set_height(*height as u32);
|
c.set_height(*height as u32);
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.unwrap_or_else(|_| println!("Failed to update window size")),
|
.map_err(|_| format!("Failed to update window size"))?,
|
||||||
UpdateResult::RefreshFsTree => unimplemented!(),
|
UpdateResult::RefreshFsTree => unimplemented!(),
|
||||||
UpdateResult::OpenFile(file_path) => {
|
UpdateResult::OpenFile(file_path) => {
|
||||||
app_state.open_file(file_path.clone(), &mut renderer);
|
app_state.open_file(file_path.clone(), &mut renderer)?;
|
||||||
}
|
}
|
||||||
UpdateResult::OpenDirectory(dir_path) => {
|
UpdateResult::OpenDirectory(dir_path) => {
|
||||||
app_state.open_directory(dir_path.clone(), &mut renderer);
|
app_state.open_directory(dir_path.clone(), &mut renderer);
|
||||||
@ -212,15 +210,9 @@ impl Application {
|
|||||||
UpdateResult::MouseDragStart(_point) => (),
|
UpdateResult::MouseDragStart(_point) => (),
|
||||||
UpdateResult::MouseDragStop(_point) => (),
|
UpdateResult::MouseDragStop(_point) => (),
|
||||||
UpdateResult::FileDropped(_path) => (),
|
UpdateResult::FileDropped(_path) => (),
|
||||||
UpdateResult::SaveCurrentFile => app_state
|
UpdateResult::SaveCurrentFile => app_state.save_file()?,
|
||||||
.save_file()
|
UpdateResult::OpenSettings => app_state.open_settings(&mut renderer)?,
|
||||||
.unwrap_or_else(|e| eprintln!("Failed to save {:?}", e)),
|
UpdateResult::CloseModal => app_state.close_modal()?,
|
||||||
UpdateResult::OpenSettings => app_state
|
|
||||||
.open_settings(&mut renderer)
|
|
||||||
.unwrap_or_else(|e| eprintln!("Failed to open settings {:?}", e)),
|
|
||||||
UpdateResult::CloseModal => app_state
|
|
||||||
.close_modal()
|
|
||||||
.unwrap_or_else(|e| eprintln!("Failed to close modal {:?}", e)),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.tasks = new_tasks;
|
self.tasks = new_tasks;
|
||||||
@ -236,6 +228,8 @@ impl Application {
|
|||||||
sleep(sleep_time);
|
sleep(sleep_time);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn open_file(&mut self, file_path: String) {
|
pub fn open_file(&mut self, file_path: String) {
|
||||||
|
@ -47,11 +47,11 @@ fn init_logger(directories: &Directories) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(tarpaulin, skip)]
|
#[cfg_attr(tarpaulin, skip)]
|
||||||
fn main() {
|
fn main() -> Result<(), String> {
|
||||||
let directories = Directories::new(None, None);
|
let directories = Directories::new(None, None);
|
||||||
let mut app = Application::new();
|
let mut app = Application::new();
|
||||||
app.init();
|
app.init();
|
||||||
init_logger(&directories);
|
init_logger(&directories);
|
||||||
app.open_file("./test_files/test.rs".to_string());
|
app.open_file("./test_files/test.rs".to_string());
|
||||||
app.run();
|
app.run()
|
||||||
}
|
}
|
||||||
|
@ -1,15 +1,140 @@
|
|||||||
extern crate log;
|
extern crate log;
|
||||||
|
//#[macro_use]
|
||||||
|
extern crate plex;
|
||||||
extern crate simplelog;
|
extern crate simplelog;
|
||||||
|
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
|
||||||
pub mod plain;
|
pub mod plain;
|
||||||
pub mod rust_lang;
|
pub mod rust_lang;
|
||||||
|
pub mod toml;
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! lexer_whitespace {
|
||||||
|
($provider: expr) => {{
|
||||||
|
let text = $provider.text();
|
||||||
|
let line = $provider.line();
|
||||||
|
let character = $provider.character();
|
||||||
|
let start = $provider.start();
|
||||||
|
let end = $provider.end(&text);
|
||||||
|
|
||||||
|
TokenType::Whitespace {
|
||||||
|
token: Token::new(text, line, character, start, end),
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! lexer_keyword {
|
||||||
|
($provider: expr) => {{
|
||||||
|
let text = $provider.text();
|
||||||
|
let line = $provider.line();
|
||||||
|
let character = $provider.character();
|
||||||
|
let start = $provider.start();
|
||||||
|
let end = $provider.end(&text);
|
||||||
|
|
||||||
|
TokenType::Keyword {
|
||||||
|
token: Token::new(text, line, character, start, end),
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! lexer_string {
|
||||||
|
($provider: expr) => {{
|
||||||
|
let text = $provider.text();
|
||||||
|
let line = $provider.line();
|
||||||
|
let character = $provider.character();
|
||||||
|
let start = $provider.start();
|
||||||
|
let end = $provider.end(&text);
|
||||||
|
|
||||||
|
TokenType::String {
|
||||||
|
token: Token::new(text, line, character, start, end),
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! lexer_identifier {
|
||||||
|
($provider: expr) => {{
|
||||||
|
let text = $provider.text();
|
||||||
|
let line = $provider.line();
|
||||||
|
let character = $provider.character();
|
||||||
|
let start = $provider.start();
|
||||||
|
let end = $provider.end(&text);
|
||||||
|
|
||||||
|
TokenType::Identifier {
|
||||||
|
token: Token::new(text, line, character, start, end),
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! lexer_literal {
|
||||||
|
($provider: expr) => {{
|
||||||
|
let text = $provider.text();
|
||||||
|
let line = $provider.line();
|
||||||
|
let character = $provider.character();
|
||||||
|
let start = $provider.start();
|
||||||
|
let end = $provider.end(&text);
|
||||||
|
|
||||||
|
TokenType::Literal {
|
||||||
|
token: Token::new(text, line, character, start, end),
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! lexer_comment {
|
||||||
|
($provider: expr) => {{
|
||||||
|
let text = $provider.text();
|
||||||
|
let line = $provider.line();
|
||||||
|
let character = $provider.character();
|
||||||
|
let start = $provider.start();
|
||||||
|
let end = $provider.end(&text);
|
||||||
|
|
||||||
|
TokenType::Comment {
|
||||||
|
token: Token::new(text, line, character, start, end),
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! lexer_operator {
|
||||||
|
($provider: expr) => {{
|
||||||
|
let text = $provider.text();
|
||||||
|
let line = $provider.line();
|
||||||
|
let character = $provider.character();
|
||||||
|
let start = $provider.start();
|
||||||
|
let end = $provider.end(&text);
|
||||||
|
|
||||||
|
TokenType::Operator {
|
||||||
|
token: Token::new(text, line, character, start, end),
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! lexer_separator {
|
||||||
|
($provider: expr) => {{
|
||||||
|
let text = $provider.text();
|
||||||
|
let line = $provider.line();
|
||||||
|
let character = $provider.character();
|
||||||
|
let start = $provider.start();
|
||||||
|
let end = $provider.end(&text);
|
||||||
|
|
||||||
|
TokenType::Separator {
|
||||||
|
token: Token::new(text, line, character, start, end),
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait TokenBuilder {
|
||||||
|
fn text(&self) -> String;
|
||||||
|
fn line(&self) -> usize;
|
||||||
|
fn character(&self) -> usize;
|
||||||
|
fn start(&self) -> usize;
|
||||||
|
fn end(&self, current_text: &String) -> usize;
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Ord, Eq)]
|
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Ord, Eq)]
|
||||||
pub enum Language {
|
pub enum Language {
|
||||||
PlainText,
|
PlainText,
|
||||||
Rust,
|
Rust,
|
||||||
|
Toml,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
@ -92,7 +217,7 @@ pub struct Span {
|
|||||||
pub hi: usize,
|
pub hi: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Clone, PartialEq)]
|
||||||
pub struct Token {
|
pub struct Token {
|
||||||
line: usize,
|
line: usize,
|
||||||
character: usize,
|
character: usize,
|
||||||
@ -101,6 +226,22 @@ pub struct Token {
|
|||||||
text: String,
|
text: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Debug for Token {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
|
||||||
|
self.text.fmt(f)?;
|
||||||
|
f.write_str(" { ")?;
|
||||||
|
f.write_str("line ")?;
|
||||||
|
self.line.fmt(f)?;
|
||||||
|
f.write_str(" character ")?;
|
||||||
|
self.character.fmt(f)?;
|
||||||
|
f.write_str(" start ")?;
|
||||||
|
self.start.fmt(f)?;
|
||||||
|
f.write_str(" end ")?;
|
||||||
|
self.end.fmt(f)?;
|
||||||
|
f.write_str(" }")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Token {
|
impl Token {
|
||||||
pub fn new(text: String, line: usize, character: usize, start: usize, end: usize) -> Self {
|
pub fn new(text: String, line: usize, character: usize, start: usize, end: usize) -> Self {
|
||||||
Self {
|
Self {
|
||||||
@ -153,6 +294,7 @@ pub fn parse(text: String, language: Language) -> Vec<TokenType> {
|
|||||||
// .inspect(|tok| warn!("tok: {:?}", tok))
|
// .inspect(|tok| warn!("tok: {:?}", tok))
|
||||||
.map(|t| t.0)
|
.map(|t| t.0)
|
||||||
.collect(),
|
.collect(),
|
||||||
|
Language::Toml => toml::lexer::Lexer::new(text).tokenize(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,9 +97,10 @@ pub mod lexer {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
|
||||||
use crate::{Token, TokenType};
|
use crate::{Token, TokenType};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn must_parse_simple_text() {
|
fn must_parse_simple_text() {
|
||||||
let code = "foo";
|
let code = "foo";
|
||||||
|
284
rider-lexers/src/toml.rs
Normal file
284
rider-lexers/src/toml.rs
Normal file
@ -0,0 +1,284 @@
|
|||||||
|
pub mod lexer {
|
||||||
|
use crate::{Token, TokenType};
|
||||||
|
use std::ops::{Deref, DerefMut};
|
||||||
|
|
||||||
|
use crate::*;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Buffer(String);
|
||||||
|
|
||||||
|
impl Buffer {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self(String::new())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_string(&self) -> bool {
|
||||||
|
self.0.starts_with('\'') || self.0.starts_with('"')
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_escaped(&self) -> bool {
|
||||||
|
self.0.ends_with('\\')
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_string_beginning(&self, c: char) -> bool {
|
||||||
|
self.is_string() && self.0.starts_with(c)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_white(&self) -> bool {
|
||||||
|
self.0.as_str() == " "
|
||||||
|
|| self.0.as_str() == "\t"
|
||||||
|
|| self.0.as_str() == "\n"
|
||||||
|
|| self.0.as_str() == "\r"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for Buffer {
|
||||||
|
type Target = String;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DerefMut for Buffer {
|
||||||
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||||
|
&mut self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Lexer {
|
||||||
|
content: String,
|
||||||
|
buffer: Buffer,
|
||||||
|
line: usize,
|
||||||
|
character: usize,
|
||||||
|
start: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Lexer {
|
||||||
|
pub fn new(content: String) -> Self {
|
||||||
|
Self {
|
||||||
|
content,
|
||||||
|
line: 0,
|
||||||
|
character: 0,
|
||||||
|
start: 0,
|
||||||
|
buffer: Buffer::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tokenize(&mut self) -> Vec<TokenType> {
|
||||||
|
let mut tokens = vec![];
|
||||||
|
let content = self.content.clone();
|
||||||
|
for c in content.chars() {
|
||||||
|
match c {
|
||||||
|
' ' | '\n' if self.non_special() => {
|
||||||
|
self.push_non_empty(&mut tokens);
|
||||||
|
self.append_and_push(c, &mut tokens, |b| lexer_whitespace!(b))
|
||||||
|
}
|
||||||
|
'[' | ']' | '{' | '}' if self.non_special() => {
|
||||||
|
self.push_non_empty(&mut tokens);
|
||||||
|
self.append_and_push(c, &mut tokens, |b| lexer_separator!(b))
|
||||||
|
}
|
||||||
|
'=' if self.non_special() => {
|
||||||
|
self.push_non_empty(&mut tokens);
|
||||||
|
self.append_and_push(c, &mut tokens, |b| lexer_operator!(b))
|
||||||
|
}
|
||||||
|
'"' | '\'' if self.is_string_beginning(c) => {
|
||||||
|
self.append_and_push(c, &mut tokens, |b| lexer_string!(b))
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
self.buffer.push(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !self.is_empty() {
|
||||||
|
tokens.push(lexer_identifier!(self));
|
||||||
|
self.clear();
|
||||||
|
}
|
||||||
|
tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
fn append_and_push<F>(&mut self, c: char, tokens: &mut Vec<TokenType>, builder: F)
|
||||||
|
where
|
||||||
|
F: Fn(&Lexer) -> TokenType,
|
||||||
|
{
|
||||||
|
self.buffer.push(c);
|
||||||
|
tokens.push(builder(&self));
|
||||||
|
self.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push_non_empty(&mut self, tokens: &mut Vec<TokenType>) {
|
||||||
|
if self.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
tokens.push(self.match_token());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn non_special(&self) -> bool {
|
||||||
|
!self.buffer.is_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn match_token(&mut self) -> TokenType {
|
||||||
|
let token = if self.buffer.is_string() {
|
||||||
|
lexer_string!(self)
|
||||||
|
} else if self.buffer.is_white() {
|
||||||
|
lexer_whitespace!(self)
|
||||||
|
} else {
|
||||||
|
lexer_identifier!(self)
|
||||||
|
};
|
||||||
|
self.clear();
|
||||||
|
token
|
||||||
|
}
|
||||||
|
|
||||||
|
fn clear(&mut self) {
|
||||||
|
if self.buffer.contains('\n') {
|
||||||
|
self.line += self.buffer.lines().count();
|
||||||
|
self.character +=
|
||||||
|
self.buffer.len() - self.buffer.rfind('\n').unwrap_or(self.buffer.len());
|
||||||
|
self.start += self.buffer.len();
|
||||||
|
} else {
|
||||||
|
self.character += self.buffer.len();
|
||||||
|
self.start += self.buffer.len();
|
||||||
|
}
|
||||||
|
self.buffer.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for Lexer {
|
||||||
|
type Target = Buffer;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.buffer
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TokenBuilder for Lexer {
|
||||||
|
fn text(&self) -> String {
|
||||||
|
self.buffer.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn line(&self) -> usize {
|
||||||
|
self.line
|
||||||
|
}
|
||||||
|
|
||||||
|
fn character(&self) -> usize {
|
||||||
|
self.character
|
||||||
|
}
|
||||||
|
|
||||||
|
fn start(&self) -> usize {
|
||||||
|
self.start
|
||||||
|
}
|
||||||
|
|
||||||
|
fn end(&self, text: &String) -> usize {
|
||||||
|
self.start + text.len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
struct BuilderMock {
|
||||||
|
line: usize,
|
||||||
|
character: usize,
|
||||||
|
start: usize,
|
||||||
|
text: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TokenBuilder for BuilderMock {
|
||||||
|
fn text(&self) -> String {
|
||||||
|
self.text.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn line(&self) -> usize {
|
||||||
|
self.line
|
||||||
|
}
|
||||||
|
|
||||||
|
fn character(&self) -> usize {
|
||||||
|
self.character
|
||||||
|
}
|
||||||
|
|
||||||
|
fn start(&self) -> usize {
|
||||||
|
self.start
|
||||||
|
}
|
||||||
|
|
||||||
|
fn end(&self, current_text: &String) -> usize {
|
||||||
|
self.start + current_text.len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! builder {
|
||||||
|
($text: expr, $line: expr, $character: expr, $start: expr) => {
|
||||||
|
BuilderMock {
|
||||||
|
line: $line,
|
||||||
|
character: $character,
|
||||||
|
start: $start,
|
||||||
|
text: $text.to_owned(),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_empty() {
|
||||||
|
let code = "".to_owned();
|
||||||
|
let mut lexer = Lexer::new(code);
|
||||||
|
let result = lexer.tokenize();
|
||||||
|
let expected = vec![];
|
||||||
|
assert_eq!(result, expected)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_section() {
|
||||||
|
let code = "[package]".to_owned();
|
||||||
|
let mut lexer = Lexer::new(code);
|
||||||
|
let result = lexer.tokenize();
|
||||||
|
let expected = vec![
|
||||||
|
lexer_separator!(builder!("[", 0, 0, 0)),
|
||||||
|
lexer_identifier!(builder!("package", 0, 1, 1)),
|
||||||
|
lexer_separator!(builder!("]", 0, 8, 8)),
|
||||||
|
];
|
||||||
|
assert_eq!(result, expected)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_package() {
|
||||||
|
let code = "redis = \"*\"".to_owned();
|
||||||
|
let mut lexer = Lexer::new(code);
|
||||||
|
let result = lexer.tokenize();
|
||||||
|
let expected = vec![
|
||||||
|
lexer_identifier!(builder!("redis", 0, 0, 0)),
|
||||||
|
lexer_whitespace!(builder!(" ", 0, 5, 5)),
|
||||||
|
lexer_operator!(builder!("=", 0, 6, 6)),
|
||||||
|
lexer_whitespace!(builder!(" ", 0, 7, 7)),
|
||||||
|
lexer_string!(builder!("\"*\"", 0, 8, 8)),
|
||||||
|
];
|
||||||
|
assert_eq!(result, expected)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_complex_package() {
|
||||||
|
let code = "redis = { version = \"*\" }".to_owned();
|
||||||
|
let mut lexer = Lexer::new(code);
|
||||||
|
let result = lexer.tokenize();
|
||||||
|
let expected = vec![
|
||||||
|
lexer_identifier!(builder!("redis", 0, 0, 0)),
|
||||||
|
lexer_whitespace!(builder!(" ", 0, 5, 5)),
|
||||||
|
lexer_operator!(builder!("=", 0, 6, 6)),
|
||||||
|
lexer_whitespace!(builder!(" ", 0, 7, 7)),
|
||||||
|
lexer_separator!(builder!("{", 0, 8, 8)),
|
||||||
|
lexer_whitespace!(builder!(" ", 0, 9, 9)),
|
||||||
|
lexer_identifier!(builder!("version", 0, 10, 10)),
|
||||||
|
lexer_whitespace!(builder!(" ", 0, 17, 17)),
|
||||||
|
lexer_operator!(builder!("=", 0, 18, 18)),
|
||||||
|
lexer_whitespace!(builder!(" ", 0, 19, 19)),
|
||||||
|
lexer_string!(builder!("\"*\"", 0, 20, 20)),
|
||||||
|
lexer_whitespace!(builder!(" ", 0, 23, 23)),
|
||||||
|
lexer_separator!(builder!("}", 0, 24, 24)),
|
||||||
|
];
|
||||||
|
assert_eq!(result, expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mod compiler {
|
||||||
|
// pub struct Compiler {}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user