From 4a0db72d4e24cd886f96911c7e22dfe47f815cca Mon Sep 17 00:00:00 2001 From: Jordan Orelli Date: Sun, 27 Aug 2023 11:24:27 -0500 Subject: [PATCH] logging is in a separate method now --- src/main.rs | 27 ++----------------- src/parse.rs | 76 ++++++++++++++++++++++++++++++++++++---------------- src/shell.rs | 16 +++++++++++ 3 files changed, 71 insertions(+), 48 deletions(-) diff --git a/src/main.rs b/src/main.rs index 65a9611..2a6beb1 100644 --- a/src/main.rs +++ b/src/main.rs @@ -19,21 +19,12 @@ use anyhow::Result; fn main() -> Result<()> { let mut shell = Shell::new()?; - let log_path = shell.expand_path("~/clyde.log"); - match Log::file(log_path) { - Ok(f) => { - let target = Box::leak(Box::new(f)); - _ = set_logger(target).map(|()| set_max_level(LevelFilter::Debug)); - } - Err(e) => { - println!("did not open log file: {}", e); - } - } + shell.enable_logging("~/clyde.log"); let prompt = Prompt::new(); prompt.print(&mut shell.output)?; - info!("» enter"); + info!("» shell session start --------"); loop { match shell.input.next()? { input::Event::Key(event) => { @@ -189,17 +180,3 @@ fn main() -> Result<()> { } } } - -/* - -> ls -foo -bar -whatever - -> ls - - - - -*/ diff --git a/src/parse.rs b/src/parse.rs index 737deea..69d2968 100644 --- a/src/parse.rs +++ b/src/parse.rs @@ -7,6 +7,15 @@ pub enum Error { UnexpectedToken, } +pub struct Tok { + start_line: usize, + start_col: usize, + end_line: usize, + end_col: usize, + text: String, + kind: Token, +} + #[allow(dead_code)] #[derive(Debug, PartialEq)] pub enum Token { @@ -17,7 +26,7 @@ pub enum Token { #[allow(dead_code)] fn lex>(text: S) -> Result, Error> { - Lexer::new(text.as_ref()).lex() + Ok(Lexer::new(text.as_ref()).collect()) } struct Lexer<'text> { @@ -35,27 +44,6 @@ impl<'text> Lexer<'text> { } } - fn lex(&mut self) -> Result, Error> { - let mut tokens = vec![]; - while self.peek().is_some() { - self.skip_whitespace(); - match self.peek() { - None => return Ok(tokens), - Some('|') => { - tokens.push(Token::Pipe); - self.skip(); - } - Some('*') => { - tokens.push(self.glob()); - } - Some(_) => { - tokens.push(self.ident()); - } - } - } - Ok(tokens) - } - fn skip_whitespace(&mut self) { loop { match self.peek() { @@ -112,7 +100,7 @@ impl<'text> Lexer<'text> { Some('*') => { self.keep(); self.glob(); - }, + } Some(c) => { if c.is_whitespace() { break; @@ -146,6 +134,24 @@ impl<'text> Lexer<'text> { } } +impl <'text> Iterator for Lexer<'text> { + type Item = Token; + + fn next(&mut self) -> Option { + self.skip_whitespace(); + self.kept.clear(); + match self.peek() { + Some('|') => { + self.skip(); + Some(Token::Pipe) + } + Some('*') => Some(self.glob()), + Some(_) => Some(self.ident()), + None => None, + } + } +} + #[derive(PartialEq, Clone)] pub enum Element { Empty, @@ -350,6 +356,10 @@ mod tests { Token::Ident(s.into()) } + fn glob>(s: S) -> Token { + Token::Glob(s.into()) + } + macro_rules! lex { ( $($name:ident: $line:literal $($token:expr)* ;)+ @@ -407,6 +417,26 @@ mod tests { pipeline_2: "one |two" ident("one") Pipe ident("two"); + + simple_glob: + "*" + glob("*"); + + ext_glob: + "*.rs" + glob("*.rs"); + + mixed: + "ls *.rs" + ident("ls") glob("*.rs"); + + globby_pipeline: + "ls *.rs | wc -l" + ident("ls") glob("*.rs") Pipe ident("wc") ident("-l"); + + mid_glob: + "a*b" + glob("a*b"); } #[test] diff --git a/src/shell.rs b/src/shell.rs index 29b9cb0..0a78035 100644 --- a/src/shell.rs +++ b/src/shell.rs @@ -69,6 +69,22 @@ impl Shell { Ok(()) } + pub fn enable_logging

(&self, path: P) + where + P: AsRef, + { + let log_path = self.expand_path(path); + match Log::file(log_path) { + Ok(f) => { + let target = Box::leak(Box::new(f)); + _ = set_logger(target).map(|()| set_max_level(LevelFilter::Debug)); + } + Err(e) => { + println!("did not open log file: {}", e); + } + } + } + pub fn expand_path>(&self, p: P) -> PathBuf { let p = p.as_ref();