diff --git a/Cargo.lock b/Cargo.lock index 97e77bc..3e46e42 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -20,6 +20,18 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "clyde" +version = "0.1.0" +dependencies = [ + "anyhow", + "dirs", + "log", + "macros", + "thiserror", + "windows", +] + [[package]] name = "dirs" version = "4.0.0" @@ -145,18 +157,6 @@ version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" -[[package]] -name = "wash" -version = "0.1.0" -dependencies = [ - "anyhow", - "dirs", - "log", - "macros", - "thiserror", - "windows", -] - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" diff --git a/Cargo.toml b/Cargo.toml index 520df50..bcc833c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "wash" +name = "clyde" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/src/main.rs b/src/main.rs index ea56fc6..5762558 100644 --- a/src/main.rs +++ b/src/main.rs @@ -204,3 +204,17 @@ fn main() -> Result<()> { } } } + +/* + +> ls +foo +bar +whatever + +> ls + + + + +*/ diff --git a/src/parse.rs b/src/parse.rs index 36426a7..2acc442 100644 --- a/src/parse.rs +++ b/src/parse.rs @@ -5,6 +5,7 @@ pub enum Error {} #[derive(Debug, PartialEq)] pub enum Token { Ident(String), + Pipe, } #[allow(dead_code)] @@ -30,7 +31,11 @@ impl<'text> Lexer<'text> { while self.peek().is_some() { self.skip_whitespace(); match self.peek() { - None => { return Ok(tokens) }, + None => return Ok(tokens), + Some('|') => { + tokens.push(Token::Pipe); + self.skip(); + } Some(_) => { tokens.push(self.ident()); } @@ -76,7 +81,7 @@ impl<'text> Lexer<'text> { } fn ident(&mut self) -> Token { - let mut kept: Vec = vec!(); + let mut kept: Vec = vec![]; loop { match self.peek() { None => break, @@ -93,39 +98,97 @@ impl<'text> Lexer<'text> { } } -// pub fn parse(line: String) -> Result { -// Ok(Tree::new()) -// } +struct Parser { + +} + +struct Tree { -// pub struct Tree { -// -// } +} -// impl Tree { -// pub fn new() -> Self { Self { } } -// } +fn parse>(text: S) -> Result { + Ok(Tree{}) +} #[cfg(test)] mod tests { use super::*; + use Token::Pipe; - #[test] - fn lex_empty() -> Result<(), Error> { - let tokens = lex("")?; - assert_eq!(tokens.len(), 0); - Ok(()) + fn ident>(s: S) -> Token { + Token::Ident(s.into()) } - #[test] - fn lex_ident() -> Result<(), Error> { - let tokens = lex("one")?; - assert_eq!(tokens.len(), 1); - assert_eq!(tokens[0], Token::Ident(String::from("one"))); - - let tokens = lex("one two")?; - assert_eq!(tokens.len(), 2); - assert_eq!(tokens[0], Token::Ident(String::from("one"))); - assert_eq!(tokens[1], Token::Ident(String::from("two"))); - Ok(()) + macro_rules! lex { + ( + $($name:ident: $line:literal $($token:expr)* ;)+ + ) => { + $( + #[test] + fn $name() -> Result<(), Error> { + let mut tokens = lex($line)?; + + $( + assert!(tokens.len() > 0); + let front = tokens.remove(0); + assert_eq!($token, front); + )* + + Ok(()) + } + )* + }; + } + + lex! { + single_token: + "one" + ident("one"); + + two_tokens: + "one two" + ident("one") ident("two"); + + leading_whitespace: + " one" + ident("one"); + + trailing_whitespace: + "one " + ident("one"); + + surrounding_whitespace: + " one " + ident("one"); + + internal_hyphen: + "one-two" + ident("one-two"); + + pipe: + "|" + Pipe; + + pipeline: + "one | two" + ident("one") Pipe ident("two"); + + pipeline_2: + "one |two" + ident("one") Pipe ident("two"); } } + +/* + + + + + +*/ + + + + + +