just messin around now

parse-tree
Jordan Orelli 1 year ago
parent d79a28b3b0
commit 5980bc59da

24
Cargo.lock generated

@ -20,6 +20,18 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "clyde"
version = "0.1.0"
dependencies = [
"anyhow",
"dirs",
"log",
"macros",
"thiserror",
"windows",
]
[[package]]
name = "dirs"
version = "4.0.0"
@ -145,18 +157,6 @@ version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
[[package]]
name = "wash"
version = "0.1.0"
dependencies = [
"anyhow",
"dirs",
"log",
"macros",
"thiserror",
"windows",
]
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"

@ -1,5 +1,5 @@
[package]
name = "wash"
name = "clyde"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

@ -204,3 +204,17 @@ fn main() -> Result<()> {
}
}
}
/*
> ls
foo
bar
whatever
> ls
*/

@ -5,6 +5,7 @@ pub enum Error {}
#[derive(Debug, PartialEq)]
pub enum Token {
Ident(String),
Pipe,
}
#[allow(dead_code)]
@ -30,7 +31,11 @@ impl<'text> Lexer<'text> {
while self.peek().is_some() {
self.skip_whitespace();
match self.peek() {
None => { return Ok(tokens) },
None => return Ok(tokens),
Some('|') => {
tokens.push(Token::Pipe);
self.skip();
}
Some(_) => {
tokens.push(self.ident());
}
@ -76,7 +81,7 @@ impl<'text> Lexer<'text> {
}
fn ident(&mut self) -> Token {
let mut kept: Vec<char> = vec!();
let mut kept: Vec<char> = vec![];
loop {
match self.peek() {
None => break,
@ -93,39 +98,97 @@ impl<'text> Lexer<'text> {
}
}
// pub fn parse(line: String) -> Result<Tree, Error> {
// Ok(Tree::new())
// }
struct Parser {
}
struct Tree {
// pub struct Tree {
//
// }
}
// impl Tree {
// pub fn new() -> Self { Self { } }
// }
fn parse<S: AsRef<str>>(text: S) -> Result<Tree, Error> {
Ok(Tree{})
}
#[cfg(test)]
mod tests {
use super::*;
use Token::Pipe;
#[test]
fn lex_empty() -> Result<(), Error> {
let tokens = lex("")?;
assert_eq!(tokens.len(), 0);
Ok(())
fn ident<S: Into<String>>(s: S) -> Token {
Token::Ident(s.into())
}
#[test]
fn lex_ident() -> Result<(), Error> {
let tokens = lex("one")?;
assert_eq!(tokens.len(), 1);
assert_eq!(tokens[0], Token::Ident(String::from("one")));
let tokens = lex("one two")?;
assert_eq!(tokens.len(), 2);
assert_eq!(tokens[0], Token::Ident(String::from("one")));
assert_eq!(tokens[1], Token::Ident(String::from("two")));
Ok(())
macro_rules! lex {
(
$($name:ident: $line:literal $($token:expr)* ;)+
) => {
$(
#[test]
fn $name() -> Result<(), Error> {
let mut tokens = lex($line)?;
$(
assert!(tokens.len() > 0);
let front = tokens.remove(0);
assert_eq!($token, front);
)*
Ok(())
}
)*
};
}
lex! {
single_token:
"one"
ident("one");
two_tokens:
"one two"
ident("one") ident("two");
leading_whitespace:
" one"
ident("one");
trailing_whitespace:
"one "
ident("one");
surrounding_whitespace:
" one "
ident("one");
internal_hyphen:
"one-two"
ident("one-two");
pipe:
"|"
Pipe;
pipeline:
"one | two"
ident("one") Pipe ident("two");
pipeline_2:
"one |two"
ident("one") Pipe ident("two");
}
}
/*
*/

Loading…
Cancel
Save