I'm redoing the lexing and parsing

main
Jordan Orelli 9 months ago
parent 4a0db72d4e
commit d152c4092a

@ -1,3 +1,4 @@
use crate::lex::Topoglyph;
use std::io;
use thiserror::Error;
use windows::Win32::Foundation::{GetLastError, BOOL};
@ -14,6 +15,39 @@ pub enum Error {
InputError(String),
}
#[derive(Debug, Error)]
pub enum LexError {
#[error("a word character was expected but none was encountered")]
ExpectedWordCharacter,
#[error("unexpected character: {0:?}")]
UnexpectedCharacter(Topoglyph),
#[error("unexpected eof")]
UnexpectedEOF,
#[error("invalid trailing carriage return character")]
IllegalTrailingCarriageReturn,
#[error("carriage return without newline is baffling")]
IllegalDanglingCarriageReturn,
#[error("not yet supported: {0}")]
NotYetSupported(String),
}
impl LexError {
pub fn not_yet(msg: &str) -> Self {
LexError::NotYetSupported(msg.to_string())
}
}
#[derive(Debug, Error)]
pub enum ParseError {
#[error("Unexpected Token")]
UnexpectedToken,
}
impl Error {
pub fn last_error() -> Self {
unsafe { Error::WindowsError(GetLastError().to_hresult().message().to_string()) }

@ -0,0 +1,466 @@
use crate::error::LexError;
use std::{collections::VecDeque, fmt, ops::Range, str::Chars};
fn is_glob(c: char) -> bool {
match c {
'*' | '?' => true,
_ => false,
}
}
fn is_special(c: char) -> bool {
match c {
'?' => true,
_ => false,
}
}
fn is_keyword(s: &str) -> bool {
match s {
"for" => true,
_ => false,
}
}
/// The position of a specific glyph within a corpus of text
#[derive(PartialEq, Clone, Copy)]
pub struct Position {
/// The visual line in which this glyph appears in the source text
line: u64,
/// The visual column in which this glyph appears in the source text
column: u64,
}
impl Position {
fn start() -> Self {
Self { line: 0, column: 0 }
}
fn incr(&mut self) -> Position {
let p = *self;
self.column += 1;
p
}
fn incr_line(&mut self) -> Position {
let p = *self;
self.column = 0;
self.line += 1;
p
}
}
impl fmt::Debug for Position {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "{line}:{column}", line = self.line, column = self.column)
}
}
/// A [Topoglyph] is a wrapper around a basic Rust [char] that includes information about where that
/// char appears in the source text. Where the char only describes the
/// [glyph](https://en.wikipedia.org/wiki/Glyph) (i.e., the graphical symbol), a topoglyph
/// includes both the glyph and its position, to be used to describe the locations of parsed
/// elements within a source text. Two glyphs appearing at different locations within a source text
/// would correspond to two distinct topoglyphs.
#[derive(PartialEq, Clone)]
pub struct Topoglyph {
/// the unicode code point of the glyph
glyph: char,
/// The visual position in which the glyph appears; i.e., the human-comprehensible location
/// of the glyph in the source text
position: Position,
/// The byte offsets corresponding to this topoglyph in the source data; i.e., the
/// machine-comprehensible location of the glyph in the source text
bytes: Range<u64>,
}
impl fmt::Debug for Topoglyph {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "{char}@{pos:?}", char = self.glyph, pos = self.position)
}
}
/// A topoglypher produces [topoglyphs](Topoglyph) for a source text; i.e., it is an iterator of
/// topoglyphs. The topoglypher is used to control reading from the source text and keeps a
/// lookahead buffer of topoglyphs that have not been processed. While a [Lexer] is responsible
/// for the creation and iteration of [tokens](Token), a topoglypher is responsible for the
/// creation and iteration of topoglyphs.
struct Topoglypher<'text> {
source: Chars<'text>,
next_position: Position,
bytes_read: u64,
lookahead: VecDeque<Topoglyph>,
}
impl<'text> Topoglypher<'text> {
fn new(source: &'text str) -> Self {
Self {
source: source.chars(),
next_position: Position::start(),
bytes_read: 0,
lookahead: VecDeque::new(),
}
}
fn feed(&mut self, n: usize) -> bool {
while self.lookahead.len() < n {
let c = match self.source.next() {
Some(c) => c,
None => break,
};
let len = c.len_utf8();
let start = self.bytes_read;
self.bytes_read += len as u64;
let position = if c == '\n' {
self.next_position.incr_line()
} else {
self.next_position.incr()
};
self.lookahead.push_back(Topoglyph {
glyph: c,
position,
bytes: Range {
start,
end: self.bytes_read,
},
})
}
self.lookahead.len() == n
}
fn peek(&mut self) -> Option<&Topoglyph> {
self.peek_at(0)
}
fn pop(&mut self) -> Result<Topoglyph, LexError> {
self.next().ok_or(LexError::UnexpectedEOF)
}
fn peek_at(&mut self, idx: usize) -> Option<&Topoglyph> {
self.feed(idx + 1);
self.lookahead.get(idx)
}
fn next_is<F>(&mut self, pred: F) -> bool
where
F: FnOnce(&Topoglyph) -> bool,
{
self.peek().map(pred).unwrap_or(false)
}
fn is_empty(&mut self) -> bool {
self.peek().is_none()
}
pub fn yeet_while<F>(&mut self, mut pred: F)
where
F: FnMut(&Topoglyph) -> bool,
{
while let Some(g) = self.peek() {
if pred(&g) {
self.next();
} else {
return;
}
}
}
fn yeet_whitespace(&mut self) {
self.yeet_while(|tg| tg.glyph.is_whitespace());
}
fn keep_word(&mut self) -> Result<Lexeme, LexError> {
let gs = self.keep_until(|g| g.glyph.is_whitespace());
if gs.is_empty() {
return Err(LexError::ExpectedWordCharacter);
}
Ok(Lexeme::from(gs))
}
fn keep_while<F>(&mut self, mut pred: F) -> Vec<Topoglyph>
where
F: FnMut(&Topoglyph) -> bool,
{
let mut keep = Vec::new();
while let Some(g) = self.peek() {
if pred(&g) {
keep.push(g.clone());
self.next();
} else {
break;
}
}
keep
}
fn keep_until<F>(&mut self, mut pred: F) -> Vec<Topoglyph>
where
F: FnMut(&Topoglyph) -> bool,
{
self.keep_while(|g| !pred(g))
}
}
impl<'text> Iterator for Topoglypher<'text> {
type Item = Topoglyph;
fn next(&mut self) -> Option<Self::Item> {
self.feed(1);
self.lookahead.pop_front()
}
}
/// A Lexeme is the text of a given Token, without respect to that Token's type, but with respect
/// to where the text appears relative to some source code. This is, simply, a string that contains
/// the addresses of each of its characters with respect to some source text.
#[derive(PartialEq)]
pub struct Lexeme {
elems: Vec<Topoglyph>,
}
impl Lexeme {
fn span(&self) -> Option<Range<Position>> {
if self.elems.is_empty() {
return None;
}
Some(Range {
start: self.elems[0].position,
end: self.elems[self.elems.len() - 1].position,
})
}
fn text(&self) -> String {
self.elems.as_slice().iter().map(|tg| tg.glyph).collect()
}
}
impl fmt::Debug for Lexeme {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
let span = match self.span() {
Some(span) => span,
None => return write!(f, "<empty Lexeme>"),
};
write!(
f,
"<{text} @{start_line}:{start_column}-{end_line}:{end_column}>",
start_line = span.start.line,
start_column = span.start.column,
end_line = span.end.line,
end_column = span.end.column,
text = self.text(),
)
}
}
impl fmt::Display for Lexeme {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "{}", self.text())
}
}
impl From<Vec<Topoglyph>> for Lexeme {
fn from(v: Vec<Topoglyph>) -> Self {
Self { elems: v }
}
}
#[allow(dead_code)]
#[derive(Debug, PartialEq)]
pub enum Token {
BareString(Lexeme),
Glob(Lexeme),
}
struct Lexer<'text> {
source: Topoglypher<'text>,
}
impl<'text> Lexer<'text> {
fn new(text: &'text str) -> Self {
Self {
source: Topoglypher::new(text),
}
}
fn next_token(&mut self) -> Option<Result<Token, LexError>> {
self.source.yeet_whitespace();
let next = self.source.next()?;
match next.glyph {
_ if next.glyph.is_alphabetic() => Some(self.lex_bare_string(vec![next])),
'\\' => match self.source.pop() {
Ok(escaped) => Some(self.lex_bare_string(vec![escaped])),
Err(e) => Some(Err(e)),
},
'@' => Some(self.lex_var(vec![next])),
'\'' => Some(self.lex_raw_string(vec![next])),
'"' => Some(self.lex_interp_string(vec![next])),
_ => Some(Err(LexError::UnexpectedCharacter(next))),
}
}
fn lex_bare_string(&mut self, mut progress: Vec<Topoglyph>) -> Result<Token, LexError> {
while let Some(next) = self.source.peek() {
match next.glyph {
_ if next.glyph.is_whitespace() => break,
_ if next.glyph.is_alphanumeric() => progress.push(self.source.pop()?),
'\\' => {
self.source.pop()?;
progress.push(self.source.pop()?);
}
'*' | '?' => return self.lex_glob(progress),
_ => return Err(LexError::UnexpectedCharacter(self.source.pop()?)),
}
}
if progress.is_empty() {
Err(LexError::UnexpectedEOF)
} else {
Ok(Token::BareString(progress.into()))
}
}
fn lex_glob(&mut self, mut progress: Vec<Topoglyph>) -> Result<Token, LexError> {
while let Some(next) = self.source.peek() {
match next.glyph {
_ if next.glyph.is_whitespace() => break,
_ if next.glyph.is_alphanumeric() => progress.push(self.source.pop()?),
'*' | '?' => progress.push(self.source.pop()?),
'\\' => {
self.source.pop()?;
progress.push(self.source.pop()?);
}
_ => return Err(LexError::UnexpectedCharacter(self.source.pop()?)),
}
}
if progress.is_empty() {
Err(LexError::UnexpectedEOF)
} else {
Ok(Token::Glob(progress.into()))
}
}
fn lex_raw_string(&mut self, _progress: Vec<Topoglyph>) -> Result<Token, LexError> {
Err(LexError::not_yet("raw strings not done yet"))
}
fn lex_interp_string(&mut self, _progress: Vec<Topoglyph>) -> Result<Token, LexError> {
Err(LexError::not_yet("interpreted strings not done yet"))
}
fn lex_var(&mut self, _progress: Vec<Topoglyph>) -> Result<Token, LexError> {
Err(LexError::not_yet("variables are not done yet"))
}
}
impl<'text> Iterator for Lexer<'text> {
type Item = Result<Token, LexError>;
fn next(&mut self) -> Option<Self::Item> {
self.next_token()
}
}
#[cfg(test)]
mod tests {
use super::*;
/// this macro allows us to specify a set of inputs that we expect to lex successfully.
macro_rules! accept {
($($name:ident: $line:literal ;)+) => {$(
#[test]
fn $name() {
println!("testing that we can lex the following input text:\n\t{}", $line);
let lexer = Lexer::new($line);
let tokens: Result<Vec<Token>, LexError> = lexer.collect();
match tokens {
Ok(tokens) => {
println!("output tokens: {tokens:?}");
}
Err(e) => {
println!("output error: {e:?}");
panic!("Encounter an unexpected lex error");
}
}
}
)*};
}
/// this macro allows us to specify a set of inputs that we expect to fail to lex successfully.
macro_rules! reject {
($($name:ident: $line:literal ;)+) => {$(
#[test]
fn $name() {
println!("testing that we will fail to lex the following input text:\n\t{}", $line);
let lexer = Lexer::new($line);
let tokens: Result<Vec<Token>, LexError> = lexer.collect();
match tokens {
Ok(tokens) => {
println!("output tokens: {tokens:?}");
panic!("Did not encounter an expected lex error");
}
Err(e) => {
println!("output error: {e:?}");
}
}
}
)*};
}
reject! {
// A slash on its own makes no sense
lonely_slash: r"\";
// A slash is an escape character, so starting the escape sequence and then ending the
// input makes no sense
trailing_slash: r"one two three \";
// Globs aren't done yet
glob: "*";
// Vars aren't done yet
var: "@name";
// Single-quoted strings arne't done yet
strings: r"echo 'one' two";
// Single-quoted strings arne't done yet
double_quoted_strings: r#"echo "one" two"#;
}
accept! {
empty: "";
spaces: " ";
identifier: "a";
identifier_2: " a";
identifier_3: "a ";
identifier_4: " a ";
multi_idents: "one two three four ";
}
}
/*
Run a program or command named a, which is on the PATH
> a
Run a program or command named a, which is in the current directory
> ./a
*/

@ -2,6 +2,7 @@ mod error;
mod ext;
mod input;
mod key;
mod lex;
mod line;
mod log;
mod output;

@ -1,156 +1,5 @@
use crate::error::ParseError;
use std::{cell::RefCell, fmt, rc::Rc};
use thiserror::Error;
#[derive(Debug, Error)]
pub enum Error {
#[error("Unexpected Token")]
UnexpectedToken,
}
pub struct Tok {
start_line: usize,
start_col: usize,
end_line: usize,
end_col: usize,
text: String,
kind: Token,
}
#[allow(dead_code)]
#[derive(Debug, PartialEq)]
pub enum Token {
Ident(String),
Glob(String),
Pipe,
}
#[allow(dead_code)]
fn lex<S: AsRef<str>>(text: S) -> Result<Vec<Token>, Error> {
Ok(Lexer::new(text.as_ref()).collect())
}
struct Lexer<'text> {
chars: std::str::Chars<'text>,
peeked: Option<char>,
kept: Vec<char>,
}
impl<'text> Lexer<'text> {
fn new(text: &'text str) -> Self {
Self {
chars: text.chars(),
peeked: None,
kept: Vec::new(),
}
}
fn skip_whitespace(&mut self) {
loop {
match self.peek() {
None => break,
Some(c) => {
if c.is_whitespace() {
self.skip()
} else {
break;
}
}
}
}
}
fn peek(&mut self) -> Option<char> {
match self.peeked {
Some(c) => Some(c),
None => match self.chars.next() {
Some(c) => {
self.peeked = Some(c);
Some(c)
}
None => None,
},
}
}
fn skip(&mut self) {
if self.peeked.is_some() {
self.peeked = None;
} else {
self.chars.next();
}
}
fn keep(&mut self) {
match self.peeked {
Some(c) => {
self.kept.push(c);
self.peeked = None;
}
None => match self.chars.next() {
Some(c) => self.kept.push(c),
None => todo!(),
},
}
}
fn ident(&mut self) -> Token {
loop {
match self.peek() {
None => break,
Some('*') => {
self.keep();
self.glob();
}
Some(c) => {
if c.is_whitespace() {
break;
}
self.keep();
}
}
}
Token::Ident(self.pop())
}
fn glob(&mut self) -> Token {
loop {
match self.peek() {
None => break,
Some(c) => {
if c.is_whitespace() {
break;
}
self.keep();
}
}
}
Token::Glob(self.pop())
}
fn pop(&mut self) -> String {
let s: String = self.kept.iter().collect();
self.kept.clear();
s
}
}
impl <'text> Iterator for Lexer<'text> {
type Item = Token;
fn next(&mut self) -> Option<Token> {
self.skip_whitespace();
self.kept.clear();
match self.peek() {
Some('|') => {
self.skip();
Some(Token::Pipe)
}
Some('*') => Some(self.glob()),
Some(_) => Some(self.ident()),
None => None,
}
}
}
#[derive(PartialEq, Clone)]
pub enum Element {
@ -190,6 +39,7 @@ impl Node {
Self::new(Element::Empty)
}
#[allow(dead_code)]
pub fn child_of(parent: Rc<RefCell<Self>>, elem: Element) -> Self {
Self {
elem,
@ -198,9 +48,9 @@ impl Node {
}
}
pub fn visit(self) -> Tree {
self.into()
}
// pub fn visit(self) -> Tree {
// self.into()
// }
}
impl fmt::Debug for Node {
@ -221,75 +71,75 @@ pub struct Tree {
target: Rc<RefCell<Node>>,
}
impl Tree {
fn new() -> Self {
Node::empty().into()
}
/// Adds an element as a node as a child of the current node, then descends the tree to select
/// that child node. This is similar to how pushing a value changes the top element of a stack.
fn push(self, elem: Element) -> Self {
if self.is_empty() {
self.target.replace(Node::new(elem));
self
} else {
let child = Node::child_of(Rc::clone(&self.target), elem);
Tree {
target: Rc::new(RefCell::new(child)),
}
}
}
/// Adds a child node with a given element value to the currently selected node without chaning
/// our selection.
fn append(self, elem: Element) -> Self {
if self.is_empty() {
self.target.replace(Node::new(elem));
} else {
let node = Node::child_of(Rc::clone(&self.target), elem);
let child = Rc::new(RefCell::new(node));
self.target.borrow_mut().children.push(child);
}
self
}
/// Tells us whether or not the currently selected node is an empty node
fn is_empty(&self) -> bool {
self.target.borrow().elem == Element::Empty
}
fn parent(&self) -> Option<Self> {
self.target.borrow().parent.as_ref().map(|parent| Self {
target: Rc::clone(parent),
})
}
fn is_root(&self) -> bool {
self.parent().is_none()
}
pub fn root(self) -> Self {
match self.parent() {
Some(parent) => parent.root(),
None => self,
}
}
fn peek(&self) -> Element {
self.target.borrow().elem.clone()
}
fn into_node(self) -> Node {
self.into()
}
fn children(&self) -> ChildIter {
ChildIter {
parent: Rc::clone(&self.target),
idx: 0,
}
}
}
// impl Tree {
// fn new() -> Self {
// Node::empty().into()
// }
//
// /// Adds an element as a node as a child of the current node, then descends the tree to select
// /// that child node. This is similar to how pushing a value changes the top element of a stack.
// fn push(self, elem: Element) -> Self {
// if self.is_empty() {
// self.target.replace(Node::new(elem));
// self
// } else {
// let child = Node::child_of(Rc::clone(&self.target), elem);
// Tree {
// target: Rc::new(RefCell::new(child)),
// }
// }
// }
//
// /// Adds a child node with a given element value to the currently selected node without chaning
// /// our selection.
// fn append(self, elem: Element) -> Self {
// if self.is_empty() {
// self.target.replace(Node::new(elem));
// } else {
// let node = Node::child_of(Rc::clone(&self.target), elem);
// let child = Rc::new(RefCell::new(node));
// self.target.borrow_mut().children.push(child);
// }
// self
// }
//
// /// Tells us whether or not the currently selected node is an empty node
// fn is_empty(&self) -> bool {
// self.target.borrow().elem == Element::Empty
// }
//
// fn parent(&self) -> Option<Self> {
// self.target.borrow().parent.as_ref().map(|parent| Self {
// target: Rc::clone(parent),
// })
// }
//
// fn is_root(&self) -> bool {
// self.parent().is_none()
// }
//
// pub fn root(self) -> Self {
// match self.parent() {
// Some(parent) => parent.root(),
// None => self,
// }
// }
//
// fn peek(&self) -> Element {
// self.target.borrow().elem.clone()
// }
//
// fn into_node(self) -> Node {
// self.into()
// }
//
// fn children(&self) -> ChildIter {
// ChildIter {
// parent: Rc::clone(&self.target),
// idx: 0,
// }
// }
// }
impl fmt::Debug for Tree {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
@ -331,165 +181,166 @@ impl From<Node> for Tree {
}
}
pub fn parse<S: AsRef<str>>(text: S) -> Result<Tree, Error> {
let mut tree = Tree::new();
for token in lex(text)? {
match (tree.peek(), token) {
(Element::Empty, Token::Ident(cmd)) => {
tree = tree.push(Element::Command(cmd));
}
(Element::Command(_), Token::Ident(arg)) => {
tree = tree.append(Element::Literal(arg));
}
_ => todo!(),
}
}
Ok(tree.root())
pub fn parse<S: AsRef<str>>(text: S) -> Result<Tree, ParseError> {
todo!()
// let mut tree = Tree::new();
// for token in lex(text)? {
// match (tree.peek(), token) {
// (Element::Empty, Token::Ident(cmd)) => {
// tree = tree.push(Element::Command(cmd));
// }
// (Element::Command(_), Token::Ident(arg)) => {
// tree = tree.append(Element::Literal(arg));
// }
// _ => todo!(),
// }
// }
// Ok(tree.root())
}
#[cfg(test)]
mod tests {
use super::*;
use Token::Pipe;
fn ident<S: Into<String>>(s: S) -> Token {
Token::Ident(s.into())
}
fn glob<S: Into<String>>(s: S) -> Token {
Token::Glob(s.into())
}
macro_rules! lex {
(
$($name:ident: $line:literal $($token:expr)* ;)+
) => {
$(
#[test]
fn $name() -> Result<(), Error> {
let mut tokens = lex($line)?;
$(
assert!(tokens.len() > 0);
let front = tokens.remove(0);
assert_eq!($token, front);
)*
Ok(())
}
)*
};
}
lex! {
single_token:
"one"
ident("one");
two_tokens:
"ls one two"
ident("ls") ident("one") ident("two");
leading_whitespace:
" one"
ident("one");
trailing_whitespace:
"one "
ident("one");
surrounding_whitespace:
" one "
ident("one");
internal_hyphen:
"one-two"
ident("one-two");
pipe:
"|"
Pipe;
pipeline:
"one | two"
ident("one") Pipe ident("two");
pipeline_2:
"one |two"
ident("one") Pipe ident("two");
simple_glob:
"*"
glob("*");
ext_glob:
"*.rs"
glob("*.rs");
mixed:
"ls *.rs"
ident("ls") glob("*.rs");
globby_pipeline:
"ls *.rs | wc -l"
ident("ls") glob("*.rs") Pipe ident("wc") ident("-l");
mid_glob:
"a*b"
glob("a*b");
}
#[test]
fn empty_tree() {
let root = Tree::new().root();
assert!(root.is_root());
assert_eq!(root.peek(), Element::Empty);
assert_eq!(root.children().count(), 0);
}
#[test]
fn tree_root() {
let root = Tree::new()
.push(Element::Command(String::from("ls")))
.root();
assert!(root.is_root());
assert_eq!(root.children().count(), 0);
assert_eq!(root.into_node().elem, Element::Command(String::from("ls")));
}
#[test]
fn tree_push() {
let tree = Tree::new().push(Element::Command(String::from("ls")));
assert_eq!(tree.peek(), Element::Command(String::from("ls")));
let tree = Tree::new()
.push(Element::Command(String::from("ls")))
.append(Element::Command(String::from("one")));
assert_eq!(tree.peek(), Element::Command(String::from("ls")));
}
#[test]
fn parse_args() {
let res = parse("ls one two three");
assert!(res.is_ok());
let tree = res.unwrap();
assert_eq!(tree.peek(), Element::Command(String::from("ls")));
assert_eq!(tree.children().count(), 3);
let mut args = tree.children();
assert_eq!(
args.next().unwrap().peek(),
Element::Literal(String::from("one"))
);
assert_eq!(
args.next().unwrap().peek(),
Element::Literal(String::from("two"))
);
assert_eq!(
args.next().unwrap().peek(),
Element::Literal(String::from("three"))
);
assert!(args.next().is_none());
}
}
// #[cfg(test)]
// mod tests {
// use super::*;
// use Token::Pipe;
//
// fn ident<S: Into<String>>(s: S) -> Token {
// Token::Ident(s.into())
// }
//
// fn glob<S: Into<String>>(s: S) -> Token {
// Token::Glob(s.into())
// }
//
// macro_rules! lex {
// (
// $($name:ident: $line:literal $($token:expr)* ;)+
// ) => {
// $(
// #[test]
// fn $name() -> Result<(), Error> {
// let mut tokens = lex($line)?;
//
// $(
// assert!(tokens.len() > 0);
// let front = tokens.remove(0);
// assert_eq!($token, front);
// )*
//
// Ok(())
// }
// )*
// };
// }
//
// lex! {
// single_token:
// "one"
// ident("one");
//
// two_tokens:
// "ls one two"
// ident("ls") ident("one") ident("two");
//
// leading_whitespace:
// " one"
// ident("one");
//
// trailing_whitespace:
// "one "
// ident("one");
//
// surrounding_whitespace:
// " one "
// ident("one");
//
// internal_hyphen:
// "one-two"
// ident("one-two");
//
// pipe:
// "|"
// Pipe;
//
// pipeline:
// "one | two"
// ident("one") Pipe ident("two");
//
// pipeline_2:
// "one |two"
// ident("one") Pipe ident("two");
//
// simple_glob:
// "*"
// glob("*");
//
// ext_glob:
// "*.rs"
// glob("*.rs");
//
// mixed:
// "ls *.rs"
// ident("ls") glob("*.rs");
//
// globby_pipeline:
// "ls *.rs | wc -l"
// ident("ls") glob("*.rs") Pipe ident("wc") ident("-l");
//
// mid_glob:
// "a*b"
// glob("a*b");
// }
//
// #[test]
// fn empty_tree() {
// let root = Tree::new().root();
// assert!(root.is_root());
// assert_eq!(root.peek(), Element::Empty);
// assert_eq!(root.children().count(), 0);
// }
//
// #[test]
// fn tree_root() {
// let root = Tree::new()
// .push(Element::Command(String::from("ls")))
// .root();
// assert!(root.is_root());
// assert_eq!(root.children().count(), 0);
// assert_eq!(root.into_node().elem, Element::Command(String::from("ls")));
// }
//
// #[test]
// fn tree_push() {
// let tree = Tree::new().push(Element::Command(String::from("ls")));
// assert_eq!(tree.peek(), Element::Command(String::from("ls")));
//
// let tree = Tree::new()
// .push(Element::Command(String::from("ls")))
// .append(Element::Command(String::from("one")));
// assert_eq!(tree.peek(), Element::Command(String::from("ls")));
// }
//
// #[test]
// fn parse_args() {
// let res = parse("ls one two three");
// assert!(res.is_ok());
//
// let tree = res.unwrap();
// assert_eq!(tree.peek(), Element::Command(String::from("ls")));
// assert_eq!(tree.children().count(), 3);
//
// let mut args = tree.children();
// assert_eq!(
// args.next().unwrap().peek(),
// Element::Literal(String::from("one"))
// );
// assert_eq!(
// args.next().unwrap().peek(),
// Element::Literal(String::from("two"))
// );
// assert_eq!(
// args.next().unwrap().peek(),
// Element::Literal(String::from("three"))
// );
// assert!(args.next().is_none());
// }
// }

Loading…
Cancel
Save