Actually finished lexer integration
There were some previous bugs in the original lexer implementation that had to be fixed, along with some extraneous code that had to be removed. Luckily, that's all been taken care of!
This commit is contained in:
parent
2ba09a17b6
commit
346c17c517
2 changed files with 35 additions and 25 deletions
47
src/lexer.rs
47
src/lexer.rs
|
@ -31,7 +31,7 @@ pub fn collect(input: &[&str]) -> Result<Vec<Token>, Error> {
|
|||
Ok(vector)
|
||||
}
|
||||
|
||||
fn analyze(input: &str) -> Result<Token, Error> {
|
||||
pub fn analyze(input: &str) -> Result<Token, Error> {
|
||||
let mut token = Token::new();
|
||||
|
||||
token.is_arg = match &input[..1] {
|
||||
|
@ -48,33 +48,32 @@ fn analyze(input: &str) -> Result<Token, Error> {
|
|||
return Err(Error::new(ErrorKind::OptionFormat, String::from(input)));
|
||||
}
|
||||
|
||||
let option = &input[1..(input.len() - 1)];
|
||||
let option = if token.is_arg {
|
||||
&input[1..]
|
||||
} else if token.has_arg {
|
||||
&input[..(input.len() - 1)]
|
||||
} else {
|
||||
input
|
||||
};
|
||||
|
||||
let mut current_stage = AnalysisStage::ShortName;
|
||||
let mut char_iter = option.chars();
|
||||
let mut current_char = char_iter.next();
|
||||
while current_char.is_some() {
|
||||
let c = current_char.unwrap();
|
||||
for c in option.chars() {
|
||||
match c {
|
||||
'/' => {
|
||||
current_stage = AnalysisStage::LongName;
|
||||
continue;
|
||||
},
|
||||
'(' => {
|
||||
current_stage = AnalysisStage::Description;
|
||||
continue;
|
||||
},
|
||||
')' => break,
|
||||
_ => ()
|
||||
}
|
||||
|
||||
'/' => current_stage = AnalysisStage::LongName,
|
||||
'(' => current_stage = AnalysisStage::Description,
|
||||
')' => (),
|
||||
_ => {
|
||||
match current_stage {
|
||||
AnalysisStage::ShortName => token.short_name.push(c),
|
||||
AnalysisStage::LongName => token.long_name.push(c),
|
||||
AnalysisStage::Description => token.description.push(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
current_char = char_iter.next();
|
||||
if token.short_name.is_empty() && token.long_name.is_empty() {
|
||||
token.is_group = true;
|
||||
}
|
||||
|
||||
Ok(token)
|
||||
|
@ -117,6 +116,16 @@ impl Token {
|
|||
""
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fmt_with_padding(&self, padding: usize) -> String {
|
||||
let mut name = format!("-{}, --{}", self.short_name, self.long_name);
|
||||
|
||||
for _ in 0..padding {
|
||||
name.push(' ');
|
||||
}
|
||||
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Token {
|
||||
|
|
|
@ -24,6 +24,7 @@ use std::env::Args;
|
|||
|
||||
pub use errors::{Error, ErrorKind};
|
||||
pub use matches::Matches;
|
||||
pub use lexer::{analyze, collect, Token};
|
||||
use opts::Opts;
|
||||
|
||||
pub fn parse(mut args: Args, options: &[&'static str]) -> Result<Matches, Error> {
|
||||
|
|
Loading…
Add table
Reference in a new issue