summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorNadrieril2019-02-27 15:35:04 +0100
committerNadrieril2019-02-27 15:35:58 +0100
commitd5bc8da87f3fed99a010d867a3e34d2768caf768 (patch)
treef6341f5790a7041c19eb2320edf0d029569543a5 /src
parent3118e96dfb9aebc1486886cc6659772a5e91b2e8 (diff)
Try to update to latest nom
Diffstat (limited to 'src')
-rw-r--r--src/grammar.lalrpop2
-rw-r--r--src/lexer.rs54
-rw-r--r--src/main.rs2
-rw-r--r--src/parser.rs2
4 files changed, 29 insertions, 31 deletions
diff --git a/src/grammar.lalrpop b/src/grammar.lalrpop
index 35716fc..1e4d323 100644
--- a/src/grammar.lalrpop
+++ b/src/grammar.lalrpop
@@ -13,7 +13,7 @@ grammar<'input>;
extern {
type Location = usize;
- type Error = LexicalError;
+ type Error = LexicalError<'input>;
enum Tok<'input> {
Pi => Tok::Pi,
diff --git a/src/lexer.rs b/src/lexer.rs
index 0b58293..8617f4b 100644
--- a/src/lexer.rs
+++ b/src/lexer.rs
@@ -64,10 +64,7 @@ pub enum Tok<'i> {
}
#[derive(Debug)]
-pub enum LexicalError {
- Error(usize, nom::simple_errors::Err<u32>),
- Incomplete(nom::Needed),
-}
+pub struct LexicalError<'a>(pub usize, pub nom::Err<&'a str>);
pub type Spanned<Tok, Loc, Error> = Result<(Loc, Tok, Loc), Error>;
@@ -109,11 +106,11 @@ named!(identifier<&str, &str>, recognize!(preceded!(
macro_rules! ident_tag {
($i:expr, $tag:expr) => {
match identifier($i) {
- nom::IResult::Done(i, s) => {
+ Ok((i, s)) => {
if s == $tag {
- nom::IResult::Done(i, s)
+ Ok((i, s))
} else {
- nom::IResult::Error(error_position!(nom::ErrorKind::Tag, $i))
+ Err(nom::Err::Error(error_position!($i, nom::ErrorKind::Tag)))
}
}
r => r,
@@ -146,20 +143,20 @@ named!(string_escape_numeric<&str, char>, map_opt!(alt!(
), ::std::char::from_u32));
fn string_lit_inner(input: &str) -> nom::IResult<&str, String> {
- use nom::IResult::*;;
+ use nom::Err;
use nom::ErrorKind;
let mut s = String::new();
let mut cs = input.char_indices().peekable();
while let Some((i, c)) = cs.next() {
match c {
- '"' => return nom::IResult::Done(&input[i..], s),
+ '"' => return Ok((&input[i..], s)),
'\\' => match cs.next() {
Some((_, s)) if s.is_whitespace() => {
while cs.peek().map(|&(_, s)| s.is_whitespace()) == Some(true) {
let _ = cs.next();
}
if cs.next().map(|p| p.1) != Some('\\') {
- return Error(error_position!(ErrorKind::Custom(4 /* FIXME */), input));
+ return Err(Err::Error(error_position!(input, ErrorKind::Custom(4 /* FIXME */))));
}
}
Some((j, ec)) => {
@@ -168,24 +165,23 @@ fn string_lit_inner(input: &str) -> nom::IResult<&str, String> {
// FIXME Named ASCII escapes and control character escapes
} else {
match string_escape_numeric(&input[j..]) {
- Done(rest, esc) => {
+ Ok((rest, esc)) => {
let &(k, _) = cs.peek().unwrap();
// digits are always single byte ASCII characters
let consumed = input[k..].len() - rest.len();
for _ in 0..consumed { let _ = cs.next(); }
s.push(esc);
}
- Incomplete(s) => return Incomplete(s),
- Error(e) => return Error(e),
+ Err(e) => return Err(e),
}
}
},
- _ => return Error(error_position!(ErrorKind::Custom(5 /* FIXME */), input)),
+ _ => return Err(Err::Error(error_position!(input, ErrorKind::Custom(5 /* FIXME */)))),
},
_ => s.push(c),
}
}
- Error(error_position!(ErrorKind::Custom(3 /* FIXME */), input))
+ Err(Err::Error(error_position!(input, ErrorKind::Custom(3 /* FIXME */))))
}
named!(string_lit<&str, String>, delimited!(tag!("\""), string_lit_inner, tag!("\"")));
@@ -322,30 +318,26 @@ impl<'input> Lexer<'input> {
}
impl<'input> Iterator for Lexer<'input> {
- type Item = Spanned<Tok<'input>, usize, LexicalError>;
+ type Item = Spanned<Tok<'input>, usize, LexicalError<'input>>;
fn next(&mut self) -> Option<Self::Item> {
- use nom::IResult::*;
self.skip_comments_and_whitespace();
let input = self.current_input();
if input.is_empty() {
return None;
}
match token(input) {
- Done(rest, t) => {
+ Ok((rest, t)) => {
let parsed_len = input.len() - rest.len();
//println!("parsed {} bytes => {:?}", parsed_len, t);
let start = self.offset;
self.offset += parsed_len;
Some(Ok((start, t, self.offset)))
}
- Error(e) => {
+ Err(e) => {
let offset = self.offset;
- self.offset = self.input.len();
- Some(Err(LexicalError::Error(offset, e)))
- }
- Incomplete(needed) => {
- Some(Err(LexicalError::Incomplete(needed)))
+ // self.offset = self.input.len();
+ Some(Err(LexicalError(offset, e)))
}
}
}
@@ -354,6 +346,12 @@ impl<'input> Iterator for Lexer<'input> {
#[test]
fn test_lex() {
use self::Tok::*;
+ let s = "22";
+ let expected = [Lambda, Bool(false)];
+ let lexer = Lexer::new(s);
+ let tokens = lexer.map(|r| r.unwrap().1).collect::<Vec<_>>();
+ assert_eq!(&tokens, &expected);
+
let s = "λ(b : Bool) → b == False";
let expected = [Lambda,
ParenL,
@@ -369,9 +367,9 @@ fn test_lex() {
let tokens = lexer.map(|r| r.unwrap().1).collect::<Vec<_>>();
assert_eq!(&tokens, &expected);
- assert_eq!(string_lit(r#""a\&b""#).to_result(), Ok("ab".to_owned()));
- assert_eq!(string_lit(r#""a\ \b""#).to_result(), Ok("ab".to_owned()));
+ assert_eq!(string_lit(r#""a\&b""#), Ok(("", "ab".to_owned())));
+ assert_eq!(string_lit(r#""a\ \b""#), Ok(("", "ab".to_owned())));
assert!(string_lit(r#""a\ b""#).is_err());
- assert_eq!(string_lit(r#""a\nb""#).to_result(), Ok("a\nb".to_owned()));
- assert_eq!(string_lit(r#""\o141\x62\99""#).to_result(), Ok("abc".to_owned()));
+ assert_eq!(string_lit(r#""a\nb""#), Ok(("", "a\nb".to_owned())));
+ assert_eq!(string_lit(r#""\o141\x62\99""#), Ok(("", "abd".to_owned())));
}
diff --git a/src/main.rs b/src/main.rs
index fde3978..2a1eb11 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -71,7 +71,7 @@ fn main() {
io::stdin().read_to_string(&mut buffer).unwrap();
let expr = match parser::parse_expr(&buffer) {
Ok(e) => e,
- Err(lalrpop_util::ParseError::User { error: lexer::LexicalError::Error(pos, e) }) => {
+ Err(lalrpop_util::ParseError::User { error: lexer::LexicalError(pos, e) }) => {
print_error(&format!("Unexpected token {:?}", e), &buffer, pos, pos);
return;
}
diff --git a/src/parser.rs b/src/parser.rs
index 3ad7ffc..1127d86 100644
--- a/src/parser.rs
+++ b/src/parser.rs
@@ -4,7 +4,7 @@ use grammar;
use grammar_util::BoxExpr;
use lexer::{Lexer, LexicalError, Tok};
-pub type ParseError<'i> = lalrpop_util::ParseError<usize, Tok<'i>, LexicalError>;
+pub type ParseError<'i> = lalrpop_util::ParseError<usize, Tok<'i>, LexicalError<'i>>;
pub fn parse_expr(s: &str) -> Result<BoxExpr, ParseError> {
grammar::ExprParser::new().parse(Lexer::new(s))