diff options
author | Fintan Halpenny | 2019-09-06 12:04:28 +0200 |
---|---|---|
committer | Fintan Halpenny | 2019-09-06 12:04:28 +0200 |
commit | 52c91e08db68e05f760ebfd465b84fe4107731df (patch) | |
tree | 9f17524482d5861f7bcda66d2c2de363ecf5fdc5 | |
parent | 4edaf0814868e604eed5cfd594ea3f448ca90678 (diff) | |
parent | 7d84b5eb6fdd82fe24139452e2427bfb8128f123 (diff) |
Merge remote-tracking branch 'origin/fintan/canonicalize' into fintan/canonicalize
Diffstat (limited to '')
-rw-r--r-- | .rustfmt.toml | 2 | ||||
-rw-r--r-- | Cargo.lock | 49 | ||||
-rw-r--r-- | dhall/src/phase/binary.rs | 28 | ||||
-rw-r--r-- | dhall/src/phase/normalize.rs | 7 | ||||
-rw-r--r-- | dhall/src/phase/resolve.rs | 1 | ||||
-rw-r--r-- | dhall/src/phase/typecheck.rs | 8 | ||||
-rw-r--r-- | dhall_proc_macros/Cargo.toml | 6 | ||||
-rw-r--r-- | dhall_proc_macros/src/lib.rs | 19 | ||||
-rw-r--r-- | dhall_proc_macros/src/make_parser.rs | 111 | ||||
-rw-r--r-- | dhall_proc_macros/src/parse_children.rs | 212 | ||||
-rw-r--r-- | dhall_syntax/Cargo.toml | 2 | ||||
-rw-r--r-- | dhall_syntax/src/core/expr.rs | 9 | ||||
-rw-r--r-- | dhall_syntax/src/lib.rs | 3 | ||||
-rw-r--r-- | dhall_syntax/src/parser.rs | 1881 | ||||
-rw-r--r-- | improved_slice_patterns/src/lib.rs | 25 | ||||
-rw-r--r-- | rust-toolchain | 2 |
16 files changed, 1368 insertions, 997 deletions
diff --git a/.rustfmt.toml b/.rustfmt.toml index 0713c39..ee91dbd 100644 --- a/.rustfmt.toml +++ b/.rustfmt.toml @@ -1,3 +1,3 @@ edition = "2018" max_width = 80 -error_on_line_overflow = true +# error_on_line_overflow = true @@ -95,9 +95,9 @@ name = "dhall_proc_macros" version = "0.1.0" dependencies = [ "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.31 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -105,9 +105,11 @@ name = "dhall_syntax" version = "0.1.0" dependencies = [ "dhall_generated_parser 0.1.0", + "dhall_proc_macros 0.1.0", "either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "take_mut 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -181,6 +183,11 @@ dependencies = [ ] [[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] name = "maplit" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -270,6 +277,14 @@ dependencies = [ ] [[package]] +name = "proc-macro2" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "quote" version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -278,6 +293,14 @@ dependencies = [ ] [[package]] +name = "quote" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "same-file" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -345,6 +368,16 @@ dependencies = [ ] [[package]] +name = "syn" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "take_mut" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -387,6 +420,11 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] +name = "unicode-xid" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] name = "version_check" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -457,6 +495,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum indexmap 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7e81a7c05f79578dbc15793d8b619db9ba32b4577003ef3af1a91c416798c58d" "checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" +"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" "checksum maplit 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "08cbb6b4fef96b6d77bfc40ec491b1690c779e77b05cd9f07f787ed376fd4c43" "checksum memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2efc7bc57c883d4a4d6e3246905283d8dae951bb3bd32f49d6ef297f546e1c39" "checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6" @@ -468,13 +507,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum pretty 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f60c0d9f6fc88ecdd245d90c1920ff76a430ab34303fc778d33b1d0a4c3bf6d3" "checksum pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427" "checksum proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)" = "4d317f9caece796be1980837fd5cb3dfec5613ebdb04ad0956deea83ce168915" +"checksum proc-macro2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "175a40b9cf564ce9bf050654633dbf339978706b8ead1a907bb970b63185dd95" "checksum quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "faf4799c5d274f3868a4aae320a0a182cbd2baee377b378f080e16a23e9d80db" +"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" "checksum same-file 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8f20c4be53a8a1ff4c1f1b2bd14570d2f634628709752f0702ecdd2b3f9a5267" "checksum serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)" = "aa5f7c20820475babd2c077c3ab5f8c77a31c15e16ea38687b4c02d3e48680f4" "checksum serde_cbor 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "45cd6d95391b16cd57e88b68be41d504183b7faae22030c0cc3b3f73dd57b2fd" "checksum serde_derive 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)" = "58fc82bec244f168b23d1963b45c8bf5726e9a15a9d146a067f9081aeed2de79" "checksum sha-1 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "51b9d1f3b5de8a167ab06834a7c883bd197f2191e1dda1a22d9ccfeedbf9aded" "checksum syn 0.15.31 (registry+https://github.com/rust-lang/crates.io-index)" = "d2b4cfac95805274c6afdb12d8f770fa2d27c045953e7b630a81801953699a9a" +"checksum syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf" "checksum take_mut 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" "checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1" "checksum term-painter 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "dcaa948f0e3e38470cd8dc8dcfe561a75c9e43f28075bb183845be2b9b3c08cf" @@ -482,6 +524,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169" "checksum ucd-trie 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "71a9c5b1fe77426cf144cc30e49e955270f5086e31a6441dfa8b32efc09b9d77" "checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" +"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c" "checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" "checksum walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "9d9d7ed3431229a144296213105a390676cc49c9b6a72bd19f3176c98e129fa1" "checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" diff --git a/dhall/src/phase/binary.rs b/dhall/src/phase/binary.rs index 3c45e81..4831c7e 100644 --- a/dhall/src/phase/binary.rs +++ b/dhall/src/phase/binary.rs @@ -53,7 +53,7 @@ fn cbor_value_to_dhall(data: &cbor::Value) -> Result<DecodedExpr, DecodeError> { let l = Label::from(l.as_str()); Var(V(l, *n as usize)) } - [U64(0), f, args..] => { + [U64(0), f, args @ ..] => { if args.is_empty() { Err(DecodeError::WrongFormatError( "Function application must have at least one argument" @@ -127,7 +127,7 @@ fn cbor_value_to_dhall(data: &cbor::Value) -> Result<DecodedExpr, DecodeError> { let t = cbor_value_to_dhall(&t)?; EmptyListLit(rc(App(rc(ExprF::Builtin(Builtin::List)), t))) } - [U64(4), Null, rest..] => { + [U64(4), Null, rest @ ..] => { let rest = rest .iter() .map(cbor_value_to_dhall) @@ -175,7 +175,7 @@ fn cbor_value_to_dhall(data: &cbor::Value) -> Result<DecodedExpr, DecodeError> { let l = Label::from(l.as_str()); Field(x, l) } - [U64(10), x, rest..] => { + [U64(10), x, rest @ ..] => { let x = cbor_value_to_dhall(&x)?; let labels = rest .iter() @@ -204,7 +204,7 @@ fn cbor_value_to_dhall(data: &cbor::Value) -> Result<DecodedExpr, DecodeError> { [U64(15), U64(x)] => NaturalLit(*x as Natural), [U64(16), U64(x)] => IntegerLit(*x as Integer), [U64(16), I64(x)] => IntegerLit(*x as Integer), - [U64(18), String(first), rest..] => { + [U64(18), String(first), rest @ ..] => { TextLit(InterpolatedText::from(( first.clone(), rest.iter() @@ -226,7 +226,7 @@ fn cbor_value_to_dhall(data: &cbor::Value) -> Result<DecodedExpr, DecodeError> { let t = cbor_value_to_dhall(&t)?; Assert(t) } - [U64(24), hash, U64(mode), U64(scheme), rest..] => { + [U64(24), hash, U64(mode), U64(scheme), rest @ ..] => { let mode = match mode { 0 => ImportMode::Code, 1 => ImportMode::RawText, @@ -239,7 +239,9 @@ fn cbor_value_to_dhall(data: &cbor::Value) -> Result<DecodedExpr, DecodeError> { let hash = match hash { Null => None, Bytes(bytes) => match bytes.as_slice() { - [18, 32, rest..] => Some(Hash::SHA256(rest.to_vec())), + [18, 32, rest @ ..] => { + Some(Hash::SHA256(rest.to_vec())) + } _ => Err(DecodeError::WrongFormatError(format!( "import/hash/unknown_multihash: {:?}", bytes @@ -337,7 +339,7 @@ fn cbor_value_to_dhall(data: &cbor::Value) -> Result<DecodedExpr, DecodeError> { location, }) } - [U64(25), bindings..] => { + [U64(25), bindings @ ..] => { let mut tuples = bindings.iter().tuples(); let bindings = (&mut tuples) .map(|(x, t, v)| { @@ -481,13 +483,9 @@ where BoolIf(x, y, z) => ser_seq!(ser; tag(14), expr(x), expr(y), expr(z)), Var(V(l, n)) if l == &"_".into() => ser.serialize_u64(*n as u64), Var(V(l, n)) => ser_seq!(ser; label(l), U64(*n as u64)), - Lam(l, x, y) if l == &"_".into() => { - ser_seq!(ser; tag(1), expr(x), expr(y)) - } + Lam(l, x, y) if l == &"_".into() => ser_seq!(ser; tag(1), expr(x), expr(y)), Lam(l, x, y) => ser_seq!(ser; tag(1), label(l), expr(x), expr(y)), - Pi(l, x, y) if l == &"_".into() => { - ser_seq!(ser; tag(2), expr(x), expr(y)) - } + Pi(l, x, y) if l == &"_".into() => ser_seq!(ser; tag(2), expr(x), expr(y)), Pi(l, x, y) => ser_seq!(ser; tag(2), label(l), expr(x), expr(y)), Let(_, _, _, _) => { let (bound_e, bindings) = collect_nested_lets(e); @@ -559,9 +557,7 @@ where ser_seq!(ser; tag(3), U64(op), expr(x), expr(y)) } Merge(x, y, None) => ser_seq!(ser; tag(6), expr(x), expr(y)), - Merge(x, y, Some(z)) => { - ser_seq!(ser; tag(6), expr(x), expr(y), expr(z)) - } + Merge(x, y, Some(z)) => ser_seq!(ser; tag(6), expr(x), expr(y), expr(z)), ToMap(x, None) => ser_seq!(ser; tag(27), expr(x)), ToMap(x, Some(y)) => ser_seq!(ser; tag(27), expr(x), expr(y)), Projection(x, ls) => ser.collect_seq( diff --git a/dhall/src/phase/normalize.rs b/dhall/src/phase/normalize.rs index 3f6e99c..0992f74 100644 --- a/dhall/src/phase/normalize.rs +++ b/dhall/src/phase/normalize.rs @@ -290,7 +290,7 @@ pub(crate) fn apply_builtin( ) } }, - (ListFold, [_, l, _, cons, nil, r..]) => match &*l.as_whnf() { + (ListFold, [_, l, _, cons, nil, r @ ..]) => match &*l.as_whnf() { EmptyListLit(_) => Ret::ValueWithRemainingArgs(r, nil.clone()), NEListLit(xs) => { let mut v = nil.clone(); @@ -326,7 +326,8 @@ pub(crate) fn apply_builtin( ) } }, - (OptionalFold, [_, v, _, just, nothing, r..]) => match &*v.as_whnf() { + (OptionalFold, [_, v, _, just, nothing, r @ ..]) => match &*v.as_whnf() + { EmptyOptionalLit(_) => { Ret::ValueWithRemainingArgs(r, nothing.clone()) } @@ -356,7 +357,7 @@ pub(crate) fn apply_builtin( ), ), }, - (NaturalFold, [n, t, succ, zero, r..]) => match &*n.as_whnf() { + (NaturalFold, [n, t, succ, zero, r @ ..]) => match &*n.as_whnf() { NaturalLit(0) => Ret::ValueWithRemainingArgs(r, zero.clone()), NaturalLit(n) => { let fold = Value::from_builtin(NaturalFold) diff --git a/dhall/src/phase/resolve.rs b/dhall/src/phase/resolve.rs index 54a0f27..4034a5c 100644 --- a/dhall/src/phase/resolve.rs +++ b/dhall/src/phase/resolve.rs @@ -17,7 +17,6 @@ type ImportCache = HashMap<Import, Normalized>; pub(crate) type ImportStack = Vec<Import>; - fn resolve_import( import: &Import, root: &ImportRoot, diff --git a/dhall/src/phase/typecheck.rs b/dhall/src/phase/typecheck.rs index 9013c1f..2e61fbc 100644 --- a/dhall/src/phase/typecheck.rs +++ b/dhall/src/phase/typecheck.rs @@ -249,9 +249,7 @@ fn type_of_builtin<E>(b: Builtin) -> Expr<E> { list ), ListLength => make_type!(forall (a: Type) -> (List a) -> Natural), - ListHead | ListLast => { - make_type!(forall (a: Type) -> (List a) -> Optional a) - } + ListHead | ListLast => make_type!(forall (a: Type) -> (List a) -> Optional a), ListIndexed => make_type!( forall (a: Type) -> (List a) -> @@ -375,9 +373,7 @@ fn type_last_layer( Import(_) => unreachable!( "There should remain no imports in a resolved expression" ), - Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => { - unreachable!() - } + Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachable!(), App(f, a) => { let tf = f.get_type()?; let tf_borrow = tf.as_whnf(); diff --git a/dhall_proc_macros/Cargo.toml b/dhall_proc_macros/Cargo.toml index df1eda8..b641a39 100644 --- a/dhall_proc_macros/Cargo.toml +++ b/dhall_proc_macros/Cargo.toml @@ -11,6 +11,6 @@ doctest = false [dependencies] itertools = "0.8.0" -quote = "0.6.11" -proc-macro2 = "0.4.27" -syn = "0.15.29" +quote = "1.0.2" +proc-macro2 = "1.0.2" +syn = { version = "1.0.5", features = ["full", "extra-traits"] } diff --git a/dhall_proc_macros/src/lib.rs b/dhall_proc_macros/src/lib.rs index 5304429..37e8f9f 100644 --- a/dhall_proc_macros/src/lib.rs +++ b/dhall_proc_macros/src/lib.rs @@ -1,3 +1,4 @@ +#![feature(drain_filter)] //! This crate contains the code-generation primitives for the [dhall-rust][dhall-rust] crate. //! This is highly unstable and breaks regularly; use at your own risk. //! @@ -6,6 +7,8 @@ extern crate proc_macro; mod derive; +mod make_parser; +mod parse_children; use proc_macro::TokenStream; @@ -13,3 +16,19 @@ use proc_macro::TokenStream; pub fn derive_static_type(input: TokenStream) -> TokenStream { derive::derive_static_type(input) } + +#[proc_macro_attribute] +pub fn make_parser(attrs: TokenStream, input: TokenStream) -> TokenStream { + TokenStream::from(match make_parser::make_parser(attrs, input) { + Ok(tokens) => tokens, + Err(err) => err.to_compile_error(), + }) +} + +#[proc_macro] +pub fn parse_children(input: TokenStream) -> TokenStream { + TokenStream::from(match parse_children::parse_children(input) { + Ok(tokens) => tokens, + Err(err) => err.to_compile_error(), + }) +} diff --git a/dhall_proc_macros/src/make_parser.rs b/dhall_proc_macros/src/make_parser.rs new file mode 100644 index 0000000..268a639 --- /dev/null +++ b/dhall_proc_macros/src/make_parser.rs @@ -0,0 +1,111 @@ +use quote::quote; +use syn::parse::{ParseStream, Result}; +use syn::spanned::Spanned; +use syn::{ + parse_quote, Error, Expr, FnArg, Ident, ImplItem, ImplItemMethod, ItemImpl, + Pat, Token, +}; + +fn apply_special_attrs(function: &mut ImplItemMethod) -> Result<()> { + *function = parse_quote!( + #[allow(non_snake_case, dead_code)] + #function + ); + + let recognized_attrs: Vec<_> = function + .attrs + .drain_filter(|attr| attr.path.is_ident("prec_climb")) + .collect(); + + let name = function.sig.ident.clone(); + + if recognized_attrs.is_empty() { + // do nothing + } else if recognized_attrs.len() > 1 { + return Err(Error::new( + recognized_attrs[1].span(), + "expected a single prec_climb attribute", + )); + } else { + let attr = recognized_attrs.into_iter().next().unwrap(); + let (child_rule, climber) = + attr.parse_args_with(|input: ParseStream| { + let child_rule: Ident = input.parse()?; + let _: Token![,] = input.parse()?; + let climber: Expr = input.parse()?; + Ok((child_rule, climber)) + })?; + + // Get the name of the first (`input`) function argument + let first_arg = function.sig.inputs.first().ok_or_else(|| { + Error::new( + function.sig.inputs.span(), + "a prec_climb function needs 4 arguments", + ) + })?; + let first_arg = match &first_arg { + FnArg::Receiver(_) => return Err(Error::new( + first_arg.span(), + "a prec_climb function should not have a `self` argument", + )), + FnArg::Typed(first_arg) => match &*first_arg.pat{ + Pat::Ident(ident) => &ident.ident, + _ => return Err(Error::new( + first_arg.span(), + "this argument should be a plain identifier instead of a pattern", + )), + } + }; + + function.block = parse_quote!({ + #function + + #climber.climb( + #first_arg.pair.clone().into_inner(), + |p| Self::#child_rule(#first_arg.with_pair(p)), + |l, op, r| { + #name(#first_arg.clone(), l?, op, r?) + }, + ) + }); + // Remove the 3 last arguments to keep only the `input` one + function.sig.inputs.pop(); + function.sig.inputs.pop(); + function.sig.inputs.pop(); + // Check that an argument remains + function.sig.inputs.first().ok_or_else(|| { + Error::new( + function.sig.inputs.span(), + "a prec_climb function needs 4 arguments", + ) + })?; + } + + Ok(()) +} + +pub fn make_parser( + attrs: proc_macro::TokenStream, + input: proc_macro::TokenStream, +) -> Result<proc_macro2::TokenStream> { + let rule_enum: Ident = syn::parse(attrs)?; + + let mut imp: ItemImpl = syn::parse(input)?; + imp.items + .iter_mut() + .map(|item| match item { + ImplItem::Method(m) => apply_special_attrs(m), + _ => Ok(()), + }) + .collect::<Result<()>>()?; + + let ty = &imp.self_ty; + let (impl_generics, _, where_clause) = imp.generics.split_for_impl(); + Ok(quote!( + impl #impl_generics PestConsumer for #ty #where_clause { + type RuleEnum = #rule_enum; + } + + #imp + )) +} diff --git a/dhall_proc_macros/src/parse_children.rs b/dhall_proc_macros/src/parse_children.rs new file mode 100644 index 0000000..b1d43fc --- /dev/null +++ b/dhall_proc_macros/src/parse_children.rs @@ -0,0 +1,212 @@ +use proc_macro2::{Span, TokenStream}; +use quote::quote; +use syn::parse::{Parse, ParseStream, Result}; +use syn::punctuated::Punctuated; +use syn::spanned::Spanned; +use syn::{bracketed, parenthesized, token, Error, Expr, Ident, Pat, Token}; + +#[derive(Debug, Clone)] +struct ChildrenBranch { + pattern_span: Span, + pattern: Punctuated<ChildrenBranchPatternItem, Token![,]>, + body: Expr, +} + +#[derive(Debug, Clone)] +enum ChildrenBranchPatternItem { + Single { rule_name: Ident, binder: Pat }, + Multiple { rule_name: Ident, binder: Ident }, +} + +#[derive(Debug, Clone)] +struct ParseChildrenInput { + input_expr: Expr, + branches: Punctuated<ChildrenBranch, Token![,]>, +} + +impl Parse for ChildrenBranch { + fn parse(input: ParseStream) -> Result<Self> { + let contents; + let _: token::Bracket = bracketed!(contents in input); + let pattern_unparsed: TokenStream = contents.fork().parse()?; + let pattern_span = pattern_unparsed.span(); + let pattern = Punctuated::parse_terminated(&contents)?; + let _: Token![=>] = input.parse()?; + let body = input.parse()?; + + Ok(ChildrenBranch { + pattern_span, + pattern, + body, + }) + } +} + +impl Parse for ChildrenBranchPatternItem { + fn parse(input: ParseStream) -> Result<Self> { + let contents; + let rule_name = input.parse()?; + parenthesized!(contents in input); + if input.peek(Token![..]) { + let binder = contents.parse()?; + let _: Token![..] = input.parse()?; + Ok(ChildrenBranchPatternItem::Multiple { rule_name, binder }) + } else if input.is_empty() || input.peek(Token![,]) { + let binder = contents.parse()?; + Ok(ChildrenBranchPatternItem::Single { rule_name, binder }) + } else { + Err(input.error("expected `..` or nothing")) + } + } +} + +impl Parse for ParseChildrenInput { + fn parse(input: ParseStream) -> Result<Self> { + let input_expr = input.parse()?; + let _: Token![;] = input.parse()?; + let branches = Punctuated::parse_terminated(input)?; + + Ok(ParseChildrenInput { + input_expr, + branches, + }) + } +} + +fn make_parser_branch( + branch: &ChildrenBranch, + i_inputs: &Ident, +) -> Result<TokenStream> { + use ChildrenBranchPatternItem::{Multiple, Single}; + + let body = &branch.body; + + // Convert the input pattern into a pattern-match on the Rules of the children. This uses + // slice_patterns. + // A single pattern just checks that the rule matches; a variable-length pattern binds the + // subslice and checks, in the if-guard, that its elements all match the chosen Rule. + let i_variable_pattern = + Ident::new("___variable_pattern", Span::call_site()); + let match_pat = branch.pattern.iter().map(|item| match item { + Single { rule_name, .. } => { + quote!(<<Self as PestConsumer>::RuleEnum>::#rule_name) + } + Multiple { .. } => quote!(#i_variable_pattern @ ..), + }); + let match_filter = branch.pattern.iter().map(|item| match item { + Single { .. } => quote!(), + Multiple { rule_name, .. } => quote!( + { + // We can't use .all() directly in the pattern guard; see + // https://github.com/rust-lang/rust/issues/59803. + let all_match = |slice: &[_]| { + slice.iter().all(|r| + r == &<<Self as PestConsumer>::RuleEnum>::#rule_name + ) + }; + all_match(#i_variable_pattern) + } && + ), + }); + + // Once we have found a branch that matches, we need to parse the children. + let mut singles_before_multiple = Vec::new(); + let mut multiple = None; + let mut singles_after_multiple = Vec::new(); + for item in &branch.pattern { + match item { + Single { + rule_name, binder, .. + } => { + if multiple.is_none() { + singles_before_multiple.push((rule_name, binder)) + } else { + singles_after_multiple.push((rule_name, binder)) + } + } + Multiple { + rule_name, binder, .. + } => { + if multiple.is_none() { + multiple = Some((rule_name, binder)) + } else { + return Err(Error::new( + branch.pattern_span.clone(), + "multiple variable-length patterns are not allowed", + )); + } + } + } + } + let mut parses = Vec::new(); + for (rule_name, binder) in singles_before_multiple.into_iter() { + parses.push(quote!( + let #binder = Self::#rule_name( + #i_inputs.next().unwrap() + )?; + )) + } + // Note the `rev()`: we are taking inputs from the end of the iterator in reverse order, so that + // only the unmatched inputs are left for the variable-length pattern, if any. + for (rule_name, binder) in singles_after_multiple.into_iter().rev() { + parses.push(quote!( + let #binder = Self::#rule_name( + #i_inputs.next_back().unwrap() + )?; + )) + } + if let Some((rule_name, binder)) = multiple { + parses.push(quote!( + let #binder = #i_inputs + .map(|i| Self::#rule_name(i)) + .collect::<Result<Vec<_>, _>>()? + .into_iter(); + )) + } + + Ok(quote!( + [#(#match_pat),*] if #(#match_filter)* true => { + #(#parses)* + #body + } + )) +} + +pub fn parse_children( + input: proc_macro::TokenStream, +) -> Result<proc_macro2::TokenStream> { + let input: ParseChildrenInput = syn::parse(input)?; + + let i_children_rules = Ident::new("___children_rules", Span::call_site()); + let i_inputs = Ident::new("___inputs", Span::call_site()); + + let input_expr = &input.input_expr; + let branches = input + .branches + .iter() + .map(|br| make_parser_branch(br, &i_inputs)) + .collect::<Result<Vec<_>>>()?; + + Ok(quote!({ + let #i_children_rules: Vec<_> = #input_expr.pair + .clone() + .into_inner() + .map(|p| p.as_rule()) + .collect(); + + #[allow(unused_mut)] + let mut #i_inputs = #input_expr + .pair + .clone() + .into_inner() + .map(|p| #input_expr.with_pair(p)); + + #[allow(unreachable_code)] + match #i_children_rules.as_slice() { + #(#branches,)* + [..] => return Err(#input_expr.error( + format!("Unexpected children: {:?}", #i_children_rules) + )), + } + })) +} diff --git a/dhall_syntax/Cargo.toml b/dhall_syntax/Cargo.toml index 1da10c7..eb492d0 100644 --- a/dhall_syntax/Cargo.toml +++ b/dhall_syntax/Cargo.toml @@ -15,4 +15,6 @@ pest = "2.1" either = "1.5.2" take_mut = "0.2.2" hex = "0.3.2" +lazy_static = "1.4.0" dhall_generated_parser = { path = "../dhall_generated_parser" } +dhall_proc_macros = { path = "../dhall_proc_macros" } diff --git a/dhall_syntax/src/core/expr.rs b/dhall_syntax/src/core/expr.rs index 51b6c47..eeee4d8 100644 --- a/dhall_syntax/src/core/expr.rs +++ b/dhall_syntax/src/core/expr.rs @@ -381,15 +381,10 @@ pub fn rc<E>(x: RawExpr<E>) -> Expr<E> { Expr::from_expr_no_span(x) } -pub(crate) fn spanned( - span: Span, - x: crate::parser::ParsedRawExpr, -) -> crate::parser::ParsedExpr { +pub(crate) fn spanned<E>(span: Span, x: RawExpr<E>) -> Expr<E> { Expr::new(x, span) } -pub(crate) fn unspanned( - x: crate::parser::ParsedRawExpr, -) -> crate::parser::ParsedExpr { +pub(crate) fn unspanned<E>(x: RawExpr<E>) -> Expr<E> { Expr::from_expr_no_span(x) } diff --git a/dhall_syntax/src/lib.rs b/dhall_syntax/src/lib.rs index e4a6077..95f40c2 100644 --- a/dhall_syntax/src/lib.rs +++ b/dhall_syntax/src/lib.rs @@ -1,8 +1,7 @@ #![feature(trace_macros)] #![feature(slice_patterns)] -#![feature(try_blocks)] #![feature(never_type)] -#![feature(bind_by_move_pattern_guards)] +#![feature(proc_macro_hygiene)] #![allow( clippy::many_single_char_names, clippy::should_implement_trait, diff --git a/dhall_syntax/src/parser.rs b/dhall_syntax/src/parser.rs index 52b3760..1262774 100644 --- a/dhall_syntax/src/parser.rs +++ b/dhall_syntax/src/parser.rs @@ -4,10 +4,10 @@ use pest::prec_climber as pcl; use pest::prec_climber::PrecClimber; use pest::Parser; use std::borrow::Cow; -use std::collections::HashMap; use std::rc::Rc; use dhall_generated_parser::{DhallParser, Rule}; +use dhall_proc_macros::{make_parser, parse_children}; use crate::map::{DupTreeMap, DupTreeSet}; use crate::ExprF::*; @@ -18,64 +18,59 @@ use crate::*; // their own crate because they are quite general and useful. For now they // are here and hopefully you can figure out how they work. -pub(crate) type ParsedRawExpr = RawExpr<!>; -pub(crate) type ParsedExpr = Expr<!>; -type ParsedText = InterpolatedText<ParsedExpr>; -type ParsedTextContents = InterpolatedTextContents<ParsedExpr>; +type ParsedText<E> = InterpolatedText<Expr<E>>; +type ParsedTextContents<E> = InterpolatedTextContents<Expr<E>>; pub type ParseError = pest::error::Error<Rule>; pub type ParseResult<T> = Result<T, ParseError>; -#[derive(Debug)] -enum Either<A, B> { - Left(A), - Right(B), +#[derive(Debug, Clone)] +struct ParseInput<'input, Rule> +where + Rule: pest::RuleType, +{ + pair: Pair<'input, Rule>, + original_input_str: Rc<str>, } -impl crate::Builtin { - pub fn parse(s: &str) -> Option<Self> { - use crate::Builtin::*; - match s { - "Bool" => Some(Bool), - "Natural" => Some(Natural), - "Integer" => Some(Integer), - "Double" => Some(Double), - "Text" => Some(Text), - "List" => Some(List), - "Optional" => Some(Optional), - "None" => Some(OptionalNone), - "Natural/build" => Some(NaturalBuild), - "Natural/fold" => Some(NaturalFold), - "Natural/isZero" => Some(NaturalIsZero), - "Natural/even" => Some(NaturalEven), - "Natural/odd" => Some(NaturalOdd), - "Natural/toInteger" => Some(NaturalToInteger), - "Natural/show" => Some(NaturalShow), - "Natural/subtract" => Some(NaturalSubtract), - "Integer/toDouble" => Some(IntegerToDouble), - "Integer/show" => Some(IntegerShow), - "Double/show" => Some(DoubleShow), - "List/build" => Some(ListBuild), - "List/fold" => Some(ListFold), - "List/length" => Some(ListLength), - "List/head" => Some(ListHead), - "List/last" => Some(ListLast), - "List/indexed" => Some(ListIndexed), - "List/reverse" => Some(ListReverse), - "Optional/fold" => Some(OptionalFold), - "Optional/build" => Some(OptionalBuild), - "Text/show" => Some(TextShow), - _ => None, +impl<'input> ParseInput<'input, Rule> { + fn error(&self, message: String) -> ParseError { + let message = format!( + "{} while matching on:\n{}", + message, + debug_pair(self.pair.clone()) + ); + let e = pest::error::ErrorVariant::CustomError { message }; + pest::error::Error::new_from_span(e, self.pair.as_span()) + } + fn parse(input_str: &'input str, rule: Rule) -> ParseResult<Self> { + let mut pairs = DhallParser::parse(rule, input_str)?; + // TODO: proper errors + let pair = pairs.next().unwrap(); + assert_eq!(pairs.next(), None); + Ok(ParseInput { + original_input_str: input_str.to_string().into(), + pair, + }) + } + fn with_pair(&self, new_pair: Pair<'input, Rule>) -> Self { + ParseInput { + pair: new_pair, + original_input_str: self.original_input_str.clone(), } } + fn as_span(&self) -> Span { + Span::make(self.original_input_str.clone(), self.pair.as_span()) + } + fn as_str(&self) -> &'input str { + self.pair.as_str() + } } -pub fn custom_parse_error(pair: &Pair<Rule>, msg: String) -> ParseError { - let msg = - format!("{} while matching on:\n{}", msg, debug_pair(pair.clone())); - let e = pest::error::ErrorVariant::CustomError { message: msg }; - pest::error::Error::new_from_span(e, pair.as_span()) +// Used to retrieve the `Rule` enum associated with the `Self` type in `parse_children`. +trait PestConsumer { + type RuleEnum: pest::RuleType; } fn debug_pair(pair: Pair<Rule>) -> String { @@ -119,276 +114,53 @@ fn debug_pair(pair: Pair<Rule>) -> String { s } -macro_rules! parse_children { - // Variable length pattern with a common unary variant - (@match_forwards, - $parse_args:expr, - $iter:expr, - ($body:expr), - $variant:ident ($x:ident).., - $($rest:tt)* - ) => { - parse_children!(@match_backwards, - $parse_args, $iter, - ({ - let $x = $iter - .map(|x| Parsers::$variant($parse_args, x)) - .collect::<Result<Vec<_>, _>>()? - .into_iter(); - $body - }), - $($rest)* - ) - }; - // Single item pattern - (@match_forwards, - $parse_args:expr, - $iter:expr, - ($body:expr), - $variant:ident ($x:pat), - $($rest:tt)* - ) => {{ - let p = $iter.next().unwrap(); - let $x = Parsers::$variant($parse_args, p)?; - parse_children!(@match_forwards, - $parse_args, $iter, - ($body), - $($rest)* - ) - }}; - // Single item pattern after a variable length one: declare reversed and take from the end - (@match_backwards, - $parse_args:expr, - $iter:expr, - ($body:expr), - $variant:ident ($x:pat), - $($rest:tt)* - ) => { - parse_children!(@match_backwards, $parse_args, $iter, ({ - let p = $iter.next_back().unwrap(); - let $x = Parsers::$variant($parse_args, p)?; - $body - }), $($rest)*) - }; - - // Check no elements remain - (@match_forwards, $parse_args:expr, $iter:expr, ($body:expr) $(,)*) => { - $body - }; - // After a variable length pattern, everything has already been consumed - (@match_backwards, $parse_args:expr, $iter:expr, ($body:expr) $(,)*) => { - $body - }; - - ($parse_args:expr, $iter:expr; [$($args:tt)*] => $body:expr) => { - parse_children!(@match_forwards, - $parse_args, $iter, - ($body), - $($args)*, - ) - }; +#[derive(Debug)] +enum Either<A, B> { + Left(A), + Right(B), } -macro_rules! make_parser { - (@children_pattern, - $varpat:ident, - ($($acc:tt)*), - [$variant:ident ($x:pat), $($rest:tt)*] - ) => ( - make_parser!(@children_pattern, - $varpat, - ($($acc)* , Rule::$variant), - [$($rest)*] - ) - ); - (@children_pattern, - $varpat:ident, - ($($acc:tt)*), - [$variant:ident ($x:ident).., $($rest:tt)*] - ) => ( - make_parser!(@children_pattern, - $varpat, - ($($acc)* , $varpat..), - [$($rest)*] - ) - ); - (@children_pattern, - $varpat:ident, - (, $($acc:tt)*), [$(,)*] - ) => ([$($acc)*]); - (@children_pattern, - $varpat:ident, - ($($acc:tt)*), [$(,)*] - ) => ([$($acc)*]); - - (@children_filter, - $varpat:ident, - [$variant:ident ($x:pat), $($rest:tt)*] - ) => ( - make_parser!(@children_filter, $varpat, [$($rest)*]) - ); - (@children_filter, - $varpat:ident, - [$variant:ident ($x:ident).., $($rest:tt)*] - ) => ( - $varpat.iter().all(|r| r == &Rule::$variant) && - make_parser!(@children_filter, $varpat, [$($rest)*]) - ); - (@children_filter, $varpat:ident, [$(,)*]) => (true); - - (@body, - ($climbers:expr, $input:expr, $pair:expr), - rule!( - $name:ident<$o:ty>; - $span:ident; - captured_str!($x:pat) => $body:expr - ) - ) => ({ - let $span = Span::make($input.clone(), $pair.as_span()); - let $x = $pair.as_str(); - let res: Result<_, String> = try { $body }; - res.map_err(|msg| custom_parse_error(&$pair, msg)) - }); - (@body, - ($climbers:expr, $input:expr, $pair:expr), - rule!( - $name:ident<$o:ty>; - $span:ident; - children!( $( [$($args:tt)*] => $body:expr ),* $(,)* ) - ) - ) => ({ - let children_rules: Vec<Rule> = $pair - .clone() - .into_inner() - .map(|p| p.as_rule()) - .collect(); - - let $span = Span::make($input.clone(), $pair.as_span()); - #[allow(unused_mut)] - let mut iter = $pair.clone().into_inner(); - - #[allow(unreachable_code)] - match children_rules.as_slice() { - $( - make_parser!(@children_pattern, x, (), [$($args)*,]) - if make_parser!(@children_filter, x, [$($args)*,]) - => { - parse_children!(($climbers, $input.clone()), iter; - [$($args)*] => { - let res: Result<_, String> = try { $body }; - res.map_err(|msg| custom_parse_error(&$pair, msg)) - } - ) - } - , - )* - [..] => Err(custom_parse_error( - &$pair, - format!("Unexpected children: {:?}", children_rules) - )), - } - }); - (@body, - ($climbers:expr, $input:expr, $pair:expr), - rule!( - $name:ident<$o:ty>; - prec_climb!( - $other_rule:ident, - $_climber:expr, - $args:pat => $body:expr $(,)* - ) - ) - ) => ({ - let climber = $climbers.get(&Rule::$name).unwrap(); - climber.climb( - $pair.clone().into_inner(), - |p| Parsers::$other_rule(($climbers, $input.clone()), p), - |l, op, r| { - let $args = (l?, op, r?); - let res: Result<_, String> = try { $body }; - res.map_err(|msg| custom_parse_error(&$pair, msg)) - }, - ) - }); - (@body, - ($($things:tt)*), - rule!( - $name:ident<$o:ty>; - $($args:tt)* - ) - ) => ({ - make_parser!(@body, - ($($things)*), - rule!( - $name<$o>; - _span; - $($args)* - ) - ) - }); - (@body, - ($($things:tt)*), - rule!($name:ident<$o:ty>) - ) => ({ - Ok(()) - }); - - (@construct_climber, - ($map:expr), - rule!( - $name:ident<$o:ty>; - prec_climb!($other_rule:ident, $climber:expr, $($_rest:tt)* ) - ) - ) => ({ - $map.insert(Rule::$name, $climber) - }); - (@construct_climber, ($($things:tt)*), $($args:tt)*) => (()); - - ($( $submac:ident!( $name:ident<$o:ty> $($args:tt)* ); )*) => ( - struct Parsers; - - impl Parsers { - $( - #[allow(non_snake_case, unused_variables, clippy::let_unit_value)] - fn $name<'a>( - (climbers, input): (&HashMap<Rule, PrecClimber<Rule>>, Rc<str>), - pair: Pair<'a, Rule>, - ) -> ParseResult<$o> { - make_parser!(@body, (climbers, input, pair), - $submac!( $name<$o> $($args)* )) - } - )* - } - - fn construct_precclimbers() -> HashMap<Rule, PrecClimber<Rule>> { - let mut map = HashMap::new(); - $( - make_parser!(@construct_climber, (map), - $submac!( $name<$o> $($args)* )); - )* - map - } - - struct EntryPoint; - - impl EntryPoint { - $( - #[allow(non_snake_case, dead_code)] - fn $name<'a>( - input: Rc<str>, - pair: Pair<'a, Rule>, - ) -> ParseResult<$o> { - let climbers = construct_precclimbers(); - Parsers::$name((&climbers, input), pair) - } - )* +impl crate::Builtin { + pub fn parse(s: &str) -> Option<Self> { + use crate::Builtin::*; + match s { + "Bool" => Some(Bool), + "Natural" => Some(Natural), + "Integer" => Some(Integer), + "Double" => Some(Double), + "Text" => Some(Text), + "List" => Some(List), + "Optional" => Some(Optional), + "None" => Some(OptionalNone), + "Natural/build" => Some(NaturalBuild), + "Natural/fold" => Some(NaturalFold), + "Natural/isZero" => Some(NaturalIsZero), + "Natural/even" => Some(NaturalEven), + "Natural/odd" => Some(NaturalOdd), + "Natural/toInteger" => Some(NaturalToInteger), + "Natural/show" => Some(NaturalShow), + "Natural/subtract" => Some(NaturalSubtract), + "Integer/toDouble" => Some(IntegerToDouble), + "Integer/show" => Some(IntegerShow), + "Double/show" => Some(DoubleShow), + "List/build" => Some(ListBuild), + "List/fold" => Some(ListFold), + "List/length" => Some(ListLength), + "List/head" => Some(ListHead), + "List/last" => Some(ListLast), + "List/indexed" => Some(ListIndexed), + "List/reverse" => Some(ListReverse), + "Optional/fold" => Some(OptionalFold), + "Optional/build" => Some(OptionalBuild), + "Text/show" => Some(TextShow), + _ => None, } - ); + } } // Trim the shared indent off of a vec of lines, as defined by the Dhall semantics of multiline // literals. -fn trim_indent(lines: &mut Vec<ParsedText>) { +fn trim_indent<E: Clone>(lines: &mut Vec<ParsedText<E>>) { let is_indent = |c: char| c == ' ' || c == '\t'; // There is at least one line so this is safe @@ -427,676 +199,901 @@ fn trim_indent(lines: &mut Vec<ParsedText>) { } } -make_parser! { - rule!(EOI<()>); - - rule!(simple_label<Label>; - captured_str!(s) => Label::from(s.trim().to_owned()) - ); - rule!(quoted_label<Label>; - captured_str!(s) => Label::from(s.trim().to_owned()) - ); - rule!(label<Label>; children!( - [simple_label(l)] => l, - [quoted_label(l)] => l, - )); - - rule!(double_quote_literal<ParsedText>; children!( - [double_quote_chunk(chunks)..] => { - chunks.collect() - } - )); - - rule!(double_quote_chunk<ParsedTextContents>; children!( - [interpolation(e)] => { - InterpolatedTextContents::Expr(e) - }, - [double_quote_escaped(s)] => { - InterpolatedTextContents::Text(s) - }, - [double_quote_char(s)] => { - InterpolatedTextContents::Text(s.to_owned()) - }, - )); - rule!(double_quote_escaped<String>; - captured_str!(s) => { - match s { - "\"" => "\"".to_owned(), - "$" => "$".to_owned(), - "\\" => "\\".to_owned(), - "/" => "/".to_owned(), - "b" => "\u{0008}".to_owned(), - "f" => "\u{000C}".to_owned(), - "n" => "\n".to_owned(), - "r" => "\r".to_owned(), - "t" => "\t".to_owned(), - // "uXXXX" or "u{XXXXX}" - _ => { - use std::convert::{TryFrom, TryInto}; - - let s = &s[1..]; - let s = if &s[0..1] == "{" { - &s[1..s.len()-1] - } else { - &s[0..s.len()] - }; - - if s.len() > 8 { - Err(format!("Escape sequences can't have more than 8 chars: \"{}\"", s))? - } +lazy_static::lazy_static! { + static ref PRECCLIMBER: PrecClimber<Rule> = { + use Rule::*; + // In order of precedence + let operators = vec![ + import_alt, + bool_or, + natural_plus, + text_append, + list_append, + bool_and, + combine, + prefer, + combine_types, + natural_times, + bool_eq, + bool_ne, + equivalent, + ]; + PrecClimber::new( + operators + .into_iter() + .map(|op| pcl::Operator::new(op, pcl::Assoc::Left)) + .collect(), + ) + }; +} + +struct Parsers; + +#[make_parser(Rule)] +impl Parsers { + fn EOI(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + + fn simple_label(input: ParseInput<Rule>) -> ParseResult<Label> { + Ok(Label::from(input.as_str().trim().to_owned())) + } + fn quoted_label(input: ParseInput<Rule>) -> ParseResult<Label> { + Ok(Label::from(input.as_str().trim().to_owned())) + } + fn label(input: ParseInput<Rule>) -> ParseResult<Label> { + Ok(parse_children!(input; + [simple_label(l)] => l, + [quoted_label(l)] => l, + )) + } - // pad with zeroes - let s: String = std::iter::repeat('0') - .take(8 - s.len()) - .chain(s.chars()) - .collect(); - - // `s` has length 8, so `bytes` has length 4 - let bytes: &[u8] = &hex::decode(s).unwrap(); - let i = u32::from_be_bytes(bytes.try_into().unwrap()); - let c = char::try_from(i).unwrap(); - match i { - 0xD800..=0xDFFF => { - let c_ecapsed = c.escape_unicode(); - Err(format!("Escape sequences can't contain surrogate pairs: \"{}\"", c_ecapsed))? - }, - 0x0FFFE..=0x0FFFF | 0x1FFFE..=0x1FFFF | - 0x2FFFE..=0x2FFFF | 0x3FFFE..=0x3FFFF | - 0x4FFFE..=0x4FFFF | 0x5FFFE..=0x5FFFF | - 0x6FFFE..=0x6FFFF | 0x7FFFE..=0x7FFFF | - 0x8FFFE..=0x8FFFF | 0x9FFFE..=0x9FFFF | - 0xAFFFE..=0xAFFFF | 0xBFFFE..=0xBFFFF | - 0xCFFFE..=0xCFFFF | 0xDFFFE..=0xDFFFF | - 0xEFFFE..=0xEFFFF | 0xFFFFE..=0xFFFFF | - 0x10_FFFE..=0x10_FFFF => { - let c_ecapsed = c.escape_unicode(); - Err(format!("Escape sequences can't contain non-characters: \"{}\"", c_ecapsed))? - }, - _ => {} + fn double_quote_literal<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<ParsedText<E>> { + Ok(parse_children!(input; + [double_quote_chunk(chunks)..] => { + chunks.collect() + } + )) + } + + fn double_quote_chunk<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<ParsedTextContents<E>> { + Ok(parse_children!(input; + [interpolation(e)] => { + InterpolatedTextContents::Expr(e) + }, + [double_quote_escaped(s)] => { + InterpolatedTextContents::Text(s) + }, + [double_quote_char(s)] => { + InterpolatedTextContents::Text(s.to_owned()) + }, + )) + } + fn double_quote_escaped(input: ParseInput<Rule>) -> ParseResult<String> { + Ok(match input.as_str() { + "\"" => "\"".to_owned(), + "$" => "$".to_owned(), + "\\" => "\\".to_owned(), + "/" => "/".to_owned(), + "b" => "\u{0008}".to_owned(), + "f" => "\u{000C}".to_owned(), + "n" => "\n".to_owned(), + "r" => "\r".to_owned(), + "t" => "\t".to_owned(), + // "uXXXX" or "u{XXXXX}" + s => { + use std::convert::{TryFrom, TryInto}; + + let s = &s[1..]; + let s = if &s[0..1] == "{" { + &s[1..s.len() - 1] + } else { + &s[0..s.len()] + }; + + if s.len() > 8 { + Err(input.error(format!( + "Escape sequences can't have more than 8 chars: \"{}\"", + s + )))? + } + + // pad with zeroes + let s: String = std::iter::repeat('0') + .take(8 - s.len()) + .chain(s.chars()) + .collect(); + + // `s` has length 8, so `bytes` has length 4 + let bytes: &[u8] = &hex::decode(s).unwrap(); + let i = u32::from_be_bytes(bytes.try_into().unwrap()); + let c = char::try_from(i).unwrap(); + match i { + 0xD800..=0xDFFF => { + let c_ecapsed = c.escape_unicode(); + Err(input.error(format!("Escape sequences can't contain surrogate pairs: \"{}\"", c_ecapsed)))? + } + 0x0FFFE..=0x0FFFF + | 0x1FFFE..=0x1FFFF + | 0x2FFFE..=0x2FFFF + | 0x3FFFE..=0x3FFFF + | 0x4FFFE..=0x4FFFF + | 0x5FFFE..=0x5FFFF + | 0x6FFFE..=0x6FFFF + | 0x7FFFE..=0x7FFFF + | 0x8FFFE..=0x8FFFF + | 0x9FFFE..=0x9FFFF + | 0xAFFFE..=0xAFFFF + | 0xBFFFE..=0xBFFFF + | 0xCFFFE..=0xCFFFF + | 0xDFFFE..=0xDFFFF + | 0xEFFFE..=0xEFFFF + | 0xFFFFE..=0xFFFFF + | 0x10_FFFE..=0x10_FFFF => { + let c_ecapsed = c.escape_unicode(); + Err(input.error(format!("Escape sequences can't contain non-characters: \"{}\"", c_ecapsed)))? } - std::iter::once(c).collect() + _ => {} } + std::iter::once(c).collect() } - } - ); - rule!(double_quote_char<&'a str>; - captured_str!(s) => s - ); + }) + } + fn double_quote_char<'a>( + input: ParseInput<'a, Rule>, + ) -> ParseResult<&'a str> { + Ok(input.as_str()) + } - rule!(single_quote_literal<ParsedText>; children!( - [single_quote_continue(lines)] => { - let newline: ParsedText = "\n".to_string().into(); + fn single_quote_literal<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<ParsedText<E>> { + Ok(parse_children!(input; + [single_quote_continue(lines)] => { + let newline: ParsedText<E> = "\n".to_string().into(); - let mut lines: Vec<ParsedText> = lines - .into_iter() - .rev() - .map(|l| l.into_iter().rev().collect::<ParsedText>()) - .collect(); + let mut lines: Vec<ParsedText<E>> = lines + .into_iter() + .rev() + .map(|l| l.into_iter().rev().collect::<ParsedText<E>>()) + .collect(); - trim_indent(&mut lines); + trim_indent(&mut lines); - lines - .into_iter() - .intersperse(newline) - .flat_map(InterpolatedText::into_iter) - .collect::<ParsedText>() - } - )); - rule!(single_quote_char<&'a str>; - captured_str!(s) => s - ); - rule!(escaped_quote_pair<&'a str>; - captured_str!(_) => "''" - ); - rule!(escaped_interpolation<&'a str>; - captured_str!(_) => "${" - ); - rule!(interpolation<ParsedExpr>; children!( - [expression(e)] => e - )); + lines + .into_iter() + .intersperse(newline) + .flat_map(InterpolatedText::into_iter) + .collect::<ParsedText<E>>() + } + )) + } + fn single_quote_char<'a>( + input: ParseInput<'a, Rule>, + ) -> ParseResult<&'a str> { + Ok(input.as_str()) + } + fn escaped_quote_pair<'a>(_: ParseInput<'a, Rule>) -> ParseResult<&'a str> { + Ok("''") + } + fn escaped_interpolation<'a>( + _: ParseInput<'a, Rule>, + ) -> ParseResult<&'a str> { + Ok("${") + } + fn interpolation<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<Expr<E>> { + Ok(parse_children!(input; + [expression(e)] => e + )) + } // Returns a vec of lines in reversed order, where each line is also in reversed order. - rule!(single_quote_continue<Vec<Vec<ParsedTextContents>>>; children!( - [interpolation(c), single_quote_continue(lines)] => { - let c = InterpolatedTextContents::Expr(c); - let mut lines = lines; - lines.last_mut().unwrap().push(c); - lines - }, - [escaped_quote_pair(c), single_quote_continue(lines)] => { - let mut lines = lines; - // TODO: don't allocate for every char - let c = InterpolatedTextContents::Text(c.to_owned()); - lines.last_mut().unwrap().push(c); - lines - }, - [escaped_interpolation(c), single_quote_continue(lines)] => { - let mut lines = lines; - // TODO: don't allocate for every char - let c = InterpolatedTextContents::Text(c.to_owned()); - lines.last_mut().unwrap().push(c); - lines - }, - [single_quote_char(c), single_quote_continue(lines)] => { - let mut lines = lines; - if c == "\n" || c == "\r\n" { - lines.push(vec![]); - } else { + fn single_quote_continue<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<Vec<Vec<ParsedTextContents<E>>>> { + Ok(parse_children!(input; + [interpolation(c), single_quote_continue(lines)] => { + let c = InterpolatedTextContents::Expr(c); + let mut lines = lines; + lines.last_mut().unwrap().push(c); + lines + }, + [escaped_quote_pair(c), single_quote_continue(lines)] => { + let mut lines = lines; // TODO: don't allocate for every char let c = InterpolatedTextContents::Text(c.to_owned()); lines.last_mut().unwrap().push(c); - } - lines - }, - [] => { - vec![vec![]] - }, - )); - - rule!(builtin<ParsedExpr>; span; - captured_str!(s) => { - spanned(span, match crate::Builtin::parse(s) { + lines + }, + [escaped_interpolation(c), single_quote_continue(lines)] => { + let mut lines = lines; + // TODO: don't allocate for every char + let c = InterpolatedTextContents::Text(c.to_owned()); + lines.last_mut().unwrap().push(c); + lines + }, + [single_quote_char(c), single_quote_continue(lines)] => { + let mut lines = lines; + if c == "\n" || c == "\r\n" { + lines.push(vec![]); + } else { + // TODO: don't allocate for every char + let c = InterpolatedTextContents::Text(c.to_owned()); + lines.last_mut().unwrap().push(c); + } + lines + }, + [] => { + vec![vec![]] + }, + )) + } + + fn builtin<E: Clone>(input: ParseInput<Rule>) -> ParseResult<Expr<E>> { + let s = input.as_str(); + let span = input.as_span(); + Ok(spanned( + span, + match crate::Builtin::parse(s) { Some(b) => Builtin(b), - None => match s { - "True" => BoolLit(true), - "False" => BoolLit(false), - "Type" => Const(crate::Const::Type), - "Kind" => Const(crate::Const::Kind), - "Sort" => Const(crate::Const::Sort), - _ => Err( - format!("Unrecognized builtin: '{}'", s) - )?, + None => { + match s { + "True" => BoolLit(true), + "False" => BoolLit(false), + "Type" => Const(crate::Const::Type), + "Kind" => Const(crate::Const::Kind), + "Sort" => Const(crate::Const::Sort), + _ => Err(input + .error(format!("Unrecognized builtin: '{}'", s)))?, + } } - }) + }, + )) + } + + fn NaN(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + fn minus_infinity_literal(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + fn plus_infinity_literal(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + + fn numeric_double_literal( + input: ParseInput<Rule>, + ) -> ParseResult<core::Double> { + let s = input.as_str().trim(); + match s.parse::<f64>() { + Ok(x) if x.is_infinite() => Err(input.error(format!( + "Overflow while parsing double literal '{}'", + s + ))), + Ok(x) => Ok(NaiveDouble::from(x)), + Err(e) => Err(input.error(format!("{}", e))), } - ); - - rule!(NaN<()>); - rule!(minus_infinity_literal<()>); - rule!(plus_infinity_literal<()>); - - rule!(numeric_double_literal<core::Double>; - captured_str!(s) => { - let s = s.trim(); - match s.parse::<f64>() { - Ok(x) if x.is_infinite() => - Err(format!("Overflow while parsing double literal '{}'", s))?, - Ok(x) => NaiveDouble::from(x), - Err(e) => Err(format!("{}", e))?, + } + + fn double_literal(input: ParseInput<Rule>) -> ParseResult<core::Double> { + Ok(parse_children!(input; + [numeric_double_literal(n)] => n, + [minus_infinity_literal(_)] => std::f64::NEG_INFINITY.into(), + [plus_infinity_literal(_)] => std::f64::INFINITY.into(), + [NaN(_)] => std::f64::NAN.into(), + )) + } + + fn natural_literal(input: ParseInput<Rule>) -> ParseResult<core::Natural> { + input + .as_str() + .trim() + .parse() + .map_err(|e| input.error(format!("{}", e))) + } + + fn integer_literal(input: ParseInput<Rule>) -> ParseResult<core::Integer> { + input + .as_str() + .trim() + .parse() + .map_err(|e| input.error(format!("{}", e))) + } + + fn identifier<E: Clone>(input: ParseInput<Rule>) -> ParseResult<Expr<E>> { + let span = input.as_span(); + Ok(parse_children!(input; + [variable(v)] => { + spanned(span, Var(v)) + }, + [builtin(e)] => e, + )) + } + + fn variable(input: ParseInput<Rule>) -> ParseResult<V<Label>> { + Ok(parse_children!(input; + [label(l), natural_literal(idx)] => { + V(l, idx) + }, + [label(l)] => { + V(l, 0) + }, + )) + } + + fn unquoted_path_component<'a>( + input: ParseInput<'a, Rule>, + ) -> ParseResult<&'a str> { + Ok(input.as_str()) + } + fn quoted_path_component<'a>( + input: ParseInput<'a, Rule>, + ) -> ParseResult<&'a str> { + Ok(input.as_str()) + } + fn path_component(input: ParseInput<Rule>) -> ParseResult<String> { + Ok(parse_children!(input; + [unquoted_path_component(s)] => s.to_string(), + [quoted_path_component(s)] => { + const RESERVED: &percent_encoding::AsciiSet = + &percent_encoding::CONTROLS + .add(b'=').add(b':').add(b'/').add(b'?') + .add(b'#').add(b'[').add(b']').add(b'@') + .add(b'!').add(b'$').add(b'&').add(b'\'') + .add(b'(').add(b')').add(b'*').add(b'+') + .add(b',').add(b';'); + s.chars() + .map(|c| { + // Percent-encode ascii chars + if c.is_ascii() { + percent_encoding::utf8_percent_encode( + &c.to_string(), + RESERVED, + ).to_string() + } else { + c.to_string() + } + }) + .collect() + }, + )) + } + fn path(input: ParseInput<Rule>) -> ParseResult<FilePath> { + Ok(parse_children!(input; + [path_component(components)..] => { + FilePath { file_path: components.collect() } } - } - ); - - rule!(double_literal<core::Double>; children!( - [numeric_double_literal(n)] => n, - [minus_infinity_literal(n)] => std::f64::NEG_INFINITY.into(), - [plus_infinity_literal(n)] => std::f64::INFINITY.into(), - [NaN(n)] => std::f64::NAN.into(), - )); - - rule!(natural_literal<core::Natural>; - captured_str!(s) => { - s.trim() - .parse() - .map_err(|e| format!("{}", e))? - } - ); + )) + } - rule!(integer_literal<core::Integer>; - captured_str!(s) => { - s.trim() - .parse() - .map_err(|e| format!("{}", e))? - } - ); - - rule!(identifier<ParsedExpr>; span; children!( - [variable(v)] => { - spanned(span, Var(v)) - }, - [builtin(e)] => e, - )); - - rule!(variable<V<Label>>; children!( - [label(l), natural_literal(idx)] => { - V(l, idx) - }, - [label(l)] => { - V(l, 0) - }, - )); - - rule!(unquoted_path_component<&'a str>; captured_str!(s) => s); - rule!(quoted_path_component<&'a str>; captured_str!(s) => s); - rule!(path_component<String>; children!( - [unquoted_path_component(s)] => s.to_string(), - [quoted_path_component(s)] => { - const RESERVED: &percent_encoding::AsciiSet = - &percent_encoding::CONTROLS - .add(b'=').add(b':').add(b'/').add(b'?') - .add(b'#').add(b'[').add(b']').add(b'@') - .add(b'!').add(b'$').add(b'&').add(b'\'') - .add(b'(').add(b')').add(b'*').add(b'+') - .add(b',').add(b';'); - s.chars() - .map(|c| { - // Percent-encode ascii chars - if c.is_ascii() { - percent_encoding::utf8_percent_encode( - &c.to_string(), - RESERVED, - ).to_string() - } else { - c.to_string() - } - }) - .collect() - }, - )); - rule!(path<Vec<String>>; children!( - [path_component(components)..] => { - components.collect() - } - )); - - rule!(local<(FilePrefix, Vec<String>)>; children!( - [parent_path(l)] => l, - [here_path(l)] => l, - [home_path(l)] => l, - [absolute_path(l)] => l, - )); - - rule!(parent_path<(FilePrefix, Vec<String>)>; children!( - [path(p)] => (FilePrefix::Parent, p) - )); - rule!(here_path<(FilePrefix, Vec<String>)>; children!( - [path(p)] => (FilePrefix::Here, p) - )); - rule!(home_path<(FilePrefix, Vec<String>)>; children!( - [path(p)] => (FilePrefix::Home, p) - )); - rule!(absolute_path<(FilePrefix, Vec<String>)>; children!( - [path(p)] => (FilePrefix::Absolute, p) - )); - - rule!(scheme<Scheme>; captured_str!(s) => match s { - "http" => Scheme::HTTP, - "https" => Scheme::HTTPS, - _ => unreachable!(), - }); - - rule!(http_raw<URL<ParsedExpr>>; children!( - [scheme(sch), authority(auth), path(file_path)] => URL { - scheme: sch, - authority: auth, - path: FilePath { file_path }, - query: None, - headers: None, - }, - [scheme(sch), authority(auth), path(file_path), query(q)] => { - URL { + fn local(input: ParseInput<Rule>) -> ParseResult<(FilePrefix, FilePath)> { + Ok(parse_children!(input; + [parent_path(l)] => l, + [here_path(l)] => l, + [home_path(l)] => l, + [absolute_path(l)] => l, + )) + } + + fn parent_path(input: ParseInput<Rule>) -> ParseResult<(FilePrefix, FilePath)> { + Ok(parse_children!(input; + [path(p)] => (FilePrefix::Parent, p) + )) + } + fn here_path(input: ParseInput<Rule>) -> ParseResult<(FilePrefix, FilePath)> { + Ok(parse_children!(input; + [path(p)] => (FilePrefix::Here, p) + )) + } + fn home_path(input: ParseInput<Rule>) -> ParseResult<(FilePrefix, FilePath)> { + Ok(parse_children!(input; + [path(p)] => (FilePrefix::Home, p) + )) + } + fn absolute_path( + input: ParseInput<Rule>, + ) -> ParseResult<(FilePrefix, FilePath)> { + Ok(parse_children!(input; + [path(p)] => (FilePrefix::Absolute, p) + )) + } + + fn scheme(input: ParseInput<Rule>) -> ParseResult<Scheme> { + Ok(match input.as_str() { + "http" => Scheme::HTTP, + "https" => Scheme::HTTPS, + _ => unreachable!(), + }) + } + + fn http_raw<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<URL<Expr<E>>> { + Ok(parse_children!(input; + [scheme(sch), authority(auth), path(p)] => URL { scheme: sch, authority: auth, - path: FilePath { file_path }, + path: p, + query: None, + headers: None, + }, + [scheme(sch), authority(auth), path(p), query(q)] => URL { + scheme: sch, + authority: auth, + path: p, query: Some(q), headers: None, - } - }, - )); + }, + )) + } - rule!(authority<String>; captured_str!(s) => s.to_owned()); + fn authority(input: ParseInput<Rule>) -> ParseResult<String> { + Ok(input.as_str().to_owned()) + } - rule!(query<String>; captured_str!(s) => s.to_owned()); + fn query(input: ParseInput<Rule>) -> ParseResult<String> { + Ok(input.as_str().to_owned()) + } - rule!(http<URL<ParsedExpr>>; children!( + fn http<E: Clone>(input: ParseInput<Rule>) -> ParseResult<URL<Expr<E>>> { + Ok(parse_children!(input; [http_raw(url)] => url, [http_raw(url), import_expression(e)] => URL { headers: Some(e), ..url }, - )); - - rule!(env<String>; children!( - [bash_environment_variable(s)] => s, - [posix_environment_variable(s)] => s, - )); - rule!(bash_environment_variable<String>; captured_str!(s) => s.to_owned()); - rule!(posix_environment_variable<String>; children!( - [posix_environment_variable_character(chars)..] => { - chars.collect() - }, - )); - rule!(posix_environment_variable_character<Cow<'a, str>>; - captured_str!(s) => { - match s { - "\\\"" => Cow::Owned("\"".to_owned()), - "\\\\" => Cow::Owned("\\".to_owned()), - "\\a" => Cow::Owned("\u{0007}".to_owned()), - "\\b" => Cow::Owned("\u{0008}".to_owned()), - "\\f" => Cow::Owned("\u{000C}".to_owned()), - "\\n" => Cow::Owned("\n".to_owned()), - "\\r" => Cow::Owned("\r".to_owned()), - "\\t" => Cow::Owned("\t".to_owned()), - "\\v" => Cow::Owned("\u{000B}".to_owned()), - _ => Cow::Borrowed(s) - } - } - ); - - rule!(missing<()>); - - rule!(import_type<ImportLocation<ParsedExpr>>; children!( - [missing(_)] => { - ImportLocation::Missing - }, - [env(e)] => { - ImportLocation::Env(e) - }, - [http(url)] => { - ImportLocation::Remote(url) - }, - [local((prefix, file_path))] => { - ImportLocation::Local(prefix, FilePath { file_path }) - }, - )); - - rule!(hash<Hash>; captured_str!(s) => { - let s = s.trim(); + )) + } + + fn env(input: ParseInput<Rule>) -> ParseResult<String> { + Ok(parse_children!(input; + [bash_environment_variable(s)] => s, + [posix_environment_variable(s)] => s, + )) + } + fn bash_environment_variable( + input: ParseInput<Rule>, + ) -> ParseResult<String> { + Ok(input.as_str().to_owned()) + } + fn posix_environment_variable( + input: ParseInput<Rule>, + ) -> ParseResult<String> { + Ok(parse_children!(input; + [posix_environment_variable_character(chars)..] => { + chars.collect() + }, + )) + } + fn posix_environment_variable_character<'a>( + input: ParseInput<'a, Rule>, + ) -> ParseResult<Cow<'a, str>> { + Ok(match input.as_str() { + "\\\"" => Cow::Owned("\"".to_owned()), + "\\\\" => Cow::Owned("\\".to_owned()), + "\\a" => Cow::Owned("\u{0007}".to_owned()), + "\\b" => Cow::Owned("\u{0008}".to_owned()), + "\\f" => Cow::Owned("\u{000C}".to_owned()), + "\\n" => Cow::Owned("\n".to_owned()), + "\\r" => Cow::Owned("\r".to_owned()), + "\\t" => Cow::Owned("\t".to_owned()), + "\\v" => Cow::Owned("\u{000B}".to_owned()), + s => Cow::Borrowed(s), + }) + } + + fn missing(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + + fn import_type<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<ImportLocation<Expr<E>>> { + Ok(parse_children!(input; + [missing(_)] => { + ImportLocation::Missing + }, + [env(e)] => { + ImportLocation::Env(e) + }, + [http(url)] => { + ImportLocation::Remote(url) + }, + [local((prefix, p))] => { + ImportLocation::Local(prefix, p) + }, + )) + } + + fn hash(input: ParseInput<Rule>) -> ParseResult<Hash> { + let s = input.as_str().trim(); let protocol = &s[..6]; let hash = &s[7..]; if protocol != "sha256" { - Err(format!("Unknown hashing protocol '{}'", protocol))? + Err(input.error(format!("Unknown hashing protocol '{}'", protocol)))? } - Hash::SHA256(hex::decode(hash).unwrap()) - }); + Ok(Hash::SHA256(hex::decode(hash).unwrap())) + } - rule!(import_hashed<crate::Import<ParsedExpr>>; children!( + fn import_hashed<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<crate::Import<Expr<E>>> { + Ok(parse_children!(input; [import_type(location)] => crate::Import {mode: ImportMode::Code, location, hash: None }, [import_type(location), hash(h)] => - crate::Import {mode: ImportMode::Code, location, hash: Some(h) }, - )); - - rule!(Text<()>); - rule!(Location<()>); - - rule!(import<ParsedExpr>; span; children!( - [import_hashed(imp)] => { - spanned(span, Import(crate::Import { - mode: ImportMode::Code, - ..imp - })) - }, - [import_hashed(imp), Text(_)] => { - spanned(span, Import(crate::Import { - mode: ImportMode::RawText, - ..imp - })) - }, - [import_hashed(imp), Location(_)] => { - spanned(span, Import(crate::Import { - mode: ImportMode::Location, - ..imp - })) - }, - )); - - rule!(lambda<()>); - rule!(forall<()>); - rule!(arrow<()>); - rule!(merge<()>); - rule!(assert<()>); - rule!(if_<()>); - rule!(in_<()>); - rule!(toMap<()>); - - rule!(empty_list_literal<ParsedExpr>; span; children!( - [application_expression(e)] => { - spanned(span, EmptyListLit(e)) - }, - )); - - rule!(expression<ParsedExpr>; span; children!( - [lambda(()), label(l), expression(typ), - arrow(()), expression(body)] => { - spanned(span, Lam(l, typ, body)) - }, - [if_(()), expression(cond), expression(left), expression(right)] => { - spanned(span, BoolIf(cond, left, right)) - }, - [let_binding(bindings).., in_(()), expression(final_expr)] => { - bindings.rev().fold( - final_expr, - |acc, x| unspanned(Let(x.0, x.1, x.2, acc)) - ) - }, - [forall(()), label(l), expression(typ), - arrow(()), expression(body)] => { - spanned(span, Pi(l, typ, body)) - }, - [operator_expression(typ), arrow(()), expression(body)] => { - spanned(span, Pi("_".into(), typ, body)) - }, - [merge(()), import_expression(x), import_expression(y), - application_expression(z)] => { - spanned(span, Merge(x, y, Some(z))) - }, - [empty_list_literal(e)] => e, - [assert(()), expression(x)] => { - spanned(span, Assert(x)) - }, - [toMap(()), import_expression(x), application_expression(y)] => { - spanned(span, ToMap(x, Some(y))) - }, - [operator_expression(e)] => e, - [operator_expression(e), expression(annot)] => { - spanned(span, Annot(e, annot)) - }, - )); - - rule!(let_binding<(Label, Option<ParsedExpr>, ParsedExpr)>; - children!( - [label(name), expression(annot), expression(expr)] => - (name, Some(annot), expr), - [label(name), expression(expr)] => - (name, None, expr), - )); - - rule!(List<()>); - rule!(Optional<()>); - - rule!(operator_expression<ParsedExpr>; prec_climb!( - application_expression, - { - use Rule::*; - // In order of precedence - let operators = vec![ - import_alt, - bool_or, - natural_plus, - text_append, - list_append, - bool_and, - combine, - prefer, - combine_types, - natural_times, - bool_eq, - bool_ne, - equivalent, - ]; - PrecClimber::new( - operators - .into_iter() - .map(|op| pcl::Operator::new(op, pcl::Assoc::Left)) - .collect(), - ) - }, - (l, op, r) => { - use crate::BinOp::*; - use Rule::*; - let op = match op.as_rule() { - import_alt => ImportAlt, - bool_or => BoolOr, - natural_plus => NaturalPlus, - text_append => TextAppend, - list_append => ListAppend, - bool_and => BoolAnd, - combine => RecursiveRecordMerge, - prefer => RightBiasedRecordMerge, - combine_types => RecursiveRecordTypeMerge, - natural_times => NaturalTimes, - bool_eq => BoolEQ, - bool_ne => BoolNE, - equivalent => Equivalence, - r => Err( - format!("Rule {:?} isn't an operator", r), - )?, - }; - - unspanned(BinOp(op, l, r)) - } - )); - - rule!(Some_<()>); - - rule!(application_expression<ParsedExpr>; children!( - [first_application_expression(e)] => e, - [first_application_expression(first), import_expression(rest)..] => { - rest.fold(first, |acc, e| unspanned(App(acc, e))) - }, - )); - - rule!(first_application_expression<ParsedExpr>; span; - children!( - [Some_(()), import_expression(e)] => { - spanned(span, SomeLit(e)) - }, - [merge(()), import_expression(x), import_expression(y)] => { - spanned(span, Merge(x, y, None)) - }, - [toMap(()), import_expression(x)] => { - spanned(span, ToMap(x, None)) - }, - [import_expression(e)] => e, - )); - - rule!(import_expression<ParsedExpr>; span; - children!( - [selector_expression(e)] => e, - [import(e)] => e, - )); - - rule!(selector_expression<ParsedExpr>; children!( - [primitive_expression(e)] => e, - [primitive_expression(first), selector(rest)..] => { - rest.fold(first, |acc, e| unspanned(match e { - Either::Left(l) => Field(acc, l), - Either::Right(ls) => Projection(acc, ls), - })) - }, - )); - - rule!(selector<Either<Label, DupTreeSet<Label>>>; children!( - [label(l)] => Either::Left(l), - [labels(ls)] => Either::Right(ls), - [expression(e)] => unimplemented!("selection by expression"), // TODO - )); - - rule!(labels<DupTreeSet<Label>>; children!( - [label(ls)..] => ls.collect(), - )); - - rule!(primitive_expression<ParsedExpr>; span; children!( - [double_literal(n)] => spanned(span, DoubleLit(n)), - [natural_literal(n)] => spanned(span, NaturalLit(n)), - [integer_literal(n)] => spanned(span, IntegerLit(n)), - [double_quote_literal(s)] => spanned(span, TextLit(s)), - [single_quote_literal(s)] => spanned(span, TextLit(s)), - [empty_record_type(e)] => e, - [empty_record_literal(e)] => e, - [non_empty_record_type_or_literal(e)] => e, - [union_type(e)] => e, - [non_empty_list_literal(e)] => e, - [identifier(e)] => e, - [expression(e)] => e, - )); - - rule!(empty_record_literal<ParsedExpr>; span; - captured_str!(_) => spanned(span, RecordLit(Default::default())) - ); - - rule!(empty_record_type<ParsedExpr>; span; - captured_str!(_) => spanned(span, RecordType(Default::default())) - ); - - rule!(non_empty_record_type_or_literal<ParsedExpr>; span; - children!( - [label(first_label), non_empty_record_type(rest)] => { - let (first_expr, mut map) = rest; - map.insert(first_label, first_expr); - spanned(span, RecordType(map)) - }, - [label(first_label), non_empty_record_literal(rest)] => { - let (first_expr, mut map) = rest; - map.insert(first_label, first_expr); - spanned(span, RecordLit(map)) - }, - )); - - rule!(non_empty_record_type - <(ParsedExpr, DupTreeMap<Label, ParsedExpr>)>; children!( - [expression(expr), record_type_entry(entries)..] => { - (expr, entries.collect()) - } - )); + crate::Import {mode: ImportMode::Code, location, hash: Some(h) }, + )) + } - rule!(record_type_entry<(Label, ParsedExpr)>; children!( - [label(name), expression(expr)] => (name, expr) - )); + fn Text(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + fn Location(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } - rule!(non_empty_record_literal - <(ParsedExpr, DupTreeMap<Label, ParsedExpr>)>; children!( - [expression(expr), record_literal_entry(entries)..] => { - (expr, entries.collect()) - } - )); - - rule!(record_literal_entry<(Label, ParsedExpr)>; children!( - [label(name), expression(expr)] => (name, expr) - )); - - rule!(union_type<ParsedExpr>; span; children!( - [empty_union_type(_)] => { - spanned(span, UnionType(Default::default())) - }, - [union_type_entry(entries)..] => { - spanned(span, UnionType(entries.collect())) - }, - )); - - rule!(empty_union_type<()>); - - rule!(union_type_entry<(Label, Option<ParsedExpr>)>; children!( - [label(name), expression(expr)] => (name, Some(expr)), - [label(name)] => (name, None), - )); - - rule!(non_empty_list_literal<ParsedExpr>; span; - children!( - [expression(items)..] => spanned( - span, - NEListLit(items.collect()) - ) - )); + fn import<E: Clone>(input: ParseInput<Rule>) -> ParseResult<Expr<E>> { + let span = input.as_span(); + Ok(parse_children!(input; + [import_hashed(imp)] => { + spanned(span, Import(crate::Import { + mode: ImportMode::Code, + ..imp + })) + }, + [import_hashed(imp), Text(_)] => { + spanned(span, Import(crate::Import { + mode: ImportMode::RawText, + ..imp + })) + }, + [import_hashed(imp), Location(_)] => { + spanned(span, Import(crate::Import { + mode: ImportMode::Location, + ..imp + })) + }, + )) + } + + fn lambda(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + fn forall(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + fn arrow(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + fn merge(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + fn assert(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + fn if_(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + fn in_(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + fn toMap(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } - rule!(final_expression<ParsedExpr>; children!( - [expression(e), EOI(_)] => e - )); + fn empty_list_literal<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<Expr<E>> { + let span = input.as_span(); + Ok(parse_children!(input; + [application_expression(e)] => { + spanned(span, EmptyListLit(e)) + }, + )) + } + + fn expression<E: Clone>(input: ParseInput<Rule>) -> ParseResult<Expr<E>> { + let span = input.as_span(); + Ok(parse_children!(input; + [lambda(()), label(l), expression(typ), + arrow(()), expression(body)] => { + spanned(span, Lam(l, typ, body)) + }, + [if_(()), expression(cond), expression(left), + expression(right)] => { + spanned(span, BoolIf(cond, left, right)) + }, + [let_binding(bindings).., in_(()), expression(final_expr)] => { + bindings.rev().fold( + final_expr, + |acc, x| unspanned(Let(x.0, x.1, x.2, acc)) + ) + }, + [forall(()), label(l), expression(typ), + arrow(()), expression(body)] => { + spanned(span, Pi(l, typ, body)) + }, + [operator_expression(typ), arrow(()), expression(body)] => { + spanned(span, Pi("_".into(), typ, body)) + }, + [merge(()), import_expression(x), import_expression(y), + application_expression(z)] => { + spanned(span, Merge(x, y, Some(z))) + }, + [empty_list_literal(e)] => e, + [assert(()), expression(x)] => { + spanned(span, Assert(x)) + }, + [toMap(()), import_expression(x), application_expression(y)] => { + spanned(span, ToMap(x, Some(y))) + }, + [operator_expression(e)] => e, + [operator_expression(e), expression(annot)] => { + spanned(span, Annot(e, annot)) + }, + )) + } + + fn let_binding<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<(Label, Option<Expr<E>>, Expr<E>)> { + Ok(parse_children!(input; + [label(name), expression(annot), expression(expr)] => + (name, Some(annot), expr), + [label(name), expression(expr)] => + (name, None, expr), + )) + } + + fn List(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + fn Optional(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + + #[prec_climb(application_expression, PRECCLIMBER)] + fn operator_expression<E: Clone>( + input: ParseInput<Rule>, + l: Expr<E>, + op: Pair<Rule>, + r: Expr<E>, + ) -> ParseResult<Expr<E>> { + use crate::BinOp::*; + use Rule::*; + let op = match op.as_rule() { + import_alt => ImportAlt, + bool_or => BoolOr, + natural_plus => NaturalPlus, + text_append => TextAppend, + list_append => ListAppend, + bool_and => BoolAnd, + combine => RecursiveRecordMerge, + prefer => RightBiasedRecordMerge, + combine_types => RecursiveRecordTypeMerge, + natural_times => NaturalTimes, + bool_eq => BoolEQ, + bool_ne => BoolNE, + equivalent => Equivalence, + r => Err(input.error(format!("Rule {:?} isn't an operator", r)))?, + }; + + Ok(unspanned(BinOp(op, l, r))) + } + + fn Some_(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + + fn application_expression<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<Expr<E>> { + Ok(parse_children!(input; + [first_application_expression(e)] => e, + [first_application_expression(first), + import_expression(rest)..] => { + rest.fold(first, |acc, e| unspanned(App(acc, e))) + }, + )) + } + + fn first_application_expression<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<Expr<E>> { + let span = input.as_span(); + Ok(parse_children!(input; + [Some_(()), import_expression(e)] => { + spanned(span, SomeLit(e)) + }, + [merge(()), import_expression(x), import_expression(y)] => { + spanned(span, Merge(x, y, None)) + }, + [toMap(()), import_expression(x)] => { + spanned(span, ToMap(x, None)) + }, + [import_expression(e)] => e, + )) + } + + fn import_expression<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<Expr<E>> { + Ok(parse_children!(input; + [selector_expression(e)] => e, + [import(e)] => e, + )) + } + + fn selector_expression<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<Expr<E>> { + Ok(parse_children!(input; + [primitive_expression(e)] => e, + [primitive_expression(first), selector(rest)..] => { + rest.fold(first, |acc, e| unspanned(match e { + Either::Left(l) => Field(acc, l), + Either::Right(ls) => Projection(acc, ls), + })) + }, + )) + } + + fn selector( + input: ParseInput<Rule>, + ) -> ParseResult<Either<Label, DupTreeSet<Label>>> { + Ok(parse_children!(input; + [label(l)] => Either::Left(l), + [labels(ls)] => Either::Right(ls), + // [expression(_e)] => unimplemented!("selection by expression"), // TODO + )) + } + + fn labels(input: ParseInput<Rule>) -> ParseResult<DupTreeSet<Label>> { + Ok(parse_children!(input; + [label(ls)..] => ls.collect(), + )) + } + + fn primitive_expression<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<Expr<E>> { + let span = input.as_span(); + Ok(parse_children!(input; + [double_literal(n)] => spanned(span, DoubleLit(n)), + [natural_literal(n)] => spanned(span, NaturalLit(n)), + [integer_literal(n)] => spanned(span, IntegerLit(n)), + [double_quote_literal(s)] => spanned(span, TextLit(s)), + [single_quote_literal(s)] => spanned(span, TextLit(s)), + [empty_record_type(e)] => e, + [empty_record_literal(e)] => e, + [non_empty_record_type_or_literal(e)] => e, + [union_type(e)] => e, + [non_empty_list_literal(e)] => e, + [identifier(e)] => e, + [expression(e)] => e, + )) + } + + fn empty_record_literal<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<Expr<E>> { + let span = input.as_span(); + Ok(spanned(span, RecordLit(Default::default()))) + } + + fn empty_record_type<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<Expr<E>> { + let span = input.as_span(); + Ok(spanned(span, RecordType(Default::default()))) + } + + fn non_empty_record_type_or_literal<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<Expr<E>> { + let span = input.as_span(); + Ok(parse_children!(input; + [label(first_label), non_empty_record_type(rest)] => { + let (first_expr, mut map) = rest; + map.insert(first_label, first_expr); + spanned(span, RecordType(map)) + }, + [label(first_label), non_empty_record_literal(rest)] => { + let (first_expr, mut map) = rest; + map.insert(first_label, first_expr); + spanned(span, RecordLit(map)) + }, + )) + } + + fn non_empty_record_type<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<(Expr<E>, DupTreeMap<Label, Expr<E>>)> { + Ok(parse_children!(input; + [expression(expr), record_type_entry(entries)..] => { + (expr, entries.collect()) + } + )) + } + + fn record_type_entry<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<(Label, Expr<E>)> { + Ok(parse_children!(input; + [label(name), expression(expr)] => (name, expr) + )) + } + + fn non_empty_record_literal<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<(Expr<E>, DupTreeMap<Label, Expr<E>>)> { + Ok(parse_children!(input; + [expression(expr), record_literal_entry(entries)..] => { + (expr, entries.collect()) + } + )) + } + + fn record_literal_entry<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<(Label, Expr<E>)> { + Ok(parse_children!(input; + [label(name), expression(expr)] => (name, expr) + )) + } + + fn union_type<E: Clone>(input: ParseInput<Rule>) -> ParseResult<Expr<E>> { + let span = input.as_span(); + Ok(parse_children!(input; + [empty_union_type(_)] => { + spanned(span, UnionType(Default::default())) + }, + [union_type_entry(entries)..] => { + spanned(span, UnionType(entries.collect())) + }, + )) + } + + fn empty_union_type(_: ParseInput<Rule>) -> ParseResult<()> { + Ok(()) + } + + fn union_type_entry<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<(Label, Option<Expr<E>>)> { + Ok(parse_children!(input; + [label(name), expression(expr)] => (name, Some(expr)), + [label(name)] => (name, None), + )) + } + + fn non_empty_list_literal<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<Expr<E>> { + let span = input.as_span(); + Ok(parse_children!(input; + [expression(items)..] => spanned( + span, + NEListLit(items.collect()) + ) + )) + } + + fn final_expression<E: Clone>( + input: ParseInput<Rule>, + ) -> ParseResult<Expr<E>> { + Ok(parse_children!(input; + [expression(e), EOI(_)] => e + )) + } } -pub fn parse_expr(s: &str) -> ParseResult<ParsedExpr> { - let mut pairs = DhallParser::parse(Rule::final_expression, s)?; - let rc_input = s.to_string().into(); - let expr = EntryPoint::final_expression(rc_input, pairs.next().unwrap())?; - assert_eq!(pairs.next(), None); - Ok(expr) +pub fn parse_expr<E: Clone>(s: &str) -> ParseResult<Expr<E>> { + let input = ParseInput::parse(s, Rule::final_expression)?; + Parsers::final_expression(input) } diff --git a/improved_slice_patterns/src/lib.rs b/improved_slice_patterns/src/lib.rs index 5478c1b..1669207 100644 --- a/improved_slice_patterns/src/lib.rs +++ b/improved_slice_patterns/src/lib.rs @@ -15,7 +15,7 @@ /// Contrary to slice_patterns, this allows moving out /// of the iterator. /// -/// A variable length pattern (`x..`) is only allowed as the last +/// A variable length pattern (`x @ ..`) is only allowed as the last /// pattern, unless the iterator is double-ended. /// /// Example: @@ -25,7 +25,7 @@ /// let vec = vec![Some(1), Some(2), Some(3), None]; /// /// let res = destructure_iter!(vec.into_iter(); -/// [Some(x), y.., z] => { +/// [Some(x), y @ .., z] => { /// // x: usize /// // y: impl Iterator<Option<usize>> /// // z: Option<usize> @@ -42,7 +42,8 @@ #[macro_export] macro_rules! destructure_iter { // Variable length pattern - (@match_forwards, $iter:expr, ($body:expr), $x:ident.., $($rest:tt)*) => { + (@match_forwards, $iter:expr, ($body:expr), + $x:ident @ .., $($rest:tt)*) => { $crate::destructure_iter!(@match_backwards, $iter, ({ @@ -133,7 +134,7 @@ macro_rules! destructure_iter { /// Contrary to slice_patterns, this allows moving out /// of the `Vec`. /// -/// A variable length pattern (`x..`) returns an iterator. +/// A variable length pattern (`x @ ..`) returns an iterator. /// /// Example: /// ```edition2018 @@ -143,7 +144,7 @@ macro_rules! destructure_iter { /// let vec = vec![Some(1), Some(2), Some(3), None]; /// /// let res = match_vec!(vec; -/// [Some(_), y.., None] => { +/// [Some(_), y @ .., None] => { /// y.collect::<Vec<_>>() /// }, /// [None, None] => { @@ -158,7 +159,7 @@ macro_rules! destructure_iter { /// let vec = vec![Some(1), Some(2), Some(3), None]; /// /// let res = match_vec!(vec; -/// [Some(_), y.., Some(_)] => { +/// [Some(_), y @ .., Some(_)] => { /// y.collect::<Vec<_>>() /// }, /// [None, None] => { @@ -175,16 +176,16 @@ macro_rules! destructure_iter { #[macro_export] macro_rules! match_vec { // Variable length pattern - (@make_pat; ($($acc:tt)*), $x:ident.., $($rest:tt)*) => { + (@make_pat; ($($acc:tt)*), $x:ident @ .., $($rest:tt)*) => { $crate::match_vec!(@make_pat; - ($($acc)*, $x..), + ($($acc)*, $x @ ..), $($rest)* ) }; // Special variable length pattern with a common unary variant (@make_pat; ($($acc:tt)*), $variant:ident ($x:ident).., $($rest:tt)*) => { $crate::match_vec!(@make_pat; - ($($acc)*, $x..), + ($($acc)*, $x @ ..), $($rest)* ) }; @@ -209,7 +210,7 @@ macro_rules! match_vec { [$($acc)*] }; - (@make_filter; $x:ident.., $($rest:tt)*) => { + (@make_filter; $x:ident @ .., $($rest:tt)*) => { $crate::match_vec!(@make_filter; $($rest)* ) @@ -282,8 +283,8 @@ fn test() { [Some(_x), None, None] => 4, [Some(_x), None] => 2, [None, Some(y)] => 1, - [None, _y..] => 3, - [_x.., Some(y), Some(z), None] => y - z, + [None, _y @ ..] => 3, + [_x @ .., Some(y), Some(z), None] => y - z, [Some(ys)..] => ys.sum(), [] => 0, [..] => -1, diff --git a/rust-toolchain b/rust-toolchain index 50ef20a..1d00bee 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -nightly-2019-03-22 +nightly-2019-09-03 |