summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock49
-rw-r--r--dhall_generated_parser/Cargo.toml3
-rw-r--r--dhall_generated_parser/build.rs17
-rw-r--r--dhall_generated_parser/src/lib.rs7
-rw-r--r--dhall_proc_macros/src/lib.rs19
-rw-r--r--dhall_proc_macros/src/make_parser.rs334
-rw-r--r--dhall_proc_macros/src/parse_children.rs215
-rw-r--r--dhall_syntax/Cargo.toml1
-rw-r--r--dhall_syntax/src/core/expr.rs22
-rw-r--r--dhall_syntax/src/lib.rs2
-rw-r--r--dhall_syntax/src/parser.rs542
11 files changed, 288 insertions, 923 deletions
diff --git a/Cargo.lock b/Cargo.lock
index d5ce864..5e81f7f 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -91,8 +91,7 @@ version = "0.1.0"
dependencies = [
"abnf_to_pest 0.1.1",
"pest 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest_generator 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -117,6 +116,7 @@ dependencies = [
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"pest 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_consume 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"take_mut 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -238,6 +238,37 @@ dependencies = [
]
[[package]]
+name = "pest_consume"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "pest 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_consume_macros 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro-hack 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "pest_consume_macros"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro-hack 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "pest_derive"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "pest 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_generator 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "pest_generator"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -279,6 +310,16 @@ dependencies = [
]
[[package]]
+name = "proc-macro-hack"
+version = "0.5.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "proc-macro2"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -482,10 +523,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum output_vt100 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9"
"checksum percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
"checksum pest 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7e4fb201c5c22a55d8b24fef95f78be52738e5e1361129be1b5e862ecdb6894a"
+"checksum pest_consume 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "db19e2b6df75694d2a73accd716c3e2b28d6241ad88ec140a5588eb4486eeb40"
+"checksum pest_consume_macros 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "36dc3a65f772c034446335f2a09fa4ea7a3cc471f130acdb06e96225f0ee6da0"
+"checksum pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0"
"checksum pest_generator 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7b9fcf299b5712d06ee128a556c94709aaa04512c4dffb8ead07c5c998447fc0"
"checksum pest_meta 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "df43fd99896fd72c485fe47542c7b500e4ac1e8700bf995544d1317a60ded547"
"checksum pretty 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f60c0d9f6fc88ecdd245d90c1920ff76a430ab34303fc778d33b1d0a4c3bf6d3"
"checksum pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427"
+"checksum proc-macro-hack 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e688f31d92ffd7c1ddc57a1b4e6d773c0f2a14ee437a4b0a4f5a69c80eb221c8"
"checksum proc-macro2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "175a40b9cf564ce9bf050654633dbf339978706b8ead1a907bb970b63185dd95"
"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
"checksum same-file 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "585e8ddcedc187886a30fa705c47985c3fa88d06624095856b36ca0b82ff4421"
diff --git a/dhall_generated_parser/Cargo.toml b/dhall_generated_parser/Cargo.toml
index 0730f60..b1f0d02 100644
--- a/dhall_generated_parser/Cargo.toml
+++ b/dhall_generated_parser/Cargo.toml
@@ -12,8 +12,7 @@ doctest = false
[build-dependencies]
abnf_to_pest = { version = "0.1.1", path = "../abnf_to_pest" }
-pest_generator = "2.1"
-quote = "1.0.2"
[dependencies]
pest = "2.1"
+pest_derive = "2.1"
diff --git a/dhall_generated_parser/build.rs b/dhall_generated_parser/build.rs
index c562fad..68895dd 100644
--- a/dhall_generated_parser/build.rs
+++ b/dhall_generated_parser/build.rs
@@ -1,7 +1,5 @@
-use std::env;
use std::fs::File;
use std::io::{BufRead, BufReader, Read, Write};
-use std::path::Path;
use abnf_to_pest::render_rules_to_pest;
@@ -90,20 +88,5 @@ fn main() -> std::io::Result<()> {
writeln!(&mut file)?;
writeln!(&mut file, "{}", render_rules_to_pest(rules).pretty(80))?;
- // Generate pest parser manually to avoid spurious recompilations
- let derived = {
- let pest_path = "dhall.pest";
- let pest = quote::quote! {
- #[grammar = #pest_path]
- pub struct DhallParser;
- };
- pest_generator::derive_parser(pest, false)
- };
-
- let out_dir = env::var("OUT_DIR").unwrap();
- let grammar_path = Path::new(&out_dir).join("grammar.rs");
- let mut file = File::create(grammar_path)?;
- writeln!(file, "pub struct DhallParser;\n{}", derived,)?;
-
Ok(())
}
diff --git a/dhall_generated_parser/src/lib.rs b/dhall_generated_parser/src/lib.rs
index 280b75e..fbb9ccd 100644
--- a/dhall_generated_parser/src/lib.rs
+++ b/dhall_generated_parser/src/lib.rs
@@ -14,4 +14,9 @@
// additional overrides are done in ../build.rs.
// The lines that are commented out in ./dhall.pest.visibility are marked as
// silent (see pest docs for what that means) in the generated pest file.
-include!(concat!(env!("OUT_DIR"), "/grammar.rs"));
+
+use pest_derive::Parser;
+
+#[derive(Parser)]
+#[grammar = "dhall.pest"]
+pub struct DhallParser;
diff --git a/dhall_proc_macros/src/lib.rs b/dhall_proc_macros/src/lib.rs
index 37e8f9f..5304429 100644
--- a/dhall_proc_macros/src/lib.rs
+++ b/dhall_proc_macros/src/lib.rs
@@ -1,4 +1,3 @@
-#![feature(drain_filter)]
//! This crate contains the code-generation primitives for the [dhall-rust][dhall-rust] crate.
//! This is highly unstable and breaks regularly; use at your own risk.
//!
@@ -7,8 +6,6 @@
extern crate proc_macro;
mod derive;
-mod make_parser;
-mod parse_children;
use proc_macro::TokenStream;
@@ -16,19 +13,3 @@ use proc_macro::TokenStream;
pub fn derive_static_type(input: TokenStream) -> TokenStream {
derive::derive_static_type(input)
}
-
-#[proc_macro_attribute]
-pub fn make_parser(attrs: TokenStream, input: TokenStream) -> TokenStream {
- TokenStream::from(match make_parser::make_parser(attrs, input) {
- Ok(tokens) => tokens,
- Err(err) => err.to_compile_error(),
- })
-}
-
-#[proc_macro]
-pub fn parse_children(input: TokenStream) -> TokenStream {
- TokenStream::from(match parse_children::parse_children(input) {
- Ok(tokens) => tokens,
- Err(err) => err.to_compile_error(),
- })
-}
diff --git a/dhall_proc_macros/src/make_parser.rs b/dhall_proc_macros/src/make_parser.rs
deleted file mode 100644
index a17ab61..0000000
--- a/dhall_proc_macros/src/make_parser.rs
+++ /dev/null
@@ -1,334 +0,0 @@
-use std::collections::HashMap;
-use std::iter;
-
-use quote::quote;
-use syn::parse::{Parse, ParseStream, Result};
-use syn::spanned::Spanned;
-use syn::{
- parse_quote, Error, Expr, FnArg, Ident, ImplItem, ImplItemMethod, ItemImpl,
- LitBool, Pat, Token,
-};
-
-mod kw {
- syn::custom_keyword!(shortcut);
-}
-
-struct AliasArgs {
- target: Ident,
- is_shortcut: bool,
-}
-
-struct PrecClimbArgs {
- child_rule: Ident,
- climber: Expr,
-}
-
-struct AliasSrc {
- ident: Ident,
- is_shortcut: bool,
-}
-
-struct ParsedFn<'a> {
- // Body of the function
- function: &'a mut ImplItemMethod,
- // Name of the function.
- fn_name: Ident,
- // Name of the first argument of the function, which should be of type `ParseInput`.
- input_arg: Ident,
- // List of aliases pointing to this function
- alias_srcs: Vec<AliasSrc>,
-}
-
-impl Parse for AliasArgs {
- fn parse(input: ParseStream) -> Result<Self> {
- let target = input.parse()?;
- let is_shortcut = if input.peek(Token![,]) {
- // #[alias(rule, shortcut = true)]
- let _: Token![,] = input.parse()?;
- let _: kw::shortcut = input.parse()?;
- let _: Token![=] = input.parse()?;
- let b: LitBool = input.parse()?;
- b.value
- } else {
- // #[alias(rule)]
- false
- };
- Ok(AliasArgs {
- target,
- is_shortcut,
- })
- }
-}
-
-impl Parse for PrecClimbArgs {
- fn parse(input: ParseStream) -> Result<Self> {
- let child_rule = input.parse()?;
- let _: Token![,] = input.parse()?;
- let climber = input.parse()?;
- Ok(PrecClimbArgs {
- child_rule,
- climber,
- })
- }
-}
-
-fn collect_aliases(
- imp: &mut ItemImpl,
-) -> Result<HashMap<Ident, Vec<AliasSrc>>> {
- let functions = imp.items.iter_mut().flat_map(|item| match item {
- ImplItem::Method(m) => Some(m),
- _ => None,
- });
-
- let mut alias_map = HashMap::new();
- for function in functions {
- let fn_name = function.sig.ident.clone();
- let mut alias_attrs = function
- .attrs
- .drain_filter(|attr| attr.path.is_ident("alias"))
- .collect::<Vec<_>>()
- .into_iter();
-
- if let Some(attr) = alias_attrs.next() {
- let args: AliasArgs = attr.parse_args()?;
- alias_map.entry(args.target).or_insert_with(Vec::new).push(
- AliasSrc {
- ident: fn_name,
- is_shortcut: args.is_shortcut,
- },
- );
- }
- if let Some(attr) = alias_attrs.next() {
- return Err(Error::new(
- attr.span(),
- "expected at most one alias attribute",
- ));
- }
- }
-
- Ok(alias_map)
-}
-
-fn parse_fn<'a>(
- function: &'a mut ImplItemMethod,
- alias_map: &mut HashMap<Ident, Vec<AliasSrc>>,
-) -> Result<ParsedFn<'a>> {
- let fn_name = function.sig.ident.clone();
- // Get the name of the first (`input`) function argument
- let input_arg = function.sig.inputs.first().ok_or_else(|| {
- Error::new(
- function.sig.inputs.span(),
- "a rule function needs an `input` argument",
- )
- })?;
- let input_arg = match &input_arg {
- FnArg::Receiver(_) => return Err(Error::new(
- input_arg.span(),
- "a rule function should not have a `self` argument",
- )),
- FnArg::Typed(input_arg) => match &*input_arg.pat{
- Pat::Ident(ident) => ident.ident.clone(),
- _ => return Err(Error::new(
- input_arg.span(),
- "this argument should be a plain identifier instead of a pattern",
- )),
- }
- };
-
- let alias_srcs = alias_map.remove(&fn_name).unwrap_or_else(Vec::new);
-
- Ok(ParsedFn {
- function,
- fn_name,
- input_arg,
- alias_srcs,
- })
-}
-
-fn apply_special_attrs(f: &mut ParsedFn, rule_enum: &Ident) -> Result<()> {
- let function = &mut *f.function;
- let fn_name = &f.fn_name;
- let input_arg = &f.input_arg;
-
- *function = parse_quote!(
- #[allow(non_snake_case)]
- #function
- );
-
- // `prec_climb` attr
- let prec_climb_attrs: Vec<_> = function
- .attrs
- .drain_filter(|attr| attr.path.is_ident("prec_climb"))
- .collect();
-
- if prec_climb_attrs.len() > 1 {
- return Err(Error::new(
- prec_climb_attrs[1].span(),
- "expected at most one prec_climb attribute",
- ));
- } else if prec_climb_attrs.is_empty() {
- // do nothing
- } else {
- let attr = prec_climb_attrs.into_iter().next().unwrap();
- let PrecClimbArgs {
- child_rule,
- climber,
- } = attr.parse_args()?;
-
- function.block = parse_quote!({
- #function
-
- #climber.climb(
- #input_arg.pair.clone().into_inner(),
- |p| Self::#child_rule(#input_arg.with_pair(p)),
- |l, op, r| {
- #fn_name(#input_arg.clone(), l?, op, r?)
- },
- )
- });
- // Remove the 3 last arguments to keep only the `input` one
- function.sig.inputs.pop();
- function.sig.inputs.pop();
- function.sig.inputs.pop();
- // Check that an argument remains
- function.sig.inputs.first().ok_or_else(|| {
- Error::new(
- function.sig.inputs.span(),
- "a prec_climb function needs 4 arguments",
- )
- })?;
- }
-
- // `alias` attr
- if !f.alias_srcs.is_empty() {
- let aliases = f.alias_srcs.iter().map(|src| &src.ident);
- let block = &function.block;
- function.block = parse_quote!({
- let mut #input_arg = #input_arg;
- // While the current rule allows shortcutting, and there is a single child, and the
- // child can still be parsed by the current function, then skip to that child.
- while <Self as PestConsumer>::allows_shortcut(#input_arg.as_rule()) {
- if let Some(child) = #input_arg.single_child() {
- if &<Self as PestConsumer>::rule_alias(child.as_rule())
- == stringify!(#fn_name) {
- #input_arg = child;
- continue;
- }
- }
- break
- }
-
- match #input_arg.as_rule() {
- #(#rule_enum::#aliases => Self::#aliases(#input_arg),)*
- #rule_enum::#fn_name => #block,
- r => unreachable!(
- "make_parser: called {} on {:?}",
- stringify!(#fn_name),
- r
- )
- }
- });
- }
-
- Ok(())
-}
-
-pub fn make_parser(
- attrs: proc_macro::TokenStream,
- input: proc_macro::TokenStream,
-) -> Result<proc_macro2::TokenStream> {
- let rule_enum: Ident = syn::parse(attrs)?;
- let mut imp: ItemImpl = syn::parse(input)?;
-
- let mut alias_map = collect_aliases(&mut imp)?;
- let rule_alias_branches: Vec<_> = alias_map
- .iter()
- .flat_map(|(tgt, srcs)| iter::repeat(tgt).zip(srcs))
- .map(|(tgt, src)| {
- let ident = &src.ident;
- quote!(
- #rule_enum::#ident => stringify!(#tgt).to_string(),
- )
- })
- .collect();
- let shortcut_branches: Vec<_> = alias_map
- .iter()
- .flat_map(|(_tgt, srcs)| srcs)
- .map(|AliasSrc { ident, is_shortcut }| {
- quote!(
- #rule_enum::#ident => #is_shortcut,
- )
- })
- .collect();
-
- let fn_map: HashMap<Ident, ParsedFn> = imp
- .items
- .iter_mut()
- .flat_map(|item| match item {
- ImplItem::Method(m) => Some(m),
- _ => None,
- })
- .map(|method| {
- let mut f = parse_fn(method, &mut alias_map)?;
- apply_special_attrs(&mut f, &rule_enum)?;
- Ok((f.fn_name.clone(), f))
- })
- .collect::<Result<_>>()?;
-
- // Entries that remain in the alias map don't have a matching method, so we create one.
- let extra_fns: Vec<_> = alias_map
- .iter()
- .map(|(tgt, srcs)| {
- // Get the signature of one of the functions that has this alias. They should all have
- // essentially the same signature anyways.
- let f = fn_map.get(&srcs.first().unwrap().ident).unwrap();
- let input_arg = f.input_arg.clone();
- let mut sig = f.function.sig.clone();
- sig.ident = tgt.clone();
- let srcs = srcs.iter().map(|src| &src.ident);
-
- Ok(parse_quote!(
- #sig {
- match #input_arg.as_rule() {
- #(#rule_enum::#srcs => Self::#srcs(#input_arg),)*
- // We can't match on #rule_enum::#tgt since `tgt` might be an arbitrary
- // identifier.
- r if &format!("{:?}", r) == stringify!(#tgt) =>
- return Err(#input_arg.error(format!(
- "make_parser: missing method for rule {}",
- stringify!(#tgt),
- ))),
- r => unreachable!(
- "make_parser: called {} on {:?}",
- stringify!(#tgt),
- r
- )
- }
- }
- ))
- })
- .collect::<Result<_>>()?;
- imp.items.extend(extra_fns);
-
- let ty = &imp.self_ty;
- let (impl_generics, _, where_clause) = imp.generics.split_for_impl();
- Ok(quote!(
- impl #impl_generics PestConsumer for #ty #where_clause {
- type Rule = #rule_enum;
- fn rule_alias(rule: Self::Rule) -> String {
- match rule {
- #(#rule_alias_branches)*
- r => format!("{:?}", r),
- }
- }
- fn allows_shortcut(rule: Self::Rule) -> bool {
- match rule {
- #(#shortcut_branches)*
- _ => false,
- }
- }
- }
-
- #imp
- ))
-}
diff --git a/dhall_proc_macros/src/parse_children.rs b/dhall_proc_macros/src/parse_children.rs
deleted file mode 100644
index a35c03f..0000000
--- a/dhall_proc_macros/src/parse_children.rs
+++ /dev/null
@@ -1,215 +0,0 @@
-use proc_macro2::{Span, TokenStream};
-use quote::quote;
-use syn::parse::{Parse, ParseStream, Result};
-use syn::punctuated::Punctuated;
-use syn::spanned::Spanned;
-use syn::{bracketed, parenthesized, token, Error, Expr, Ident, Pat, Token};
-
-#[derive(Debug, Clone)]
-struct ChildrenBranch {
- pattern_span: Span,
- pattern: Punctuated<ChildrenBranchPatternItem, Token![,]>,
- body: Expr,
-}
-
-#[derive(Debug, Clone)]
-enum ChildrenBranchPatternItem {
- Single { rule_name: Ident, binder: Pat },
- Multiple { rule_name: Ident, binder: Ident },
-}
-
-#[derive(Debug, Clone)]
-struct ParseChildrenInput {
- input_expr: Expr,
- branches: Punctuated<ChildrenBranch, Token![,]>,
-}
-
-impl Parse for ChildrenBranch {
- fn parse(input: ParseStream) -> Result<Self> {
- let contents;
- let _: token::Bracket = bracketed!(contents in input);
- let pattern_unparsed: TokenStream = contents.fork().parse()?;
- let pattern_span = pattern_unparsed.span();
- let pattern = Punctuated::parse_terminated(&contents)?;
- let _: Token![=>] = input.parse()?;
- let body = input.parse()?;
-
- Ok(ChildrenBranch {
- pattern_span,
- pattern,
- body,
- })
- }
-}
-
-impl Parse for ChildrenBranchPatternItem {
- fn parse(input: ParseStream) -> Result<Self> {
- let contents;
- let rule_name = input.parse()?;
- parenthesized!(contents in input);
- if input.peek(Token![..]) {
- let binder = contents.parse()?;
- let _: Token![..] = input.parse()?;
- Ok(ChildrenBranchPatternItem::Multiple { rule_name, binder })
- } else if input.is_empty() || input.peek(Token![,]) {
- let binder = contents.parse()?;
- Ok(ChildrenBranchPatternItem::Single { rule_name, binder })
- } else {
- Err(input.error("expected `..` or nothing"))
- }
- }
-}
-
-impl Parse for ParseChildrenInput {
- fn parse(input: ParseStream) -> Result<Self> {
- let input_expr = input.parse()?;
- let _: Token![;] = input.parse()?;
- let branches = Punctuated::parse_terminated(input)?;
-
- Ok(ParseChildrenInput {
- input_expr,
- branches,
- })
- }
-}
-
-fn make_parser_branch(
- branch: &ChildrenBranch,
- i_inputs: &Ident,
-) -> Result<TokenStream> {
- use ChildrenBranchPatternItem::{Multiple, Single};
-
- let body = &branch.body;
-
- // Convert the input pattern into a pattern-match on the Rules of the children. This uses
- // slice_patterns.
- // A single pattern just checks that the rule matches; a variable-length pattern binds the
- // subslice and checks, in the if-guard, that its elements all match the chosen Rule.
- let i_variable_pattern =
- Ident::new("___variable_pattern", Span::call_site());
- let match_pat = branch.pattern.iter().map(|item| match item {
- Single { rule_name, .. } => quote!(stringify!(#rule_name)),
- Multiple { .. } => quote!(#i_variable_pattern @ ..),
- });
- let match_filter = branch.pattern.iter().map(|item| match item {
- Single { .. } => quote!(),
- Multiple { rule_name, .. } => quote!(
- {
- // We can't use .all() directly in the pattern guard; see
- // https://github.com/rust-lang/rust/issues/59803.
- let all_match = |slice: &[_]| {
- slice.iter().all(|r|
- *r == stringify!(#rule_name)
- )
- };
- all_match(#i_variable_pattern)
- } &&
- ),
- });
-
- // Once we have found a branch that matches, we need to parse the children.
- let mut singles_before_multiple = Vec::new();
- let mut multiple = None;
- let mut singles_after_multiple = Vec::new();
- for item in &branch.pattern {
- match item {
- Single {
- rule_name, binder, ..
- } => {
- if multiple.is_none() {
- singles_before_multiple.push((rule_name, binder))
- } else {
- singles_after_multiple.push((rule_name, binder))
- }
- }
- Multiple {
- rule_name, binder, ..
- } => {
- if multiple.is_none() {
- multiple = Some((rule_name, binder))
- } else {
- return Err(Error::new(
- branch.pattern_span.clone(),
- "multiple variable-length patterns are not allowed",
- ));
- }
- }
- }
- }
- let mut parses = Vec::new();
- for (rule_name, binder) in singles_before_multiple.into_iter() {
- parses.push(quote!(
- let #binder = Self::#rule_name(
- #i_inputs.next().unwrap()
- )?;
- ))
- }
- // Note the `rev()`: we are taking inputs from the end of the iterator in reverse order, so that
- // only the unmatched inputs are left for the variable-length pattern, if any.
- for (rule_name, binder) in singles_after_multiple.into_iter().rev() {
- parses.push(quote!(
- let #binder = Self::#rule_name(
- #i_inputs.next_back().unwrap()
- )?;
- ))
- }
- if let Some((rule_name, binder)) = multiple {
- parses.push(quote!(
- let #binder = #i_inputs
- .map(|i| Self::#rule_name(i))
- .collect::<Result<Vec<_>, _>>()?
- .into_iter();
- ))
- }
-
- Ok(quote!(
- [#(#match_pat),*] if #(#match_filter)* true => {
- #(#parses)*
- #body
- }
- ))
-}
-
-pub fn parse_children(
- input: proc_macro::TokenStream,
-) -> Result<proc_macro2::TokenStream> {
- let input: ParseChildrenInput = syn::parse(input)?;
-
- let i_children_rules = Ident::new("___children_rules", Span::call_site());
- let i_inputs = Ident::new("___inputs", Span::call_site());
-
- let input_expr = &input.input_expr;
- let branches = input
- .branches
- .iter()
- .map(|br| make_parser_branch(br, &i_inputs))
- .collect::<Result<Vec<_>>>()?;
-
- Ok(quote!({
- let #i_children_rules: Vec<_> = #input_expr.pair
- .clone()
- .into_inner()
- .map(|p| p.as_rule())
- .map(<Self as PestConsumer>::rule_alias)
- .collect();
- let #i_children_rules: Vec<&str> = #i_children_rules
- .iter()
- .map(String::as_str)
- .collect();
-
- #[allow(unused_mut)]
- let mut #i_inputs = #input_expr
- .pair
- .clone()
- .into_inner()
- .map(|p| #input_expr.with_pair(p));
-
- #[allow(unreachable_code)]
- match #i_children_rules.as_slice() {
- #(#branches,)*
- [..] => return Err(#input_expr.error(
- format!("Unexpected children: {:?}", #i_children_rules)
- )),
- }
- }))
-}
diff --git a/dhall_syntax/Cargo.toml b/dhall_syntax/Cargo.toml
index eb492d0..b98c4a4 100644
--- a/dhall_syntax/Cargo.toml
+++ b/dhall_syntax/Cargo.toml
@@ -18,3 +18,4 @@ hex = "0.3.2"
lazy_static = "1.4.0"
dhall_generated_parser = { path = "../dhall_generated_parser" }
dhall_proc_macros = { path = "../dhall_proc_macros" }
+pest_consume = "1.0"
diff --git a/dhall_syntax/src/core/expr.rs b/dhall_syntax/src/core/expr.rs
index 2cb23c9..74b481f 100644
--- a/dhall_syntax/src/core/expr.rs
+++ b/dhall_syntax/src/core/expr.rs
@@ -37,6 +37,16 @@ impl Span {
end: sp.end(),
}
}
+ /// Takes the union of the two spans. Assumes that the spans come from the same input.
+ /// This will also capture any input between the spans.
+ pub fn union(&self, other: &Span) -> Self {
+ use std::cmp::{max, min};
+ Span {
+ input: self.input.clone(),
+ start: min(self.start, other.start),
+ end: max(self.start, other.start),
+ }
+ }
}
/// Double with bitwise equality
@@ -324,8 +334,11 @@ impl<E> Expr<E> {
pub fn as_mut(&mut self) -> &mut RawExpr<E> {
&mut self.0.as_mut().0
}
+ pub fn span(&self) -> Option<Span> {
+ self.0.as_ref().1.clone()
+ }
- pub fn new(x: RawExpr<E>, n: Span) -> Self {
+ pub(crate) fn new(x: RawExpr<E>, n: Span) -> Self {
Expr(Box::new((x, Some(n))))
}
@@ -384,13 +397,6 @@ pub fn rc<E>(x: RawExpr<E>) -> Expr<E> {
Expr::from_expr_no_span(x)
}
-pub(crate) fn spanned<E>(span: Span, x: RawExpr<E>) -> Expr<E> {
- Expr::new(x, span)
-}
-pub(crate) fn unspanned<E>(x: RawExpr<E>) -> Expr<E> {
- Expr::from_expr_no_span(x)
-}
-
/// Add an isize to an usize
/// Panics on over/underflow
fn add_ui(u: usize, i: isize) -> Option<usize> {
diff --git a/dhall_syntax/src/lib.rs b/dhall_syntax/src/lib.rs
index 95f40c2..b8fa19f 100644
--- a/dhall_syntax/src/lib.rs
+++ b/dhall_syntax/src/lib.rs
@@ -1,7 +1,5 @@
#![feature(trace_macros)]
-#![feature(slice_patterns)]
#![feature(never_type)]
-#![feature(proc_macro_hygiene)]
#![allow(
clippy::many_single_char_names,
clippy::should_implement_trait,
diff --git a/dhall_syntax/src/parser.rs b/dhall_syntax/src/parser.rs
index 41d6d04..f2dea53 100644
--- a/dhall_syntax/src/parser.rs
+++ b/dhall_syntax/src/parser.rs
@@ -1,13 +1,11 @@
use itertools::Itertools;
-use pest::iterators::Pair;
use pest::prec_climber as pcl;
use pest::prec_climber::PrecClimber;
-use pest::Parser;
-use std::borrow::Cow;
use std::rc::Rc;
-use dhall_generated_parser::{DhallParser, Rule};
-use dhall_proc_macros::{make_parser, parse_children};
+use dgp::Rule;
+use dhall_generated_parser as dgp;
+use pest_consume::{match_nodes, Parser};
use crate::map::{DupTreeMap, DupTreeSet};
use crate::ExprF::*;
@@ -20,115 +18,11 @@ use crate::*;
type ParsedText<E> = InterpolatedText<Expr<E>>;
type ParsedTextContents<E> = InterpolatedTextContents<Expr<E>>;
+type ParseInput<'input> = pest_consume::Node<'input, Rule, Rc<str>>;
pub type ParseError = pest::error::Error<Rule>;
-
pub type ParseResult<T> = Result<T, ParseError>;
-#[derive(Debug, Clone)]
-struct ParseInput<'input, Rule>
-where
- Rule: pest::RuleType,
-{
- pair: Pair<'input, Rule>,
- original_input_str: Rc<str>,
-}
-
-impl<'input> ParseInput<'input, Rule> {
- fn error(&self, message: String) -> ParseError {
- let message = format!(
- "{} while matching on:\n{}",
- message,
- debug_pair(self.pair.clone())
- );
- let e = pest::error::ErrorVariant::CustomError { message };
- pest::error::Error::new_from_span(e, self.pair.as_span())
- }
- fn parse(input_str: &'input str, rule: Rule) -> ParseResult<Self> {
- let mut pairs = DhallParser::parse(rule, input_str)?;
- // TODO: proper errors
- let pair = pairs.next().unwrap();
- assert_eq!(pairs.next(), None);
- Ok(ParseInput {
- original_input_str: input_str.to_string().into(),
- pair,
- })
- }
- fn with_pair(&self, new_pair: Pair<'input, Rule>) -> Self {
- ParseInput {
- pair: new_pair,
- original_input_str: self.original_input_str.clone(),
- }
- }
- /// If the contained pair has exactly one child, return a new Self containing it.
- fn single_child(&self) -> Option<Self> {
- let mut children = self.pair.clone().into_inner();
- if let Some(child) = children.next() {
- if children.next().is_none() {
- return Some(self.with_pair(child));
- }
- }
- None
- }
- fn as_span(&self) -> Span {
- Span::make(self.original_input_str.clone(), self.pair.as_span())
- }
- fn as_str(&self) -> &'input str {
- self.pair.as_str()
- }
- fn as_rule(&self) -> Rule {
- self.pair.as_rule()
- }
-}
-
-// Used by the macros.
-trait PestConsumer {
- type Rule: pest::RuleType;
- fn rule_alias(rule: Self::Rule) -> String;
- fn allows_shortcut(rule: Self::Rule) -> bool;
-}
-
-fn debug_pair(pair: Pair<Rule>) -> String {
- use std::fmt::Write;
- let mut s = String::new();
- fn aux(s: &mut String, indent: usize, prefix: String, pair: Pair<Rule>) {
- let indent_str = "| ".repeat(indent);
- let rule = pair.as_rule();
- let contents = pair.as_str();
- let mut inner = pair.into_inner();
- let mut first = true;
- while let Some(p) = inner.next() {
- if first {
- first = false;
- let last = inner.peek().is_none();
- if last && p.as_str() == contents {
- let prefix = format!("{}{:?} > ", prefix, rule);
- aux(s, indent, prefix, p);
- continue;
- } else {
- writeln!(
- s,
- r#"{}{}{:?}: "{}""#,
- indent_str, prefix, rule, contents
- )
- .unwrap();
- }
- }
- aux(s, indent + 1, "".into(), p);
- }
- if first {
- writeln!(
- s,
- r#"{}{}{:?}: "{}""#,
- indent_str, prefix, rule, contents
- )
- .unwrap();
- }
- }
- aux(&mut s, 0, "".into(), pair);
- s
-}
-
#[derive(Debug)]
enum Either<A, B> {
Left(A),
@@ -173,6 +67,16 @@ impl crate::Builtin {
}
}
+fn input_to_span(input: ParseInput) -> Span {
+ Span::make(input.user_data().clone(), input.as_pair().as_span())
+}
+fn spanned<E>(input: ParseInput, x: RawExpr<E>) -> Expr<E> {
+ Expr::new(x, input_to_span(input))
+}
+fn spanned_union<E>(span1: Span, span2: Span, x: RawExpr<E>) -> Expr<E> {
+ Expr::new(x, span1.union(&span2))
+}
+
// Trim the shared indent off of a vec of lines, as defined by the Dhall semantics of multiline
// literals.
fn trim_indent<E: Clone>(lines: &mut Vec<ParsedText<E>>) {
@@ -242,27 +146,27 @@ lazy_static::lazy_static! {
};
}
-struct Parsers;
+struct DhallParser;
-#[make_parser(Rule)]
-impl Parsers {
- fn EOI(_input: ParseInput<Rule>) -> ParseResult<()> {
+#[pest_consume::parser(parser = dgp::DhallParser, rule = dgp::Rule)]
+impl DhallParser {
+ fn EOI(_input: ParseInput) -> ParseResult<()> {
Ok(())
}
#[alias(label)]
- fn simple_label(input: ParseInput<Rule>) -> ParseResult<Label> {
+ fn simple_label(input: ParseInput) -> ParseResult<Label> {
Ok(Label::from(input.as_str()))
}
#[alias(label)]
- fn quoted_label(input: ParseInput<Rule>) -> ParseResult<Label> {
+ fn quoted_label(input: ParseInput) -> ParseResult<Label> {
Ok(Label::from(input.as_str()))
}
fn double_quote_literal<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<ParsedText<E>> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.into_children();
[double_quote_chunk(chunks)..] => {
chunks.collect()
}
@@ -270,9 +174,9 @@ impl Parsers {
}
fn double_quote_chunk<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<ParsedTextContents<E>> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.into_children();
[expression(e)] => {
InterpolatedTextContents::Expr(e)
},
@@ -282,7 +186,7 @@ impl Parsers {
))
}
#[alias(double_quote_char)]
- fn double_quote_escaped(input: ParseInput<Rule>) -> ParseResult<String> {
+ fn double_quote_escaped(input: ParseInput) -> ParseResult<String> {
Ok(match input.as_str() {
"\"" => "\"".to_owned(),
"$" => "$".to_owned(),
@@ -352,16 +256,14 @@ impl Parsers {
}
})
}
- fn double_quote_char<'a>(
- input: ParseInput<'a, Rule>,
- ) -> ParseResult<String> {
+ fn double_quote_char(input: ParseInput) -> ParseResult<String> {
Ok(input.as_str().to_owned())
}
fn single_quote_literal<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<ParsedText<E>> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.into_children();
[single_quote_continue(lines)] => {
let newline: ParsedText<E> = "\n".to_string().into();
@@ -382,29 +284,23 @@ impl Parsers {
}
))
}
- fn single_quote_char<'a>(
- input: ParseInput<'a, Rule>,
- ) -> ParseResult<&'a str> {
+ fn single_quote_char(input: ParseInput) -> ParseResult<&str> {
Ok(input.as_str())
}
#[alias(single_quote_char)]
- fn escaped_quote_pair<'a>(
- _input: ParseInput<'a, Rule>,
- ) -> ParseResult<&'a str> {
+ fn escaped_quote_pair(_input: ParseInput) -> ParseResult<&str> {
Ok("''")
}
#[alias(single_quote_char)]
- fn escaped_interpolation<'a>(
- _input: ParseInput<'a, Rule>,
- ) -> ParseResult<&'a str> {
+ fn escaped_interpolation(_input: ParseInput) -> ParseResult<&str> {
Ok("${")
}
// Returns a vec of lines in reversed order, where each line is also in reversed order.
fn single_quote_continue<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<Vec<Vec<ParsedTextContents<E>>>> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.into_children();
[expression(e), single_quote_continue(lines)] => {
let c = InterpolatedTextContents::Expr(e);
let mut lines = lines;
@@ -429,7 +325,7 @@ impl Parsers {
}
#[alias(expression)]
- fn builtin<E: Clone>(input: ParseInput<Rule>) -> ParseResult<Expr<E>> {
+ fn builtin<E: Clone>(input: ParseInput) -> ParseResult<Expr<E>> {
let s = input.as_str();
let e = match crate::Builtin::parse(s) {
Some(b) => Builtin(b),
@@ -442,30 +338,24 @@ impl Parsers {
_ => Err(input.error(format!("Unrecognized builtin: '{}'", s)))?,
},
};
- Ok(spanned(input.as_span(), e))
+ Ok(spanned(input, e))
}
#[alias(double_literal)]
- fn NaN(_input: ParseInput<Rule>) -> ParseResult<core::Double> {
+ fn NaN(_input: ParseInput) -> ParseResult<core::Double> {
Ok(std::f64::NAN.into())
}
#[alias(double_literal)]
- fn minus_infinity_literal(
- _input: ParseInput<Rule>,
- ) -> ParseResult<core::Double> {
+ fn minus_infinity_literal(_input: ParseInput) -> ParseResult<core::Double> {
Ok(std::f64::NEG_INFINITY.into())
}
#[alias(double_literal)]
- fn plus_infinity_literal(
- _input: ParseInput<Rule>,
- ) -> ParseResult<core::Double> {
+ fn plus_infinity_literal(_input: ParseInput) -> ParseResult<core::Double> {
Ok(std::f64::INFINITY.into())
}
#[alias(double_literal)]
- fn numeric_double_literal(
- input: ParseInput<Rule>,
- ) -> ParseResult<core::Double> {
+ fn numeric_double_literal(input: ParseInput) -> ParseResult<core::Double> {
let s = input.as_str().trim();
match s.parse::<f64>() {
Ok(x) if x.is_infinite() => Err(input.error(format!(
@@ -477,7 +367,7 @@ impl Parsers {
}
}
- fn natural_literal(input: ParseInput<Rule>) -> ParseResult<core::Natural> {
+ fn natural_literal(input: ParseInput) -> ParseResult<core::Natural> {
input
.as_str()
.trim()
@@ -485,7 +375,7 @@ impl Parsers {
.map_err(|e| input.error(format!("{}", e)))
}
- fn integer_literal(input: ParseInput<Rule>) -> ParseResult<core::Integer> {
+ fn integer_literal(input: ParseInput) -> ParseResult<core::Integer> {
input
.as_str()
.trim()
@@ -494,36 +384,26 @@ impl Parsers {
}
#[alias(expression, shortcut = true)]
- fn identifier<E: Clone>(input: ParseInput<Rule>) -> ParseResult<Expr<E>> {
- Ok(parse_children!(input;
- [variable(v)] => {
- spanned(input.as_span(), Var(v))
- },
+ fn identifier<E: Clone>(input: ParseInput) -> ParseResult<Expr<E>> {
+ Ok(match_nodes!(input.children();
+ [variable(v)] => spanned(input, Var(v)),
[expression(e)] => e,
))
}
- fn variable(input: ParseInput<Rule>) -> ParseResult<V<Label>> {
- Ok(parse_children!(input;
- [label(l), natural_literal(idx)] => {
- V(l, idx)
- },
- [label(l)] => {
- V(l, 0)
- },
+ fn variable(input: ParseInput) -> ParseResult<V<Label>> {
+ Ok(match_nodes!(input.into_children();
+ [label(l), natural_literal(idx)] => V(l, idx),
+ [label(l)] => V(l, 0),
))
}
#[alias(path_component)]
- fn unquoted_path_component<'a>(
- input: ParseInput<'a, Rule>,
- ) -> ParseResult<String> {
+ fn unquoted_path_component(input: ParseInput) -> ParseResult<String> {
Ok(input.as_str().to_string())
}
#[alias(path_component)]
- fn quoted_path_component<'a>(
- input: ParseInput<'a, Rule>,
- ) -> ParseResult<String> {
+ fn quoted_path_component(input: ParseInput) -> ParseResult<String> {
#[rustfmt::skip]
const RESERVED: &percent_encoding::AsciiSet =
&percent_encoding::CONTROLS
@@ -549,8 +429,8 @@ impl Parsers {
})
.collect())
}
- fn path(input: ParseInput<Rule>) -> ParseResult<FilePath> {
- Ok(parse_children!(input;
+ fn path(input: ParseInput) -> ParseResult<FilePath> {
+ Ok(match_nodes!(input.into_children();
[path_component(components)..] => {
FilePath { file_path: components.collect() }
}
@@ -559,47 +439,43 @@ impl Parsers {
#[alias(import_type)]
fn local<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<ImportLocation<Expr<E>>> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.into_children();
[local_path((prefix, p))] => ImportLocation::Local(prefix, p),
))
}
#[alias(local_path)]
fn parent_path(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<(FilePrefix, FilePath)> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.into_children();
[path(p)] => (FilePrefix::Parent, p)
))
}
#[alias(local_path)]
- fn here_path(
- input: ParseInput<Rule>,
- ) -> ParseResult<(FilePrefix, FilePath)> {
- Ok(parse_children!(input;
+ fn here_path(input: ParseInput) -> ParseResult<(FilePrefix, FilePath)> {
+ Ok(match_nodes!(input.into_children();
[path(p)] => (FilePrefix::Here, p)
))
}
#[alias(local_path)]
- fn home_path(
- input: ParseInput<Rule>,
- ) -> ParseResult<(FilePrefix, FilePath)> {
- Ok(parse_children!(input;
+ fn home_path(input: ParseInput) -> ParseResult<(FilePrefix, FilePath)> {
+ Ok(match_nodes!(input.into_children();
[path(p)] => (FilePrefix::Home, p)
))
}
#[alias(local_path)]
fn absolute_path(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<(FilePrefix, FilePath)> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.into_children();
[path(p)] => (FilePrefix::Absolute, p)
))
}
- fn scheme(input: ParseInput<Rule>) -> ParseResult<Scheme> {
+ fn scheme(input: ParseInput) -> ParseResult<Scheme> {
Ok(match input.as_str() {
"http" => Scheme::HTTP,
"https" => Scheme::HTTPS,
@@ -607,10 +483,8 @@ impl Parsers {
})
}
- fn http_raw<E: Clone>(
- input: ParseInput<Rule>,
- ) -> ParseResult<URL<Expr<E>>> {
- Ok(parse_children!(input;
+ fn http_raw<E: Clone>(input: ParseInput) -> ParseResult<URL<Expr<E>>> {
+ Ok(match_nodes!(input.into_children();
[scheme(sch), authority(auth), path(p)] => URL {
scheme: sch,
authority: auth,
@@ -628,19 +502,19 @@ impl Parsers {
))
}
- fn authority(input: ParseInput<Rule>) -> ParseResult<String> {
+ fn authority(input: ParseInput) -> ParseResult<String> {
Ok(input.as_str().to_owned())
}
- fn query(input: ParseInput<Rule>) -> ParseResult<String> {
+ fn query(input: ParseInput) -> ParseResult<String> {
Ok(input.as_str().to_owned())
}
#[alias(import_type)]
fn http<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<ImportLocation<Expr<E>>> {
- Ok(ImportLocation::Remote(parse_children!(input;
+ Ok(ImportLocation::Remote(match_nodes!(input.into_children();
[http_raw(url)] => url,
[http_raw(url), expression(e)] => URL { headers: Some(e), ..url },
)))
@@ -648,53 +522,49 @@ impl Parsers {
#[alias(import_type)]
fn env<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<ImportLocation<Expr<E>>> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.into_children();
[environment_variable(v)] => ImportLocation::Env(v),
))
}
#[alias(environment_variable)]
- fn bash_environment_variable(
- input: ParseInput<Rule>,
- ) -> ParseResult<String> {
+ fn bash_environment_variable(input: ParseInput) -> ParseResult<String> {
Ok(input.as_str().to_owned())
}
#[alias(environment_variable)]
- fn posix_environment_variable(
- input: ParseInput<Rule>,
- ) -> ParseResult<String> {
- Ok(parse_children!(input;
+ fn posix_environment_variable(input: ParseInput) -> ParseResult<String> {
+ Ok(match_nodes!(input.into_children();
[posix_environment_variable_character(chars)..] => {
chars.collect()
},
))
}
- fn posix_environment_variable_character<'a>(
- input: ParseInput<'a, Rule>,
- ) -> ParseResult<Cow<'a, str>> {
+ fn posix_environment_variable_character(
+ input: ParseInput,
+ ) -> ParseResult<&str> {
Ok(match input.as_str() {
- "\\\"" => Cow::Owned("\"".to_owned()),
- "\\\\" => Cow::Owned("\\".to_owned()),
- "\\a" => Cow::Owned("\u{0007}".to_owned()),
- "\\b" => Cow::Owned("\u{0008}".to_owned()),
- "\\f" => Cow::Owned("\u{000C}".to_owned()),
- "\\n" => Cow::Owned("\n".to_owned()),
- "\\r" => Cow::Owned("\r".to_owned()),
- "\\t" => Cow::Owned("\t".to_owned()),
- "\\v" => Cow::Owned("\u{000B}".to_owned()),
- s => Cow::Borrowed(s),
+ "\\\"" => "\"",
+ "\\\\" => "\\",
+ "\\a" => "\u{0007}",
+ "\\b" => "\u{0008}",
+ "\\f" => "\u{000C}",
+ "\\n" => "\n",
+ "\\r" => "\r",
+ "\\t" => "\t",
+ "\\v" => "\u{000B}",
+ s => s,
})
}
#[alias(import_type)]
fn missing<E: Clone>(
- _input: ParseInput<Rule>,
+ _input: ParseInput,
) -> ParseResult<ImportLocation<Expr<E>>> {
Ok(ImportLocation::Missing)
}
- fn hash(input: ParseInput<Rule>) -> ParseResult<Hash> {
+ fn hash(input: ParseInput) -> ParseResult<Hash> {
let s = input.as_str().trim();
let protocol = &s[..6];
let hash = &s[7..];
@@ -705,29 +575,29 @@ impl Parsers {
}
fn import_hashed<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<crate::Import<Expr<E>>> {
use crate::Import;
let mode = ImportMode::Code;
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.into_children();
[import_type(location)] => Import { mode, location, hash: None },
[import_type(location), hash(h)] => Import { mode, location, hash: Some(h) },
))
}
#[alias(import_mode)]
- fn Text(_input: ParseInput<Rule>) -> ParseResult<ImportMode> {
+ fn Text(_input: ParseInput) -> ParseResult<ImportMode> {
Ok(ImportMode::RawText)
}
#[alias(import_mode)]
- fn Location(_input: ParseInput<Rule>) -> ParseResult<ImportMode> {
+ fn Location(_input: ParseInput) -> ParseResult<ImportMode> {
Ok(ImportMode::Location)
}
#[alias(expression)]
- fn import<E: Clone>(input: ParseInput<Rule>) -> ParseResult<Expr<E>> {
+ fn import<E: Clone>(input: ParseInput) -> ParseResult<Expr<E>> {
use crate::Import;
- let import = parse_children!(input;
+ let import = match_nodes!(input.children();
[import_hashed(imp)] => {
Import { mode: ImportMode::Code, ..imp }
},
@@ -735,97 +605,99 @@ impl Parsers {
Import { mode, ..imp }
},
);
- Ok(spanned(input.as_span(), Import(import)))
+ Ok(spanned(input, Import(import)))
}
- fn lambda(_input: ParseInput<Rule>) -> ParseResult<()> {
+ fn lambda(_input: ParseInput) -> ParseResult<()> {
Ok(())
}
- fn forall(_input: ParseInput<Rule>) -> ParseResult<()> {
+ fn forall(_input: ParseInput) -> ParseResult<()> {
Ok(())
}
- fn arrow(_input: ParseInput<Rule>) -> ParseResult<()> {
+ fn arrow(_input: ParseInput) -> ParseResult<()> {
Ok(())
}
- fn merge(_input: ParseInput<Rule>) -> ParseResult<()> {
+ fn merge(_input: ParseInput) -> ParseResult<()> {
Ok(())
}
- fn assert(_input: ParseInput<Rule>) -> ParseResult<()> {
+ fn assert(_input: ParseInput) -> ParseResult<()> {
Ok(())
}
- fn if_(_input: ParseInput<Rule>) -> ParseResult<()> {
+ fn if_(_input: ParseInput) -> ParseResult<()> {
Ok(())
}
- fn toMap(_input: ParseInput<Rule>) -> ParseResult<()> {
+ fn toMap(_input: ParseInput) -> ParseResult<()> {
Ok(())
}
#[alias(expression)]
- fn empty_list_literal<E: Clone>(
- input: ParseInput<Rule>,
- ) -> ParseResult<Expr<E>> {
- Ok(parse_children!(input;
- [expression(e)] => spanned(input.as_span(), EmptyListLit(e)),
+ fn empty_list_literal<E: Clone>(input: ParseInput) -> ParseResult<Expr<E>> {
+ Ok(match_nodes!(input.children();
+ [expression(e)] => spanned(input, EmptyListLit(e)),
))
}
- fn expression<E: Clone>(input: ParseInput<Rule>) -> ParseResult<Expr<E>> {
- let span = input.as_span();
- Ok(parse_children!(input;
+ fn expression<E: Clone>(input: ParseInput) -> ParseResult<Expr<E>> {
+ Ok(match_nodes!(input.children();
[lambda(()), label(l), expression(typ),
arrow(()), expression(body)] => {
- spanned(span, Lam(l, typ, body))
+ spanned(input, Lam(l, typ, body))
},
[if_(()), expression(cond), expression(left),
expression(right)] => {
- spanned(span, BoolIf(cond, left, right))
+ spanned(input, BoolIf(cond, left, right))
},
[let_binding(bindings).., expression(final_expr)] => {
bindings.rev().fold(
final_expr,
- |acc, x| unspanned(Let(x.0, x.1, x.2, acc))
+ |acc, x| {
+ spanned_union(
+ acc.span().unwrap(),
+ x.3,
+ Let(x.0, x.1, x.2, acc)
+ )
+ }
)
},
[forall(()), label(l), expression(typ),
arrow(()), expression(body)] => {
- spanned(span, Pi(l, typ, body))
+ spanned(input, Pi(l, typ, body))
},
[expression(typ), arrow(()), expression(body)] => {
- spanned(span, Pi("_".into(), typ, body))
+ spanned(input, Pi("_".into(), typ, body))
},
[merge(()), expression(x), expression(y), expression(z)] => {
- spanned(span, Merge(x, y, Some(z)))
+ spanned(input, Merge(x, y, Some(z)))
},
[assert(()), expression(x)] => {
- spanned(span, Assert(x))
+ spanned(input, Assert(x))
},
[toMap(()), expression(x), expression(y)] => {
- spanned(span, ToMap(x, Some(y)))
+ spanned(input, ToMap(x, Some(y)))
},
[expression(e), expression(annot)] => {
- spanned(span, Annot(e, annot))
+ spanned(input, Annot(e, annot))
},
[expression(e)] => e,
))
}
fn let_binding<E: Clone>(
- input: ParseInput<Rule>,
- ) -> ParseResult<(Label, Option<Expr<E>>, Expr<E>)> {
- Ok(parse_children!(input;
+ input: ParseInput,
+ ) -> ParseResult<(Label, Option<Expr<E>>, Expr<E>, Span)> {
+ Ok(match_nodes!(input.children();
[label(name), expression(annot), expression(expr)] =>
- (name, Some(annot), expr),
+ (name, Some(annot), expr, input_to_span(input)),
[label(name), expression(expr)] =>
- (name, None, expr),
+ (name, None, expr, input_to_span(input)),
))
}
#[alias(expression, shortcut = true)]
#[prec_climb(expression, PRECCLIMBER)]
fn operator_expression<E: Clone>(
- input: ParseInput<Rule>,
l: Expr<E>,
- op: Pair<Rule>,
+ op: ParseInput,
r: Expr<E>,
) -> ParseResult<Expr<E>> {
use crate::BinOp::*;
@@ -844,42 +716,54 @@ impl Parsers {
bool_eq => BoolEQ,
bool_ne => BoolNE,
equivalent => Equivalence,
- r => Err(input.error(format!("Rule {:?} isn't an operator", r)))?,
+ r => Err(op.error(format!("Rule {:?} isn't an operator", r)))?,
};
- Ok(unspanned(BinOp(op, l, r)))
+ Ok(spanned_union(
+ l.span().unwrap(),
+ r.span().unwrap(),
+ BinOp(op, l, r),
+ ))
}
- fn Some_(_input: ParseInput<Rule>) -> ParseResult<()> {
+ fn Some_(_input: ParseInput) -> ParseResult<()> {
Ok(())
}
#[alias(expression, shortcut = true)]
fn application_expression<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<Expr<E>> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.children();
[expression(e)] => e,
[expression(first), expression(rest)..] => {
- rest.fold(first, |acc, e| unspanned(App(acc, e)))
+ rest.fold(
+ first,
+ |acc, e| {
+ spanned_union(
+ acc.span().unwrap(),
+ e.span().unwrap(),
+ App(acc, e)
+ )
+ }
+ )
},
))
}
#[alias(expression, shortcut = true)]
fn first_application_expression<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<Expr<E>> {
- let span = input.as_span();
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.children();
[Some_(()), expression(e)] => {
- spanned(span, SomeLit(e))
+ spanned(input, SomeLit(e))
},
[merge(()), expression(x), expression(y)] => {
- spanned(span, Merge(x, y, None))
+ spanned(input, Merge(x, y, None))
},
[toMap(()), expression(x)] => {
- spanned(span, ToMap(x, None))
+ spanned(input, ToMap(x, None))
},
[expression(e)] => e,
))
@@ -887,69 +771,75 @@ impl Parsers {
#[alias(expression, shortcut = true)]
fn selector_expression<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<Expr<E>> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.children();
[expression(e)] => e,
[expression(first), selector(rest)..] => {
- rest.fold(first, |acc, e| unspanned(match e {
- Either::Left(l) => Field(acc, l),
- Either::Right(ls) => Projection(acc, ls),
- }))
+ rest.fold(
+ first,
+ |acc, e| {
+ spanned_union(
+ acc.span().unwrap(),
+ e.1,
+ match e.0 {
+ Either::Left(l) => Field(acc, l),
+ Either::Right(ls) => Projection(acc, ls),
+ }
+ )
+ }
+ )
},
))
}
fn selector(
- input: ParseInput<Rule>,
- ) -> ParseResult<Either<Label, DupTreeSet<Label>>> {
- Ok(parse_children!(input;
- [label(l)] => Either::Left(l),
- [labels(ls)] => Either::Right(ls),
+ input: ParseInput,
+ ) -> ParseResult<(Either<Label, DupTreeSet<Label>>, Span)> {
+ Ok(match_nodes!(input.children();
+ [label(l)] => (Either::Left(l), input_to_span(input)),
+ [labels(ls)] => (Either::Right(ls), input_to_span(input)),
// [expression(_e)] => unimplemented!("selection by expression"), // TODO
))
}
- fn labels(input: ParseInput<Rule>) -> ParseResult<DupTreeSet<Label>> {
- Ok(parse_children!(input;
+ fn labels(input: ParseInput) -> ParseResult<DupTreeSet<Label>> {
+ Ok(match_nodes!(input.into_children();
[label(ls)..] => ls.collect(),
))
}
#[alias(expression, shortcut = true)]
fn primitive_expression<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<Expr<E>> {
- let span = input.as_span();
- Ok(parse_children!(input;
- [double_literal(n)] => spanned(span, DoubleLit(n)),
- [natural_literal(n)] => spanned(span, NaturalLit(n)),
- [integer_literal(n)] => spanned(span, IntegerLit(n)),
- [double_quote_literal(s)] => spanned(span, TextLit(s)),
- [single_quote_literal(s)] => spanned(span, TextLit(s)),
+ Ok(match_nodes!(input.children();
+ [double_literal(n)] => spanned(input, DoubleLit(n)),
+ [natural_literal(n)] => spanned(input, NaturalLit(n)),
+ [integer_literal(n)] => spanned(input, IntegerLit(n)),
+ [double_quote_literal(s)] => spanned(input, TextLit(s)),
+ [single_quote_literal(s)] => spanned(input, TextLit(s)),
[expression(e)] => e,
))
}
#[alias(expression)]
fn empty_record_literal<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<Expr<E>> {
- Ok(spanned(input.as_span(), RecordLit(Default::default())))
+ Ok(spanned(input, RecordLit(Default::default())))
}
#[alias(expression)]
- fn empty_record_type<E: Clone>(
- input: ParseInput<Rule>,
- ) -> ParseResult<Expr<E>> {
- Ok(spanned(input.as_span(), RecordType(Default::default())))
+ fn empty_record_type<E: Clone>(input: ParseInput) -> ParseResult<Expr<E>> {
+ Ok(spanned(input, RecordType(Default::default())))
}
#[alias(expression)]
fn non_empty_record_type_or_literal<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<Expr<E>> {
- let e = parse_children!(input;
+ let e = match_nodes!(input.children();
[label(first_label), non_empty_record_type(rest)] => {
let (first_expr, mut map) = rest;
map.insert(first_label, first_expr);
@@ -961,13 +851,13 @@ impl Parsers {
RecordLit(map)
},
);
- Ok(spanned(input.as_span(), e))
+ Ok(spanned(input, e))
}
fn non_empty_record_type<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<(Expr<E>, DupTreeMap<Label, Expr<E>>)> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.into_children();
[expression(expr), record_type_entry(entries)..] => {
(expr, entries.collect())
}
@@ -975,17 +865,17 @@ impl Parsers {
}
fn record_type_entry<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<(Label, Expr<E>)> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.into_children();
[label(name), expression(expr)] => (name, expr)
))
}
fn non_empty_record_literal<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<(Expr<E>, DupTreeMap<Label, Expr<E>>)> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.into_children();
[expression(expr), record_literal_entry(entries)..] => {
(expr, entries.collect())
}
@@ -993,30 +883,30 @@ impl Parsers {
}
fn record_literal_entry<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<(Label, Expr<E>)> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.into_children();
[label(name), expression(expr)] => (name, expr)
))
}
#[alias(expression)]
- fn union_type<E: Clone>(input: ParseInput<Rule>) -> ParseResult<Expr<E>> {
- let map = parse_children!(input;
+ fn union_type<E: Clone>(input: ParseInput) -> ParseResult<Expr<E>> {
+ let map = match_nodes!(input.children();
[empty_union_type(_)] => Default::default(),
[union_type_entry(entries)..] => entries.collect(),
);
- Ok(spanned(input.as_span(), UnionType(map)))
+ Ok(spanned(input, UnionType(map)))
}
- fn empty_union_type(_input: ParseInput<Rule>) -> ParseResult<()> {
+ fn empty_union_type(_input: ParseInput) -> ParseResult<()> {
Ok(())
}
fn union_type_entry<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<(Label, Option<Expr<E>>)> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.children();
[label(name), expression(expr)] => (name, Some(expr)),
[label(name)] => (name, None),
))
@@ -1024,26 +914,32 @@ impl Parsers {
#[alias(expression)]
fn non_empty_list_literal<E: Clone>(
- input: ParseInput<Rule>,
+ input: ParseInput,
) -> ParseResult<Expr<E>> {
- Ok(parse_children!(input;
+ Ok(match_nodes!(input.children();
[expression(items)..] => spanned(
- input.as_span(),
+ input,
NEListLit(items.collect())
)
))
}
- fn final_expression<E: Clone>(
- input: ParseInput<Rule>,
- ) -> ParseResult<Expr<E>> {
- Ok(parse_children!(input;
+ #[alias(expression)]
+ fn final_expression<E: Clone>(input: ParseInput) -> ParseResult<Expr<E>> {
+ Ok(match_nodes!(input.into_children();
[expression(e), EOI(_)] => e
))
}
}
-pub fn parse_expr<E: Clone>(s: &str) -> ParseResult<Expr<E>> {
- let input = ParseInput::parse(s, Rule::final_expression)?;
- Parsers::final_expression(input)
+pub fn parse_expr<E: Clone>(input_str: &str) -> ParseResult<Expr<E>> {
+ let rc_input_str = input_str.to_string().into();
+ let inputs = DhallParser::parse_with_userdata(
+ Rule::final_expression,
+ input_str,
+ rc_input_str,
+ )?;
+ Ok(match_nodes!(<DhallParser>; inputs;
+ [expression(e)] => e,
+ ))
}