summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNadrieril2019-09-01 13:51:12 +0200
committerNadrieril2019-09-01 22:31:24 +0200
commitbe51899f7d5f1f9ede689ca0a9707a0aca3d31c4 (patch)
treebebbed4e36f938fb52b23b5d2f03165b685c86bf
parent737abd9be6d35bbce784d9cf249edf7ad14677d6 (diff)
Rewrite the make_parser macro as a proc_macro
-rw-r--r--Cargo.lock42
-rw-r--r--dhall_proc_macros/Cargo.toml6
-rw-r--r--dhall_proc_macros/src/lib.rs9
-rw-r--r--dhall_proc_macros/src/parser.rs398
-rw-r--r--dhall_syntax/Cargo.toml1
-rw-r--r--dhall_syntax/src/parser.rs209
6 files changed, 456 insertions, 209 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 9bd7adc..360168a 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -95,9 +95,9 @@ name = "dhall_proc_macros"
version = "0.1.0"
dependencies = [
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.31 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -105,6 +105,7 @@ name = "dhall_syntax"
version = "0.1.0"
dependencies = [
"dhall_generated_parser 0.1.0",
+ "dhall_proc_macros 0.1.0",
"either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -270,6 +271,14 @@ dependencies = [
]
[[package]]
+name = "proc-macro2"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "quote"
version = "0.6.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -278,6 +287,14 @@ dependencies = [
]
[[package]]
+name = "quote"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "same-file"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -345,6 +362,16 @@ dependencies = [
]
[[package]]
+name = "syn"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "take_mut"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -387,6 +414,11 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
+name = "unicode-xid"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
name = "version_check"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -468,13 +500,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum pretty 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f60c0d9f6fc88ecdd245d90c1920ff76a430ab34303fc778d33b1d0a4c3bf6d3"
"checksum pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427"
"checksum proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)" = "4d317f9caece796be1980837fd5cb3dfec5613ebdb04ad0956deea83ce168915"
+"checksum proc-macro2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "175a40b9cf564ce9bf050654633dbf339978706b8ead1a907bb970b63185dd95"
"checksum quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "faf4799c5d274f3868a4aae320a0a182cbd2baee377b378f080e16a23e9d80db"
+"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
"checksum same-file 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8f20c4be53a8a1ff4c1f1b2bd14570d2f634628709752f0702ecdd2b3f9a5267"
"checksum serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)" = "aa5f7c20820475babd2c077c3ab5f8c77a31c15e16ea38687b4c02d3e48680f4"
"checksum serde_cbor 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "45cd6d95391b16cd57e88b68be41d504183b7faae22030c0cc3b3f73dd57b2fd"
"checksum serde_derive 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)" = "58fc82bec244f168b23d1963b45c8bf5726e9a15a9d146a067f9081aeed2de79"
"checksum sha-1 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "51b9d1f3b5de8a167ab06834a7c883bd197f2191e1dda1a22d9ccfeedbf9aded"
"checksum syn 0.15.31 (registry+https://github.com/rust-lang/crates.io-index)" = "d2b4cfac95805274c6afdb12d8f770fa2d27c045953e7b630a81801953699a9a"
+"checksum syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
"checksum take_mut 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60"
"checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1"
"checksum term-painter 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "dcaa948f0e3e38470cd8dc8dcfe561a75c9e43f28075bb183845be2b9b3c08cf"
@@ -482,6 +517,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169"
"checksum ucd-trie 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "71a9c5b1fe77426cf144cc30e49e955270f5086e31a6441dfa8b32efc09b9d77"
"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
+"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
"checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
"checksum walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "9d9d7ed3431229a144296213105a390676cc49c9b6a72bd19f3176c98e129fa1"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
diff --git a/dhall_proc_macros/Cargo.toml b/dhall_proc_macros/Cargo.toml
index df1eda8..b641a39 100644
--- a/dhall_proc_macros/Cargo.toml
+++ b/dhall_proc_macros/Cargo.toml
@@ -11,6 +11,6 @@ doctest = false
[dependencies]
itertools = "0.8.0"
-quote = "0.6.11"
-proc-macro2 = "0.4.27"
-syn = "0.15.29"
+quote = "1.0.2"
+proc-macro2 = "1.0.2"
+syn = { version = "1.0.5", features = ["full", "extra-traits"] }
diff --git a/dhall_proc_macros/src/lib.rs b/dhall_proc_macros/src/lib.rs
index 5304429..37c9985 100644
--- a/dhall_proc_macros/src/lib.rs
+++ b/dhall_proc_macros/src/lib.rs
@@ -6,6 +6,7 @@
extern crate proc_macro;
mod derive;
+mod parser;
use proc_macro::TokenStream;
@@ -13,3 +14,11 @@ use proc_macro::TokenStream;
pub fn derive_static_type(input: TokenStream) -> TokenStream {
derive::derive_static_type(input)
}
+
+#[proc_macro]
+pub fn make_parser(input: TokenStream) -> TokenStream {
+ TokenStream::from(match parser::make_parser(input) {
+ Ok(tokens) => tokens,
+ Err(err) => err.to_compile_error(),
+ })
+}
diff --git a/dhall_proc_macros/src/parser.rs b/dhall_proc_macros/src/parser.rs
new file mode 100644
index 0000000..bb4e894
--- /dev/null
+++ b/dhall_proc_macros/src/parser.rs
@@ -0,0 +1,398 @@
+use proc_macro2::{Span, TokenStream};
+use quote::quote;
+use syn::parse::{Parse, ParseStream, Result};
+use syn::punctuated::Punctuated;
+use syn::{bracketed, parenthesized, token, Expr, Ident, Pat, Token, Type};
+
+mod rule_kw {
+ syn::custom_keyword!(rule);
+ syn::custom_keyword!(captured_str);
+ syn::custom_keyword!(children);
+ syn::custom_keyword!(prec_climb);
+}
+
+#[derive(Debug, Clone)]
+struct Rules(Vec<Rule>);
+
+#[derive(Debug, Clone)]
+struct Rule {
+ rule_token: rule_kw::rule,
+ bang_token: Token![!],
+ paren_token: token::Paren,
+ name: Ident,
+ lt_token: token::Lt,
+ output_type: Type,
+ gt_token: token::Gt,
+ contents: RuleContents,
+ semi_token: Token![;],
+}
+
+#[derive(Debug, Clone)]
+enum RuleContents {
+ Empty,
+ CapturedString {
+ span: Option<Ident>,
+ captured_str_token: rule_kw::captured_str,
+ bang_token: Token![!],
+ paren_token: token::Paren,
+ pattern: Pat,
+ fat_arrow_token: Token![=>],
+ body: Expr,
+ },
+ Children {
+ span: Option<Ident>,
+ children_token: rule_kw::children,
+ bang_token: Token![!],
+ paren_token: token::Paren,
+ branches: Punctuated<ChildrenBranch, Token![,]>,
+ },
+ PrecClimb {
+ span: Option<Ident>,
+ prec_climb_token: rule_kw::prec_climb,
+ bang_token: Token![!],
+ paren_token: token::Paren,
+ child_rule: Ident,
+ comma_token: Token![,],
+ climber: Expr,
+ comma_token2: Token![,],
+ pattern: Pat,
+ fat_arrow_token: Token![=>],
+ body: Expr,
+ },
+}
+
+#[derive(Debug, Clone)]
+struct ChildrenBranch {
+ bracket_token: token::Bracket,
+ pattern_unparsed: TokenStream,
+ pattern: Punctuated<ChildrenBranchPatternItem, Token![,]>,
+ fat_arrow_token: Token![=>],
+ body: Expr,
+}
+
+#[derive(Debug, Clone)]
+enum ChildrenBranchPatternItem {
+ Single {
+ rule_name: Ident,
+ paren_token: token::Paren,
+ binder: Pat,
+ },
+ Multiple {
+ rule_name: Ident,
+ paren_token: token::Paren,
+ binder: Ident,
+ slice_token: Token![..],
+ },
+}
+
+impl Parse for Rules {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let mut rules = Vec::new();
+ while !input.is_empty() {
+ rules.push(input.parse()?)
+ }
+ Ok(Rules(rules))
+ }
+}
+
+impl Parse for Rule {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let contents;
+ Ok(Rule {
+ rule_token: input.parse()?,
+ bang_token: input.parse()?,
+ paren_token: parenthesized!(contents in input),
+ name: contents.parse()?,
+ lt_token: contents.parse()?,
+ output_type: contents.parse()?,
+ gt_token: contents.parse()?,
+ contents: contents.parse()?,
+ semi_token: input.parse()?,
+ })
+ }
+}
+
+impl Parse for RuleContents {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if input.is_empty() {
+ return Ok(RuleContents::Empty);
+ }
+ let _: Token![;] = input.parse()?;
+ let span = if input.peek(Ident) && input.peek2(Token![;]) {
+ let span: Ident = input.parse()?;
+ let _: Token![;] = input.parse()?;
+ Some(span)
+ } else {
+ None
+ };
+
+ let lookahead = input.lookahead1();
+ if lookahead.peek(rule_kw::captured_str) {
+ let contents;
+ Ok(RuleContents::CapturedString {
+ span,
+ captured_str_token: input.parse()?,
+ bang_token: input.parse()?,
+ paren_token: parenthesized!(contents in input),
+ pattern: contents.parse()?,
+ fat_arrow_token: input.parse()?,
+ body: input.parse()?,
+ })
+ } else if lookahead.peek(rule_kw::children) {
+ let contents;
+ Ok(RuleContents::Children {
+ span,
+ children_token: input.parse()?,
+ bang_token: input.parse()?,
+ paren_token: parenthesized!(contents in input),
+ branches: Punctuated::parse_terminated(&contents)?,
+ })
+ } else if lookahead.peek(rule_kw::prec_climb) {
+ let contents;
+ Ok(RuleContents::PrecClimb {
+ span,
+ prec_climb_token: input.parse()?,
+ bang_token: input.parse()?,
+ paren_token: parenthesized!(contents in input),
+ child_rule: contents.parse()?,
+ comma_token: contents.parse()?,
+ climber: contents.parse()?,
+ comma_token2: contents.parse()?,
+ pattern: contents.parse()?,
+ fat_arrow_token: contents.parse()?,
+ body: contents.parse()?,
+ })
+ } else {
+ Err(lookahead.error())
+ }
+ }
+}
+
+impl Parse for ChildrenBranch {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let contents;
+ Ok(ChildrenBranch {
+ bracket_token: bracketed!(contents in input),
+ pattern_unparsed: contents.fork().parse()?,
+ pattern: Punctuated::parse_terminated(&contents)?,
+ fat_arrow_token: input.parse()?,
+ body: input.parse()?,
+ })
+ }
+}
+
+impl Parse for ChildrenBranchPatternItem {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let rule_name = input.parse()?;
+ let contents;
+ let paren_token = parenthesized!(contents in input);
+ if input.peek(Token![..]) {
+ Ok(ChildrenBranchPatternItem::Multiple {
+ rule_name,
+ paren_token,
+ binder: contents.parse()?,
+ slice_token: input.parse()?,
+ })
+ } else if input.is_empty() || input.peek(Token![,]) {
+ Ok(ChildrenBranchPatternItem::Single {
+ rule_name,
+ paren_token,
+ binder: contents.parse()?,
+ })
+ } else {
+ Err(input.error("expected `..` or nothing"))
+ }
+ }
+}
+
+fn make_construct_precclimbers(rules: &Rules) -> Result<TokenStream> {
+ let mut entries: Vec<TokenStream> = Vec::new();
+ for rule in &rules.0 {
+ if let RuleContents::PrecClimb { climber, .. } = &rule.contents {
+ let name = &rule.name;
+ entries.push(quote!(
+ map.insert(Rule::#name, #climber);
+ ))
+ }
+ }
+
+ Ok(quote!(
+ fn construct_precclimbers() -> HashMap<Rule, PrecClimber<Rule>> {
+ let mut map = HashMap::new();
+ #(#entries)*
+ map
+ }
+ ))
+}
+
+fn make_entrypoints(rules: &Rules) -> Result<TokenStream> {
+ let mut entries: Vec<TokenStream> = Vec::new();
+ for rule in &rules.0 {
+ let name = &rule.name;
+ let output_type = &rule.output_type;
+ entries.push(quote!(
+ #[allow(non_snake_case, dead_code)]
+ fn #name<'a>(
+ input: Rc<str>,
+ pair: Pair<'a, Rule>,
+ ) -> ParseResult<#output_type> {
+ let climbers = construct_precclimbers();
+ Parsers::#name((&climbers, input), pair)
+ }
+ ))
+ }
+
+ Ok(quote!(
+ struct EntryPoint;
+ impl EntryPoint {
+ #(#entries)*
+ }
+ ))
+}
+
+fn make_parser_branch(branch: &ChildrenBranch) -> TokenStream {
+ let ChildrenBranch {
+ pattern,
+ body,
+ pattern_unparsed,
+ ..
+ } = branch;
+ let variable_pattern = Ident::new("variable_pattern", Span::call_site());
+ let match_pat = pattern.iter().map(|item| match item {
+ ChildrenBranchPatternItem::Single { rule_name, .. } => {
+ quote!(Rule::#rule_name)
+ }
+ ChildrenBranchPatternItem::Multiple { .. } => {
+ quote!(#variable_pattern..)
+ }
+ });
+ let match_filter = pattern.iter().map(|item| match item {
+ ChildrenBranchPatternItem::Single { .. } => quote!(true &&),
+ ChildrenBranchPatternItem::Multiple { rule_name, .. } => {
+ quote!(#variable_pattern.iter().all(|r| r == &Rule::#rule_name) &&)
+ }
+ });
+ quote!(
+ [#(#match_pat),*] if #(#match_filter)* true => {
+ parse_children!((climbers, input.clone()), iter;
+ [#pattern_unparsed] => {
+ #[allow(unused_variables)]
+ let res: Result<_, String> = try { #body };
+ res.map_err(|msg|
+ custom_parse_error(&pair, msg)
+ )
+ }
+ )
+ }
+ )
+}
+
+fn make_parser_expr(rule: &Rule) -> Result<TokenStream> {
+ let name = &rule.name;
+ let expr = match &rule.contents {
+ RuleContents::Empty => quote!(Ok(())),
+ RuleContents::CapturedString { pattern, body, .. } => quote!(
+ let #pattern = pair.as_str();
+ let res: Result<_, String> = try { #body };
+ res.map_err(|msg| custom_parse_error(&pair, msg))
+ ),
+ RuleContents::PrecClimb {
+ child_rule,
+ pattern,
+ body,
+ ..
+ } => quote!(
+ let climber = climbers.get(&Rule::#name).unwrap();
+ climber.climb(
+ pair.clone().into_inner(),
+ |p| Parsers::#child_rule((climbers, input.clone()), p),
+ |l, op, r| {
+ let #pattern = (l?, op, r?);
+ let res: Result<_, String> = try { #body };
+ res.map_err(|msg| custom_parse_error(&pair, msg))
+ },
+ )
+ ),
+ RuleContents::Children { branches, .. } => {
+ let branches = branches.iter().map(make_parser_branch);
+ quote!(
+ let children_rules: Vec<Rule> = pair
+ .clone()
+ .into_inner()
+ .map(|p| p.as_rule())
+ .collect();
+
+ #[allow(unused_mut)]
+ let mut iter = pair.clone().into_inner();
+
+ #[allow(unreachable_code)]
+ match children_rules.as_slice() {
+ #(#branches,)*
+ [..] => Err(custom_parse_error(
+ &pair,
+ format!("Unexpected children: {:?}", children_rules)
+ )),
+ }
+ )
+ }
+ };
+ Ok(expr)
+}
+
+fn make_parsers(rules: &Rules) -> Result<TokenStream> {
+ let mut entries: Vec<TokenStream> = Vec::new();
+ for rule in &rules.0 {
+ let span_def = match &rule.contents {
+ RuleContents::CapturedString {
+ span: Some(span), ..
+ }
+ | RuleContents::Children {
+ span: Some(span), ..
+ }
+ | RuleContents::PrecClimb {
+ span: Some(span), ..
+ } => Some(quote!(
+ let #span = Span::make(input.clone(), pair.as_span());
+ )),
+ _ => None,
+ };
+
+ let name = &rule.name;
+ let output_type = &rule.output_type;
+ let expr = make_parser_expr(rule)?;
+
+ entries.push(quote!(
+ #[allow(non_snake_case, dead_code)]
+ fn #name<'a>(
+ (climbers, input): (&HashMap<Rule, PrecClimber<Rule>>, Rc<str>),
+ pair: Pair<'a, Rule>,
+ ) -> ParseResult<#output_type> {
+ #span_def
+ #expr
+ }
+ ))
+ }
+
+ Ok(quote!(
+ struct Parsers;
+ impl Parsers {
+ #(#entries)*
+ }
+ ))
+}
+
+pub fn make_parser(
+ input: proc_macro::TokenStream,
+) -> Result<proc_macro2::TokenStream> {
+ let rules: Rules = syn::parse_macro_input::parse(input.clone())?;
+
+ let construct_precclimbers = make_construct_precclimbers(&rules)?;
+ let entrypoints = make_entrypoints(&rules)?;
+ let parsers = make_parsers(&rules)?;
+
+ Ok(quote!(
+ #construct_precclimbers
+ #entrypoints
+ #parsers
+ ))
+}
diff --git a/dhall_syntax/Cargo.toml b/dhall_syntax/Cargo.toml
index 1da10c7..62ecced 100644
--- a/dhall_syntax/Cargo.toml
+++ b/dhall_syntax/Cargo.toml
@@ -16,3 +16,4 @@ either = "1.5.2"
take_mut = "0.2.2"
hex = "0.3.2"
dhall_generated_parser = { path = "../dhall_generated_parser" }
+dhall_proc_macros = { path = "../dhall_proc_macros" }
diff --git a/dhall_syntax/src/parser.rs b/dhall_syntax/src/parser.rs
index 53fd68a..4fd6f57 100644
--- a/dhall_syntax/src/parser.rs
+++ b/dhall_syntax/src/parser.rs
@@ -189,203 +189,6 @@ macro_rules! parse_children {
};
}
-macro_rules! make_parser {
- (@children_pattern,
- $varpat:ident,
- ($($acc:tt)*),
- [$variant:ident ($x:pat), $($rest:tt)*]
- ) => (
- make_parser!(@children_pattern,
- $varpat,
- ($($acc)* , Rule::$variant),
- [$($rest)*]
- )
- );
- (@children_pattern,
- $varpat:ident,
- ($($acc:tt)*),
- [$variant:ident ($x:ident).., $($rest:tt)*]
- ) => (
- make_parser!(@children_pattern,
- $varpat,
- ($($acc)* , $varpat..),
- [$($rest)*]
- )
- );
- (@children_pattern,
- $varpat:ident,
- (, $($acc:tt)*), [$(,)*]
- ) => ([$($acc)*]);
- (@children_pattern,
- $varpat:ident,
- ($($acc:tt)*), [$(,)*]
- ) => ([$($acc)*]);
-
- (@children_filter,
- $varpat:ident,
- [$variant:ident ($x:pat), $($rest:tt)*]
- ) => (
- make_parser!(@children_filter, $varpat, [$($rest)*])
- );
- (@children_filter,
- $varpat:ident,
- [$variant:ident ($x:ident).., $($rest:tt)*]
- ) => (
- $varpat.iter().all(|r| r == &Rule::$variant) &&
- make_parser!(@children_filter, $varpat, [$($rest)*])
- );
- (@children_filter, $varpat:ident, [$(,)*]) => (true);
-
- (@body,
- ($climbers:expr, $input:expr, $pair:expr),
- rule!(
- $name:ident<$o:ty>;
- $span:ident;
- captured_str!($x:pat) => $body:expr
- )
- ) => ({
- let $span = Span::make($input.clone(), $pair.as_span());
- let $x = $pair.as_str();
- let res: Result<_, String> = try { $body };
- res.map_err(|msg| custom_parse_error(&$pair, msg))
- });
- (@body,
- ($climbers:expr, $input:expr, $pair:expr),
- rule!(
- $name:ident<$o:ty>;
- $span:ident;
- children!( $( [$($args:tt)*] => $body:expr ),* $(,)* )
- )
- ) => ({
- let children_rules: Vec<Rule> = $pair
- .clone()
- .into_inner()
- .map(|p| p.as_rule())
- .collect();
-
- let $span = Span::make($input.clone(), $pair.as_span());
- #[allow(unused_mut)]
- let mut iter = $pair.clone().into_inner();
-
- #[allow(unreachable_code)]
- match children_rules.as_slice() {
- $(
- make_parser!(@children_pattern, x, (), [$($args)*,])
- if make_parser!(@children_filter, x, [$($args)*,])
- => {
- parse_children!(($climbers, $input.clone()), iter;
- [$($args)*] => {
- let res: Result<_, String> = try { $body };
- res.map_err(|msg| custom_parse_error(&$pair, msg))
- }
- )
- }
- ,
- )*
- [..] => Err(custom_parse_error(
- &$pair,
- format!("Unexpected children: {:?}", children_rules)
- )),
- }
- });
- (@body,
- ($climbers:expr, $input:expr, $pair:expr),
- rule!(
- $name:ident<$o:ty>;
- prec_climb!(
- $other_rule:ident,
- $_climber:expr,
- $args:pat => $body:expr $(,)*
- )
- )
- ) => ({
- let climber = $climbers.get(&Rule::$name).unwrap();
- climber.climb(
- $pair.clone().into_inner(),
- |p| Parsers::$other_rule(($climbers, $input.clone()), p),
- |l, op, r| {
- let $args = (l?, op, r?);
- let res: Result<_, String> = try { $body };
- res.map_err(|msg| custom_parse_error(&$pair, msg))
- },
- )
- });
- (@body,
- ($($things:tt)*),
- rule!(
- $name:ident<$o:ty>;
- $($args:tt)*
- )
- ) => ({
- make_parser!(@body,
- ($($things)*),
- rule!(
- $name<$o>;
- _span;
- $($args)*
- )
- )
- });
- (@body,
- ($($things:tt)*),
- rule!($name:ident<$o:ty>)
- ) => ({
- Ok(())
- });
-
- (@construct_climber,
- ($map:expr),
- rule!(
- $name:ident<$o:ty>;
- prec_climb!($other_rule:ident, $climber:expr, $($_rest:tt)* )
- )
- ) => ({
- $map.insert(Rule::$name, $climber)
- });
- (@construct_climber, ($($things:tt)*), $($args:tt)*) => (());
-
- ($( $submac:ident!( $name:ident<$o:ty> $($args:tt)* ); )*) => (
- struct Parsers;
-
- impl Parsers {
- $(
- #[allow(non_snake_case, unused_variables, clippy::let_unit_value)]
- fn $name<'a>(
- (climbers, input): (&HashMap<Rule, PrecClimber<Rule>>, Rc<str>),
- pair: Pair<'a, Rule>,
- ) -> ParseResult<$o> {
- make_parser!(@body, (climbers, input, pair),
- $submac!( $name<$o> $($args)* ))
- }
- )*
- }
-
- fn construct_precclimbers() -> HashMap<Rule, PrecClimber<Rule>> {
- let mut map = HashMap::new();
- $(
- make_parser!(@construct_climber, (map),
- $submac!( $name<$o> $($args)* ));
- )*
- map
- }
-
- struct EntryPoint;
-
- impl EntryPoint {
- $(
- #[allow(non_snake_case, dead_code)]
- fn $name<'a>(
- input: Rc<str>,
- pair: Pair<'a, Rule>,
- ) -> ParseResult<$o> {
- let climbers = construct_precclimbers();
- Parsers::$name((&climbers, input), pair)
- }
- )*
- }
- );
-}
-
// Trim the shared indent off of a vec of lines, as defined by the Dhall semantics of multiline
// literals.
fn trim_indent(lines: &mut Vec<ParsedText>) {
@@ -427,7 +230,7 @@ fn trim_indent(lines: &mut Vec<ParsedText>) {
}
}
-make_parser! {
+dhall_proc_macros::make_parser! {
rule!(EOI<()>);
rule!(simple_label<Label>;
@@ -629,9 +432,9 @@ make_parser! {
rule!(double_literal<core::Double>; children!(
[numeric_double_literal(n)] => n,
- [minus_infinity_literal(n)] => std::f64::NEG_INFINITY.into(),
- [plus_infinity_literal(n)] => std::f64::INFINITY.into(),
- [NaN(n)] => std::f64::NAN.into(),
+ [minus_infinity_literal(_)] => std::f64::NEG_INFINITY.into(),
+ [plus_infinity_literal(_)] => std::f64::INFINITY.into(),
+ [NaN(_)] => std::f64::NAN.into(),
));
rule!(natural_literal<core::Natural>;
@@ -977,7 +780,7 @@ make_parser! {
[import_expression(e)] => e,
));
- rule!(import_expression<ParsedExpr>; span;
+ rule!(import_expression<ParsedExpr>;
children!(
[selector_expression(e)] => e,
[import(e)] => e,
@@ -996,7 +799,7 @@ make_parser! {
rule!(selector<Either<Label, DupTreeSet<Label>>>; children!(
[label(l)] => Either::Left(l),
[labels(ls)] => Either::Right(ls),
- [expression(e)] => unimplemented!("selection by expression"), // TODO
+ [expression(_e)] => unimplemented!("selection by expression"), // TODO
));
rule!(labels<DupTreeSet<Label>>; children!(