From be51899f7d5f1f9ede689ca0a9707a0aca3d31c4 Mon Sep 17 00:00:00 2001 From: Nadrieril Date: Sun, 1 Sep 2019 13:51:12 +0200 Subject: Rewrite the make_parser macro as a proc_macro --- dhall_proc_macros/Cargo.toml | 6 +- dhall_proc_macros/src/lib.rs | 9 + dhall_proc_macros/src/parser.rs | 398 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 410 insertions(+), 3 deletions(-) create mode 100644 dhall_proc_macros/src/parser.rs (limited to 'dhall_proc_macros') diff --git a/dhall_proc_macros/Cargo.toml b/dhall_proc_macros/Cargo.toml index df1eda8..b641a39 100644 --- a/dhall_proc_macros/Cargo.toml +++ b/dhall_proc_macros/Cargo.toml @@ -11,6 +11,6 @@ doctest = false [dependencies] itertools = "0.8.0" -quote = "0.6.11" -proc-macro2 = "0.4.27" -syn = "0.15.29" +quote = "1.0.2" +proc-macro2 = "1.0.2" +syn = { version = "1.0.5", features = ["full", "extra-traits"] } diff --git a/dhall_proc_macros/src/lib.rs b/dhall_proc_macros/src/lib.rs index 5304429..37c9985 100644 --- a/dhall_proc_macros/src/lib.rs +++ b/dhall_proc_macros/src/lib.rs @@ -6,6 +6,7 @@ extern crate proc_macro; mod derive; +mod parser; use proc_macro::TokenStream; @@ -13,3 +14,11 @@ use proc_macro::TokenStream; pub fn derive_static_type(input: TokenStream) -> TokenStream { derive::derive_static_type(input) } + +#[proc_macro] +pub fn make_parser(input: TokenStream) -> TokenStream { + TokenStream::from(match parser::make_parser(input) { + Ok(tokens) => tokens, + Err(err) => err.to_compile_error(), + }) +} diff --git a/dhall_proc_macros/src/parser.rs b/dhall_proc_macros/src/parser.rs new file mode 100644 index 0000000..bb4e894 --- /dev/null +++ b/dhall_proc_macros/src/parser.rs @@ -0,0 +1,398 @@ +use proc_macro2::{Span, TokenStream}; +use quote::quote; +use syn::parse::{Parse, ParseStream, Result}; +use syn::punctuated::Punctuated; +use syn::{bracketed, parenthesized, token, Expr, Ident, Pat, Token, Type}; + +mod rule_kw { + syn::custom_keyword!(rule); + syn::custom_keyword!(captured_str); + syn::custom_keyword!(children); + syn::custom_keyword!(prec_climb); +} + +#[derive(Debug, Clone)] +struct Rules(Vec); + +#[derive(Debug, Clone)] +struct Rule { + rule_token: rule_kw::rule, + bang_token: Token![!], + paren_token: token::Paren, + name: Ident, + lt_token: token::Lt, + output_type: Type, + gt_token: token::Gt, + contents: RuleContents, + semi_token: Token![;], +} + +#[derive(Debug, Clone)] +enum RuleContents { + Empty, + CapturedString { + span: Option, + captured_str_token: rule_kw::captured_str, + bang_token: Token![!], + paren_token: token::Paren, + pattern: Pat, + fat_arrow_token: Token![=>], + body: Expr, + }, + Children { + span: Option, + children_token: rule_kw::children, + bang_token: Token![!], + paren_token: token::Paren, + branches: Punctuated, + }, + PrecClimb { + span: Option, + prec_climb_token: rule_kw::prec_climb, + bang_token: Token![!], + paren_token: token::Paren, + child_rule: Ident, + comma_token: Token![,], + climber: Expr, + comma_token2: Token![,], + pattern: Pat, + fat_arrow_token: Token![=>], + body: Expr, + }, +} + +#[derive(Debug, Clone)] +struct ChildrenBranch { + bracket_token: token::Bracket, + pattern_unparsed: TokenStream, + pattern: Punctuated, + fat_arrow_token: Token![=>], + body: Expr, +} + +#[derive(Debug, Clone)] +enum ChildrenBranchPatternItem { + Single { + rule_name: Ident, + paren_token: token::Paren, + binder: Pat, + }, + Multiple { + rule_name: Ident, + paren_token: token::Paren, + binder: Ident, + slice_token: Token![..], + }, +} + +impl Parse for Rules { + fn parse(input: ParseStream) -> Result { + let mut rules = Vec::new(); + while !input.is_empty() { + rules.push(input.parse()?) + } + Ok(Rules(rules)) + } +} + +impl Parse for Rule { + fn parse(input: ParseStream) -> Result { + let contents; + Ok(Rule { + rule_token: input.parse()?, + bang_token: input.parse()?, + paren_token: parenthesized!(contents in input), + name: contents.parse()?, + lt_token: contents.parse()?, + output_type: contents.parse()?, + gt_token: contents.parse()?, + contents: contents.parse()?, + semi_token: input.parse()?, + }) + } +} + +impl Parse for RuleContents { + fn parse(input: ParseStream) -> Result { + if input.is_empty() { + return Ok(RuleContents::Empty); + } + let _: Token![;] = input.parse()?; + let span = if input.peek(Ident) && input.peek2(Token![;]) { + let span: Ident = input.parse()?; + let _: Token![;] = input.parse()?; + Some(span) + } else { + None + }; + + let lookahead = input.lookahead1(); + if lookahead.peek(rule_kw::captured_str) { + let contents; + Ok(RuleContents::CapturedString { + span, + captured_str_token: input.parse()?, + bang_token: input.parse()?, + paren_token: parenthesized!(contents in input), + pattern: contents.parse()?, + fat_arrow_token: input.parse()?, + body: input.parse()?, + }) + } else if lookahead.peek(rule_kw::children) { + let contents; + Ok(RuleContents::Children { + span, + children_token: input.parse()?, + bang_token: input.parse()?, + paren_token: parenthesized!(contents in input), + branches: Punctuated::parse_terminated(&contents)?, + }) + } else if lookahead.peek(rule_kw::prec_climb) { + let contents; + Ok(RuleContents::PrecClimb { + span, + prec_climb_token: input.parse()?, + bang_token: input.parse()?, + paren_token: parenthesized!(contents in input), + child_rule: contents.parse()?, + comma_token: contents.parse()?, + climber: contents.parse()?, + comma_token2: contents.parse()?, + pattern: contents.parse()?, + fat_arrow_token: contents.parse()?, + body: contents.parse()?, + }) + } else { + Err(lookahead.error()) + } + } +} + +impl Parse for ChildrenBranch { + fn parse(input: ParseStream) -> Result { + let contents; + Ok(ChildrenBranch { + bracket_token: bracketed!(contents in input), + pattern_unparsed: contents.fork().parse()?, + pattern: Punctuated::parse_terminated(&contents)?, + fat_arrow_token: input.parse()?, + body: input.parse()?, + }) + } +} + +impl Parse for ChildrenBranchPatternItem { + fn parse(input: ParseStream) -> Result { + let rule_name = input.parse()?; + let contents; + let paren_token = parenthesized!(contents in input); + if input.peek(Token![..]) { + Ok(ChildrenBranchPatternItem::Multiple { + rule_name, + paren_token, + binder: contents.parse()?, + slice_token: input.parse()?, + }) + } else if input.is_empty() || input.peek(Token![,]) { + Ok(ChildrenBranchPatternItem::Single { + rule_name, + paren_token, + binder: contents.parse()?, + }) + } else { + Err(input.error("expected `..` or nothing")) + } + } +} + +fn make_construct_precclimbers(rules: &Rules) -> Result { + let mut entries: Vec = Vec::new(); + for rule in &rules.0 { + if let RuleContents::PrecClimb { climber, .. } = &rule.contents { + let name = &rule.name; + entries.push(quote!( + map.insert(Rule::#name, #climber); + )) + } + } + + Ok(quote!( + fn construct_precclimbers() -> HashMap> { + let mut map = HashMap::new(); + #(#entries)* + map + } + )) +} + +fn make_entrypoints(rules: &Rules) -> Result { + let mut entries: Vec = Vec::new(); + for rule in &rules.0 { + let name = &rule.name; + let output_type = &rule.output_type; + entries.push(quote!( + #[allow(non_snake_case, dead_code)] + fn #name<'a>( + input: Rc, + pair: Pair<'a, Rule>, + ) -> ParseResult<#output_type> { + let climbers = construct_precclimbers(); + Parsers::#name((&climbers, input), pair) + } + )) + } + + Ok(quote!( + struct EntryPoint; + impl EntryPoint { + #(#entries)* + } + )) +} + +fn make_parser_branch(branch: &ChildrenBranch) -> TokenStream { + let ChildrenBranch { + pattern, + body, + pattern_unparsed, + .. + } = branch; + let variable_pattern = Ident::new("variable_pattern", Span::call_site()); + let match_pat = pattern.iter().map(|item| match item { + ChildrenBranchPatternItem::Single { rule_name, .. } => { + quote!(Rule::#rule_name) + } + ChildrenBranchPatternItem::Multiple { .. } => { + quote!(#variable_pattern..) + } + }); + let match_filter = pattern.iter().map(|item| match item { + ChildrenBranchPatternItem::Single { .. } => quote!(true &&), + ChildrenBranchPatternItem::Multiple { rule_name, .. } => { + quote!(#variable_pattern.iter().all(|r| r == &Rule::#rule_name) &&) + } + }); + quote!( + [#(#match_pat),*] if #(#match_filter)* true => { + parse_children!((climbers, input.clone()), iter; + [#pattern_unparsed] => { + #[allow(unused_variables)] + let res: Result<_, String> = try { #body }; + res.map_err(|msg| + custom_parse_error(&pair, msg) + ) + } + ) + } + ) +} + +fn make_parser_expr(rule: &Rule) -> Result { + let name = &rule.name; + let expr = match &rule.contents { + RuleContents::Empty => quote!(Ok(())), + RuleContents::CapturedString { pattern, body, .. } => quote!( + let #pattern = pair.as_str(); + let res: Result<_, String> = try { #body }; + res.map_err(|msg| custom_parse_error(&pair, msg)) + ), + RuleContents::PrecClimb { + child_rule, + pattern, + body, + .. + } => quote!( + let climber = climbers.get(&Rule::#name).unwrap(); + climber.climb( + pair.clone().into_inner(), + |p| Parsers::#child_rule((climbers, input.clone()), p), + |l, op, r| { + let #pattern = (l?, op, r?); + let res: Result<_, String> = try { #body }; + res.map_err(|msg| custom_parse_error(&pair, msg)) + }, + ) + ), + RuleContents::Children { branches, .. } => { + let branches = branches.iter().map(make_parser_branch); + quote!( + let children_rules: Vec = pair + .clone() + .into_inner() + .map(|p| p.as_rule()) + .collect(); + + #[allow(unused_mut)] + let mut iter = pair.clone().into_inner(); + + #[allow(unreachable_code)] + match children_rules.as_slice() { + #(#branches,)* + [..] => Err(custom_parse_error( + &pair, + format!("Unexpected children: {:?}", children_rules) + )), + } + ) + } + }; + Ok(expr) +} + +fn make_parsers(rules: &Rules) -> Result { + let mut entries: Vec = Vec::new(); + for rule in &rules.0 { + let span_def = match &rule.contents { + RuleContents::CapturedString { + span: Some(span), .. + } + | RuleContents::Children { + span: Some(span), .. + } + | RuleContents::PrecClimb { + span: Some(span), .. + } => Some(quote!( + let #span = Span::make(input.clone(), pair.as_span()); + )), + _ => None, + }; + + let name = &rule.name; + let output_type = &rule.output_type; + let expr = make_parser_expr(rule)?; + + entries.push(quote!( + #[allow(non_snake_case, dead_code)] + fn #name<'a>( + (climbers, input): (&HashMap>, Rc), + pair: Pair<'a, Rule>, + ) -> ParseResult<#output_type> { + #span_def + #expr + } + )) + } + + Ok(quote!( + struct Parsers; + impl Parsers { + #(#entries)* + } + )) +} + +pub fn make_parser( + input: proc_macro::TokenStream, +) -> Result { + let rules: Rules = syn::parse_macro_input::parse(input.clone())?; + + let construct_precclimbers = make_construct_precclimbers(&rules)?; + let entrypoints = make_entrypoints(&rules)?; + let parsers = make_parsers(&rules)?; + + Ok(quote!( + #construct_precclimbers + #entrypoints + #parsers + )) +} -- cgit v1.2.3 From 1baef509afe52ab285e73469fc597de8f4e166b6 Mon Sep 17 00:00:00 2001 From: Nadrieril Date: Sun, 1 Sep 2019 18:38:39 +0200 Subject: Change parser macros to use a function-like syntax This makes the parser code look much less magical. --- dhall_proc_macros/src/lib.rs | 8 + dhall_proc_macros/src/parser.rs | 494 ++++++++++++++++++++-------------------- 2 files changed, 252 insertions(+), 250 deletions(-) (limited to 'dhall_proc_macros') diff --git a/dhall_proc_macros/src/lib.rs b/dhall_proc_macros/src/lib.rs index 37c9985..46d93e9 100644 --- a/dhall_proc_macros/src/lib.rs +++ b/dhall_proc_macros/src/lib.rs @@ -22,3 +22,11 @@ pub fn make_parser(input: TokenStream) -> TokenStream { Err(err) => err.to_compile_error(), }) } + +#[proc_macro] +pub fn parse_children(input: TokenStream) -> TokenStream { + TokenStream::from(match parser::parse_children(input) { + Ok(tokens) => tokens, + Err(err) => err.to_compile_error(), + }) +} diff --git a/dhall_proc_macros/src/parser.rs b/dhall_proc_macros/src/parser.rs index bb4e894..2618bec 100644 --- a/dhall_proc_macros/src/parser.rs +++ b/dhall_proc_macros/src/parser.rs @@ -2,7 +2,11 @@ use proc_macro2::{Span, TokenStream}; use quote::quote; use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; -use syn::{bracketed, parenthesized, token, Expr, Ident, Pat, Token, Type}; +use syn::spanned::Spanned; +use syn::{ + bracketed, parenthesized, parse_quote, token, Error, Expr, Ident, ItemFn, + Pat, ReturnType, Token, Type, +}; mod rule_kw { syn::custom_keyword!(rule); @@ -16,73 +20,40 @@ struct Rules(Vec); #[derive(Debug, Clone)] struct Rule { - rule_token: rule_kw::rule, - bang_token: Token![!], - paren_token: token::Paren, name: Ident, - lt_token: token::Lt, output_type: Type, - gt_token: token::Gt, contents: RuleContents, - semi_token: Token![;], } #[derive(Debug, Clone)] enum RuleContents { - Empty, - CapturedString { - span: Option, - captured_str_token: rule_kw::captured_str, - bang_token: Token![!], - paren_token: token::Paren, - pattern: Pat, - fat_arrow_token: Token![=>], - body: Expr, - }, - Children { - span: Option, - children_token: rule_kw::children, - bang_token: Token![!], - paren_token: token::Paren, - branches: Punctuated, - }, PrecClimb { - span: Option, - prec_climb_token: rule_kw::prec_climb, - bang_token: Token![!], - paren_token: token::Paren, child_rule: Ident, - comma_token: Token![,], climber: Expr, - comma_token2: Token![,], - pattern: Pat, - fat_arrow_token: Token![=>], - body: Expr, + function: ItemFn, + }, + Function { + function: ItemFn, }, } #[derive(Debug, Clone)] struct ChildrenBranch { - bracket_token: token::Bracket, - pattern_unparsed: TokenStream, + pattern_span: Span, pattern: Punctuated, - fat_arrow_token: Token![=>], body: Expr, } #[derive(Debug, Clone)] enum ChildrenBranchPatternItem { - Single { - rule_name: Ident, - paren_token: token::Paren, - binder: Pat, - }, - Multiple { - rule_name: Ident, - paren_token: token::Paren, - binder: Ident, - slice_token: Token![..], - }, + Single { rule_name: Ident, binder: Pat }, + Multiple { rule_name: Ident, binder: Ident }, +} + +#[derive(Debug, Clone)] +struct ParseChildrenInput { + input_expr: Expr, + branches: Punctuated, } impl Parse for Rules { @@ -97,73 +68,50 @@ impl Parse for Rules { impl Parse for Rule { fn parse(input: ParseStream) -> Result { - let contents; - Ok(Rule { - rule_token: input.parse()?, - bang_token: input.parse()?, - paren_token: parenthesized!(contents in input), - name: contents.parse()?, - lt_token: contents.parse()?, - output_type: contents.parse()?, - gt_token: contents.parse()?, - contents: contents.parse()?, - semi_token: input.parse()?, - }) - } -} + let function: ItemFn = input.parse()?; + let (recognized_attrs, remaining_attrs) = function + .attrs + .iter() + .cloned() + .partition::, _>(|attr| attr.path.is_ident("prec_climb")); + let function = ItemFn { + attrs: remaining_attrs, + ..(function.clone()) + }; -impl Parse for RuleContents { - fn parse(input: ParseStream) -> Result { - if input.is_empty() { - return Ok(RuleContents::Empty); - } - let _: Token![;] = input.parse()?; - let span = if input.peek(Ident) && input.peek2(Token![;]) { - let span: Ident = input.parse()?; - let _: Token![;] = input.parse()?; - Some(span) - } else { - None + let name = function.sig.ident.clone(); + let output_type = match &function.sig.output { + ReturnType::Default => parse_quote!(()), + ReturnType::Type(_, t) => (**t).clone(), }; - let lookahead = input.lookahead1(); - if lookahead.peek(rule_kw::captured_str) { - let contents; - Ok(RuleContents::CapturedString { - span, - captured_str_token: input.parse()?, - bang_token: input.parse()?, - paren_token: parenthesized!(contents in input), - pattern: contents.parse()?, - fat_arrow_token: input.parse()?, - body: input.parse()?, - }) - } else if lookahead.peek(rule_kw::children) { - let contents; - Ok(RuleContents::Children { - span, - children_token: input.parse()?, - bang_token: input.parse()?, - paren_token: parenthesized!(contents in input), - branches: Punctuated::parse_terminated(&contents)?, - }) - } else if lookahead.peek(rule_kw::prec_climb) { - let contents; - Ok(RuleContents::PrecClimb { - span, - prec_climb_token: input.parse()?, - bang_token: input.parse()?, - paren_token: parenthesized!(contents in input), - child_rule: contents.parse()?, - comma_token: contents.parse()?, - climber: contents.parse()?, - comma_token2: contents.parse()?, - pattern: contents.parse()?, - fat_arrow_token: contents.parse()?, - body: contents.parse()?, + if recognized_attrs.is_empty() { + Ok(Rule { + name, + output_type, + contents: RuleContents::Function { function }, }) + } else if recognized_attrs.len() != 1 { + Err(input.error("expected a prec_climb attribute")) } else { - Err(lookahead.error()) + let attr = recognized_attrs.into_iter().next().unwrap(); + let (child_rule, climber) = + attr.parse_args_with(|input: ParseStream| { + let child_rule: Ident = input.parse()?; + let _: Token![,] = input.parse()?; + let climber: Expr = input.parse()?; + Ok((child_rule, climber)) + })?; + + Ok(Rule { + name, + output_type, + contents: RuleContents::PrecClimb { + child_rule, + climber, + function, + }, + }) } } } @@ -171,40 +119,52 @@ impl Parse for RuleContents { impl Parse for ChildrenBranch { fn parse(input: ParseStream) -> Result { let contents; + let _: token::Bracket = bracketed!(contents in input); + let pattern_unparsed: TokenStream = contents.fork().parse()?; + let pattern_span = pattern_unparsed.span(); + let pattern = Punctuated::parse_terminated(&contents)?; + let _: Token![=>] = input.parse()?; + let body = input.parse()?; + Ok(ChildrenBranch { - bracket_token: bracketed!(contents in input), - pattern_unparsed: contents.fork().parse()?, - pattern: Punctuated::parse_terminated(&contents)?, - fat_arrow_token: input.parse()?, - body: input.parse()?, + pattern_span, + pattern, + body, }) } } impl Parse for ChildrenBranchPatternItem { fn parse(input: ParseStream) -> Result { - let rule_name = input.parse()?; let contents; - let paren_token = parenthesized!(contents in input); + let rule_name = input.parse()?; + parenthesized!(contents in input); if input.peek(Token![..]) { - Ok(ChildrenBranchPatternItem::Multiple { - rule_name, - paren_token, - binder: contents.parse()?, - slice_token: input.parse()?, - }) + let binder = contents.parse()?; + let _: Token![..] = input.parse()?; + Ok(ChildrenBranchPatternItem::Multiple { rule_name, binder }) } else if input.is_empty() || input.peek(Token![,]) { - Ok(ChildrenBranchPatternItem::Single { - rule_name, - paren_token, - binder: contents.parse()?, - }) + let binder = contents.parse()?; + Ok(ChildrenBranchPatternItem::Single { rule_name, binder }) } else { Err(input.error("expected `..` or nothing")) } } } +impl Parse for ParseChildrenInput { + fn parse(input: ParseStream) -> Result { + let input_expr = input.parse()?; + let _: Token![;] = input.parse()?; + let branches = Punctuated::parse_terminated(input)?; + + Ok(ParseChildrenInput { + input_expr, + branches, + }) + } +} + fn make_construct_precclimbers(rules: &Rules) -> Result { let mut entries: Vec = Vec::new(); for rule in &rules.0 { @@ -233,11 +193,16 @@ fn make_entrypoints(rules: &Rules) -> Result { entries.push(quote!( #[allow(non_snake_case, dead_code)] fn #name<'a>( - input: Rc, + input_str: &str, pair: Pair<'a, Rule>, - ) -> ParseResult<#output_type> { + ) -> #output_type { let climbers = construct_precclimbers(); - Parsers::#name((&climbers, input), pair) + let input = ParseInput { + climbers: &climbers, + original_input_str: input_str.to_string().into(), + pair + }; + Parsers::#name(input) } )) } @@ -250,128 +215,37 @@ fn make_entrypoints(rules: &Rules) -> Result { )) } -fn make_parser_branch(branch: &ChildrenBranch) -> TokenStream { - let ChildrenBranch { - pattern, - body, - pattern_unparsed, - .. - } = branch; - let variable_pattern = Ident::new("variable_pattern", Span::call_site()); - let match_pat = pattern.iter().map(|item| match item { - ChildrenBranchPatternItem::Single { rule_name, .. } => { - quote!(Rule::#rule_name) - } - ChildrenBranchPatternItem::Multiple { .. } => { - quote!(#variable_pattern..) - } - }); - let match_filter = pattern.iter().map(|item| match item { - ChildrenBranchPatternItem::Single { .. } => quote!(true &&), - ChildrenBranchPatternItem::Multiple { rule_name, .. } => { - quote!(#variable_pattern.iter().all(|r| r == &Rule::#rule_name) &&) - } - }); - quote!( - [#(#match_pat),*] if #(#match_filter)* true => { - parse_children!((climbers, input.clone()), iter; - [#pattern_unparsed] => { - #[allow(unused_variables)] - let res: Result<_, String> = try { #body }; - res.map_err(|msg| - custom_parse_error(&pair, msg) - ) - } - ) - } - ) -} - -fn make_parser_expr(rule: &Rule) -> Result { - let name = &rule.name; - let expr = match &rule.contents { - RuleContents::Empty => quote!(Ok(())), - RuleContents::CapturedString { pattern, body, .. } => quote!( - let #pattern = pair.as_str(); - let res: Result<_, String> = try { #body }; - res.map_err(|msg| custom_parse_error(&pair, msg)) - ), - RuleContents::PrecClimb { - child_rule, - pattern, - body, - .. - } => quote!( - let climber = climbers.get(&Rule::#name).unwrap(); - climber.climb( - pair.clone().into_inner(), - |p| Parsers::#child_rule((climbers, input.clone()), p), - |l, op, r| { - let #pattern = (l?, op, r?); - let res: Result<_, String> = try { #body }; - res.map_err(|msg| custom_parse_error(&pair, msg)) - }, - ) - ), - RuleContents::Children { branches, .. } => { - let branches = branches.iter().map(make_parser_branch); - quote!( - let children_rules: Vec = pair - .clone() - .into_inner() - .map(|p| p.as_rule()) - .collect(); - - #[allow(unused_mut)] - let mut iter = pair.clone().into_inner(); - - #[allow(unreachable_code)] - match children_rules.as_slice() { - #(#branches,)* - [..] => Err(custom_parse_error( - &pair, - format!("Unexpected children: {:?}", children_rules) - )), - } - ) - } - }; - Ok(expr) -} - fn make_parsers(rules: &Rules) -> Result { - let mut entries: Vec = Vec::new(); - for rule in &rules.0 { - let span_def = match &rule.contents { - RuleContents::CapturedString { - span: Some(span), .. - } - | RuleContents::Children { - span: Some(span), .. - } - | RuleContents::PrecClimb { - span: Some(span), .. - } => Some(quote!( - let #span = Span::make(input.clone(), pair.as_span()); - )), - _ => None, - }; - + let entries = rules.0.iter().map(|rule| { let name = &rule.name; let output_type = &rule.output_type; - let expr = make_parser_expr(rule)?; - - entries.push(quote!( - #[allow(non_snake_case, dead_code)] - fn #name<'a>( - (climbers, input): (&HashMap>, Rc), - pair: Pair<'a, Rule>, - ) -> ParseResult<#output_type> { - #span_def - #expr - } - )) - } + match &rule.contents { + RuleContents::PrecClimb { + child_rule, + function, + .. + } => quote!( + #[allow(non_snake_case, dead_code)] + fn #name<'a, 'climbers>( + input: ParseInput<'a, 'climbers, Rule>, + ) -> #output_type { + #function + let climber = input.climbers.get(&Rule::#name).unwrap(); + climber.climb( + input.pair.clone().into_inner(), + |p| Parsers::#child_rule(input.with_pair(p)), + |l, op, r| { + #name(input.clone(), l?, op, r?) + }, + ) + } + ), + RuleContents::Function { function } => quote!( + #[allow(non_snake_case, dead_code)] + #function + ), + } + }); Ok(quote!( struct Parsers; @@ -384,7 +258,7 @@ fn make_parsers(rules: &Rules) -> Result { pub fn make_parser( input: proc_macro::TokenStream, ) -> Result { - let rules: Rules = syn::parse_macro_input::parse(input.clone())?; + let rules: Rules = syn::parse(input.clone())?; let construct_precclimbers = make_construct_precclimbers(&rules)?; let entrypoints = make_entrypoints(&rules)?; @@ -396,3 +270,123 @@ pub fn make_parser( #parsers )) } + +fn make_parser_branch(branch: &ChildrenBranch) -> Result { + use ChildrenBranchPatternItem::{Multiple, Single}; + + let body = &branch.body; + + // Convert the input pattern into a pattern-match on the Rules of the children. This uses + // slice_patterns. + // A single pattern just checks that the rule matches; a variable-length pattern binds the + // subslice and checks that they all match the chosen Rule in the `if`-condition. + let variable_pattern_ident = + Ident::new("variable_pattern", Span::call_site()); + let match_pat = branch.pattern.iter().map(|item| match item { + Single { rule_name, .. } => quote!(Rule::#rule_name), + Multiple { .. } => quote!(#variable_pattern_ident..), + }); + let match_filter = branch.pattern.iter().map(|item| match item { + Single { .. } => quote!(), + Multiple { rule_name, .. } => quote!( + #variable_pattern_ident.iter().all(|r| r == &Rule::#rule_name) && + ), + }); + + // Once we have found a branch that matches, we need to parse the children. + let mut singles_before_multiple = Vec::new(); + let mut multiple = None; + let mut singles_after_multiple = Vec::new(); + for item in &branch.pattern { + match item { + Single { + rule_name, binder, .. + } => { + if multiple.is_none() { + singles_before_multiple.push((rule_name, binder)) + } else { + singles_after_multiple.push((rule_name, binder)) + } + } + Multiple { + rule_name, binder, .. + } => { + if multiple.is_none() { + multiple = Some((rule_name, binder)) + } else { + return Err(Error::new( + branch.pattern_span.clone(), + "multiple variable-length patterns are not allowed", + )); + } + } + } + } + let mut parses = Vec::new(); + for (rule_name, binder) in singles_before_multiple.into_iter() { + parses.push(quote!( + let #binder = Parsers::#rule_name( + inputs.next().unwrap() + )?; + )) + } + // Note the `rev()`: we are taking inputs from the end of the iterator in reverse order, so that + // only the unmatched inputs are left for the variable-length pattern, if any. + for (rule_name, binder) in singles_after_multiple.into_iter().rev() { + parses.push(quote!( + let #binder = Parsers::#rule_name( + inputs.next_back().unwrap() + )?; + )) + } + if let Some((rule_name, binder)) = multiple { + parses.push(quote!( + let #binder = inputs + .map(|i| Parsers::#rule_name(i)) + .collect::, _>>()? + .into_iter(); + )) + } + + Ok(quote!( + [#(#match_pat),*] if #(#match_filter)* true => { + #(#parses)* + #body + } + )) +} + +pub fn parse_children( + input: proc_macro::TokenStream, +) -> Result { + let input: ParseChildrenInput = syn::parse(input)?; + + let input_expr = &input.input_expr; + let branches = input + .branches + .iter() + .map(make_parser_branch) + .collect::>>()?; + Ok(quote!({ + let children_rules: Vec = #input_expr.pair + .clone() + .into_inner() + .map(|p| p.as_rule()) + .collect(); + + #[allow(unused_mut)] + let mut inputs = #input_expr + .pair + .clone() + .into_inner() + .map(|p| #input_expr.with_pair(p)); + + #[allow(unreachable_code)] + match children_rules.as_slice() { + #(#branches,)* + [..] => return Err(#input_expr.error( + format!("Unexpected children: {:?}", children_rules) + )), + } + })) +} -- cgit v1.2.3 From 3e9aa3e46bd5906469751c908a0daedfe26dac22 Mon Sep 17 00:00:00 2001 From: Nadrieril Date: Sun, 1 Sep 2019 22:46:03 +0200 Subject: Make make_parser into a proc_macro_attribute That way rustfmt will format the contents of the parser. --- dhall_proc_macros/src/lib.rs | 4 ++-- dhall_proc_macros/src/parser.rs | 12 ++++++++---- 2 files changed, 10 insertions(+), 6 deletions(-) (limited to 'dhall_proc_macros') diff --git a/dhall_proc_macros/src/lib.rs b/dhall_proc_macros/src/lib.rs index 46d93e9..92cf981 100644 --- a/dhall_proc_macros/src/lib.rs +++ b/dhall_proc_macros/src/lib.rs @@ -15,8 +15,8 @@ pub fn derive_static_type(input: TokenStream) -> TokenStream { derive::derive_static_type(input) } -#[proc_macro] -pub fn make_parser(input: TokenStream) -> TokenStream { +#[proc_macro_attribute] +pub fn make_parser(_attr: TokenStream, input: TokenStream) -> TokenStream { TokenStream::from(match parser::make_parser(input) { Ok(tokens) => tokens, Err(err) => err.to_compile_error(), diff --git a/dhall_proc_macros/src/parser.rs b/dhall_proc_macros/src/parser.rs index 2618bec..1c75279 100644 --- a/dhall_proc_macros/src/parser.rs +++ b/dhall_proc_macros/src/parser.rs @@ -4,8 +4,8 @@ use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::spanned::Spanned; use syn::{ - bracketed, parenthesized, parse_quote, token, Error, Expr, Ident, ItemFn, - Pat, ReturnType, Token, Type, + braced, bracketed, parenthesized, parse_quote, token, Error, Expr, Ident, + ItemFn, Pat, ReturnType, Token, Type, }; mod rule_kw { @@ -58,9 +58,13 @@ struct ParseChildrenInput { impl Parse for Rules { fn parse(input: ParseStream) -> Result { + let _: Token![impl ] = input.parse()?; + let _: Token![_] = input.parse()?; + let contents; + braced!(contents in input); let mut rules = Vec::new(); - while !input.is_empty() { - rules.push(input.parse()?) + while !contents.is_empty() { + rules.push(contents.parse()?) } Ok(Rules(rules)) } -- cgit v1.2.3 From 98b53bfe084fcdb58207de19fccbd9ac10a7baf7 Mon Sep 17 00:00:00 2001 From: Nadrieril Date: Mon, 2 Sep 2019 17:02:06 +0200 Subject: Avoid use of bind_by_move_pattern_guards feature --- dhall_proc_macros/src/parser.rs | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) (limited to 'dhall_proc_macros') diff --git a/dhall_proc_macros/src/parser.rs b/dhall_proc_macros/src/parser.rs index 1c75279..f2efb80 100644 --- a/dhall_proc_macros/src/parser.rs +++ b/dhall_proc_macros/src/parser.rs @@ -283,7 +283,7 @@ fn make_parser_branch(branch: &ChildrenBranch) -> Result { // Convert the input pattern into a pattern-match on the Rules of the children. This uses // slice_patterns. // A single pattern just checks that the rule matches; a variable-length pattern binds the - // subslice and checks that they all match the chosen Rule in the `if`-condition. + // subslice and checks, in the if-guard, that its elements all match the chosen Rule. let variable_pattern_ident = Ident::new("variable_pattern", Span::call_site()); let match_pat = branch.pattern.iter().map(|item| match item { @@ -293,7 +293,14 @@ fn make_parser_branch(branch: &ChildrenBranch) -> Result { let match_filter = branch.pattern.iter().map(|item| match item { Single { .. } => quote!(), Multiple { rule_name, .. } => quote!( - #variable_pattern_ident.iter().all(|r| r == &Rule::#rule_name) && + { + // We can't use .all() directly in the pattern guard without the + // bind_by_move_pattern_guards feature. + fn all_match(slice: &[Rule]) -> bool { + slice.iter().all(|r| r == &Rule::#rule_name) + } + all_match(#variable_pattern_ident) + } && ), }); -- cgit v1.2.3 From 4bc3380f57e6ce1c7766df0d6b720371b216490d Mon Sep 17 00:00:00 2001 From: Nadrieril Date: Mon, 2 Sep 2019 17:44:23 +0200 Subject: Get rid of EntryPoint magic --- dhall_proc_macros/src/lib.rs | 1 + dhall_proc_macros/src/parser.rs | 156 ++++++++-------------------------------- 2 files changed, 32 insertions(+), 125 deletions(-) (limited to 'dhall_proc_macros') diff --git a/dhall_proc_macros/src/lib.rs b/dhall_proc_macros/src/lib.rs index 92cf981..3e41254 100644 --- a/dhall_proc_macros/src/lib.rs +++ b/dhall_proc_macros/src/lib.rs @@ -1,3 +1,4 @@ +#![feature(drain_filter)] //! This crate contains the code-generation primitives for the [dhall-rust][dhall-rust] crate. //! This is highly unstable and breaks regularly; use at your own risk. //! diff --git a/dhall_proc_macros/src/parser.rs b/dhall_proc_macros/src/parser.rs index f2efb80..5d03cf5 100644 --- a/dhall_proc_macros/src/parser.rs +++ b/dhall_proc_macros/src/parser.rs @@ -5,7 +5,7 @@ use syn::punctuated::Punctuated; use syn::spanned::Spanned; use syn::{ braced, bracketed, parenthesized, parse_quote, token, Error, Expr, Ident, - ItemFn, Pat, ReturnType, Token, Type, + ItemFn, Pat, ReturnType, Token, }; mod rule_kw { @@ -20,21 +20,7 @@ struct Rules(Vec); #[derive(Debug, Clone)] struct Rule { - name: Ident, - output_type: Type, - contents: RuleContents, -} - -#[derive(Debug, Clone)] -enum RuleContents { - PrecClimb { - child_rule: Ident, - climber: Expr, - function: ItemFn, - }, - Function { - function: ItemFn, - }, + function: ItemFn, } #[derive(Debug, Clone)] @@ -72,16 +58,11 @@ impl Parse for Rules { impl Parse for Rule { fn parse(input: ParseStream) -> Result { - let function: ItemFn = input.parse()?; - let (recognized_attrs, remaining_attrs) = function + let mut function: ItemFn = input.parse()?; + let recognized_attrs: Vec<_> = function .attrs - .iter() - .cloned() - .partition::, _>(|attr| attr.path.is_ident("prec_climb")); - let function = ItemFn { - attrs: remaining_attrs, - ..(function.clone()) - }; + .drain_filter(|attr| attr.path.is_ident("prec_climb")) + .collect(); let name = function.sig.ident.clone(); let output_type = match &function.sig.output { @@ -90,11 +71,7 @@ impl Parse for Rule { }; if recognized_attrs.is_empty() { - Ok(Rule { - name, - output_type, - contents: RuleContents::Function { function }, - }) + Ok(Rule { function }) } else if recognized_attrs.len() != 1 { Err(input.error("expected a prec_climb attribute")) } else { @@ -107,15 +84,24 @@ impl Parse for Rule { Ok((child_rule, climber)) })?; - Ok(Rule { - name, - output_type, - contents: RuleContents::PrecClimb { - child_rule, - climber, - function, - }, - }) + let function = parse_quote!( + fn #name<'a>( + input: ParseInput<'a, Rule>, + ) -> #output_type { + #[allow(non_snake_case, dead_code)] + #function + + #climber.climb( + input.pair.clone().into_inner(), + |p| Parsers::#child_rule(input.with_pair(p)), + |l, op, r| { + #name(input.clone(), l?, op, r?) + }, + ) + } + ); + + Ok(Rule { function }) } } } @@ -169,86 +155,13 @@ impl Parse for ParseChildrenInput { } } -fn make_construct_precclimbers(rules: &Rules) -> Result { - let mut entries: Vec = Vec::new(); - for rule in &rules.0 { - if let RuleContents::PrecClimb { climber, .. } = &rule.contents { - let name = &rule.name; - entries.push(quote!( - map.insert(Rule::#name, #climber); - )) - } - } - - Ok(quote!( - fn construct_precclimbers() -> HashMap> { - let mut map = HashMap::new(); - #(#entries)* - map - } - )) -} - -fn make_entrypoints(rules: &Rules) -> Result { - let mut entries: Vec = Vec::new(); - for rule in &rules.0 { - let name = &rule.name; - let output_type = &rule.output_type; - entries.push(quote!( - #[allow(non_snake_case, dead_code)] - fn #name<'a>( - input_str: &str, - pair: Pair<'a, Rule>, - ) -> #output_type { - let climbers = construct_precclimbers(); - let input = ParseInput { - climbers: &climbers, - original_input_str: input_str.to_string().into(), - pair - }; - Parsers::#name(input) - } - )) - } - - Ok(quote!( - struct EntryPoint; - impl EntryPoint { - #(#entries)* - } - )) -} - fn make_parsers(rules: &Rules) -> Result { let entries = rules.0.iter().map(|rule| { - let name = &rule.name; - let output_type = &rule.output_type; - match &rule.contents { - RuleContents::PrecClimb { - child_rule, - function, - .. - } => quote!( - #[allow(non_snake_case, dead_code)] - fn #name<'a, 'climbers>( - input: ParseInput<'a, 'climbers, Rule>, - ) -> #output_type { - #function - let climber = input.climbers.get(&Rule::#name).unwrap(); - climber.climb( - input.pair.clone().into_inner(), - |p| Parsers::#child_rule(input.with_pair(p)), - |l, op, r| { - #name(input.clone(), l?, op, r?) - }, - ) - } - ), - RuleContents::Function { function } => quote!( - #[allow(non_snake_case, dead_code)] - #function - ), - } + let function = &rule.function; + quote!( + #[allow(non_snake_case, dead_code)] + #function + ) }); Ok(quote!( @@ -263,16 +176,9 @@ pub fn make_parser( input: proc_macro::TokenStream, ) -> Result { let rules: Rules = syn::parse(input.clone())?; - - let construct_precclimbers = make_construct_precclimbers(&rules)?; - let entrypoints = make_entrypoints(&rules)?; let parsers = make_parsers(&rules)?; - Ok(quote!( - #construct_precclimbers - #entrypoints - #parsers - )) + Ok(quote!( #parsers )) } fn make_parser_branch(branch: &ChildrenBranch) -> Result { -- cgit v1.2.3 From 49f142e3cd173549b8d63883380b5e780c9fefb1 Mon Sep 17 00:00:00 2001 From: Nadrieril Date: Mon, 2 Sep 2019 18:12:14 +0200 Subject: Properly parse the argument of make_parser as an impl --- dhall_proc_macros/src/parser.rs | 161 ++++++++++++++++------------------------ 1 file changed, 62 insertions(+), 99 deletions(-) (limited to 'dhall_proc_macros') diff --git a/dhall_proc_macros/src/parser.rs b/dhall_proc_macros/src/parser.rs index 5d03cf5..d775ca8 100644 --- a/dhall_proc_macros/src/parser.rs +++ b/dhall_proc_macros/src/parser.rs @@ -4,25 +4,10 @@ use syn::parse::{Parse, ParseStream, Result}; use syn::punctuated::Punctuated; use syn::spanned::Spanned; use syn::{ - braced, bracketed, parenthesized, parse_quote, token, Error, Expr, Ident, - ItemFn, Pat, ReturnType, Token, + bracketed, parenthesized, parse_quote, token, Error, Expr, Ident, ImplItem, + ImplItemMethod, ItemImpl, Pat, ReturnType, Token, }; -mod rule_kw { - syn::custom_keyword!(rule); - syn::custom_keyword!(captured_str); - syn::custom_keyword!(children); - syn::custom_keyword!(prec_climb); -} - -#[derive(Debug, Clone)] -struct Rules(Vec); - -#[derive(Debug, Clone)] -struct Rule { - function: ItemFn, -} - #[derive(Debug, Clone)] struct ChildrenBranch { pattern_span: Span, @@ -42,70 +27,6 @@ struct ParseChildrenInput { branches: Punctuated, } -impl Parse for Rules { - fn parse(input: ParseStream) -> Result { - let _: Token![impl ] = input.parse()?; - let _: Token![_] = input.parse()?; - let contents; - braced!(contents in input); - let mut rules = Vec::new(); - while !contents.is_empty() { - rules.push(contents.parse()?) - } - Ok(Rules(rules)) - } -} - -impl Parse for Rule { - fn parse(input: ParseStream) -> Result { - let mut function: ItemFn = input.parse()?; - let recognized_attrs: Vec<_> = function - .attrs - .drain_filter(|attr| attr.path.is_ident("prec_climb")) - .collect(); - - let name = function.sig.ident.clone(); - let output_type = match &function.sig.output { - ReturnType::Default => parse_quote!(()), - ReturnType::Type(_, t) => (**t).clone(), - }; - - if recognized_attrs.is_empty() { - Ok(Rule { function }) - } else if recognized_attrs.len() != 1 { - Err(input.error("expected a prec_climb attribute")) - } else { - let attr = recognized_attrs.into_iter().next().unwrap(); - let (child_rule, climber) = - attr.parse_args_with(|input: ParseStream| { - let child_rule: Ident = input.parse()?; - let _: Token![,] = input.parse()?; - let climber: Expr = input.parse()?; - Ok((child_rule, climber)) - })?; - - let function = parse_quote!( - fn #name<'a>( - input: ParseInput<'a, Rule>, - ) -> #output_type { - #[allow(non_snake_case, dead_code)] - #function - - #climber.climb( - input.pair.clone().into_inner(), - |p| Parsers::#child_rule(input.with_pair(p)), - |l, op, r| { - #name(input.clone(), l?, op, r?) - }, - ) - } - ); - - Ok(Rule { function }) - } - } -} - impl Parse for ChildrenBranch { fn parse(input: ParseStream) -> Result { let contents; @@ -155,30 +76,72 @@ impl Parse for ParseChildrenInput { } } -fn make_parsers(rules: &Rules) -> Result { - let entries = rules.0.iter().map(|rule| { - let function = &rule.function; - quote!( - #[allow(non_snake_case, dead_code)] - #function - ) - }); +fn apply_special_attrs(function: &mut ImplItemMethod) -> Result<()> { + let recognized_attrs: Vec<_> = function + .attrs + .drain_filter(|attr| attr.path.is_ident("prec_climb")) + .collect(); - Ok(quote!( - struct Parsers; - impl Parsers { - #(#entries)* - } - )) + let name = function.sig.ident.clone(); + let output_type = match &function.sig.output { + ReturnType::Default => parse_quote!(()), + ReturnType::Type(_, t) => (**t).clone(), + }; + + if recognized_attrs.is_empty() { + } else if recognized_attrs.len() > 1 { + return Err(Error::new( + recognized_attrs[1].span(), + "expected a single prec_climb attribute", + )); + } else { + let attr = recognized_attrs.into_iter().next().unwrap(); + let (child_rule, climber) = + attr.parse_args_with(|input: ParseStream| { + let child_rule: Ident = input.parse()?; + let _: Token![,] = input.parse()?; + let climber: Expr = input.parse()?; + Ok((child_rule, climber)) + })?; + + *function = parse_quote!( + fn #name<'a>( + input: ParseInput<'a, Rule>, + ) -> #output_type { + #[allow(non_snake_case, dead_code)] + #function + + #climber.climb( + input.pair.clone().into_inner(), + |p| Parsers::#child_rule(input.with_pair(p)), + |l, op, r| { + #name(input.clone(), l?, op, r?) + }, + ) + } + ); + } + + *function = parse_quote!( + #[allow(non_snake_case, dead_code)] + #function + ); + + Ok(()) } pub fn make_parser( input: proc_macro::TokenStream, ) -> Result { - let rules: Rules = syn::parse(input.clone())?; - let parsers = make_parsers(&rules)?; - - Ok(quote!( #parsers )) + let mut imp: ItemImpl = syn::parse(input)?; + imp.items + .iter_mut() + .map(|item| match item { + ImplItem::Method(m) => apply_special_attrs(m), + _ => Ok(()), + }) + .collect::>()?; + Ok(quote!( #imp )) } fn make_parser_branch(branch: &ChildrenBranch) -> Result { -- cgit v1.2.3 From 41f598a75de41665dd9ec0aad56b5ef526698151 Mon Sep 17 00:00:00 2001 From: Nadrieril Date: Mon, 2 Sep 2019 18:46:53 +0200 Subject: Use proper hygiene for `Parsers` and `Rule` --- dhall_proc_macros/src/lib.rs | 4 ++-- dhall_proc_macros/src/parser.rs | 49 ++++++++++++++++++++++++++++------------- 2 files changed, 36 insertions(+), 17 deletions(-) (limited to 'dhall_proc_macros') diff --git a/dhall_proc_macros/src/lib.rs b/dhall_proc_macros/src/lib.rs index 3e41254..63dd29a 100644 --- a/dhall_proc_macros/src/lib.rs +++ b/dhall_proc_macros/src/lib.rs @@ -17,8 +17,8 @@ pub fn derive_static_type(input: TokenStream) -> TokenStream { } #[proc_macro_attribute] -pub fn make_parser(_attr: TokenStream, input: TokenStream) -> TokenStream { - TokenStream::from(match parser::make_parser(input) { +pub fn make_parser(attrs: TokenStream, input: TokenStream) -> TokenStream { + TokenStream::from(match parser::make_parser(attrs, input) { Ok(tokens) => tokens, Err(err) => err.to_compile_error(), }) diff --git a/dhall_proc_macros/src/parser.rs b/dhall_proc_macros/src/parser.rs index d775ca8..a440b16 100644 --- a/dhall_proc_macros/src/parser.rs +++ b/dhall_proc_macros/src/parser.rs @@ -76,7 +76,10 @@ impl Parse for ParseChildrenInput { } } -fn apply_special_attrs(function: &mut ImplItemMethod) -> Result<()> { +fn apply_special_attrs( + rule_enum: &Ident, + function: &mut ImplItemMethod, +) -> Result<()> { let recognized_attrs: Vec<_> = function .attrs .drain_filter(|attr| attr.path.is_ident("prec_climb")) @@ -106,14 +109,14 @@ fn apply_special_attrs(function: &mut ImplItemMethod) -> Result<()> { *function = parse_quote!( fn #name<'a>( - input: ParseInput<'a, Rule>, + input: ParseInput<'a, #rule_enum>, ) -> #output_type { #[allow(non_snake_case, dead_code)] #function #climber.climb( input.pair.clone().into_inner(), - |p| Parsers::#child_rule(input.with_pair(p)), + |p| Self::#child_rule(input.with_pair(p)), |l, op, r| { #name(input.clone(), l?, op, r?) }, @@ -131,17 +134,29 @@ fn apply_special_attrs(function: &mut ImplItemMethod) -> Result<()> { } pub fn make_parser( + attrs: proc_macro::TokenStream, input: proc_macro::TokenStream, ) -> Result { + let rule_enum: Ident = syn::parse(attrs)?; + let mut imp: ItemImpl = syn::parse(input)?; imp.items .iter_mut() .map(|item| match item { - ImplItem::Method(m) => apply_special_attrs(m), + ImplItem::Method(m) => apply_special_attrs(&rule_enum, m), _ => Ok(()), }) .collect::>()?; - Ok(quote!( #imp )) + + let ty = &imp.self_ty; + let (impl_generics, _, where_clause) = imp.generics.split_for_impl(); + Ok(quote!( + impl #impl_generics PestConsumer for #ty #where_clause { + type RuleEnum = #rule_enum; + } + + #imp + )) } fn make_parser_branch(branch: &ChildrenBranch) -> Result { @@ -156,18 +171,22 @@ fn make_parser_branch(branch: &ChildrenBranch) -> Result { let variable_pattern_ident = Ident::new("variable_pattern", Span::call_site()); let match_pat = branch.pattern.iter().map(|item| match item { - Single { rule_name, .. } => quote!(Rule::#rule_name), + Single { rule_name, .. } => { + quote!(<::RuleEnum>::#rule_name) + } Multiple { .. } => quote!(#variable_pattern_ident..), }); let match_filter = branch.pattern.iter().map(|item| match item { Single { .. } => quote!(), Multiple { rule_name, .. } => quote!( { - // We can't use .all() directly in the pattern guard without the - // bind_by_move_pattern_guards feature. - fn all_match(slice: &[Rule]) -> bool { - slice.iter().all(|r| r == &Rule::#rule_name) - } + // We can't use .all() directly in the pattern guard; see + // https://github.com/rust-lang/rust/issues/59803. + let all_match = |slice: &[_]| { + slice.iter().all(|r| + r == &<::RuleEnum>::#rule_name + ) + }; all_match(#variable_pattern_ident) } && ), @@ -205,7 +224,7 @@ fn make_parser_branch(branch: &ChildrenBranch) -> Result { let mut parses = Vec::new(); for (rule_name, binder) in singles_before_multiple.into_iter() { parses.push(quote!( - let #binder = Parsers::#rule_name( + let #binder = Self::#rule_name( inputs.next().unwrap() )?; )) @@ -214,7 +233,7 @@ fn make_parser_branch(branch: &ChildrenBranch) -> Result { // only the unmatched inputs are left for the variable-length pattern, if any. for (rule_name, binder) in singles_after_multiple.into_iter().rev() { parses.push(quote!( - let #binder = Parsers::#rule_name( + let #binder = Self::#rule_name( inputs.next_back().unwrap() )?; )) @@ -222,7 +241,7 @@ fn make_parser_branch(branch: &ChildrenBranch) -> Result { if let Some((rule_name, binder)) = multiple { parses.push(quote!( let #binder = inputs - .map(|i| Parsers::#rule_name(i)) + .map(|i| Self::#rule_name(i)) .collect::, _>>()? .into_iter(); )) @@ -248,7 +267,7 @@ pub fn parse_children( .map(make_parser_branch) .collect::>>()?; Ok(quote!({ - let children_rules: Vec = #input_expr.pair + let children_rules: Vec<_> = #input_expr.pair .clone() .into_inner() .map(|p| p.as_rule()) -- cgit v1.2.3 From 97d74f514bd8c5c4b96fb4f4071f4a93ac28572d Mon Sep 17 00:00:00 2001 From: Nadrieril Date: Mon, 2 Sep 2019 18:59:30 +0200 Subject: Separate both parser proc_macros into their own files --- dhall_proc_macros/src/lib.rs | 7 +- dhall_proc_macros/src/make_parser.rs | 90 ++++++++++ dhall_proc_macros/src/parse_children.rs | 205 ++++++++++++++++++++++ dhall_proc_macros/src/parser.rs | 291 -------------------------------- 4 files changed, 299 insertions(+), 294 deletions(-) create mode 100644 dhall_proc_macros/src/make_parser.rs create mode 100644 dhall_proc_macros/src/parse_children.rs delete mode 100644 dhall_proc_macros/src/parser.rs (limited to 'dhall_proc_macros') diff --git a/dhall_proc_macros/src/lib.rs b/dhall_proc_macros/src/lib.rs index 63dd29a..37e8f9f 100644 --- a/dhall_proc_macros/src/lib.rs +++ b/dhall_proc_macros/src/lib.rs @@ -7,7 +7,8 @@ extern crate proc_macro; mod derive; -mod parser; +mod make_parser; +mod parse_children; use proc_macro::TokenStream; @@ -18,7 +19,7 @@ pub fn derive_static_type(input: TokenStream) -> TokenStream { #[proc_macro_attribute] pub fn make_parser(attrs: TokenStream, input: TokenStream) -> TokenStream { - TokenStream::from(match parser::make_parser(attrs, input) { + TokenStream::from(match make_parser::make_parser(attrs, input) { Ok(tokens) => tokens, Err(err) => err.to_compile_error(), }) @@ -26,7 +27,7 @@ pub fn make_parser(attrs: TokenStream, input: TokenStream) -> TokenStream { #[proc_macro] pub fn parse_children(input: TokenStream) -> TokenStream { - TokenStream::from(match parser::parse_children(input) { + TokenStream::from(match parse_children::parse_children(input) { Ok(tokens) => tokens, Err(err) => err.to_compile_error(), }) diff --git a/dhall_proc_macros/src/make_parser.rs b/dhall_proc_macros/src/make_parser.rs new file mode 100644 index 0000000..63ce779 --- /dev/null +++ b/dhall_proc_macros/src/make_parser.rs @@ -0,0 +1,90 @@ +use quote::quote; +use syn::parse::{ParseStream, Result}; +use syn::spanned::Spanned; +use syn::{ + parse_quote, Error, Expr, Ident, ImplItem, ImplItemMethod, ItemImpl, + ReturnType, Token, +}; + +fn apply_special_attrs( + rule_enum: &Ident, + function: &mut ImplItemMethod, +) -> Result<()> { + let recognized_attrs: Vec<_> = function + .attrs + .drain_filter(|attr| attr.path.is_ident("prec_climb")) + .collect(); + + let name = function.sig.ident.clone(); + let output_type = match &function.sig.output { + ReturnType::Default => parse_quote!(()), + ReturnType::Type(_, t) => (**t).clone(), + }; + + if recognized_attrs.is_empty() { + } else if recognized_attrs.len() > 1 { + return Err(Error::new( + recognized_attrs[1].span(), + "expected a single prec_climb attribute", + )); + } else { + let attr = recognized_attrs.into_iter().next().unwrap(); + let (child_rule, climber) = + attr.parse_args_with(|input: ParseStream| { + let child_rule: Ident = input.parse()?; + let _: Token![,] = input.parse()?; + let climber: Expr = input.parse()?; + Ok((child_rule, climber)) + })?; + + *function = parse_quote!( + fn #name<'a>( + input: ParseInput<'a, #rule_enum>, + ) -> #output_type { + #[allow(non_snake_case, dead_code)] + #function + + #climber.climb( + input.pair.clone().into_inner(), + |p| Self::#child_rule(input.with_pair(p)), + |l, op, r| { + #name(input.clone(), l?, op, r?) + }, + ) + } + ); + } + + *function = parse_quote!( + #[allow(non_snake_case, dead_code)] + #function + ); + + Ok(()) +} + +pub fn make_parser( + attrs: proc_macro::TokenStream, + input: proc_macro::TokenStream, +) -> Result { + let rule_enum: Ident = syn::parse(attrs)?; + + let mut imp: ItemImpl = syn::parse(input)?; + imp.items + .iter_mut() + .map(|item| match item { + ImplItem::Method(m) => apply_special_attrs(&rule_enum, m), + _ => Ok(()), + }) + .collect::>()?; + + let ty = &imp.self_ty; + let (impl_generics, _, where_clause) = imp.generics.split_for_impl(); + Ok(quote!( + impl #impl_generics PestConsumer for #ty #where_clause { + type RuleEnum = #rule_enum; + } + + #imp + )) +} diff --git a/dhall_proc_macros/src/parse_children.rs b/dhall_proc_macros/src/parse_children.rs new file mode 100644 index 0000000..ce6f66c --- /dev/null +++ b/dhall_proc_macros/src/parse_children.rs @@ -0,0 +1,205 @@ +use proc_macro2::{Span, TokenStream}; +use quote::quote; +use syn::parse::{Parse, ParseStream, Result}; +use syn::punctuated::Punctuated; +use syn::spanned::Spanned; +use syn::{bracketed, parenthesized, token, Error, Expr, Ident, Pat, Token}; + +#[derive(Debug, Clone)] +struct ChildrenBranch { + pattern_span: Span, + pattern: Punctuated, + body: Expr, +} + +#[derive(Debug, Clone)] +enum ChildrenBranchPatternItem { + Single { rule_name: Ident, binder: Pat }, + Multiple { rule_name: Ident, binder: Ident }, +} + +#[derive(Debug, Clone)] +struct ParseChildrenInput { + input_expr: Expr, + branches: Punctuated, +} + +impl Parse for ChildrenBranch { + fn parse(input: ParseStream) -> Result { + let contents; + let _: token::Bracket = bracketed!(contents in input); + let pattern_unparsed: TokenStream = contents.fork().parse()?; + let pattern_span = pattern_unparsed.span(); + let pattern = Punctuated::parse_terminated(&contents)?; + let _: Token![=>] = input.parse()?; + let body = input.parse()?; + + Ok(ChildrenBranch { + pattern_span, + pattern, + body, + }) + } +} + +impl Parse for ChildrenBranchPatternItem { + fn parse(input: ParseStream) -> Result { + let contents; + let rule_name = input.parse()?; + parenthesized!(contents in input); + if input.peek(Token![..]) { + let binder = contents.parse()?; + let _: Token![..] = input.parse()?; + Ok(ChildrenBranchPatternItem::Multiple { rule_name, binder }) + } else if input.is_empty() || input.peek(Token![,]) { + let binder = contents.parse()?; + Ok(ChildrenBranchPatternItem::Single { rule_name, binder }) + } else { + Err(input.error("expected `..` or nothing")) + } + } +} + +impl Parse for ParseChildrenInput { + fn parse(input: ParseStream) -> Result { + let input_expr = input.parse()?; + let _: Token![;] = input.parse()?; + let branches = Punctuated::parse_terminated(input)?; + + Ok(ParseChildrenInput { + input_expr, + branches, + }) + } +} + +fn make_parser_branch(branch: &ChildrenBranch) -> Result { + use ChildrenBranchPatternItem::{Multiple, Single}; + + let body = &branch.body; + + // Convert the input pattern into a pattern-match on the Rules of the children. This uses + // slice_patterns. + // A single pattern just checks that the rule matches; a variable-length pattern binds the + // subslice and checks, in the if-guard, that its elements all match the chosen Rule. + let variable_pattern_ident = + Ident::new("variable_pattern", Span::call_site()); + let match_pat = branch.pattern.iter().map(|item| match item { + Single { rule_name, .. } => { + quote!(<::RuleEnum>::#rule_name) + } + Multiple { .. } => quote!(#variable_pattern_ident..), + }); + let match_filter = branch.pattern.iter().map(|item| match item { + Single { .. } => quote!(), + Multiple { rule_name, .. } => quote!( + { + // We can't use .all() directly in the pattern guard; see + // https://github.com/rust-lang/rust/issues/59803. + let all_match = |slice: &[_]| { + slice.iter().all(|r| + r == &<::RuleEnum>::#rule_name + ) + }; + all_match(#variable_pattern_ident) + } && + ), + }); + + // Once we have found a branch that matches, we need to parse the children. + let mut singles_before_multiple = Vec::new(); + let mut multiple = None; + let mut singles_after_multiple = Vec::new(); + for item in &branch.pattern { + match item { + Single { + rule_name, binder, .. + } => { + if multiple.is_none() { + singles_before_multiple.push((rule_name, binder)) + } else { + singles_after_multiple.push((rule_name, binder)) + } + } + Multiple { + rule_name, binder, .. + } => { + if multiple.is_none() { + multiple = Some((rule_name, binder)) + } else { + return Err(Error::new( + branch.pattern_span.clone(), + "multiple variable-length patterns are not allowed", + )); + } + } + } + } + let mut parses = Vec::new(); + for (rule_name, binder) in singles_before_multiple.into_iter() { + parses.push(quote!( + let #binder = Self::#rule_name( + inputs.next().unwrap() + )?; + )) + } + // Note the `rev()`: we are taking inputs from the end of the iterator in reverse order, so that + // only the unmatched inputs are left for the variable-length pattern, if any. + for (rule_name, binder) in singles_after_multiple.into_iter().rev() { + parses.push(quote!( + let #binder = Self::#rule_name( + inputs.next_back().unwrap() + )?; + )) + } + if let Some((rule_name, binder)) = multiple { + parses.push(quote!( + let #binder = inputs + .map(|i| Self::#rule_name(i)) + .collect::, _>>()? + .into_iter(); + )) + } + + Ok(quote!( + [#(#match_pat),*] if #(#match_filter)* true => { + #(#parses)* + #body + } + )) +} + +pub fn parse_children( + input: proc_macro::TokenStream, +) -> Result { + let input: ParseChildrenInput = syn::parse(input)?; + + let input_expr = &input.input_expr; + let branches = input + .branches + .iter() + .map(make_parser_branch) + .collect::>>()?; + Ok(quote!({ + let children_rules: Vec<_> = #input_expr.pair + .clone() + .into_inner() + .map(|p| p.as_rule()) + .collect(); + + #[allow(unused_mut)] + let mut inputs = #input_expr + .pair + .clone() + .into_inner() + .map(|p| #input_expr.with_pair(p)); + + #[allow(unreachable_code)] + match children_rules.as_slice() { + #(#branches,)* + [..] => return Err(#input_expr.error( + format!("Unexpected children: {:?}", children_rules) + )), + } + })) +} diff --git a/dhall_proc_macros/src/parser.rs b/dhall_proc_macros/src/parser.rs deleted file mode 100644 index a440b16..0000000 --- a/dhall_proc_macros/src/parser.rs +++ /dev/null @@ -1,291 +0,0 @@ -use proc_macro2::{Span, TokenStream}; -use quote::quote; -use syn::parse::{Parse, ParseStream, Result}; -use syn::punctuated::Punctuated; -use syn::spanned::Spanned; -use syn::{ - bracketed, parenthesized, parse_quote, token, Error, Expr, Ident, ImplItem, - ImplItemMethod, ItemImpl, Pat, ReturnType, Token, -}; - -#[derive(Debug, Clone)] -struct ChildrenBranch { - pattern_span: Span, - pattern: Punctuated, - body: Expr, -} - -#[derive(Debug, Clone)] -enum ChildrenBranchPatternItem { - Single { rule_name: Ident, binder: Pat }, - Multiple { rule_name: Ident, binder: Ident }, -} - -#[derive(Debug, Clone)] -struct ParseChildrenInput { - input_expr: Expr, - branches: Punctuated, -} - -impl Parse for ChildrenBranch { - fn parse(input: ParseStream) -> Result { - let contents; - let _: token::Bracket = bracketed!(contents in input); - let pattern_unparsed: TokenStream = contents.fork().parse()?; - let pattern_span = pattern_unparsed.span(); - let pattern = Punctuated::parse_terminated(&contents)?; - let _: Token![=>] = input.parse()?; - let body = input.parse()?; - - Ok(ChildrenBranch { - pattern_span, - pattern, - body, - }) - } -} - -impl Parse for ChildrenBranchPatternItem { - fn parse(input: ParseStream) -> Result { - let contents; - let rule_name = input.parse()?; - parenthesized!(contents in input); - if input.peek(Token![..]) { - let binder = contents.parse()?; - let _: Token![..] = input.parse()?; - Ok(ChildrenBranchPatternItem::Multiple { rule_name, binder }) - } else if input.is_empty() || input.peek(Token![,]) { - let binder = contents.parse()?; - Ok(ChildrenBranchPatternItem::Single { rule_name, binder }) - } else { - Err(input.error("expected `..` or nothing")) - } - } -} - -impl Parse for ParseChildrenInput { - fn parse(input: ParseStream) -> Result { - let input_expr = input.parse()?; - let _: Token![;] = input.parse()?; - let branches = Punctuated::parse_terminated(input)?; - - Ok(ParseChildrenInput { - input_expr, - branches, - }) - } -} - -fn apply_special_attrs( - rule_enum: &Ident, - function: &mut ImplItemMethod, -) -> Result<()> { - let recognized_attrs: Vec<_> = function - .attrs - .drain_filter(|attr| attr.path.is_ident("prec_climb")) - .collect(); - - let name = function.sig.ident.clone(); - let output_type = match &function.sig.output { - ReturnType::Default => parse_quote!(()), - ReturnType::Type(_, t) => (**t).clone(), - }; - - if recognized_attrs.is_empty() { - } else if recognized_attrs.len() > 1 { - return Err(Error::new( - recognized_attrs[1].span(), - "expected a single prec_climb attribute", - )); - } else { - let attr = recognized_attrs.into_iter().next().unwrap(); - let (child_rule, climber) = - attr.parse_args_with(|input: ParseStream| { - let child_rule: Ident = input.parse()?; - let _: Token![,] = input.parse()?; - let climber: Expr = input.parse()?; - Ok((child_rule, climber)) - })?; - - *function = parse_quote!( - fn #name<'a>( - input: ParseInput<'a, #rule_enum>, - ) -> #output_type { - #[allow(non_snake_case, dead_code)] - #function - - #climber.climb( - input.pair.clone().into_inner(), - |p| Self::#child_rule(input.with_pair(p)), - |l, op, r| { - #name(input.clone(), l?, op, r?) - }, - ) - } - ); - } - - *function = parse_quote!( - #[allow(non_snake_case, dead_code)] - #function - ); - - Ok(()) -} - -pub fn make_parser( - attrs: proc_macro::TokenStream, - input: proc_macro::TokenStream, -) -> Result { - let rule_enum: Ident = syn::parse(attrs)?; - - let mut imp: ItemImpl = syn::parse(input)?; - imp.items - .iter_mut() - .map(|item| match item { - ImplItem::Method(m) => apply_special_attrs(&rule_enum, m), - _ => Ok(()), - }) - .collect::>()?; - - let ty = &imp.self_ty; - let (impl_generics, _, where_clause) = imp.generics.split_for_impl(); - Ok(quote!( - impl #impl_generics PestConsumer for #ty #where_clause { - type RuleEnum = #rule_enum; - } - - #imp - )) -} - -fn make_parser_branch(branch: &ChildrenBranch) -> Result { - use ChildrenBranchPatternItem::{Multiple, Single}; - - let body = &branch.body; - - // Convert the input pattern into a pattern-match on the Rules of the children. This uses - // slice_patterns. - // A single pattern just checks that the rule matches; a variable-length pattern binds the - // subslice and checks, in the if-guard, that its elements all match the chosen Rule. - let variable_pattern_ident = - Ident::new("variable_pattern", Span::call_site()); - let match_pat = branch.pattern.iter().map(|item| match item { - Single { rule_name, .. } => { - quote!(<::RuleEnum>::#rule_name) - } - Multiple { .. } => quote!(#variable_pattern_ident..), - }); - let match_filter = branch.pattern.iter().map(|item| match item { - Single { .. } => quote!(), - Multiple { rule_name, .. } => quote!( - { - // We can't use .all() directly in the pattern guard; see - // https://github.com/rust-lang/rust/issues/59803. - let all_match = |slice: &[_]| { - slice.iter().all(|r| - r == &<::RuleEnum>::#rule_name - ) - }; - all_match(#variable_pattern_ident) - } && - ), - }); - - // Once we have found a branch that matches, we need to parse the children. - let mut singles_before_multiple = Vec::new(); - let mut multiple = None; - let mut singles_after_multiple = Vec::new(); - for item in &branch.pattern { - match item { - Single { - rule_name, binder, .. - } => { - if multiple.is_none() { - singles_before_multiple.push((rule_name, binder)) - } else { - singles_after_multiple.push((rule_name, binder)) - } - } - Multiple { - rule_name, binder, .. - } => { - if multiple.is_none() { - multiple = Some((rule_name, binder)) - } else { - return Err(Error::new( - branch.pattern_span.clone(), - "multiple variable-length patterns are not allowed", - )); - } - } - } - } - let mut parses = Vec::new(); - for (rule_name, binder) in singles_before_multiple.into_iter() { - parses.push(quote!( - let #binder = Self::#rule_name( - inputs.next().unwrap() - )?; - )) - } - // Note the `rev()`: we are taking inputs from the end of the iterator in reverse order, so that - // only the unmatched inputs are left for the variable-length pattern, if any. - for (rule_name, binder) in singles_after_multiple.into_iter().rev() { - parses.push(quote!( - let #binder = Self::#rule_name( - inputs.next_back().unwrap() - )?; - )) - } - if let Some((rule_name, binder)) = multiple { - parses.push(quote!( - let #binder = inputs - .map(|i| Self::#rule_name(i)) - .collect::, _>>()? - .into_iter(); - )) - } - - Ok(quote!( - [#(#match_pat),*] if #(#match_filter)* true => { - #(#parses)* - #body - } - )) -} - -pub fn parse_children( - input: proc_macro::TokenStream, -) -> Result { - let input: ParseChildrenInput = syn::parse(input)?; - - let input_expr = &input.input_expr; - let branches = input - .branches - .iter() - .map(make_parser_branch) - .collect::>>()?; - Ok(quote!({ - let children_rules: Vec<_> = #input_expr.pair - .clone() - .into_inner() - .map(|p| p.as_rule()) - .collect(); - - #[allow(unused_mut)] - let mut inputs = #input_expr - .pair - .clone() - .into_inner() - .map(|p| #input_expr.with_pair(p)); - - #[allow(unreachable_code)] - match children_rules.as_slice() { - #(#branches,)* - [..] => return Err(#input_expr.error( - format!("Unexpected children: {:?}", children_rules) - )), - } - })) -} -- cgit v1.2.3 From 5dde11c8ffb13fbaf5dbc9c2b544270c22a7d2f5 Mon Sep 17 00:00:00 2001 From: Nadrieril Date: Mon, 2 Sep 2019 22:50:16 +0200 Subject: Parse polymorphically in the Embed parameter --- dhall_proc_macros/src/make_parser.rs | 83 ++++++++++++++++++++++-------------- 1 file changed, 52 insertions(+), 31 deletions(-) (limited to 'dhall_proc_macros') diff --git a/dhall_proc_macros/src/make_parser.rs b/dhall_proc_macros/src/make_parser.rs index 63ce779..268a639 100644 --- a/dhall_proc_macros/src/make_parser.rs +++ b/dhall_proc_macros/src/make_parser.rs @@ -2,26 +2,25 @@ use quote::quote; use syn::parse::{ParseStream, Result}; use syn::spanned::Spanned; use syn::{ - parse_quote, Error, Expr, Ident, ImplItem, ImplItemMethod, ItemImpl, - ReturnType, Token, + parse_quote, Error, Expr, FnArg, Ident, ImplItem, ImplItemMethod, ItemImpl, + Pat, Token, }; -fn apply_special_attrs( - rule_enum: &Ident, - function: &mut ImplItemMethod, -) -> Result<()> { +fn apply_special_attrs(function: &mut ImplItemMethod) -> Result<()> { + *function = parse_quote!( + #[allow(non_snake_case, dead_code)] + #function + ); + let recognized_attrs: Vec<_> = function .attrs .drain_filter(|attr| attr.path.is_ident("prec_climb")) .collect(); let name = function.sig.ident.clone(); - let output_type = match &function.sig.output { - ReturnType::Default => parse_quote!(()), - ReturnType::Type(_, t) => (**t).clone(), - }; if recognized_attrs.is_empty() { + // do nothing } else if recognized_attrs.len() > 1 { return Err(Error::new( recognized_attrs[1].span(), @@ -37,28 +36,50 @@ fn apply_special_attrs( Ok((child_rule, climber)) })?; - *function = parse_quote!( - fn #name<'a>( - input: ParseInput<'a, #rule_enum>, - ) -> #output_type { - #[allow(non_snake_case, dead_code)] - #function - - #climber.climb( - input.pair.clone().into_inner(), - |p| Self::#child_rule(input.with_pair(p)), - |l, op, r| { - #name(input.clone(), l?, op, r?) - }, - ) + // Get the name of the first (`input`) function argument + let first_arg = function.sig.inputs.first().ok_or_else(|| { + Error::new( + function.sig.inputs.span(), + "a prec_climb function needs 4 arguments", + ) + })?; + let first_arg = match &first_arg { + FnArg::Receiver(_) => return Err(Error::new( + first_arg.span(), + "a prec_climb function should not have a `self` argument", + )), + FnArg::Typed(first_arg) => match &*first_arg.pat{ + Pat::Ident(ident) => &ident.ident, + _ => return Err(Error::new( + first_arg.span(), + "this argument should be a plain identifier instead of a pattern", + )), } - ); - } + }; - *function = parse_quote!( - #[allow(non_snake_case, dead_code)] - #function - ); + function.block = parse_quote!({ + #function + + #climber.climb( + #first_arg.pair.clone().into_inner(), + |p| Self::#child_rule(#first_arg.with_pair(p)), + |l, op, r| { + #name(#first_arg.clone(), l?, op, r?) + }, + ) + }); + // Remove the 3 last arguments to keep only the `input` one + function.sig.inputs.pop(); + function.sig.inputs.pop(); + function.sig.inputs.pop(); + // Check that an argument remains + function.sig.inputs.first().ok_or_else(|| { + Error::new( + function.sig.inputs.span(), + "a prec_climb function needs 4 arguments", + ) + })?; + } Ok(()) } @@ -73,7 +94,7 @@ pub fn make_parser( imp.items .iter_mut() .map(|item| match item { - ImplItem::Method(m) => apply_special_attrs(&rule_enum, m), + ImplItem::Method(m) => apply_special_attrs(m), _ => Ok(()), }) .collect::>()?; -- cgit v1.2.3 From f892f0bdb1dc9a8f2d1cda245e17c28fcf1090b3 Mon Sep 17 00:00:00 2001 From: Nadrieril Date: Mon, 2 Sep 2019 23:03:05 +0200 Subject: Improve hygiene of identifiers --- dhall_proc_macros/src/parse_children.rs | 33 ++++++++++++++++++++------------- 1 file changed, 20 insertions(+), 13 deletions(-) (limited to 'dhall_proc_macros') diff --git a/dhall_proc_macros/src/parse_children.rs b/dhall_proc_macros/src/parse_children.rs index ce6f66c..c78adbc 100644 --- a/dhall_proc_macros/src/parse_children.rs +++ b/dhall_proc_macros/src/parse_children.rs @@ -73,7 +73,10 @@ impl Parse for ParseChildrenInput { } } -fn make_parser_branch(branch: &ChildrenBranch) -> Result { +fn make_parser_branch( + branch: &ChildrenBranch, + i_inputs: &Ident, +) -> Result { use ChildrenBranchPatternItem::{Multiple, Single}; let body = &branch.body; @@ -82,13 +85,13 @@ fn make_parser_branch(branch: &ChildrenBranch) -> Result { // slice_patterns. // A single pattern just checks that the rule matches; a variable-length pattern binds the // subslice and checks, in the if-guard, that its elements all match the chosen Rule. - let variable_pattern_ident = - Ident::new("variable_pattern", Span::call_site()); + let i_variable_pattern = + Ident::new("___variable_pattern", Span::call_site()); let match_pat = branch.pattern.iter().map(|item| match item { Single { rule_name, .. } => { quote!(<::RuleEnum>::#rule_name) } - Multiple { .. } => quote!(#variable_pattern_ident..), + Multiple { .. } => quote!(#i_variable_pattern..), }); let match_filter = branch.pattern.iter().map(|item| match item { Single { .. } => quote!(), @@ -101,7 +104,7 @@ fn make_parser_branch(branch: &ChildrenBranch) -> Result { r == &<::RuleEnum>::#rule_name ) }; - all_match(#variable_pattern_ident) + all_match(#i_variable_pattern) } && ), }); @@ -139,7 +142,7 @@ fn make_parser_branch(branch: &ChildrenBranch) -> Result { for (rule_name, binder) in singles_before_multiple.into_iter() { parses.push(quote!( let #binder = Self::#rule_name( - inputs.next().unwrap() + #i_inputs.next().unwrap() )?; )) } @@ -148,13 +151,13 @@ fn make_parser_branch(branch: &ChildrenBranch) -> Result { for (rule_name, binder) in singles_after_multiple.into_iter().rev() { parses.push(quote!( let #binder = Self::#rule_name( - inputs.next_back().unwrap() + #i_inputs.next_back().unwrap() )?; )) } if let Some((rule_name, binder)) = multiple { parses.push(quote!( - let #binder = inputs + let #binder = #i_inputs .map(|i| Self::#rule_name(i)) .collect::, _>>()? .into_iter(); @@ -174,31 +177,35 @@ pub fn parse_children( ) -> Result { let input: ParseChildrenInput = syn::parse(input)?; + let i_children_rules = Ident::new("___children_rules", Span::call_site()); + let i_inputs = Ident::new("___inputs", Span::call_site()); + let input_expr = &input.input_expr; let branches = input .branches .iter() - .map(make_parser_branch) + .map(|br| make_parser_branch(br, &i_inputs)) .collect::>>()?; + Ok(quote!({ - let children_rules: Vec<_> = #input_expr.pair + let #i_children_rules: Vec<_> = #input_expr.pair .clone() .into_inner() .map(|p| p.as_rule()) .collect(); #[allow(unused_mut)] - let mut inputs = #input_expr + let mut #i_inputs = #input_expr .pair .clone() .into_inner() .map(|p| #input_expr.with_pair(p)); #[allow(unreachable_code)] - match children_rules.as_slice() { + match #i_children_rules.as_slice() { #(#branches,)* [..] => return Err(#input_expr.error( - format!("Unexpected children: {:?}", children_rules) + format!("Unexpected children: {:?}", #i_children_rules) )), } })) -- cgit v1.2.3