summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--dhall_generated_parser/src/dhall.pest.visibility18
-rw-r--r--dhall_proc_macros/src/make_parser.rs187
-rw-r--r--dhall_proc_macros/src/parse_children.rs11
-rw-r--r--dhall_syntax/src/parser.rs423
-rw-r--r--tests_buffer2
5 files changed, 344 insertions, 297 deletions
diff --git a/dhall_generated_parser/src/dhall.pest.visibility b/dhall_generated_parser/src/dhall.pest.visibility
index dcebf45..17c1edc 100644
--- a/dhall_generated_parser/src/dhall.pest.visibility
+++ b/dhall_generated_parser/src/dhall.pest.visibility
@@ -18,7 +18,7 @@
simple_label
# quoted_label_char
quoted_label
-label
+# label
# nonreserved_label
# any_label
double_quote_chunk
@@ -31,13 +31,13 @@ escaped_quote_pair
escaped_interpolation
single_quote_char
single_quote_literal
-interpolation
+# interpolation
# text_literal
if_
# then
# else_
# let_
-in_
+# in_
# as_
# using
merge
@@ -49,9 +49,9 @@ toMap
assert
# keyword
builtin
-Optional
+# Optional
Text
-List
+# List
Location
# Bool
# True
@@ -95,7 +95,7 @@ arrow
numeric_double_literal
minus_infinity_literal
plus_infinity_literal
-double_literal
+# double_literal
natural_literal
integer_literal
identifier
@@ -104,7 +104,7 @@ variable
# quoted_path_character
unquoted_path_component
quoted_path_component
-path_component
+# path_component
path
local
parent_path
@@ -136,7 +136,7 @@ env
bash_environment_variable
posix_environment_variable
posix_environment_variable_character
-import_type
+# import_type
hash
import_hashed
import
@@ -160,7 +160,7 @@ not_equal_expression
equivalent_expression
application_expression
first_application_expression
-import_expression
+# import_expression
selector_expression
selector
labels
diff --git a/dhall_proc_macros/src/make_parser.rs b/dhall_proc_macros/src/make_parser.rs
index 268a639..3375c39 100644
--- a/dhall_proc_macros/src/make_parser.rs
+++ b/dhall_proc_macros/src/make_parser.rs
@@ -1,33 +1,101 @@
+use std::collections::HashMap;
+use std::iter;
+
use quote::quote;
use syn::parse::{ParseStream, Result};
use syn::spanned::Spanned;
use syn::{
parse_quote, Error, Expr, FnArg, Ident, ImplItem, ImplItemMethod, ItemImpl,
- Pat, Token,
+ Pat, Signature, Token,
};
-fn apply_special_attrs(function: &mut ImplItemMethod) -> Result<()> {
+fn collect_aliases(
+ imp: &mut ItemImpl,
+) -> Result<HashMap<Ident, (Signature, Vec<Ident>)>> {
+ let mut alias_map = HashMap::new();
+
+ for item in &mut imp.items {
+ if let ImplItem::Method(function) = item {
+ let fn_name = function.sig.ident.clone();
+ let mut alias_attrs = function
+ .attrs
+ .drain_filter(|attr| attr.path.is_ident("alias"))
+ .collect::<Vec<_>>()
+ .into_iter();
+
+ if let Some(attr) = alias_attrs.next() {
+ let tgt: Ident = attr.parse_args()?;
+ alias_map
+ .entry(tgt)
+ .or_insert_with(|| (function.sig.clone(), Vec::new()))
+ .1
+ .push(fn_name);
+ }
+ if let Some(attr) = alias_attrs.next() {
+ return Err(Error::new(
+ attr.span(),
+ "expected at most one alias attribute",
+ ));
+ }
+ }
+ }
+
+ Ok(alias_map)
+}
+
+fn parse_rulefn_sig(sig: &Signature) -> Result<(Ident, Ident)> {
+ let fn_name = sig.ident.clone();
+ // Get the name of the first (`input`) function argument
+ let input_arg = sig.inputs.first().ok_or_else(|| {
+ Error::new(
+ sig.inputs.span(),
+ "a rule function needs an `input` argument",
+ )
+ })?;
+ let input_arg = match &input_arg {
+ FnArg::Receiver(_) => return Err(Error::new(
+ input_arg.span(),
+ "a rule function should not have a `self` argument",
+ )),
+ FnArg::Typed(input_arg) => match &*input_arg.pat{
+ Pat::Ident(ident) => ident.ident.clone(),
+ _ => return Err(Error::new(
+ input_arg.span(),
+ "this argument should be a plain identifier instead of a pattern",
+ )),
+ }
+ };
+
+ Ok((fn_name, input_arg))
+}
+
+fn apply_special_attrs(
+ function: &mut ImplItemMethod,
+ alias_map: &mut HashMap<Ident, (Signature, Vec<Ident>)>,
+ rule_enum: &Ident,
+) -> Result<()> {
*function = parse_quote!(
#[allow(non_snake_case, dead_code)]
#function
);
- let recognized_attrs: Vec<_> = function
+ let (fn_name, input_arg) = parse_rulefn_sig(&function.sig)?;
+
+ // `prec_climb` attr
+ let prec_climb_attrs: Vec<_> = function
.attrs
.drain_filter(|attr| attr.path.is_ident("prec_climb"))
.collect();
- let name = function.sig.ident.clone();
-
- if recognized_attrs.is_empty() {
- // do nothing
- } else if recognized_attrs.len() > 1 {
+ if prec_climb_attrs.len() > 1 {
return Err(Error::new(
- recognized_attrs[1].span(),
- "expected a single prec_climb attribute",
+ prec_climb_attrs[1].span(),
+ "expected at most one prec_climb attribute",
));
+ } else if prec_climb_attrs.is_empty() {
+ // do nothing
} else {
- let attr = recognized_attrs.into_iter().next().unwrap();
+ let attr = prec_climb_attrs.into_iter().next().unwrap();
let (child_rule, climber) =
attr.parse_args_with(|input: ParseStream| {
let child_rule: Ident = input.parse()?;
@@ -36,35 +104,14 @@ fn apply_special_attrs(function: &mut ImplItemMethod) -> Result<()> {
Ok((child_rule, climber))
})?;
- // Get the name of the first (`input`) function argument
- let first_arg = function.sig.inputs.first().ok_or_else(|| {
- Error::new(
- function.sig.inputs.span(),
- "a prec_climb function needs 4 arguments",
- )
- })?;
- let first_arg = match &first_arg {
- FnArg::Receiver(_) => return Err(Error::new(
- first_arg.span(),
- "a prec_climb function should not have a `self` argument",
- )),
- FnArg::Typed(first_arg) => match &*first_arg.pat{
- Pat::Ident(ident) => &ident.ident,
- _ => return Err(Error::new(
- first_arg.span(),
- "this argument should be a plain identifier instead of a pattern",
- )),
- }
- };
-
function.block = parse_quote!({
#function
#climber.climb(
- #first_arg.pair.clone().into_inner(),
- |p| Self::#child_rule(#first_arg.with_pair(p)),
+ #input_arg.pair.clone().into_inner(),
+ |p| Self::#child_rule(#input_arg.with_pair(p)),
|l, op, r| {
- #name(#first_arg.clone(), l?, op, r?)
+ #fn_name(#input_arg.clone(), l?, op, r?)
},
)
});
@@ -81,6 +128,22 @@ fn apply_special_attrs(function: &mut ImplItemMethod) -> Result<()> {
})?;
}
+ // `alias` attr
+ if let Some((_, aliases)) = alias_map.remove(&fn_name) {
+ let block = &function.block;
+ function.block = parse_quote!({
+ match #input_arg.as_rule() {
+ #(#rule_enum::#aliases => Self::#aliases(#input_arg),)*
+ #rule_enum::#fn_name => #block,
+ r => unreachable!(
+ "make_parser: called {} on {:?}",
+ stringify!(#fn_name),
+ r
+ )
+ }
+ });
+ }
+
Ok(())
}
@@ -89,21 +152,69 @@ pub fn make_parser(
input: proc_macro::TokenStream,
) -> Result<proc_macro2::TokenStream> {
let rule_enum: Ident = syn::parse(attrs)?;
-
let mut imp: ItemImpl = syn::parse(input)?;
+
+ let mut alias_map = collect_aliases(&mut imp)?;
+ let rule_alias_branches: Vec<_> = alias_map
+ .iter()
+ .flat_map(|(tgt, (_, srcs))| iter::repeat(tgt).zip(srcs))
+ .map(|(tgt, src)| {
+ quote!(
+ #rule_enum::#src => stringify!(#tgt).to_string(),
+ )
+ })
+ .collect();
+
imp.items
.iter_mut()
.map(|item| match item {
- ImplItem::Method(m) => apply_special_attrs(m),
+ ImplItem::Method(m) => {
+ apply_special_attrs(m, &mut alias_map, &rule_enum)
+ }
_ => Ok(()),
})
.collect::<Result<()>>()?;
+ // Entries that remain in the alias map don't have a matching method, so we create one.
+ let extra_fns: Vec<_> = alias_map
+ .iter()
+ .map(|(tgt, (sig, srcs))| {
+ let mut sig = sig.clone();
+ sig.ident = tgt.clone();
+
+ let (_, input_arg) = parse_rulefn_sig(&sig)?;
+ Ok(ImplItem::Method(parse_quote!(
+ #sig {
+ match #input_arg.as_rule() {
+ #(#rule_enum::#srcs => Self::#srcs(#input_arg),)*
+ r if &format!("{:?}", r) == stringify!(#tgt) =>
+ return Err(#input_arg.error(format!(
+ "make_parser: missing method for rule {}",
+ stringify!(#tgt),
+ ))),
+ r => unreachable!(
+ "make_parser: called {} on {:?}",
+ stringify!(#tgt),
+ r
+ )
+ }
+ }
+ )))
+ })
+ .collect::<Result<_>>()?;
+ imp.items.extend(extra_fns);
+
let ty = &imp.self_ty;
let (impl_generics, _, where_clause) = imp.generics.split_for_impl();
Ok(quote!(
impl #impl_generics PestConsumer for #ty #where_clause {
- type RuleEnum = #rule_enum;
+ type Rule = #rule_enum;
+ fn rule_alias(rule: Self::Rule) -> String {
+ match rule {
+ #(#rule_alias_branches)*
+ r => format!("{:?}", r),
+ }
+ }
}
#imp
diff --git a/dhall_proc_macros/src/parse_children.rs b/dhall_proc_macros/src/parse_children.rs
index b1d43fc..a35c03f 100644
--- a/dhall_proc_macros/src/parse_children.rs
+++ b/dhall_proc_macros/src/parse_children.rs
@@ -88,9 +88,7 @@ fn make_parser_branch(
let i_variable_pattern =
Ident::new("___variable_pattern", Span::call_site());
let match_pat = branch.pattern.iter().map(|item| match item {
- Single { rule_name, .. } => {
- quote!(<<Self as PestConsumer>::RuleEnum>::#rule_name)
- }
+ Single { rule_name, .. } => quote!(stringify!(#rule_name)),
Multiple { .. } => quote!(#i_variable_pattern @ ..),
});
let match_filter = branch.pattern.iter().map(|item| match item {
@@ -101,7 +99,7 @@ fn make_parser_branch(
// https://github.com/rust-lang/rust/issues/59803.
let all_match = |slice: &[_]| {
slice.iter().all(|r|
- r == &<<Self as PestConsumer>::RuleEnum>::#rule_name
+ *r == stringify!(#rule_name)
)
};
all_match(#i_variable_pattern)
@@ -192,6 +190,11 @@ pub fn parse_children(
.clone()
.into_inner()
.map(|p| p.as_rule())
+ .map(<Self as PestConsumer>::rule_alias)
+ .collect();
+ let #i_children_rules: Vec<&str> = #i_children_rules
+ .iter()
+ .map(String::as_str)
.collect();
#[allow(unused_mut)]
diff --git a/dhall_syntax/src/parser.rs b/dhall_syntax/src/parser.rs
index c2ba19a..bd29a27 100644
--- a/dhall_syntax/src/parser.rs
+++ b/dhall_syntax/src/parser.rs
@@ -66,11 +66,15 @@ impl<'input> ParseInput<'input, Rule> {
fn as_str(&self) -> &'input str {
self.pair.as_str()
}
+ fn as_rule(&self) -> Rule {
+ self.pair.as_rule()
+ }
}
-// Used to retrieve the `Rule` enum associated with the `Self` type in `parse_children`.
+// Used by the macros.
trait PestConsumer {
- type RuleEnum: pest::RuleType;
+ type Rule: pest::RuleType;
+ fn rule_alias(rule: Self::Rule) -> String;
}
fn debug_pair(pair: Pair<Rule>) -> String {
@@ -231,21 +235,17 @@ struct Parsers;
#[make_parser(Rule)]
impl Parsers {
- fn EOI(_: ParseInput<Rule>) -> ParseResult<()> {
+ fn EOI(_input: ParseInput<Rule>) -> ParseResult<()> {
Ok(())
}
+ #[alias(label)]
fn simple_label(input: ParseInput<Rule>) -> ParseResult<Label> {
- Ok(Label::from(input.as_str().trim().to_owned()))
+ Ok(Label::from(input.as_str()))
}
+ #[alias(label)]
fn quoted_label(input: ParseInput<Rule>) -> ParseResult<Label> {
- Ok(Label::from(input.as_str().trim().to_owned()))
- }
- fn label(input: ParseInput<Rule>) -> ParseResult<Label> {
- Ok(parse_children!(input;
- [simple_label(l)] => l,
- [quoted_label(l)] => l,
- ))
+ Ok(Label::from(input.as_str()))
}
fn double_quote_literal<E: Clone>(
@@ -262,17 +262,15 @@ impl Parsers {
input: ParseInput<Rule>,
) -> ParseResult<ParsedTextContents<E>> {
Ok(parse_children!(input;
- [interpolation(e)] => {
+ [expression(e)] => {
InterpolatedTextContents::Expr(e)
},
- [double_quote_escaped(s)] => {
- InterpolatedTextContents::Text(s)
- },
[double_quote_char(s)] => {
- InterpolatedTextContents::Text(s.to_owned())
+ InterpolatedTextContents::Text(s)
},
))
}
+ #[alias(double_quote_char)]
fn double_quote_escaped(input: ParseInput<Rule>) -> ParseResult<String> {
Ok(match input.as_str() {
"\"" => "\"".to_owned(),
@@ -345,8 +343,8 @@ impl Parsers {
}
fn double_quote_char<'a>(
input: ParseInput<'a, Rule>,
- ) -> ParseResult<&'a str> {
- Ok(input.as_str())
+ ) -> ParseResult<String> {
+ Ok(input.as_str().to_owned())
}
fn single_quote_literal<E: Clone>(
@@ -356,6 +354,7 @@ impl Parsers {
[single_quote_continue(lines)] => {
let newline: ParsedText<E> = "\n".to_string().into();
+ // Reverse lines and chars in each line
let mut lines: Vec<ParsedText<E>> = lines
.into_iter()
.rev()
@@ -377,44 +376,27 @@ impl Parsers {
) -> ParseResult<&'a str> {
Ok(input.as_str())
}
- fn escaped_quote_pair<'a>(_: ParseInput<'a, Rule>) -> ParseResult<&'a str> {
+ #[alias(single_quote_char)]
+ fn escaped_quote_pair<'a>(
+ _input: ParseInput<'a, Rule>,
+ ) -> ParseResult<&'a str> {
Ok("''")
}
+ #[alias(single_quote_char)]
fn escaped_interpolation<'a>(
- _: ParseInput<'a, Rule>,
+ _input: ParseInput<'a, Rule>,
) -> ParseResult<&'a str> {
Ok("${")
}
- fn interpolation<E: Clone>(
- input: ParseInput<Rule>,
- ) -> ParseResult<Expr<E>> {
- Ok(parse_children!(input;
- [expression(e)] => e
- ))
- }
// Returns a vec of lines in reversed order, where each line is also in reversed order.
fn single_quote_continue<E: Clone>(
input: ParseInput<Rule>,
) -> ParseResult<Vec<Vec<ParsedTextContents<E>>>> {
Ok(parse_children!(input;
- [interpolation(c), single_quote_continue(lines)] => {
- let c = InterpolatedTextContents::Expr(c);
- let mut lines = lines;
- lines.last_mut().unwrap().push(c);
- lines
- },
- [escaped_quote_pair(c), single_quote_continue(lines)] => {
- let mut lines = lines;
- // TODO: don't allocate for every char
- let c = InterpolatedTextContents::Text(c.to_owned());
- lines.last_mut().unwrap().push(c);
- lines
- },
- [escaped_interpolation(c), single_quote_continue(lines)] => {
+ [expression(e), single_quote_continue(lines)] => {
+ let c = InterpolatedTextContents::Expr(e);
let mut lines = lines;
- // TODO: don't allocate for every char
- let c = InterpolatedTextContents::Text(c.to_owned());
lines.last_mut().unwrap().push(c);
lines
},
@@ -435,38 +417,41 @@ impl Parsers {
))
}
+ #[alias(expression)]
fn builtin<E: Clone>(input: ParseInput<Rule>) -> ParseResult<Expr<E>> {
let s = input.as_str();
- let span = input.as_span();
- Ok(spanned(
- span,
- match crate::Builtin::parse(s) {
- Some(b) => Builtin(b),
- None => {
- match s {
- "True" => BoolLit(true),
- "False" => BoolLit(false),
- "Type" => Const(crate::Const::Type),
- "Kind" => Const(crate::Const::Kind),
- "Sort" => Const(crate::Const::Sort),
- _ => Err(input
- .error(format!("Unrecognized builtin: '{}'", s)))?,
- }
- }
+ let e = match crate::Builtin::parse(s) {
+ Some(b) => Builtin(b),
+ None => match s {
+ "True" => BoolLit(true),
+ "False" => BoolLit(false),
+ "Type" => Const(crate::Const::Type),
+ "Kind" => Const(crate::Const::Kind),
+ "Sort" => Const(crate::Const::Sort),
+ _ => Err(input.error(format!("Unrecognized builtin: '{}'", s)))?,
},
- ))
+ };
+ Ok(spanned(input.as_span(), e))
}
- fn NaN(_: ParseInput<Rule>) -> ParseResult<()> {
- Ok(())
+ #[alias(double_literal)]
+ fn NaN(_input: ParseInput<Rule>) -> ParseResult<core::Double> {
+ Ok(std::f64::NAN.into())
}
- fn minus_infinity_literal(_: ParseInput<Rule>) -> ParseResult<()> {
- Ok(())
+ #[alias(double_literal)]
+ fn minus_infinity_literal(
+ _input: ParseInput<Rule>,
+ ) -> ParseResult<core::Double> {
+ Ok(std::f64::NEG_INFINITY.into())
}
- fn plus_infinity_literal(_: ParseInput<Rule>) -> ParseResult<()> {
- Ok(())
+ #[alias(double_literal)]
+ fn plus_infinity_literal(
+ _input: ParseInput<Rule>,
+ ) -> ParseResult<core::Double> {
+ Ok(std::f64::INFINITY.into())
}
+ #[alias(double_literal)]
fn numeric_double_literal(
input: ParseInput<Rule>,
) -> ParseResult<core::Double> {
@@ -481,15 +466,6 @@ impl Parsers {
}
}
- fn double_literal(input: ParseInput<Rule>) -> ParseResult<core::Double> {
- Ok(parse_children!(input;
- [numeric_double_literal(n)] => n,
- [minus_infinity_literal(_)] => std::f64::NEG_INFINITY.into(),
- [plus_infinity_literal(_)] => std::f64::INFINITY.into(),
- [NaN(_)] => std::f64::NAN.into(),
- ))
- }
-
fn natural_literal(input: ParseInput<Rule>) -> ParseResult<core::Natural> {
input
.as_str()
@@ -506,13 +482,13 @@ impl Parsers {
.map_err(|e| input.error(format!("{}", e)))
}
+ #[alias(expression)]
fn identifier<E: Clone>(input: ParseInput<Rule>) -> ParseResult<Expr<E>> {
- let span = input.as_span();
Ok(parse_children!(input;
[variable(v)] => {
- spanned(span, Var(v))
+ spanned(input.as_span(), Var(v))
},
- [builtin(e)] => e,
+ [expression(e)] => e,
))
}
@@ -527,42 +503,40 @@ impl Parsers {
))
}
+ #[alias(path_component)]
fn unquoted_path_component<'a>(
input: ParseInput<'a, Rule>,
- ) -> ParseResult<&'a str> {
- Ok(input.as_str())
+ ) -> ParseResult<String> {
+ Ok(input.as_str().to_string())
}
+ #[alias(path_component)]
fn quoted_path_component<'a>(
input: ParseInput<'a, Rule>,
- ) -> ParseResult<&'a str> {
- Ok(input.as_str())
- }
- fn path_component(input: ParseInput<Rule>) -> ParseResult<String> {
- Ok(parse_children!(input;
- [unquoted_path_component(s)] => s.to_string(),
- [quoted_path_component(s)] => {
- const RESERVED: &percent_encoding::AsciiSet =
- &percent_encoding::CONTROLS
- .add(b'=').add(b':').add(b'/').add(b'?')
- .add(b'#').add(b'[').add(b']').add(b'@')
- .add(b'!').add(b'$').add(b'&').add(b'\'')
- .add(b'(').add(b')').add(b'*').add(b'+')
- .add(b',').add(b';');
- s.chars()
- .map(|c| {
- // Percent-encode ascii chars
- if c.is_ascii() {
- percent_encoding::utf8_percent_encode(
- &c.to_string(),
- RESERVED,
- ).to_string()
- } else {
- c.to_string()
- }
- })
- .collect()
- },
- ))
+ ) -> ParseResult<String> {
+ #[rustfmt::skip]
+ const RESERVED: &percent_encoding::AsciiSet =
+ &percent_encoding::CONTROLS
+ .add(b'=').add(b':').add(b'/').add(b'?')
+ .add(b'#').add(b'[').add(b']').add(b'@')
+ .add(b'!').add(b'$').add(b'&').add(b'\'')
+ .add(b'(').add(b')').add(b'*').add(b'+')
+ .add(b',').add(b';');
+ Ok(input
+ .as_str()
+ .chars()
+ .map(|c| {
+ // Percent-encode ascii chars
+ if c.is_ascii() {
+ percent_encoding::utf8_percent_encode(
+ &c.to_string(),
+ RESERVED,
+ )
+ .to_string()
+ } else {
+ c.to_string()
+ }
+ })
+ .collect())
}
fn path(input: ParseInput<Rule>) -> ParseResult<Vec<String>> {
Ok(parse_children!(input;
@@ -572,17 +546,16 @@ impl Parsers {
))
}
- fn local(
+ #[alias(import_type)]
+ fn local<E: Clone>(
input: ParseInput<Rule>,
- ) -> ParseResult<(FilePrefix, Vec<String>)> {
+ ) -> ParseResult<ImportLocation<Expr<E>>> {
Ok(parse_children!(input;
- [parent_path(l)] => l,
- [here_path(l)] => l,
- [home_path(l)] => l,
- [absolute_path(l)] => l,
+ [local_path((prefix, p))] => ImportLocation::Local(prefix, p),
))
}
+ #[alias(local_path)]
fn parent_path(
input: ParseInput<Rule>,
) -> ParseResult<(FilePrefix, Vec<String>)> {
@@ -590,6 +563,7 @@ impl Parsers {
[path(p)] => (FilePrefix::Parent, p)
))
}
+ #[alias(local_path)]
fn here_path(
input: ParseInput<Rule>,
) -> ParseResult<(FilePrefix, Vec<String>)> {
@@ -597,6 +571,7 @@ impl Parsers {
[path(p)] => (FilePrefix::Here, p)
))
}
+ #[alias(local_path)]
fn home_path(
input: ParseInput<Rule>,
) -> ParseResult<(FilePrefix, Vec<String>)> {
@@ -604,6 +579,7 @@ impl Parsers {
[path(p)] => (FilePrefix::Home, p)
))
}
+ #[alias(local_path)]
fn absolute_path(
input: ParseInput<Rule>,
) -> ParseResult<(FilePrefix, Vec<String>)> {
@@ -649,25 +625,31 @@ impl Parsers {
Ok(input.as_str().to_owned())
}
- fn http<E: Clone>(input: ParseInput<Rule>) -> ParseResult<URL<Expr<E>>> {
- Ok(parse_children!(input;
- [http_raw(url)] => url,
- [http_raw(url), import_expression(e)] =>
- URL { headers: Some(e), ..url },
- ))
+ #[alias(import_type)]
+ fn http<E: Clone>(
+ input: ParseInput<Rule>,
+ ) -> ParseResult<ImportLocation<Expr<E>>> {
+ Ok(ImportLocation::Remote(parse_children!(input;
+ [http_raw(url)] => url,
+ [http_raw(url), expression(e)] => URL { headers: Some(e), ..url },
+ )))
}
- fn env(input: ParseInput<Rule>) -> ParseResult<String> {
+ #[alias(import_type)]
+ fn env<E: Clone>(
+ input: ParseInput<Rule>,
+ ) -> ParseResult<ImportLocation<Expr<E>>> {
Ok(parse_children!(input;
- [bash_environment_variable(s)] => s,
- [posix_environment_variable(s)] => s,
+ [environment_variable(v)] => ImportLocation::Env(v),
))
}
+ #[alias(environment_variable)]
fn bash_environment_variable(
input: ParseInput<Rule>,
) -> ParseResult<String> {
Ok(input.as_str().to_owned())
}
+ #[alias(environment_variable)]
fn posix_environment_variable(
input: ParseInput<Rule>,
) -> ParseResult<String> {
@@ -694,27 +676,11 @@ impl Parsers {
})
}
- fn missing(_: ParseInput<Rule>) -> ParseResult<()> {
- Ok(())
- }
-
- fn import_type<E: Clone>(
- input: ParseInput<Rule>,
+ #[alias(import_type)]
+ fn missing<E: Clone>(
+ _input: ParseInput<Rule>,
) -> ParseResult<ImportLocation<Expr<E>>> {
- Ok(parse_children!(input;
- [missing(_)] => {
- ImportLocation::Missing
- },
- [env(e)] => {
- ImportLocation::Env(e)
- },
- [http(url)] => {
- ImportLocation::Remote(url)
- },
- [local((prefix, p))] => {
- ImportLocation::Local(prefix, p)
- },
- ))
+ Ok(ImportLocation::Missing)
}
fn hash(input: ParseInput<Rule>) -> ParseResult<Hash> {
@@ -730,78 +696,65 @@ impl Parsers {
fn import_hashed<E: Clone>(
input: ParseInput<Rule>,
) -> ParseResult<crate::Import<Expr<E>>> {
+ use crate::Import;
+ let mode = ImportMode::Code;
Ok(parse_children!(input;
- [import_type(location)] =>
- crate::Import {mode: ImportMode::Code, location, hash: None },
- [import_type(location), hash(h)] =>
- crate::Import {mode: ImportMode::Code, location, hash: Some(h) },
+ [import_type(location)] => Import { mode, location, hash: None },
+ [import_type(location), hash(h)] => Import { mode, location, hash: Some(h) },
))
}
- fn Text(_: ParseInput<Rule>) -> ParseResult<()> {
- Ok(())
+ #[alias(import_mode)]
+ fn Text(_input: ParseInput<Rule>) -> ParseResult<ImportMode> {
+ Ok(ImportMode::RawText)
}
- fn Location(_: ParseInput<Rule>) -> ParseResult<()> {
- Ok(())
+ #[alias(import_mode)]
+ fn Location(_input: ParseInput<Rule>) -> ParseResult<ImportMode> {
+ Ok(ImportMode::Location)
}
+ #[alias(expression)]
fn import<E: Clone>(input: ParseInput<Rule>) -> ParseResult<Expr<E>> {
- let span = input.as_span();
- Ok(parse_children!(input;
+ use crate::Import;
+ let import = parse_children!(input;
[import_hashed(imp)] => {
- spanned(span, Import(crate::Import {
- mode: ImportMode::Code,
- ..imp
- }))
- },
- [import_hashed(imp), Text(_)] => {
- spanned(span, Import(crate::Import {
- mode: ImportMode::RawText,
- ..imp
- }))
+ Import { mode: ImportMode::Code, ..imp }
},
- [import_hashed(imp), Location(_)] => {
- spanned(span, Import(crate::Import {
- mode: ImportMode::Location,
- ..imp
- }))
+ [import_hashed(imp), import_mode(mode)] => {
+ Import { mode, ..imp }
},
- ))
+ );
+ Ok(spanned(input.as_span(), Import(import)))
}
- fn lambda(_: ParseInput<Rule>) -> ParseResult<()> {
+ fn lambda(_input: ParseInput<Rule>) -> ParseResult<()> {
Ok(())
}
- fn forall(_: ParseInput<Rule>) -> ParseResult<()> {
+ fn forall(_input: ParseInput<Rule>) -> ParseResult<()> {
Ok(())
}
- fn arrow(_: ParseInput<Rule>) -> ParseResult<()> {
+ fn arrow(_input: ParseInput<Rule>) -> ParseResult<()> {
Ok(())
}
- fn merge(_: ParseInput<Rule>) -> ParseResult<()> {
+ fn merge(_input: ParseInput<Rule>) -> ParseResult<()> {
Ok(())
}
- fn assert(_: ParseInput<Rule>) -> ParseResult<()> {
+ fn assert(_input: ParseInput<Rule>) -> ParseResult<()> {
Ok(())
}
- fn if_(_: ParseInput<Rule>) -> ParseResult<()> {
+ fn if_(_input: ParseInput<Rule>) -> ParseResult<()> {
Ok(())
}
- fn in_(_: ParseInput<Rule>) -> ParseResult<()> {
- Ok(())
- }
- fn toMap(_: ParseInput<Rule>) -> ParseResult<()> {
+ fn toMap(_input: ParseInput<Rule>) -> ParseResult<()> {
Ok(())
}
+ #[alias(expression)]
fn empty_list_literal<E: Clone>(
input: ParseInput<Rule>,
) -> ParseResult<Expr<E>> {
- let span = input.as_span();
Ok(parse_children!(input;
- [application_expression(e)] => {
- spanned(span, EmptyListLit(e))
- },
+ [expression(e)] => spanned(input.as_span(), EmptyListLit(e)),
))
}
@@ -816,7 +769,7 @@ impl Parsers {
expression(right)] => {
spanned(span, BoolIf(cond, left, right))
},
- [let_binding(bindings).., in_(()), expression(final_expr)] => {
+ [let_binding(bindings).., expression(final_expr)] => {
bindings.rev().fold(
final_expr,
|acc, x| unspanned(Let(x.0, x.1, x.2, acc))
@@ -826,24 +779,22 @@ impl Parsers {
arrow(()), expression(body)] => {
spanned(span, Pi(l, typ, body))
},
- [operator_expression(typ), arrow(()), expression(body)] => {
+ [expression(typ), arrow(()), expression(body)] => {
spanned(span, Pi("_".into(), typ, body))
},
- [merge(()), import_expression(x), import_expression(y),
- application_expression(z)] => {
+ [merge(()), expression(x), expression(y), expression(z)] => {
spanned(span, Merge(x, y, Some(z)))
},
- [empty_list_literal(e)] => e,
[assert(()), expression(x)] => {
spanned(span, Assert(x))
},
- [toMap(()), import_expression(x), application_expression(y)] => {
+ [toMap(()), expression(x), expression(y)] => {
spanned(span, ToMap(x, Some(y)))
},
- [operator_expression(e)] => e,
- [operator_expression(e), expression(annot)] => {
+ [expression(e), expression(annot)] => {
spanned(span, Annot(e, annot))
},
+ [expression(e)] => e,
))
}
@@ -858,14 +809,8 @@ impl Parsers {
))
}
- fn List(_: ParseInput<Rule>) -> ParseResult<()> {
- Ok(())
- }
- fn Optional(_: ParseInput<Rule>) -> ParseResult<()> {
- Ok(())
- }
-
- #[prec_climb(application_expression, PRECCLIMBER)]
+ #[alias(expression)]
+ #[prec_climb(expression, PRECCLIMBER)]
fn operator_expression<E: Clone>(
input: ParseInput<Rule>,
l: Expr<E>,
@@ -894,55 +839,48 @@ impl Parsers {
Ok(unspanned(BinOp(op, l, r)))
}
- fn Some_(_: ParseInput<Rule>) -> ParseResult<()> {
+ fn Some_(_input: ParseInput<Rule>) -> ParseResult<()> {
Ok(())
}
+ #[alias(expression)]
fn application_expression<E: Clone>(
input: ParseInput<Rule>,
) -> ParseResult<Expr<E>> {
Ok(parse_children!(input;
- [first_application_expression(e)] => e,
- [first_application_expression(first),
- import_expression(rest)..] => {
+ [expression(e)] => e,
+ [expression(first), expression(rest)..] => {
rest.fold(first, |acc, e| unspanned(App(acc, e)))
},
))
}
+ #[alias(expression)]
fn first_application_expression<E: Clone>(
input: ParseInput<Rule>,
) -> ParseResult<Expr<E>> {
let span = input.as_span();
Ok(parse_children!(input;
- [Some_(()), import_expression(e)] => {
+ [Some_(()), expression(e)] => {
spanned(span, SomeLit(e))
},
- [merge(()), import_expression(x), import_expression(y)] => {
+ [merge(()), expression(x), expression(y)] => {
spanned(span, Merge(x, y, None))
},
- [toMap(()), import_expression(x)] => {
+ [toMap(()), expression(x)] => {
spanned(span, ToMap(x, None))
},
- [import_expression(e)] => e,
- ))
- }
-
- fn import_expression<E: Clone>(
- input: ParseInput<Rule>,
- ) -> ParseResult<Expr<E>> {
- Ok(parse_children!(input;
- [selector_expression(e)] => e,
- [import(e)] => e,
+ [expression(e)] => e,
))
}
+ #[alias(expression)]
fn selector_expression<E: Clone>(
input: ParseInput<Rule>,
) -> ParseResult<Expr<E>> {
Ok(parse_children!(input;
- [primitive_expression(e)] => e,
- [primitive_expression(first), selector(rest)..] => {
+ [expression(e)] => e,
+ [expression(first), selector(rest)..] => {
rest.fold(first, |acc, e| unspanned(match e {
Either::Left(l) => Field(acc, l),
Either::Right(ls) => Projection(acc, ls),
@@ -967,6 +905,7 @@ impl Parsers {
))
}
+ #[alias(expression)]
fn primitive_expression<E: Clone>(
input: ParseInput<Rule>,
) -> ParseResult<Expr<E>> {
@@ -977,46 +916,41 @@ impl Parsers {
[integer_literal(n)] => spanned(span, IntegerLit(n)),
[double_quote_literal(s)] => spanned(span, TextLit(s)),
[single_quote_literal(s)] => spanned(span, TextLit(s)),
- [empty_record_type(e)] => e,
- [empty_record_literal(e)] => e,
- [non_empty_record_type_or_literal(e)] => e,
- [union_type(e)] => e,
- [non_empty_list_literal(e)] => e,
- [identifier(e)] => e,
[expression(e)] => e,
))
}
+ #[alias(expression)]
fn empty_record_literal<E: Clone>(
input: ParseInput<Rule>,
) -> ParseResult<Expr<E>> {
- let span = input.as_span();
- Ok(spanned(span, RecordLit(Default::default())))
+ Ok(spanned(input.as_span(), RecordLit(Default::default())))
}
+ #[alias(expression)]
fn empty_record_type<E: Clone>(
input: ParseInput<Rule>,
) -> ParseResult<Expr<E>> {
- let span = input.as_span();
- Ok(spanned(span, RecordType(Default::default())))
+ Ok(spanned(input.as_span(), RecordType(Default::default())))
}
+ #[alias(expression)]
fn non_empty_record_type_or_literal<E: Clone>(
input: ParseInput<Rule>,
) -> ParseResult<Expr<E>> {
- let span = input.as_span();
- Ok(parse_children!(input;
+ let e = parse_children!(input;
[label(first_label), non_empty_record_type(rest)] => {
let (first_expr, mut map) = rest;
map.insert(first_label, first_expr);
- spanned(span, RecordType(map))
+ RecordType(map)
},
[label(first_label), non_empty_record_literal(rest)] => {
let (first_expr, mut map) = rest;
map.insert(first_label, first_expr);
- spanned(span, RecordLit(map))
+ RecordLit(map)
},
- ))
+ );
+ Ok(spanned(input.as_span(), e))
}
fn non_empty_record_type<E: Clone>(
@@ -1055,19 +989,16 @@ impl Parsers {
))
}
+ #[alias(expression)]
fn union_type<E: Clone>(input: ParseInput<Rule>) -> ParseResult<Expr<E>> {
- let span = input.as_span();
- Ok(parse_children!(input;
- [empty_union_type(_)] => {
- spanned(span, UnionType(Default::default()))
- },
- [union_type_entry(entries)..] => {
- spanned(span, UnionType(entries.collect()))
- },
- ))
+ let map = parse_children!(input;
+ [empty_union_type(_)] => Default::default(),
+ [union_type_entry(entries)..] => entries.collect(),
+ );
+ Ok(spanned(input.as_span(), UnionType(map)))
}
- fn empty_union_type(_: ParseInput<Rule>) -> ParseResult<()> {
+ fn empty_union_type(_input: ParseInput<Rule>) -> ParseResult<()> {
Ok(())
}
@@ -1080,13 +1011,13 @@ impl Parsers {
))
}
+ #[alias(expression)]
fn non_empty_list_literal<E: Clone>(
input: ParseInput<Rule>,
) -> ParseResult<Expr<E>> {
- let span = input.as_span();
Ok(parse_children!(input;
[expression(items)..] => spanned(
- span,
+ input.as_span(),
NEListLit(items.collect())
)
))
diff --git a/tests_buffer b/tests_buffer
index 6597d69..1c4cde5 100644
--- a/tests_buffer
+++ b/tests_buffer
@@ -4,6 +4,7 @@ parser:
text interpolation and escapes
projection by expression unit tests
fix fakeurlencode test
+s/QuotedVariable/VariableQuoted/
success/
operators/
PrecedenceAll1 a ? b || c + d ++ e # f && g ∧ h ⫽ i ⩓ j * k == l != m n.o
@@ -13,6 +14,7 @@ success/
EmptyRecordLiteral {=}
ToMap toMap x
ToMapAnnot toMap x : T
+ VariableQuotedSpace ` x `
failure/
AssertNoAnnotation assert