summaryrefslogtreecommitdiff
path: root/dhall/src/semantics/phase
diff options
context:
space:
mode:
Diffstat (limited to 'dhall/src/semantics/phase')
-rw-r--r--dhall/src/semantics/phase/mod.rs252
-rw-r--r--dhall/src/semantics/phase/normalize.rs794
-rw-r--r--dhall/src/semantics/phase/parse.rs37
-rw-r--r--dhall/src/semantics/phase/resolve.rs181
-rw-r--r--dhall/src/semantics/phase/typecheck.rs810
5 files changed, 2074 insertions, 0 deletions
diff --git a/dhall/src/semantics/phase/mod.rs b/dhall/src/semantics/phase/mod.rs
new file mode 100644
index 0000000..752c257
--- /dev/null
+++ b/dhall/src/semantics/phase/mod.rs
@@ -0,0 +1,252 @@
+use std::fmt::Display;
+use std::path::Path;
+
+use crate::semantics::core::value::{ToExprOptions, Value};
+use crate::semantics::core::valuef::ValueF;
+use crate::semantics::core::var::{AlphaVar, Shift, Subst};
+use crate::semantics::error::{EncodeError, Error, ImportError, TypeError};
+use crate::syntax::binary;
+use crate::syntax::{Builtin, Const, Expr};
+use resolve::ImportRoot;
+
+pub(crate) mod normalize;
+pub(crate) mod parse;
+pub(crate) mod resolve;
+pub(crate) mod typecheck;
+
+pub type ParsedExpr = Expr<Normalized>;
+pub type DecodedExpr = Expr<Normalized>;
+pub type ResolvedExpr = Expr<Normalized>;
+pub type NormalizedExpr = Expr<Normalized>;
+
+#[derive(Debug, Clone)]
+pub struct Parsed(ParsedExpr, ImportRoot);
+
+/// An expression where all imports have been resolved
+///
+/// Invariant: there must be no `Import` nodes or `ImportAlt` operations left.
+#[derive(Debug, Clone)]
+pub struct Resolved(ResolvedExpr);
+
+/// A typed expression
+#[derive(Debug, Clone)]
+pub struct Typed(Value);
+
+/// A normalized expression.
+///
+/// Invariant: the contained Typed expression must be in normal form,
+#[derive(Debug, Clone)]
+pub struct Normalized(Typed);
+
+impl Parsed {
+ pub fn parse_file(f: &Path) -> Result<Parsed, Error> {
+ parse::parse_file(f)
+ }
+ pub fn parse_str(s: &str) -> Result<Parsed, Error> {
+ parse::parse_str(s)
+ }
+ pub fn parse_binary_file(f: &Path) -> Result<Parsed, Error> {
+ parse::parse_binary_file(f)
+ }
+ pub fn parse_binary(data: &[u8]) -> Result<Parsed, Error> {
+ parse::parse_binary(data)
+ }
+
+ pub fn resolve(self) -> Result<Resolved, ImportError> {
+ resolve::resolve(self)
+ }
+ pub fn skip_resolve(self) -> Result<Resolved, ImportError> {
+ resolve::skip_resolve_expr(self)
+ }
+
+ pub fn encode(&self) -> Result<Vec<u8>, EncodeError> {
+ binary::encode(&self.0)
+ }
+}
+
+impl Resolved {
+ pub fn typecheck(self) -> Result<Typed, TypeError> {
+ Ok(typecheck::typecheck(self.0)?.into_typed())
+ }
+ pub fn typecheck_with(self, ty: &Typed) -> Result<Typed, TypeError> {
+ Ok(typecheck::typecheck_with(self.0, ty.normalize_to_expr())?
+ .into_typed())
+ }
+}
+
+impl Typed {
+ /// Reduce an expression to its normal form, performing beta reduction
+ ///
+ /// `normalize` does not type-check the expression. You may want to type-check
+ /// expressions before normalizing them since normalization can convert an
+ /// ill-typed expression into a well-typed expression.
+ ///
+ /// However, `normalize` will not fail if the expression is ill-typed and will
+ /// leave ill-typed sub-expressions unevaluated.
+ pub fn normalize(mut self) -> Normalized {
+ self.normalize_mut();
+ Normalized(self)
+ }
+
+ pub(crate) fn from_const(c: Const) -> Self {
+ Typed(Value::from_const(c))
+ }
+ pub(crate) fn from_valuef_and_type(v: ValueF, t: Typed) -> Self {
+ Typed(Value::from_valuef_and_type(v, t.into_value()))
+ }
+ pub(crate) fn from_value(th: Value) -> Self {
+ Typed(th)
+ }
+ pub(crate) fn const_type() -> Self {
+ Typed::from_const(Const::Type)
+ }
+
+ pub(crate) fn to_expr(&self) -> NormalizedExpr {
+ self.0.to_expr(ToExprOptions {
+ alpha: false,
+ normalize: false,
+ })
+ }
+ pub fn normalize_to_expr(&self) -> NormalizedExpr {
+ self.0.to_expr(ToExprOptions {
+ alpha: false,
+ normalize: true,
+ })
+ }
+ pub(crate) fn normalize_to_expr_alpha(&self) -> NormalizedExpr {
+ self.0.to_expr(ToExprOptions {
+ alpha: true,
+ normalize: true,
+ })
+ }
+ pub(crate) fn to_value(&self) -> Value {
+ self.0.clone()
+ }
+ pub(crate) fn into_value(self) -> Value {
+ self.0
+ }
+
+ pub(crate) fn normalize_mut(&mut self) {
+ self.0.normalize_mut()
+ }
+
+ pub(crate) fn get_type(&self) -> Result<Typed, TypeError> {
+ Ok(self.0.get_type()?.into_typed())
+ }
+
+ pub fn make_builtin_type(b: Builtin) -> Self {
+ Typed::from_value(Value::from_builtin(b))
+ }
+ pub fn make_optional_type(t: Typed) -> Self {
+ Typed::from_value(
+ Value::from_builtin(Builtin::Optional).app(t.to_value()),
+ )
+ }
+ pub fn make_list_type(t: Typed) -> Self {
+ Typed::from_value(Value::from_builtin(Builtin::List).app(t.to_value()))
+ }
+ pub fn make_record_type(
+ kts: impl Iterator<Item = (String, Typed)>,
+ ) -> Self {
+ Typed::from_valuef_and_type(
+ ValueF::RecordType(
+ kts.map(|(k, t)| (k.into(), t.into_value())).collect(),
+ ),
+ Typed::const_type(),
+ )
+ }
+ pub fn make_union_type(
+ kts: impl Iterator<Item = (String, Option<Typed>)>,
+ ) -> Self {
+ Typed::from_valuef_and_type(
+ ValueF::UnionType(
+ kts.map(|(k, t)| (k.into(), t.map(|t| t.into_value())))
+ .collect(),
+ ),
+ Typed::const_type(),
+ )
+ }
+}
+
+impl Normalized {
+ pub fn encode(&self) -> Result<Vec<u8>, EncodeError> {
+ binary::encode(&self.to_expr())
+ }
+
+ pub(crate) fn to_expr(&self) -> NormalizedExpr {
+ self.0.normalize_to_expr()
+ }
+ pub(crate) fn to_expr_alpha(&self) -> NormalizedExpr {
+ self.0.normalize_to_expr_alpha()
+ }
+ pub(crate) fn into_typed(self) -> Typed {
+ self.0
+ }
+}
+
+impl Shift for Typed {
+ fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> {
+ Some(Typed(self.0.shift(delta, var)?))
+ }
+}
+
+impl Shift for Normalized {
+ fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> {
+ Some(Normalized(self.0.shift(delta, var)?))
+ }
+}
+
+impl Subst<Value> for Typed {
+ fn subst_shift(&self, var: &AlphaVar, val: &Value) -> Self {
+ Typed(self.0.subst_shift(var, val))
+ }
+}
+
+macro_rules! derive_traits_for_wrapper_struct {
+ ($ty:ident) => {
+ impl std::cmp::PartialEq for $ty {
+ fn eq(&self, other: &Self) -> bool {
+ self.0 == other.0
+ }
+ }
+
+ impl std::cmp::Eq for $ty {}
+
+ impl std::fmt::Display for $ty {
+ fn fmt(
+ &self,
+ f: &mut std::fmt::Formatter,
+ ) -> Result<(), std::fmt::Error> {
+ self.0.fmt(f)
+ }
+ }
+ };
+}
+
+derive_traits_for_wrapper_struct!(Parsed);
+derive_traits_for_wrapper_struct!(Resolved);
+derive_traits_for_wrapper_struct!(Normalized);
+
+impl std::hash::Hash for Normalized {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: std::hash::Hasher,
+ {
+ if let Ok(vec) = self.encode() {
+ vec.hash(state)
+ }
+ }
+}
+
+impl Eq for Typed {}
+impl PartialEq for Typed {
+ fn eq(&self, other: &Self) -> bool {
+ self.0 == other.0
+ }
+}
+
+impl Display for Typed {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
+ self.to_expr().fmt(f)
+ }
+}
diff --git a/dhall/src/semantics/phase/normalize.rs b/dhall/src/semantics/phase/normalize.rs
new file mode 100644
index 0000000..81c3ce1
--- /dev/null
+++ b/dhall/src/semantics/phase/normalize.rs
@@ -0,0 +1,794 @@
+use std::collections::HashMap;
+
+use crate::semantics::core::value::Value;
+use crate::semantics::core::valuef::ValueF;
+use crate::semantics::core::var::{AlphaLabel, AlphaVar, Shift, Subst};
+use crate::semantics::phase::Normalized;
+use crate::syntax;
+use crate::syntax::Const::Type;
+use crate::syntax::{
+ BinOp, Builtin, ExprF, InterpolatedText, InterpolatedTextContents, Label,
+ NaiveDouble,
+};
+
+// Ad-hoc macro to help construct closures
+macro_rules! make_closure {
+ (#$var:ident) => { $var.clone() };
+ (var($var:ident, $n:expr, $($ty:tt)*)) => {{
+ let var = AlphaVar::from_var_and_alpha(
+ Label::from(stringify!($var)).into(),
+ $n
+ );
+ ValueF::Var(var)
+ .into_value_with_type(make_closure!($($ty)*))
+ }};
+ // Warning: assumes that $ty, as a dhall value, has type `Type`
+ (λ($var:ident : $($ty:tt)*) -> $($body:tt)*) => {{
+ let var: AlphaLabel = Label::from(stringify!($var)).into();
+ let ty = make_closure!($($ty)*);
+ let body = make_closure!($($body)*);
+ let body_ty = body.get_type_not_sort();
+ let lam_ty = ValueF::Pi(var.clone(), ty.clone(), body_ty)
+ .into_value_with_type(Value::from_const(Type));
+ ValueF::Lam(var, ty, body).into_value_with_type(lam_ty)
+ }};
+ (Natural) => {
+ Value::from_builtin(Builtin::Natural)
+ };
+ (List $($rest:tt)*) => {
+ Value::from_builtin(Builtin::List)
+ .app(make_closure!($($rest)*))
+ };
+ (Some($($rest:tt)*)) => {{
+ let v = make_closure!($($rest)*);
+ let v_type = v.get_type_not_sort();
+ let opt_v_type = Value::from_builtin(Builtin::Optional).app(v_type);
+ ValueF::NEOptionalLit(v).into_value_with_type(opt_v_type)
+ }};
+ (1 + $($rest:tt)*) => {
+ ValueF::PartialExpr(ExprF::BinOp(
+ syntax::BinOp::NaturalPlus,
+ make_closure!($($rest)*),
+ Value::from_valuef_and_type(
+ ValueF::NaturalLit(1),
+ make_closure!(Natural)
+ ),
+ )).into_value_with_type(
+ make_closure!(Natural)
+ )
+ };
+ ([ $($head:tt)* ] # $($tail:tt)*) => {{
+ let head = make_closure!($($head)*);
+ let tail = make_closure!($($tail)*);
+ let list_type = tail.get_type_not_sort();
+ ValueF::PartialExpr(ExprF::BinOp(
+ syntax::BinOp::ListAppend,
+ ValueF::NEListLit(vec![head])
+ .into_value_with_type(list_type.clone()),
+ tail,
+ )).into_value_with_type(list_type)
+ }};
+}
+
+#[allow(clippy::cognitive_complexity)]
+pub(crate) fn apply_builtin(
+ b: Builtin,
+ args: Vec<Value>,
+ ty: &Value,
+) -> ValueF {
+ use syntax::Builtin::*;
+ use ValueF::*;
+
+ // Small helper enum
+ enum Ret<'a> {
+ ValueF(ValueF),
+ Value(Value),
+ // For applications that can return a function, it's important to keep the remaining
+ // arguments to apply them to the resulting function.
+ ValueWithRemainingArgs(&'a [Value], Value),
+ DoneAsIs,
+ }
+
+ let ret = match (b, args.as_slice()) {
+ (OptionalNone, [t]) => Ret::ValueF(EmptyOptionalLit(t.clone())),
+ (NaturalIsZero, [n]) => match &*n.as_whnf() {
+ NaturalLit(n) => Ret::ValueF(BoolLit(*n == 0)),
+ _ => Ret::DoneAsIs,
+ },
+ (NaturalEven, [n]) => match &*n.as_whnf() {
+ NaturalLit(n) => Ret::ValueF(BoolLit(*n % 2 == 0)),
+ _ => Ret::DoneAsIs,
+ },
+ (NaturalOdd, [n]) => match &*n.as_whnf() {
+ NaturalLit(n) => Ret::ValueF(BoolLit(*n % 2 != 0)),
+ _ => Ret::DoneAsIs,
+ },
+ (NaturalToInteger, [n]) => match &*n.as_whnf() {
+ NaturalLit(n) => Ret::ValueF(IntegerLit(*n as isize)),
+ _ => Ret::DoneAsIs,
+ },
+ (NaturalShow, [n]) => match &*n.as_whnf() {
+ NaturalLit(n) => {
+ Ret::ValueF(TextLit(vec![InterpolatedTextContents::Text(
+ n.to_string(),
+ )]))
+ }
+ _ => Ret::DoneAsIs,
+ },
+ (NaturalSubtract, [a, b]) => match (&*a.as_whnf(), &*b.as_whnf()) {
+ (NaturalLit(a), NaturalLit(b)) => {
+ Ret::ValueF(NaturalLit(if b > a { b - a } else { 0 }))
+ }
+ (NaturalLit(0), _) => Ret::Value(b.clone()),
+ (_, NaturalLit(0)) => Ret::ValueF(NaturalLit(0)),
+ _ if a == b => Ret::ValueF(NaturalLit(0)),
+ _ => Ret::DoneAsIs,
+ },
+ (IntegerShow, [n]) => match &*n.as_whnf() {
+ IntegerLit(n) => {
+ let s = if *n < 0 {
+ n.to_string()
+ } else {
+ format!("+{}", n)
+ };
+ Ret::ValueF(TextLit(vec![InterpolatedTextContents::Text(s)]))
+ }
+ _ => Ret::DoneAsIs,
+ },
+ (IntegerToDouble, [n]) => match &*n.as_whnf() {
+ IntegerLit(n) => {
+ Ret::ValueF(DoubleLit(NaiveDouble::from(*n as f64)))
+ }
+ _ => Ret::DoneAsIs,
+ },
+ (DoubleShow, [n]) => match &*n.as_whnf() {
+ DoubleLit(n) => {
+ Ret::ValueF(TextLit(vec![InterpolatedTextContents::Text(
+ n.to_string(),
+ )]))
+ }
+ _ => Ret::DoneAsIs,
+ },
+ (TextShow, [v]) => match &*v.as_whnf() {
+ TextLit(elts) => {
+ match elts.as_slice() {
+ // Empty string literal.
+ [] => {
+ // Printing InterpolatedText takes care of all the escaping
+ let txt: InterpolatedText<Normalized> =
+ std::iter::empty().collect();
+ let s = txt.to_string();
+ Ret::ValueF(TextLit(vec![
+ InterpolatedTextContents::Text(s),
+ ]))
+ }
+ // If there are no interpolations (invariants ensure that when there are no
+ // interpolations, there is a single Text item) in the literal.
+ [InterpolatedTextContents::Text(s)] => {
+ // Printing InterpolatedText takes care of all the escaping
+ let txt: InterpolatedText<Normalized> =
+ std::iter::once(InterpolatedTextContents::Text(
+ s.clone(),
+ ))
+ .collect();
+ let s = txt.to_string();
+ Ret::ValueF(TextLit(vec![
+ InterpolatedTextContents::Text(s),
+ ]))
+ }
+ _ => Ret::DoneAsIs,
+ }
+ }
+ _ => Ret::DoneAsIs,
+ },
+ (ListLength, [_, l]) => match &*l.as_whnf() {
+ EmptyListLit(_) => Ret::ValueF(NaturalLit(0)),
+ NEListLit(xs) => Ret::ValueF(NaturalLit(xs.len())),
+ _ => Ret::DoneAsIs,
+ },
+ (ListHead, [_, l]) => match &*l.as_whnf() {
+ EmptyListLit(n) => Ret::ValueF(EmptyOptionalLit(n.clone())),
+ NEListLit(xs) => {
+ Ret::ValueF(NEOptionalLit(xs.iter().next().unwrap().clone()))
+ }
+ _ => Ret::DoneAsIs,
+ },
+ (ListLast, [_, l]) => match &*l.as_whnf() {
+ EmptyListLit(n) => Ret::ValueF(EmptyOptionalLit(n.clone())),
+ NEListLit(xs) => Ret::ValueF(NEOptionalLit(
+ xs.iter().rev().next().unwrap().clone(),
+ )),
+ _ => Ret::DoneAsIs,
+ },
+ (ListReverse, [_, l]) => match &*l.as_whnf() {
+ EmptyListLit(n) => Ret::ValueF(EmptyListLit(n.clone())),
+ NEListLit(xs) => {
+ Ret::ValueF(NEListLit(xs.iter().rev().cloned().collect()))
+ }
+ _ => Ret::DoneAsIs,
+ },
+ (ListIndexed, [_, l]) => {
+ let l_whnf = l.as_whnf();
+ match &*l_whnf {
+ EmptyListLit(_) | NEListLit(_) => {
+ // Extract the type of the list elements
+ let t = match &*l_whnf {
+ EmptyListLit(t) => t.clone(),
+ NEListLit(xs) => xs[0].get_type_not_sort(),
+ _ => unreachable!(),
+ };
+
+ // Construct the returned record type: { index: Natural, value: t }
+ let mut kts = HashMap::new();
+ kts.insert("index".into(), Value::from_builtin(Natural));
+ kts.insert("value".into(), t.clone());
+ let t = Value::from_valuef_and_type(
+ RecordType(kts),
+ Value::from_const(Type),
+ );
+
+ // Construct the new list, with added indices
+ let list = match &*l_whnf {
+ EmptyListLit(_) => EmptyListLit(t),
+ NEListLit(xs) => NEListLit(
+ xs.iter()
+ .enumerate()
+ .map(|(i, e)| {
+ let mut kvs = HashMap::new();
+ kvs.insert(
+ "index".into(),
+ Value::from_valuef_and_type(
+ NaturalLit(i),
+ Value::from_builtin(
+ Builtin::Natural,
+ ),
+ ),
+ );
+ kvs.insert("value".into(), e.clone());
+ Value::from_valuef_and_type(
+ RecordLit(kvs),
+ t.clone(),
+ )
+ })
+ .collect(),
+ ),
+ _ => unreachable!(),
+ };
+ Ret::ValueF(list)
+ }
+ _ => Ret::DoneAsIs,
+ }
+ }
+ (ListBuild, [t, f]) => match &*f.as_whnf() {
+ // fold/build fusion
+ ValueF::AppliedBuiltin(ListFold, args) => {
+ if args.len() >= 2 {
+ Ret::Value(args[1].clone())
+ } else {
+ // Do we really need to handle this case ?
+ unimplemented!()
+ }
+ }
+ _ => {
+ let list_t = Value::from_builtin(List).app(t.clone());
+ Ret::Value(
+ f.app(list_t.clone())
+ .app({
+ // Move `t` under new variables
+ let t1 = t.under_binder(Label::from("x"));
+ let t2 = t1.under_binder(Label::from("xs"));
+ make_closure!(
+ λ(x : #t) ->
+ λ(xs : List #t1) ->
+ [ var(x, 1, #t2) ] # var(xs, 0, List #t2)
+ )
+ })
+ .app(
+ EmptyListLit(t.clone())
+ .into_value_with_type(list_t),
+ ),
+ )
+ }
+ },
+ (ListFold, [_, l, _, cons, nil, r @ ..]) => match &*l.as_whnf() {
+ EmptyListLit(_) => Ret::ValueWithRemainingArgs(r, nil.clone()),
+ NEListLit(xs) => {
+ let mut v = nil.clone();
+ for x in xs.iter().cloned().rev() {
+ v = cons.app(x).app(v);
+ }
+ Ret::ValueWithRemainingArgs(r, v)
+ }
+ _ => Ret::DoneAsIs,
+ },
+ (OptionalBuild, [t, f]) => match &*f.as_whnf() {
+ // fold/build fusion
+ ValueF::AppliedBuiltin(OptionalFold, args) => {
+ if args.len() >= 2 {
+ Ret::Value(args[1].clone())
+ } else {
+ // Do we really need to handle this case ?
+ unimplemented!()
+ }
+ }
+ _ => {
+ let optional_t = Value::from_builtin(Optional).app(t.clone());
+ Ret::Value(
+ f.app(optional_t.clone())
+ .app({
+ let t1 = t.under_binder(Label::from("x"));
+ make_closure!(λ(x: #t) -> Some(var(x, 0, #t1)))
+ })
+ .app(
+ EmptyOptionalLit(t.clone())
+ .into_value_with_type(optional_t),
+ ),
+ )
+ }
+ },
+ (OptionalFold, [_, v, _, just, nothing, r @ ..]) => match &*v.as_whnf()
+ {
+ EmptyOptionalLit(_) => {
+ Ret::ValueWithRemainingArgs(r, nothing.clone())
+ }
+ NEOptionalLit(x) => {
+ Ret::ValueWithRemainingArgs(r, just.app(x.clone()))
+ }
+ _ => Ret::DoneAsIs,
+ },
+ (NaturalBuild, [f]) => match &*f.as_whnf() {
+ // fold/build fusion
+ ValueF::AppliedBuiltin(NaturalFold, args) => {
+ if !args.is_empty() {
+ Ret::Value(args[0].clone())
+ } else {
+ // Do we really need to handle this case ?
+ unimplemented!()
+ }
+ }
+ _ => Ret::Value(
+ f.app(Value::from_builtin(Natural))
+ .app(make_closure!(
+ λ(x : Natural) -> 1 + var(x, 0, Natural)
+ ))
+ .app(
+ NaturalLit(0)
+ .into_value_with_type(Value::from_builtin(Natural)),
+ ),
+ ),
+ },
+ (NaturalFold, [n, t, succ, zero, r @ ..]) => match &*n.as_whnf() {
+ NaturalLit(0) => Ret::ValueWithRemainingArgs(r, zero.clone()),
+ NaturalLit(n) => {
+ let fold = Value::from_builtin(NaturalFold)
+ .app(
+ NaturalLit(n - 1)
+ .into_value_with_type(Value::from_builtin(Natural)),
+ )
+ .app(t.clone())
+ .app(succ.clone())
+ .app(zero.clone());
+ Ret::ValueWithRemainingArgs(r, succ.app(fold))
+ }
+ _ => Ret::DoneAsIs,
+ },
+ _ => Ret::DoneAsIs,
+ };
+ match ret {
+ Ret::ValueF(v) => v,
+ Ret::Value(v) => v.to_whnf_check_type(ty),
+ Ret::ValueWithRemainingArgs(unconsumed_args, mut v) => {
+ let n_consumed_args = args.len() - unconsumed_args.len();
+ for x in args.into_iter().skip(n_consumed_args) {
+ v = v.app(x);
+ }
+ v.to_whnf_check_type(ty)
+ }
+ Ret::DoneAsIs => AppliedBuiltin(b, args),
+ }
+}
+
+pub(crate) fn apply_any(f: Value, a: Value, ty: &Value) -> ValueF {
+ let f_borrow = f.as_whnf();
+ match &*f_borrow {
+ ValueF::Lam(x, _, e) => {
+ e.subst_shift(&x.into(), &a).to_whnf_check_type(ty)
+ }
+ ValueF::AppliedBuiltin(b, args) => {
+ use std::iter::once;
+ let args = args.iter().cloned().chain(once(a.clone())).collect();
+ apply_builtin(*b, args, ty)
+ }
+ ValueF::UnionConstructor(l, kts) => {
+ ValueF::UnionLit(l.clone(), a, kts.clone())
+ }
+ _ => {
+ drop(f_borrow);
+ ValueF::PartialExpr(ExprF::App(f, a))
+ }
+ }
+}
+
+pub(crate) fn squash_textlit(
+ elts: impl Iterator<Item = InterpolatedTextContents<Value>>,
+) -> Vec<InterpolatedTextContents<Value>> {
+ use std::mem::replace;
+ use InterpolatedTextContents::{Expr, Text};
+
+ fn inner(
+ elts: impl Iterator<Item = InterpolatedTextContents<Value>>,
+ crnt_str: &mut String,
+ ret: &mut Vec<InterpolatedTextContents<Value>>,
+ ) {
+ for contents in elts {
+ match contents {
+ Text(s) => crnt_str.push_str(&s),
+ Expr(e) => {
+ let e_borrow = e.as_whnf();
+ match &*e_borrow {
+ ValueF::TextLit(elts2) => {
+ inner(elts2.iter().cloned(), crnt_str, ret)
+ }
+ _ => {
+ drop(e_borrow);
+ if !crnt_str.is_empty() {
+ ret.push(Text(replace(crnt_str, String::new())))
+ }
+ ret.push(Expr(e.clone()))
+ }
+ }
+ }
+ }
+ }
+ }
+
+ let mut crnt_str = String::new();
+ let mut ret = Vec::new();
+ inner(elts, &mut crnt_str, &mut ret);
+ if !crnt_str.is_empty() {
+ ret.push(Text(replace(&mut crnt_str, String::new())))
+ }
+ ret
+}
+
+pub(crate) fn merge_maps<K, V, F, Err>(
+ map1: &HashMap<K, V>,
+ map2: &HashMap<K, V>,
+ mut f: F,
+) -> Result<HashMap<K, V>, Err>
+where
+ F: FnMut(&K, &V, &V) -> Result<V, Err>,
+ K: std::hash::Hash + Eq + Clone,
+ V: Clone,
+{
+ let mut kvs = HashMap::new();
+ for (x, v2) in map2 {
+ let newv = if let Some(v1) = map1.get(x) {
+ f(x, v1, v2)?
+ } else {
+ v2.clone()
+ };
+ kvs.insert(x.clone(), newv);
+ }
+ for (x, v1) in map1 {
+ // Insert only if key not already present
+ kvs.entry(x.clone()).or_insert_with(|| v1.clone());
+ }
+ Ok(kvs)
+}
+
+// Small helper enum to avoid repetition
+enum Ret<'a> {
+ ValueF(ValueF),
+ Value(Value),
+ ValueRef(&'a Value),
+ Expr(ExprF<Value, Normalized>),
+}
+
+fn apply_binop<'a>(
+ o: BinOp,
+ x: &'a Value,
+ y: &'a Value,
+ ty: &Value,
+) -> Option<Ret<'a>> {
+ use BinOp::{
+ BoolAnd, BoolEQ, BoolNE, BoolOr, Equivalence, ListAppend, NaturalPlus,
+ NaturalTimes, RecursiveRecordMerge, RecursiveRecordTypeMerge,
+ RightBiasedRecordMerge, TextAppend,
+ };
+ use ValueF::{
+ BoolLit, EmptyListLit, NEListLit, NaturalLit, RecordLit, RecordType,
+ TextLit,
+ };
+ let x_borrow = x.as_whnf();
+ let y_borrow = y.as_whnf();
+ Some(match (o, &*x_borrow, &*y_borrow) {
+ (BoolAnd, BoolLit(true), _) => Ret::ValueRef(y),
+ (BoolAnd, _, BoolLit(true)) => Ret::ValueRef(x),
+ (BoolAnd, BoolLit(false), _) => Ret::ValueF(BoolLit(false)),
+ (BoolAnd, _, BoolLit(false)) => Ret::ValueF(BoolLit(false)),
+ (BoolAnd, _, _) if x == y => Ret::ValueRef(x),
+ (BoolOr, BoolLit(true), _) => Ret::ValueF(BoolLit(true)),
+ (BoolOr, _, BoolLit(true)) => Ret::ValueF(BoolLit(true)),
+ (BoolOr, BoolLit(false), _) => Ret::ValueRef(y),
+ (BoolOr, _, BoolLit(false)) => Ret::ValueRef(x),
+ (BoolOr, _, _) if x == y => Ret::ValueRef(x),
+ (BoolEQ, BoolLit(true), _) => Ret::ValueRef(y),
+ (BoolEQ, _, BoolLit(true)) => Ret::ValueRef(x),
+ (BoolEQ, BoolLit(x), BoolLit(y)) => Ret::ValueF(BoolLit(x == y)),
+ (BoolEQ, _, _) if x == y => Ret::ValueF(BoolLit(true)),
+ (BoolNE, BoolLit(false), _) => Ret::ValueRef(y),
+ (BoolNE, _, BoolLit(false)) => Ret::ValueRef(x),
+ (BoolNE, BoolLit(x), BoolLit(y)) => Ret::ValueF(BoolLit(x != y)),
+ (BoolNE, _, _) if x == y => Ret::ValueF(BoolLit(false)),
+
+ (NaturalPlus, NaturalLit(0), _) => Ret::ValueRef(y),
+ (NaturalPlus, _, NaturalLit(0)) => Ret::ValueRef(x),
+ (NaturalPlus, NaturalLit(x), NaturalLit(y)) => {
+ Ret::ValueF(NaturalLit(x + y))
+ }
+ (NaturalTimes, NaturalLit(0), _) => Ret::ValueF(NaturalLit(0)),
+ (NaturalTimes, _, NaturalLit(0)) => Ret::ValueF(NaturalLit(0)),
+ (NaturalTimes, NaturalLit(1), _) => Ret::ValueRef(y),
+ (NaturalTimes, _, NaturalLit(1)) => Ret::ValueRef(x),
+ (NaturalTimes, NaturalLit(x), NaturalLit(y)) => {
+ Ret::ValueF(NaturalLit(x * y))
+ }
+
+ (ListAppend, EmptyListLit(_), _) => Ret::ValueRef(y),
+ (ListAppend, _, EmptyListLit(_)) => Ret::ValueRef(x),
+ (ListAppend, NEListLit(xs), NEListLit(ys)) => Ret::ValueF(NEListLit(
+ xs.iter().chain(ys.iter()).cloned().collect(),
+ )),
+
+ (TextAppend, TextLit(x), _) if x.is_empty() => Ret::ValueRef(y),
+ (TextAppend, _, TextLit(y)) if y.is_empty() => Ret::ValueRef(x),
+ (TextAppend, TextLit(x), TextLit(y)) => Ret::ValueF(TextLit(
+ squash_textlit(x.iter().chain(y.iter()).cloned()),
+ )),
+ (TextAppend, TextLit(x), _) => {
+ use std::iter::once;
+ let y = InterpolatedTextContents::Expr(y.clone());
+ Ret::ValueF(TextLit(squash_textlit(
+ x.iter().cloned().chain(once(y)),
+ )))
+ }
+ (TextAppend, _, TextLit(y)) => {
+ use std::iter::once;
+ let x = InterpolatedTextContents::Expr(x.clone());
+ Ret::ValueF(TextLit(squash_textlit(
+ once(x).chain(y.iter().cloned()),
+ )))
+ }
+
+ (RightBiasedRecordMerge, _, RecordLit(kvs)) if kvs.is_empty() => {
+ Ret::ValueRef(x)
+ }
+ (RightBiasedRecordMerge, RecordLit(kvs), _) if kvs.is_empty() => {
+ Ret::ValueRef(y)
+ }
+ (RightBiasedRecordMerge, RecordLit(kvs1), RecordLit(kvs2)) => {
+ let mut kvs = kvs2.clone();
+ for (x, v) in kvs1 {
+ // Insert only if key not already present
+ kvs.entry(x.clone()).or_insert_with(|| v.clone());
+ }
+ Ret::ValueF(RecordLit(kvs))
+ }
+
+ (RecursiveRecordMerge, _, RecordLit(kvs)) if kvs.is_empty() => {
+ Ret::ValueRef(x)
+ }
+ (RecursiveRecordMerge, RecordLit(kvs), _) if kvs.is_empty() => {
+ Ret::ValueRef(y)
+ }
+ (RecursiveRecordMerge, RecordLit(kvs1), RecordLit(kvs2)) => {
+ let ty_borrow = ty.as_whnf();
+ let kts = match &*ty_borrow {
+ RecordType(kts) => kts,
+ _ => unreachable!("Internal type error"),
+ };
+ let kvs = merge_maps::<_, _, _, !>(kvs1, kvs2, |k, v1, v2| {
+ Ok(Value::from_valuef_and_type(
+ ValueF::PartialExpr(ExprF::BinOp(
+ RecursiveRecordMerge,
+ v1.clone(),
+ v2.clone(),
+ )),
+ kts.get(k).expect("Internal type error").clone(),
+ ))
+ })?;
+ Ret::ValueF(RecordLit(kvs))
+ }
+
+ (RecursiveRecordTypeMerge, _, _) | (Equivalence, _, _) => {
+ unreachable!("This case should have been handled in typecheck")
+ }
+
+ _ => return None,
+ })
+}
+
+pub(crate) fn normalize_one_layer(
+ expr: ExprF<Value, Normalized>,
+ ty: &Value,
+) -> ValueF {
+ use ValueF::{
+ AppliedBuiltin, BoolLit, DoubleLit, EmptyListLit, IntegerLit,
+ NEListLit, NEOptionalLit, NaturalLit, RecordLit, TextLit,
+ UnionConstructor, UnionLit, UnionType,
+ };
+
+ let ret = match expr {
+ ExprF::Import(_) => unreachable!(
+ "There should remain no imports in a resolved expression"
+ ),
+ // Those cases have already been completely handled in the typechecking phase (using
+ // `RetWhole`), so they won't appear here.
+ ExprF::Lam(_, _, _)
+ | ExprF::Pi(_, _, _)
+ | ExprF::Let(_, _, _, _)
+ | ExprF::Embed(_)
+ | ExprF::Const(_)
+ | ExprF::Builtin(_)
+ | ExprF::Var(_)
+ | ExprF::Annot(_, _)
+ | ExprF::RecordType(_)
+ | ExprF::UnionType(_) => {
+ unreachable!("This case should have been handled in typecheck")
+ }
+ ExprF::Assert(_) => Ret::Expr(expr),
+ ExprF::App(v, a) => Ret::Value(v.app(a)),
+ ExprF::BoolLit(b) => Ret::ValueF(BoolLit(b)),
+ ExprF::NaturalLit(n) => Ret::ValueF(NaturalLit(n)),
+ ExprF::IntegerLit(n) => Ret::ValueF(IntegerLit(n)),
+ ExprF::DoubleLit(n) => Ret::ValueF(DoubleLit(n)),
+ ExprF::SomeLit(e) => Ret::ValueF(NEOptionalLit(e)),
+ ExprF::EmptyListLit(ref t) => {
+ // Check if the type is of the form `List x`
+ let t_borrow = t.as_whnf();
+ match &*t_borrow {
+ AppliedBuiltin(Builtin::List, args) if args.len() == 1 => {
+ Ret::ValueF(EmptyListLit(args[0].clone()))
+ }
+ _ => {
+ drop(t_borrow);
+ Ret::Expr(expr)
+ }
+ }
+ }
+ ExprF::NEListLit(elts) => {
+ Ret::ValueF(NEListLit(elts.into_iter().collect()))
+ }
+ ExprF::RecordLit(kvs) => {
+ Ret::ValueF(RecordLit(kvs.into_iter().collect()))
+ }
+ ExprF::TextLit(elts) => {
+ use InterpolatedTextContents::Expr;
+ let elts: Vec<_> = squash_textlit(elts.into_iter());
+ // Simplify bare interpolation
+ if let [Expr(th)] = elts.as_slice() {
+ Ret::Value(th.clone())
+ } else {
+ Ret::ValueF(TextLit(elts))
+ }
+ }
+ ExprF::BoolIf(ref b, ref e1, ref e2) => {
+ let b_borrow = b.as_whnf();
+ match &*b_borrow {
+ BoolLit(true) => Ret::ValueRef(e1),
+ BoolLit(false) => Ret::ValueRef(e2),
+ _ => {
+ let e1_borrow = e1.as_whnf();
+ let e2_borrow = e2.as_whnf();
+ match (&*e1_borrow, &*e2_borrow) {
+ // Simplify `if b then True else False`
+ (BoolLit(true), BoolLit(false)) => Ret::ValueRef(b),
+ _ if e1 == e2 => Ret::ValueRef(e1),
+ _ => {
+ drop(b_borrow);
+ drop(e1_borrow);
+ drop(e2_borrow);
+ Ret::Expr(expr)
+ }
+ }
+ }
+ }
+ }
+ ExprF::BinOp(o, ref x, ref y) => match apply_binop(o, x, y, ty) {
+ Some(ret) => ret,
+ None => Ret::Expr(expr),
+ },
+
+ ExprF::Projection(_, ref ls) if ls.is_empty() => {
+ Ret::ValueF(RecordLit(HashMap::new()))
+ }
+ ExprF::Projection(ref v, ref ls) => {
+ let v_borrow = v.as_whnf();
+ match &*v_borrow {
+ RecordLit(kvs) => Ret::ValueF(RecordLit(
+ ls.iter()
+ .filter_map(|l| {
+ kvs.get(l).map(|x| (l.clone(), x.clone()))
+ })
+ .collect(),
+ )),
+ _ => {
+ drop(v_borrow);
+ Ret::Expr(expr)
+ }
+ }
+ }
+ ExprF::Field(ref v, ref l) => {
+ let v_borrow = v.as_whnf();
+ match &*v_borrow {
+ RecordLit(kvs) => match kvs.get(l) {
+ Some(r) => Ret::Value(r.clone()),
+ None => {
+ drop(v_borrow);
+ Ret::Expr(expr)
+ }
+ },
+ UnionType(kts) => {
+ Ret::ValueF(UnionConstructor(l.clone(), kts.clone()))
+ }
+ _ => {
+ drop(v_borrow);
+ Ret::Expr(expr)
+ }
+ }
+ }
+ ExprF::ProjectionByExpr(_, _) => {
+ unimplemented!("selection by expression")
+ }
+
+ ExprF::Merge(ref handlers, ref variant, _) => {
+ let handlers_borrow = handlers.as_whnf();
+ let variant_borrow = variant.as_whnf();
+ match (&*handlers_borrow, &*variant_borrow) {
+ (RecordLit(kvs), UnionConstructor(l, _)) => match kvs.get(l) {
+ Some(h) => Ret::Value(h.clone()),
+ None => {
+ drop(handlers_borrow);
+ drop(variant_borrow);
+ Ret::Expr(expr)
+ }
+ },
+ (RecordLit(kvs), UnionLit(l, v, _)) => match kvs.get(l) {
+ Some(h) => Ret::Value(h.app(v.clone())),
+ None => {
+ drop(handlers_borrow);
+ drop(variant_borrow);
+ Ret::Expr(expr)
+ }
+ },
+ _ => {
+ drop(handlers_borrow);
+ drop(variant_borrow);
+ Ret::Expr(expr)
+ }
+ }
+ }
+ ExprF::ToMap(_, _) => unimplemented!("toMap"),
+ };
+
+ match ret {
+ Ret::ValueF(v) => v,
+ Ret::Value(v) => v.to_whnf_check_type(ty),
+ Ret::ValueRef(v) => v.to_whnf_check_type(ty),
+ Ret::Expr(expr) => ValueF::PartialExpr(expr),
+ }
+}
+
+/// Normalize a ValueF into WHNF
+pub(crate) fn normalize_whnf(v: ValueF, ty: &Value) -> ValueF {
+ match v {
+ ValueF::AppliedBuiltin(b, args) => apply_builtin(b, args, ty),
+ ValueF::PartialExpr(e) => normalize_one_layer(e, ty),
+ ValueF::TextLit(elts) => {
+ ValueF::TextLit(squash_textlit(elts.into_iter()))
+ }
+ // All other cases are already in WHNF
+ v => v,
+ }
+}
diff --git a/dhall/src/semantics/phase/parse.rs b/dhall/src/semantics/phase/parse.rs
new file mode 100644
index 0000000..4c8ad7b
--- /dev/null
+++ b/dhall/src/semantics/phase/parse.rs
@@ -0,0 +1,37 @@
+use std::fs::File;
+use std::io::Read;
+use std::path::Path;
+
+use crate::semantics::error::Error;
+use crate::semantics::phase::resolve::ImportRoot;
+use crate::semantics::phase::Parsed;
+use crate::syntax::binary;
+use crate::syntax::parse_expr;
+
+pub(crate) fn parse_file(f: &Path) -> Result<Parsed, Error> {
+ let mut buffer = String::new();
+ File::open(f)?.read_to_string(&mut buffer)?;
+ let expr = parse_expr(&*buffer)?;
+ let root = ImportRoot::LocalDir(f.parent().unwrap().to_owned());
+ Ok(Parsed(expr, root))
+}
+
+pub(crate) fn parse_str(s: &str) -> Result<Parsed, Error> {
+ let expr = parse_expr(s)?;
+ let root = ImportRoot::LocalDir(std::env::current_dir()?);
+ Ok(Parsed(expr, root))
+}
+
+pub(crate) fn parse_binary(data: &[u8]) -> Result<Parsed, Error> {
+ let expr = binary::decode(data)?;
+ let root = ImportRoot::LocalDir(std::env::current_dir()?);
+ Ok(Parsed(expr, root))
+}
+
+pub(crate) fn parse_binary_file(f: &Path) -> Result<Parsed, Error> {
+ let mut buffer = Vec::new();
+ File::open(f)?.read_to_end(&mut buffer)?;
+ let expr = binary::decode(&buffer)?;
+ let root = ImportRoot::LocalDir(f.parent().unwrap().to_owned());
+ Ok(Parsed(expr, root))
+}
diff --git a/dhall/src/semantics/phase/resolve.rs b/dhall/src/semantics/phase/resolve.rs
new file mode 100644
index 0000000..86dc7ae
--- /dev/null
+++ b/dhall/src/semantics/phase/resolve.rs
@@ -0,0 +1,181 @@
+use std::collections::HashMap;
+use std::path::{Path, PathBuf};
+
+use crate::semantics::error::{Error, ImportError};
+use crate::semantics::phase::{Normalized, NormalizedExpr, Parsed, Resolved};
+use crate::syntax;
+use crate::syntax::{FilePath, ImportLocation, URL};
+
+type Import = syntax::Import<NormalizedExpr>;
+
+/// A root from which to resolve relative imports.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub(crate) enum ImportRoot {
+ LocalDir(PathBuf),
+}
+
+type ImportCache = HashMap<Import, Normalized>;
+
+pub(crate) type ImportStack = Vec<Import>;
+
+fn resolve_import(
+ import: &Import,
+ root: &ImportRoot,
+ import_cache: &mut ImportCache,
+ import_stack: &ImportStack,
+) -> Result<Normalized, ImportError> {
+ use self::ImportRoot::*;
+ use syntax::FilePrefix::*;
+ use syntax::ImportLocation::*;
+ let cwd = match root {
+ LocalDir(cwd) => cwd,
+ };
+ match &import.location {
+ Local(prefix, path) => {
+ let path_buf: PathBuf = path.file_path.iter().collect();
+ let path_buf = match prefix {
+ // TODO: fail gracefully
+ Parent => cwd.parent().unwrap().join(path_buf),
+ Here => cwd.join(path_buf),
+ _ => unimplemented!("{:?}", import),
+ };
+ Ok(load_import(&path_buf, import_cache, import_stack).map_err(
+ |e| ImportError::Recursive(import.clone(), Box::new(e)),
+ )?)
+ }
+ _ => unimplemented!("{:?}", import),
+ }
+}
+
+fn load_import(
+ f: &Path,
+ import_cache: &mut ImportCache,
+ import_stack: &ImportStack,
+) -> Result<Normalized, Error> {
+ Ok(
+ do_resolve_expr(Parsed::parse_file(f)?, import_cache, import_stack)?
+ .typecheck()?
+ .normalize(),
+ )
+}
+
+fn do_resolve_expr(
+ parsed: Parsed,
+ import_cache: &mut ImportCache,
+ import_stack: &ImportStack,
+) -> Result<Resolved, ImportError> {
+ let Parsed(mut expr, root) = parsed;
+ let mut resolve = |import: Import| -> Result<Normalized, ImportError> {
+ if import_stack.contains(&import) {
+ return Err(ImportError::ImportCycle(import_stack.clone(), import));
+ }
+ match import_cache.get(&import) {
+ Some(expr) => Ok(expr.clone()),
+ None => {
+ // Copy the import stack and push the current import
+ let mut import_stack = import_stack.clone();
+ import_stack.push(import.clone());
+
+ // Resolve the import recursively
+ let expr = resolve_import(
+ &import,
+ &root,
+ import_cache,
+ &import_stack,
+ )?;
+
+ // Add the import to the cache
+ import_cache.insert(import, expr.clone());
+ Ok(expr)
+ }
+ }
+ };
+ expr.traverse_resolve_mut(&mut resolve)?;
+ Ok(Resolved(expr))
+}
+
+pub(crate) fn resolve(e: Parsed) -> Result<Resolved, ImportError> {
+ do_resolve_expr(e, &mut HashMap::new(), &Vec::new())
+}
+
+pub(crate) fn skip_resolve_expr(
+ parsed: Parsed,
+) -> Result<Resolved, ImportError> {
+ let mut expr = parsed.0;
+ let mut resolve = |import: Import| -> Result<Normalized, ImportError> {
+ Err(ImportError::UnexpectedImport(import))
+ };
+ expr.traverse_resolve_mut(&mut resolve)?;
+ Ok(Resolved(expr))
+}
+
+pub trait Canonicalize {
+ fn canonicalize(&self) -> Self;
+}
+
+impl Canonicalize for FilePath {
+ fn canonicalize(&self) -> FilePath {
+ let mut file_path = Vec::new();
+ let mut file_path_components = self.file_path.clone().into_iter();
+
+ loop {
+ let component = file_path_components.next();
+ match component.as_ref() {
+ // ───────────────────
+ // canonicalize(ε) = ε
+ None => break,
+
+ // canonicalize(directory₀) = directory₁
+ // ───────────────────────────────────────
+ // canonicalize(directory₀/.) = directory₁
+ Some(c) if c == "." => continue,
+
+ Some(c) if c == ".." => match file_path_components.next() {
+ // canonicalize(directory₀) = ε
+ // ────────────────────────────
+ // canonicalize(directory₀/..) = /..
+ None => file_path.push("..".to_string()),
+
+ // canonicalize(directory₀) = directory₁/..
+ // ──────────────────────────────────────────────
+ // canonicalize(directory₀/..) = directory₁/../..
+ Some(ref c) if c == ".." => {
+ file_path.push("..".to_string());
+ file_path.push("..".to_string());
+ }
+
+ // canonicalize(directory₀) = directory₁/component
+ // ─────────────────────────────────────────────── ; If "component" is not
+ // canonicalize(directory₀/..) = directory₁ ; ".."
+ Some(_) => continue,
+ },
+
+ // canonicalize(directory₀) = directory₁
+ // ───────────────────────────────────────────────────────── ; If no other
+ // canonicalize(directory₀/component) = directory₁/component ; rule matches
+ Some(c) => file_path.push(c.clone()),
+ }
+ }
+
+ FilePath { file_path }
+ }
+}
+
+impl<SE: Copy> Canonicalize for ImportLocation<SE> {
+ fn canonicalize(&self) -> ImportLocation<SE> {
+ match self {
+ ImportLocation::Local(prefix, file) => {
+ ImportLocation::Local(*prefix, file.canonicalize())
+ }
+ ImportLocation::Remote(url) => ImportLocation::Remote(URL {
+ scheme: url.scheme,
+ authority: url.authority.clone(),
+ path: url.path.canonicalize(),
+ query: url.query.clone(),
+ headers: url.headers.clone(),
+ }),
+ ImportLocation::Env(name) => ImportLocation::Env(name.to_string()),
+ ImportLocation::Missing => ImportLocation::Missing,
+ }
+ }
+}
diff --git a/dhall/src/semantics/phase/typecheck.rs b/dhall/src/semantics/phase/typecheck.rs
new file mode 100644
index 0000000..59380a3
--- /dev/null
+++ b/dhall/src/semantics/phase/typecheck.rs
@@ -0,0 +1,810 @@
+use std::cmp::max;
+use std::collections::HashMap;
+
+use crate::semantics::core::context::TypecheckContext;
+use crate::semantics::core::value::Value;
+use crate::semantics::core::valuef::ValueF;
+use crate::semantics::core::var::{Shift, Subst};
+use crate::semantics::error::{TypeError, TypeMessage};
+use crate::semantics::phase::normalize::merge_maps;
+use crate::semantics::phase::Normalized;
+use crate::syntax;
+use crate::syntax::{
+ Builtin, Const, Expr, ExprF, InterpolatedTextContents, Label, RawExpr, Span,
+};
+
+fn tck_pi_type(
+ ctx: &TypecheckContext,
+ x: Label,
+ tx: Value,
+ te: Value,
+) -> Result<Value, TypeError> {
+ use TypeMessage::*;
+ let ctx2 = ctx.insert_type(&x, tx.clone());
+
+ let ka = match tx.get_type()?.as_const() {
+ Some(k) => k,
+ _ => return Err(TypeError::new(ctx, InvalidInputType(tx))),
+ };
+
+ let kb = match te.get_type()?.as_const() {
+ Some(k) => k,
+ _ => {
+ return Err(TypeError::new(
+ &ctx2,
+ InvalidOutputType(te.get_type()?),
+ ))
+ }
+ };
+
+ let k = function_check(ka, kb);
+
+ Ok(Value::from_valuef_and_type(
+ ValueF::Pi(x.into(), tx, te),
+ Value::from_const(k),
+ ))
+}
+
+fn tck_record_type(
+ ctx: &TypecheckContext,
+ kts: impl IntoIterator<Item = Result<(Label, Value), TypeError>>,
+) -> Result<Value, TypeError> {
+ use std::collections::hash_map::Entry;
+ use TypeMessage::*;
+ let mut new_kts = HashMap::new();
+ // An empty record type has type Type
+ let mut k = Const::Type;
+ for e in kts {
+ let (x, t) = e?;
+ // Construct the union of the contained `Const`s
+ match t.get_type()?.as_const() {
+ Some(k2) => k = max(k, k2),
+ None => return Err(TypeError::new(ctx, InvalidFieldType(x, t))),
+ }
+ // Check for duplicated entries
+ let entry = new_kts.entry(x);
+ match &entry {
+ Entry::Occupied(_) => {
+ return Err(TypeError::new(ctx, RecordTypeDuplicateField))
+ }
+ Entry::Vacant(_) => entry.or_insert_with(|| t),
+ };
+ }
+
+ Ok(Value::from_valuef_and_type(
+ ValueF::RecordType(new_kts),
+ Value::from_const(k),
+ ))
+}
+
+fn tck_union_type<Iter>(
+ ctx: &TypecheckContext,
+ kts: Iter,
+) -> Result<Value, TypeError>
+where
+ Iter: IntoIterator<Item = Result<(Label, Option<Value>), TypeError>>,
+{
+ use std::collections::hash_map::Entry;
+ use TypeMessage::*;
+ let mut new_kts = HashMap::new();
+ // Check that all types are the same const
+ let mut k = None;
+ for e in kts {
+ let (x, t) = e?;
+ if let Some(t) = &t {
+ match (k, t.get_type()?.as_const()) {
+ (None, Some(k2)) => k = Some(k2),
+ (Some(k1), Some(k2)) if k1 == k2 => {}
+ _ => {
+ return Err(TypeError::new(
+ ctx,
+ InvalidFieldType(x, t.clone()),
+ ))
+ }
+ }
+ }
+ let entry = new_kts.entry(x);
+ match &entry {
+ Entry::Occupied(_) => {
+ return Err(TypeError::new(ctx, UnionTypeDuplicateField))
+ }
+ Entry::Vacant(_) => entry.or_insert_with(|| t),
+ };
+ }
+
+ // An empty union type has type Type;
+ // an union type with only unary variants also has type Type
+ let k = k.unwrap_or(Const::Type);
+
+ Ok(Value::from_valuef_and_type(
+ ValueF::UnionType(new_kts),
+ Value::from_const(k),
+ ))
+}
+
+fn function_check(a: Const, b: Const) -> Const {
+ if b == Const::Type {
+ Const::Type
+ } else {
+ max(a, b)
+ }
+}
+
+pub(crate) fn const_to_value(c: Const) -> Value {
+ let v = ValueF::Const(c);
+ match c {
+ Const::Type => {
+ Value::from_valuef_and_type(v, const_to_value(Const::Kind))
+ }
+ Const::Kind => {
+ Value::from_valuef_and_type(v, const_to_value(Const::Sort))
+ }
+ Const::Sort => Value::const_sort(),
+ }
+}
+
+pub fn rc<E>(x: RawExpr<E>) -> Expr<E> {
+ Expr::new(x, Span::Artificial)
+}
+
+// Ad-hoc macro to help construct the types of builtins
+macro_rules! make_type {
+ (Type) => { ExprF::Const(Const::Type) };
+ (Bool) => { ExprF::Builtin(Builtin::Bool) };
+ (Natural) => { ExprF::Builtin(Builtin::Natural) };
+ (Integer) => { ExprF::Builtin(Builtin::Integer) };
+ (Double) => { ExprF::Builtin(Builtin::Double) };
+ (Text) => { ExprF::Builtin(Builtin::Text) };
+ ($var:ident) => {
+ ExprF::Var(syntax::V(stringify!($var).into(), 0))
+ };
+ (Optional $ty:ident) => {
+ ExprF::App(
+ rc(ExprF::Builtin(Builtin::Optional)),
+ rc(make_type!($ty))
+ )
+ };
+ (List $($rest:tt)*) => {
+ ExprF::App(
+ rc(ExprF::Builtin(Builtin::List)),
+ rc(make_type!($($rest)*))
+ )
+ };
+ ({ $($label:ident : $ty:ident),* }) => {{
+ let mut kts = syntax::map::DupTreeMap::new();
+ $(
+ kts.insert(
+ Label::from(stringify!($label)),
+ rc(make_type!($ty)),
+ );
+ )*
+ ExprF::RecordType(kts)
+ }};
+ ($ty:ident -> $($rest:tt)*) => {
+ ExprF::Pi(
+ "_".into(),
+ rc(make_type!($ty)),
+ rc(make_type!($($rest)*))
+ )
+ };
+ (($($arg:tt)*) -> $($rest:tt)*) => {
+ ExprF::Pi(
+ "_".into(),
+ rc(make_type!($($arg)*)),
+ rc(make_type!($($rest)*))
+ )
+ };
+ (forall ($var:ident : $($ty:tt)*) -> $($rest:tt)*) => {
+ ExprF::Pi(
+ stringify!($var).into(),
+ rc(make_type!($($ty)*)),
+ rc(make_type!($($rest)*))
+ )
+ };
+}
+
+fn type_of_builtin<E>(b: Builtin) -> Expr<E> {
+ use syntax::Builtin::*;
+ rc(match b {
+ Bool | Natural | Integer | Double | Text => make_type!(Type),
+ List | Optional => make_type!(
+ Type -> Type
+ ),
+
+ NaturalFold => make_type!(
+ Natural ->
+ forall (natural: Type) ->
+ forall (succ: natural -> natural) ->
+ forall (zero: natural) ->
+ natural
+ ),
+ NaturalBuild => make_type!(
+ (forall (natural: Type) ->
+ forall (succ: natural -> natural) ->
+ forall (zero: natural) ->
+ natural) ->
+ Natural
+ ),
+ NaturalIsZero | NaturalEven | NaturalOdd => make_type!(
+ Natural -> Bool
+ ),
+ NaturalToInteger => make_type!(Natural -> Integer),
+ NaturalShow => make_type!(Natural -> Text),
+ NaturalSubtract => make_type!(Natural -> Natural -> Natural),
+
+ IntegerToDouble => make_type!(Integer -> Double),
+ IntegerShow => make_type!(Integer -> Text),
+ DoubleShow => make_type!(Double -> Text),
+ TextShow => make_type!(Text -> Text),
+
+ ListBuild => make_type!(
+ forall (a: Type) ->
+ (forall (list: Type) ->
+ forall (cons: a -> list -> list) ->
+ forall (nil: list) ->
+ list) ->
+ List a
+ ),
+ ListFold => make_type!(
+ forall (a: Type) ->
+ (List a) ->
+ forall (list: Type) ->
+ forall (cons: a -> list -> list) ->
+ forall (nil: list) ->
+ list
+ ),
+ ListLength => make_type!(forall (a: Type) -> (List a) -> Natural),
+ ListHead | ListLast => {
+ make_type!(forall (a: Type) -> (List a) -> Optional a)
+ }
+ ListIndexed => make_type!(
+ forall (a: Type) ->
+ (List a) ->
+ List { index: Natural, value: a }
+ ),
+ ListReverse => make_type!(
+ forall (a: Type) -> (List a) -> List a
+ ),
+
+ OptionalBuild => make_type!(
+ forall (a: Type) ->
+ (forall (optional: Type) ->
+ forall (just: a -> optional) ->
+ forall (nothing: optional) ->
+ optional) ->
+ Optional a
+ ),
+ OptionalFold => make_type!(
+ forall (a: Type) ->
+ (Optional a) ->
+ forall (optional: Type) ->
+ forall (just: a -> optional) ->
+ forall (nothing: optional) ->
+ optional
+ ),
+ OptionalNone => make_type!(
+ forall (a: Type) -> Optional a
+ ),
+ })
+}
+
+pub(crate) fn builtin_to_value(b: Builtin) -> Value {
+ let ctx = TypecheckContext::new();
+ Value::from_valuef_and_type(
+ ValueF::from_builtin(b),
+ type_with(&ctx, type_of_builtin(b)).unwrap(),
+ )
+}
+
+/// Type-check an expression and return the expression alongside its type if type-checking
+/// succeeded, or an error if type-checking failed.
+/// Some normalization is done while typechecking, so the returned expression might be partially
+/// normalized as well.
+fn type_with(
+ ctx: &TypecheckContext,
+ e: Expr<Normalized>,
+) -> Result<Value, TypeError> {
+ use syntax::ExprF::{Annot, Embed, Lam, Let, Pi, Var};
+ let span = e.span();
+
+ Ok(match e.as_ref() {
+ Lam(var, annot, body) => {
+ let annot = type_with(ctx, annot.clone())?;
+ let ctx2 = ctx.insert_type(var, annot.clone());
+ let body = type_with(&ctx2, body.clone())?;
+ let body_type = body.get_type()?;
+ Value::from_valuef_and_type(
+ ValueF::Lam(var.clone().into(), annot.clone(), body),
+ tck_pi_type(ctx, var.clone(), annot, body_type)?,
+ )
+ }
+ Pi(x, ta, tb) => {
+ let ta = type_with(ctx, ta.clone())?;
+ let ctx2 = ctx.insert_type(x, ta.clone());
+ let tb = type_with(&ctx2, tb.clone())?;
+ return tck_pi_type(ctx, x.clone(), ta, tb);
+ }
+ Let(x, t, v, e) => {
+ let v = if let Some(t) = t {
+ t.rewrap(Annot(v.clone(), t.clone()))
+ } else {
+ v.clone()
+ };
+
+ let v = type_with(ctx, v)?;
+ return type_with(&ctx.insert_value(x, v.clone())?, e.clone());
+ }
+ Embed(p) => p.clone().into_typed().into_value(),
+ Var(var) => match ctx.lookup(&var) {
+ Some(typed) => typed.clone(),
+ None => {
+ return Err(TypeError::new(
+ ctx,
+ TypeMessage::UnboundVariable(span),
+ ))
+ }
+ },
+ e => {
+ // Typecheck recursively all subexpressions
+ let expr = e.traverse_ref_with_special_handling_of_binders(
+ |e| type_with(ctx, e.clone()),
+ |_, _| unreachable!(),
+ )?;
+ type_last_layer(ctx, expr, span)?
+ }
+ })
+}
+
+/// When all sub-expressions have been typed, check the remaining toplevel
+/// layer.
+fn type_last_layer(
+ ctx: &TypecheckContext,
+ e: ExprF<Value, Normalized>,
+ span: Span,
+) -> Result<Value, TypeError> {
+ use syntax::BinOp::*;
+ use syntax::Builtin::*;
+ use syntax::Const::Type;
+ use syntax::ExprF::*;
+ use TypeMessage::*;
+ let mkerr = |msg: TypeMessage| Err(TypeError::new(ctx, msg));
+
+ /// Intermediary return type
+ enum Ret {
+ /// Returns the contained value as is
+ RetWhole(Value),
+ /// Returns the input expression `e` with the contained value as its type
+ RetTypeOnly(Value),
+ }
+ use Ret::*;
+
+ let ret = match &e {
+ Import(_) => unreachable!(
+ "There should remain no imports in a resolved expression"
+ ),
+ Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => {
+ unreachable!()
+ }
+ App(f, a) => {
+ let tf = f.get_type()?;
+ let tf_borrow = tf.as_whnf();
+ let (x, tx, tb) = match &*tf_borrow {
+ ValueF::Pi(x, tx, tb) => (x, tx, tb),
+ _ => return mkerr(NotAFunction(f.clone())),
+ };
+ if &a.get_type()? != tx {
+ return mkerr(TypeMismatch(f.clone(), tx.clone(), a.clone()));
+ }
+
+ RetTypeOnly(tb.subst_shift(&x.into(), a))
+ }
+ Annot(x, t) => {
+ if &x.get_type()? != t {
+ return mkerr(AnnotMismatch(x.clone(), t.clone()));
+ }
+ RetWhole(x.clone())
+ }
+ Assert(t) => {
+ match &*t.as_whnf() {
+ ValueF::Equivalence(x, y) if x == y => {}
+ ValueF::Equivalence(x, y) => {
+ return mkerr(AssertMismatch(x.clone(), y.clone()))
+ }
+ _ => return mkerr(AssertMustTakeEquivalence),
+ }
+ RetTypeOnly(t.clone())
+ }
+ BoolIf(x, y, z) => {
+ if *x.get_type()?.as_whnf() != ValueF::from_builtin(Bool) {
+ return mkerr(InvalidPredicate(x.clone()));
+ }
+
+ if y.get_type()?.get_type()?.as_const() != Some(Type) {
+ return mkerr(IfBranchMustBeTerm(true, y.clone()));
+ }
+
+ if z.get_type()?.get_type()?.as_const() != Some(Type) {
+ return mkerr(IfBranchMustBeTerm(false, z.clone()));
+ }
+
+ if y.get_type()? != z.get_type()? {
+ return mkerr(IfBranchMismatch(y.clone(), z.clone()));
+ }
+
+ RetTypeOnly(y.get_type()?)
+ }
+ EmptyListLit(t) => {
+ match &*t.as_whnf() {
+ ValueF::AppliedBuiltin(syntax::Builtin::List, args)
+ if args.len() == 1 => {}
+ _ => return mkerr(InvalidListType(t.clone())),
+ }
+ RetTypeOnly(t.clone())
+ }
+ NEListLit(xs) => {
+ let mut iter = xs.iter().enumerate();
+ let (_, x) = iter.next().unwrap();
+ for (i, y) in iter {
+ if x.get_type()? != y.get_type()? {
+ return mkerr(InvalidListElement(
+ i,
+ x.get_type()?,
+ y.clone(),
+ ));
+ }
+ }
+ let t = x.get_type()?;
+ if t.get_type()?.as_const() != Some(Type) {
+ return mkerr(InvalidListType(t));
+ }
+
+ RetTypeOnly(Value::from_builtin(syntax::Builtin::List).app(t))
+ }
+ SomeLit(x) => {
+ let t = x.get_type()?;
+ if t.get_type()?.as_const() != Some(Type) {
+ return mkerr(InvalidOptionalType(t));
+ }
+
+ RetTypeOnly(Value::from_builtin(syntax::Builtin::Optional).app(t))
+ }
+ RecordType(kts) => RetWhole(tck_record_type(
+ ctx,
+ kts.iter().map(|(x, t)| Ok((x.clone(), t.clone()))),
+ )?),
+ UnionType(kts) => RetWhole(tck_union_type(
+ ctx,
+ kts.iter().map(|(x, t)| Ok((x.clone(), t.clone()))),
+ )?),
+ RecordLit(kvs) => RetTypeOnly(tck_record_type(
+ ctx,
+ kvs.iter().map(|(x, v)| Ok((x.clone(), v.get_type()?))),
+ )?),
+ Field(r, x) => {
+ match &*r.get_type()?.as_whnf() {
+ ValueF::RecordType(kts) => match kts.get(&x) {
+ Some(tth) => {
+ RetTypeOnly(tth.clone())
+ },
+ None => return mkerr(MissingRecordField(x.clone(),
+ r.clone())),
+ },
+ // TODO: branch here only when r.get_type() is a Const
+ _ => {
+ match &*r.as_whnf() {
+ ValueF::UnionType(kts) => match kts.get(&x) {
+ // Constructor has type T -> < x: T, ... >
+ Some(Some(t)) => {
+ RetTypeOnly(
+ tck_pi_type(
+ ctx,
+ "_".into(),
+ t.clone(),
+ r.under_binder(Label::from("_")),
+ )?
+ )
+ },
+ Some(None) => {
+ RetTypeOnly(r.clone())
+ },
+ None => {
+ return mkerr(MissingUnionField(
+ x.clone(),
+ r.clone(),
+ ))
+ },
+ },
+ _ => {
+ return mkerr(NotARecord(
+ x.clone(),
+ r.clone()
+ ))
+ },
+ }
+ }
+ // _ => mkerr(NotARecord(
+ // x,
+ // r?,
+ // )),
+ }
+ }
+ Const(c) => RetWhole(const_to_value(*c)),
+ Builtin(b) => RetWhole(builtin_to_value(*b)),
+ BoolLit(_) => RetTypeOnly(builtin_to_value(Bool)),
+ NaturalLit(_) => RetTypeOnly(builtin_to_value(Natural)),
+ IntegerLit(_) => RetTypeOnly(builtin_to_value(Integer)),
+ DoubleLit(_) => RetTypeOnly(builtin_to_value(Double)),
+ TextLit(interpolated) => {
+ let text_type = builtin_to_value(Text);
+ for contents in interpolated.iter() {
+ use InterpolatedTextContents::Expr;
+ if let Expr(x) = contents {
+ if x.get_type()? != text_type {
+ return mkerr(InvalidTextInterpolation(x.clone()));
+ }
+ }
+ }
+ RetTypeOnly(text_type)
+ }
+ BinOp(RightBiasedRecordMerge, l, r) => {
+ let l_type = l.get_type()?;
+ let r_type = r.get_type()?;
+
+ // Extract the LHS record type
+ let l_type_borrow = l_type.as_whnf();
+ let kts_x = match &*l_type_borrow {
+ ValueF::RecordType(kts) => kts,
+ _ => return mkerr(MustCombineRecord(l.clone())),
+ };
+
+ // Extract the RHS record type
+ let r_type_borrow = r_type.as_whnf();
+ let kts_y = match &*r_type_borrow {
+ ValueF::RecordType(kts) => kts,
+ _ => return mkerr(MustCombineRecord(r.clone())),
+ };
+
+ // Union the two records, prefering
+ // the values found in the RHS.
+ let kts = merge_maps::<_, _, _, !>(kts_x, kts_y, |_, _, r_t| {
+ Ok(r_t.clone())
+ })?;
+
+ // Construct the final record type from the union
+ RetTypeOnly(tck_record_type(
+ ctx,
+ kts.into_iter().map(|(x, v)| Ok((x.clone(), v))),
+ )?)
+ }
+ BinOp(RecursiveRecordMerge, l, r) => RetTypeOnly(type_last_layer(
+ ctx,
+ ExprF::BinOp(
+ RecursiveRecordTypeMerge,
+ l.get_type()?,
+ r.get_type()?,
+ ),
+ Span::Artificial,
+ )?),
+ BinOp(RecursiveRecordTypeMerge, l, r) => {
+ // Extract the LHS record type
+ let borrow_l = l.as_whnf();
+ let kts_x = match &*borrow_l {
+ ValueF::RecordType(kts) => kts,
+ _ => {
+ return mkerr(RecordTypeMergeRequiresRecordType(l.clone()))
+ }
+ };
+
+ // Extract the RHS record type
+ let borrow_r = r.as_whnf();
+ let kts_y = match &*borrow_r {
+ ValueF::RecordType(kts) => kts,
+ _ => {
+ return mkerr(RecordTypeMergeRequiresRecordType(r.clone()))
+ }
+ };
+
+ // Ensure that the records combine without a type error
+ let kts = merge_maps(
+ kts_x,
+ kts_y,
+ // If the Label exists for both records, then we hit the recursive case.
+ |_, l: &Value, r: &Value| {
+ type_last_layer(
+ ctx,
+ ExprF::BinOp(
+ RecursiveRecordTypeMerge,
+ l.clone(),
+ r.clone(),
+ ),
+ Span::Artificial,
+ )
+ },
+ )?;
+
+ RetWhole(tck_record_type(ctx, kts.into_iter().map(Ok))?)
+ }
+ BinOp(o @ ListAppend, l, r) => {
+ match &*l.get_type()?.as_whnf() {
+ ValueF::AppliedBuiltin(List, _) => {}
+ _ => return mkerr(BinOpTypeMismatch(*o, l.clone())),
+ }
+
+ if l.get_type()? != r.get_type()? {
+ return mkerr(BinOpTypeMismatch(*o, r.clone()));
+ }
+
+ RetTypeOnly(l.get_type()?)
+ }
+ BinOp(Equivalence, l, r) => {
+ if l.get_type()?.get_type()?.as_const() != Some(Type) {
+ return mkerr(EquivalenceArgumentMustBeTerm(true, l.clone()));
+ }
+ if r.get_type()?.get_type()?.as_const() != Some(Type) {
+ return mkerr(EquivalenceArgumentMustBeTerm(false, r.clone()));
+ }
+
+ if l.get_type()? != r.get_type()? {
+ return mkerr(EquivalenceTypeMismatch(r.clone(), l.clone()));
+ }
+
+ RetWhole(Value::from_valuef_and_type(
+ ValueF::Equivalence(l.clone(), r.clone()),
+ Value::from_const(Type),
+ ))
+ }
+ BinOp(o, l, r) => {
+ let t = builtin_to_value(match o {
+ BoolAnd => Bool,
+ BoolOr => Bool,
+ BoolEQ => Bool,
+ BoolNE => Bool,
+ NaturalPlus => Natural,
+ NaturalTimes => Natural,
+ TextAppend => Text,
+ ListAppend => unreachable!(),
+ RightBiasedRecordMerge => unreachable!(),
+ RecursiveRecordMerge => unreachable!(),
+ RecursiveRecordTypeMerge => unreachable!(),
+ ImportAlt => unreachable!("There should remain no import alternatives in a resolved expression"),
+ Equivalence => unreachable!(),
+ });
+
+ if l.get_type()? != t {
+ return mkerr(BinOpTypeMismatch(*o, l.clone()));
+ }
+
+ if r.get_type()? != t {
+ return mkerr(BinOpTypeMismatch(*o, r.clone()));
+ }
+
+ RetTypeOnly(t)
+ }
+ Merge(record, union, type_annot) => {
+ let record_type = record.get_type()?;
+ let record_borrow = record_type.as_whnf();
+ let handlers = match &*record_borrow {
+ ValueF::RecordType(kts) => kts,
+ _ => return mkerr(Merge1ArgMustBeRecord(record.clone())),
+ };
+
+ let union_type = union.get_type()?;
+ let union_borrow = union_type.as_whnf();
+ let variants = match &*union_borrow {
+ ValueF::UnionType(kts) => kts,
+ _ => return mkerr(Merge2ArgMustBeUnion(union.clone())),
+ };
+
+ let mut inferred_type = None;
+ for (x, handler_type) in handlers {
+ let handler_return_type =
+ match variants.get(x) {
+ // Union alternative with type
+ Some(Some(variant_type)) => {
+ let handler_type_borrow = handler_type.as_whnf();
+ let (x, tx, tb) = match &*handler_type_borrow {
+ ValueF::Pi(x, tx, tb) => (x, tx, tb),
+ _ => {
+ return mkerr(NotAFunction(
+ handler_type.clone(),
+ ))
+ }
+ };
+
+ if variant_type != tx {
+ return mkerr(TypeMismatch(
+ handler_type.clone(),
+ tx.clone(),
+ variant_type.clone(),
+ ));
+ }
+
+ // Extract `tb` from under the `x` binder. Fails is `x` was free in `tb`.
+ match tb.over_binder(x) {
+ Some(x) => x,
+ None => return mkerr(
+ MergeHandlerReturnTypeMustNotBeDependent,
+ ),
+ }
+ }
+ // Union alternative without type
+ Some(None) => handler_type.clone(),
+ None => {
+ return mkerr(MergeHandlerMissingVariant(x.clone()))
+ }
+ };
+ match &inferred_type {
+ None => inferred_type = Some(handler_return_type),
+ Some(t) => {
+ if t != &handler_return_type {
+ return mkerr(MergeHandlerTypeMismatch);
+ }
+ }
+ }
+ }
+ for x in variants.keys() {
+ if !handlers.contains_key(x) {
+ return mkerr(MergeVariantMissingHandler(x.clone()));
+ }
+ }
+
+ match (inferred_type, type_annot) {
+ (Some(ref t1), Some(t2)) => {
+ if t1 != t2 {
+ return mkerr(MergeAnnotMismatch);
+ }
+ RetTypeOnly(t2.clone())
+ }
+ (Some(t), None) => RetTypeOnly(t),
+ (None, Some(t)) => RetTypeOnly(t.clone()),
+ (None, None) => return mkerr(MergeEmptyNeedsAnnotation),
+ }
+ }
+ ToMap(_, _) => unimplemented!("toMap"),
+ Projection(record, labels) => {
+ let record_type = record.get_type()?;
+ let record_type_borrow = record_type.as_whnf();
+ let kts = match &*record_type_borrow {
+ ValueF::RecordType(kts) => kts,
+ _ => return mkerr(ProjectionMustBeRecord),
+ };
+
+ let mut new_kts = HashMap::new();
+ for l in labels {
+ match kts.get(l) {
+ None => return mkerr(ProjectionMissingEntry),
+ Some(t) => new_kts.insert(l.clone(), t.clone()),
+ };
+ }
+
+ RetTypeOnly(Value::from_valuef_and_type(
+ ValueF::RecordType(new_kts),
+ record_type.get_type()?,
+ ))
+ }
+ ProjectionByExpr(_, _) => unimplemented!("selection by expression"),
+ };
+
+ Ok(match ret {
+ RetTypeOnly(typ) => Value::from_valuef_and_type_and_span(
+ ValueF::PartialExpr(e),
+ typ,
+ span,
+ ),
+ RetWhole(v) => v.with_span(span),
+ })
+}
+
+/// `type_of` is the same as `type_with` with an empty context, meaning that the
+/// expression must be closed (i.e. no free variables), otherwise type-checking
+/// will fail.
+pub(crate) fn typecheck(e: Expr<Normalized>) -> Result<Value, TypeError> {
+ type_with(&TypecheckContext::new(), e)
+}
+
+pub(crate) fn typecheck_with(
+ expr: Expr<Normalized>,
+ ty: Expr<Normalized>,
+) -> Result<Value, TypeError> {
+ typecheck(expr.rewrap(ExprF::Annot(expr.clone(), ty)))
+}