summaryrefslogtreecommitdiff
path: root/dhall/src/phase
diff options
context:
space:
mode:
authorFintan Halpenny2019-09-02 23:09:26 +0100
committerFintan Halpenny2019-09-02 23:09:26 +0100
commit8553b398a5f97eed240f5360282e911392cab6ff (patch)
tree076d554b7e066cf854aa50f350096ce55e3bd691 /dhall/src/phase
parente73f822b6972e8fa2e72b56ff5378b91bea1a5e6 (diff)
parent737abd9be6d35bbce784d9cf249edf7ad14677d6 (diff)
Merge remote-tracking branch 'origin/master' into fintan/canonicalize
Diffstat (limited to 'dhall/src/phase')
-rw-r--r--dhall/src/phase/.resolve.rs.swpbin0 -> 16384 bytes
-rw-r--r--dhall/src/phase/binary.rs148
-rw-r--r--dhall/src/phase/mod.rs235
-rw-r--r--dhall/src/phase/normalize.rs953
-rw-r--r--dhall/src/phase/parse.rs12
-rw-r--r--dhall/src/phase/resolve.rs34
-rw-r--r--dhall/src/phase/typecheck.rs846
7 files changed, 984 insertions, 1244 deletions
diff --git a/dhall/src/phase/.resolve.rs.swp b/dhall/src/phase/.resolve.rs.swp
new file mode 100644
index 0000000..5314300
--- /dev/null
+++ b/dhall/src/phase/.resolve.rs.swp
Binary files differ
diff --git a/dhall/src/phase/binary.rs b/dhall/src/phase/binary.rs
index 89b6db2..16e7ce9 100644
--- a/dhall/src/phase/binary.rs
+++ b/dhall/src/phase/binary.rs
@@ -5,30 +5,26 @@ use std::vec;
use dhall_syntax::map::DupTreeMap;
use dhall_syntax::{
- rc, ExprF, FilePrefix, Hash, Import, ImportHashed, ImportLocation,
- ImportMode, Integer, InterpolatedText, Label, Natural, Scheme, SubExpr,
- URL, V, File,
+ rc, Expr, ExprF, FilePrefix, Hash, Import, ImportLocation, ImportMode,
+ Integer, InterpolatedText, Label, Natural, Scheme, URL, V, File
};
use crate::error::{DecodeError, EncodeError};
-use crate::phase::{DecodedSubExpr, ParsedSubExpr};
+use crate::phase::DecodedExpr;
-pub fn decode(data: &[u8]) -> Result<DecodedSubExpr, DecodeError> {
+pub(crate) fn decode(data: &[u8]) -> Result<DecodedExpr, DecodeError> {
match serde_cbor::de::from_slice(data) {
Ok(v) => cbor_value_to_dhall(&v),
Err(e) => Err(DecodeError::CBORError(e)),
}
}
-//TODO: encode normalized expression too
-pub fn encode(expr: &ParsedSubExpr) -> Result<Vec<u8>, EncodeError> {
+pub(crate) fn encode<E>(expr: &Expr<E>) -> Result<Vec<u8>, EncodeError> {
serde_cbor::ser::to_vec(&Serialize::Expr(expr))
.map_err(|e| EncodeError::CBORError(e))
}
-fn cbor_value_to_dhall(
- data: &cbor::Value,
-) -> Result<DecodedSubExpr, DecodeError> {
+fn cbor_value_to_dhall(data: &cbor::Value) -> Result<DecodedExpr, DecodeError> {
use cbor::Value::*;
use dhall_syntax::{BinOp, Builtin, Const};
use ExprF::*;
@@ -262,20 +258,12 @@ fn cbor_value_to_dhall(
};
let headers = match rest.next() {
Some(Null) => None,
- // TODO
- // Some(x) => {
- // match cbor_value_to_dhall(&x)?.as_ref() {
- // Embed(import) => Some(Box::new(
- // import.location_hashed.clone(),
- // )),
- // _ => Err(DecodeError::WrongFormatError(
- // "import/remote/headers".to_owned(),
- // ))?,
- // }
- // }
+ Some(x) => {
+ let x = cbor_value_to_dhall(&x)?;
+ Some(x)
+ }
_ => Err(DecodeError::WrongFormatError(
- "import/remote/headers is unimplemented"
- .to_owned(),
+ "import/remote/headers".to_owned(),
))?,
};
let authority = match rest.next() {
@@ -343,9 +331,10 @@ fn cbor_value_to_dhall(
"import/type".to_owned(),
))?,
};
- Embed(Import {
+ Import(dhall_syntax::Import {
mode,
- location_hashed: ImportHashed { hash, location },
+ hash,
+ location,
})
}
[U64(25), bindings..] => {
@@ -380,6 +369,15 @@ fn cbor_value_to_dhall(
let y = cbor_value_to_dhall(&y)?;
Annot(x, y)
}
+ [U64(27), x] => {
+ let x = cbor_value_to_dhall(&x)?;
+ ToMap(x, None)
+ }
+ [U64(27), x, y] => {
+ let x = cbor_value_to_dhall(&x)?;
+ let y = cbor_value_to_dhall(&y)?;
+ ToMap(x, Some(y))
+ }
[U64(28), x] => {
let x = cbor_value_to_dhall(&x)?;
EmptyListLit(x)
@@ -394,7 +392,7 @@ fn cbor_map_to_dhall_map<'a, T>(
map: impl IntoIterator<Item = (&'a cbor::ObjectKey, &'a cbor::Value)>,
) -> Result<T, DecodeError>
where
- T: FromIterator<(Label, DecodedSubExpr)>,
+ T: FromIterator<(Label, DecodedExpr)>,
{
map.into_iter()
.map(|(k, v)| -> Result<(_, _), _> {
@@ -411,7 +409,7 @@ fn cbor_map_to_dhall_opt_map<'a, T>(
map: impl IntoIterator<Item = (&'a cbor::ObjectKey, &'a cbor::Value)>,
) -> Result<T, DecodeError>
where
- T: FromIterator<(Label, Option<DecodedSubExpr>)>,
+ T: FromIterator<(Label, Option<DecodedExpr>)>,
{
map.into_iter()
.map(|(k, v)| -> Result<(_, _), _> {
@@ -427,11 +425,11 @@ where
.collect::<Result<_, _>>()
}
-enum Serialize<'a> {
- Expr(&'a ParsedSubExpr),
+enum Serialize<'a, E> {
+ Expr(&'a Expr<E>),
CBOR(cbor::Value),
- RecordMap(&'a DupTreeMap<Label, ParsedSubExpr>),
- UnionMap(&'a DupTreeMap<Label, Option<ParsedSubExpr>>),
+ RecordMap(&'a DupTreeMap<Label, Expr<E>>),
+ UnionMap(&'a DupTreeMap<Label, Option<Expr<E>>>),
}
macro_rules! count {
@@ -451,7 +449,7 @@ macro_rules! ser_seq {
}};
}
-fn serialize_subexpr<S>(ser: S, e: &ParsedSubExpr) -> Result<S::Ok, S::Error>
+fn serialize_subexpr<S, E>(ser: S, e: &Expr<E>) -> Result<S::Ok, S::Error>
where
S: serde::ser::Serializer,
{
@@ -461,21 +459,14 @@ where
use std::iter::once;
use self::Serialize::{RecordMap, UnionMap};
- fn expr(x: &ParsedSubExpr) -> self::Serialize<'_> {
+ fn expr<E>(x: &Expr<E>) -> self::Serialize<'_, E> {
self::Serialize::Expr(x)
}
- fn cbor<'a>(v: cbor::Value) -> self::Serialize<'a> {
- self::Serialize::CBOR(v)
- }
- fn tag<'a>(x: u64) -> self::Serialize<'a> {
- cbor(U64(x))
- }
- fn null<'a>() -> self::Serialize<'a> {
- cbor(cbor::Value::Null)
- }
- fn label<'a>(l: &Label) -> self::Serialize<'a> {
- cbor(cbor::Value::String(l.into()))
- }
+ let cbor =
+ |v: cbor::Value| -> self::Serialize<'_, E> { self::Serialize::CBOR(v) };
+ let tag = |x: u64| cbor(U64(x));
+ let null = || cbor(cbor::Value::Null);
+ let label = |l: &Label| cbor(cbor::Value::String(l.into()));
match e.as_ref() {
Const(c) => ser.serialize_str(&c.to_string()),
@@ -571,25 +562,33 @@ where
Merge(x, y, Some(z)) => {
ser_seq!(ser; tag(6), expr(x), expr(y), expr(z))
}
+ ToMap(x, None) => ser_seq!(ser; tag(27), expr(x)),
+ ToMap(x, Some(y)) => ser_seq!(ser; tag(27), expr(x), expr(y)),
Projection(x, ls) => ser.collect_seq(
once(tag(10))
.chain(once(expr(x)))
.chain(ls.iter().map(label)),
),
- Embed(import) => serialize_import(ser, import),
+ Import(import) => serialize_import(ser, import),
+ Embed(_) => unimplemented!(
+ "An expression with resolved imports cannot be binary-encoded"
+ ),
}
}
-fn serialize_import<S>(ser: S, import: &Import) -> Result<S::Ok, S::Error>
+fn serialize_import<S, E>(
+ ser: S,
+ import: &Import<Expr<E>>,
+) -> Result<S::Ok, S::Error>
where
S: serde::ser::Serializer,
{
use cbor::Value::{Bytes, Null, U64};
use serde::ser::SerializeSeq;
- let count = 4 + match &import.location_hashed.location {
- ImportLocation::Remote(url) => 3 + url.path.clone().into_iter().len(),
- ImportLocation::Local(_, path) => path.clone().into_iter().len(),
+ let count = 4 + match &import.location {
+ ImportLocation::Remote(url) => 3 + url.path.file_path.len(),
+ ImportLocation::Local(_, path) => path.file_path.len(),
ImportLocation::Env(_) => 1,
ImportLocation::Missing => 0,
};
@@ -597,7 +596,7 @@ where
ser_seq.serialize_element(&U64(24))?;
- let hash = match &import.location_hashed.hash {
+ let hash = match &import.hash {
None => Null,
Some(Hash::SHA256(h)) => {
let mut bytes = vec![18, 32];
@@ -614,7 +613,7 @@ where
};
ser_seq.serialize_element(&U64(mode))?;
- let scheme = match &import.location_hashed.location {
+ let scheme = match &import.location {
ImportLocation::Remote(url) => match url.scheme {
Scheme::HTTP => 0,
Scheme::HTTPS => 1,
@@ -630,21 +629,16 @@ where
};
ser_seq.serialize_element(&U64(scheme))?;
- match &import.location_hashed.location {
+ match &import.location {
ImportLocation::Remote(url) => {
match &url.headers {
None => ser_seq.serialize_element(&Null)?,
- Some(location_hashed) => {
- ser_seq.serialize_element(&self::Serialize::Expr(
- &SubExpr::from_expr_no_note(ExprF::Embed(Import {
- mode: ImportMode::Code,
- location_hashed: location_hashed.as_ref().clone(),
- })),
- ))?
+ Some(e) => {
+ ser_seq.serialize_element(&self::Serialize::Expr(e))?
}
};
ser_seq.serialize_element(&url.authority)?;
- for p in url.path.clone().into_iter() {
+ for p in url.path.file_path.iter() {
ser_seq.serialize_element(&p)?;
}
match &url.query {
@@ -653,7 +647,7 @@ where
};
}
ImportLocation::Local(_, path) => {
- for p in path.clone().into_iter() {
+ for p in path.file_path.iter() {
ser_seq.serialize_element(&p)?;
}
}
@@ -666,7 +660,7 @@ where
ser_seq.end()
}
-impl<'a> serde::ser::Serialize for Serialize<'a> {
+impl<'a, E> serde::ser::Serialize for Serialize<'a, E> {
fn serialize<S>(&self, ser: S) -> Result<S::Ok, S::Error>
where
S: serde::ser::Serializer,
@@ -692,13 +686,10 @@ impl<'a> serde::ser::Serialize for Serialize<'a> {
}
}
-fn collect_nested_applications<'a, N, E>(
- e: &'a SubExpr<N, E>,
-) -> (&'a SubExpr<N, E>, Vec<&'a SubExpr<N, E>>) {
- fn go<'a, N, E>(
- e: &'a SubExpr<N, E>,
- vec: &mut Vec<&'a SubExpr<N, E>>,
- ) -> &'a SubExpr<N, E> {
+fn collect_nested_applications<'a, E>(
+ e: &'a Expr<E>,
+) -> (&'a Expr<E>, Vec<&'a Expr<E>>) {
+ fn go<'a, E>(e: &'a Expr<E>, vec: &mut Vec<&'a Expr<E>>) -> &'a Expr<E> {
match e.as_ref() {
ExprF::App(f, a) => {
vec.push(a);
@@ -712,16 +703,15 @@ fn collect_nested_applications<'a, N, E>(
(e, vec)
}
-type LetBinding<'a, N, E> =
- (&'a Label, &'a Option<SubExpr<N, E>>, &'a SubExpr<N, E>);
+type LetBinding<'a, E> = (&'a Label, &'a Option<Expr<E>>, &'a Expr<E>);
-fn collect_nested_lets<'a, N, E>(
- e: &'a SubExpr<N, E>,
-) -> (&'a SubExpr<N, E>, Vec<LetBinding<'a, N, E>>) {
- fn go<'a, N, E>(
- e: &'a SubExpr<N, E>,
- vec: &mut Vec<LetBinding<'a, N, E>>,
- ) -> &'a SubExpr<N, E> {
+fn collect_nested_lets<'a, E>(
+ e: &'a Expr<E>,
+) -> (&'a Expr<E>, Vec<LetBinding<'a, E>>) {
+ fn go<'a, E>(
+ e: &'a Expr<E>,
+ vec: &mut Vec<LetBinding<'a, E>>,
+ ) -> &'a Expr<E> {
match e.as_ref() {
ExprF::Let(l, t, v, e) => {
vec.push((l, t, v));
diff --git a/dhall/src/phase/mod.rs b/dhall/src/phase/mod.rs
index ccedff2..2c5505c 100644
--- a/dhall/src/phase/mod.rs
+++ b/dhall/src/phase/mod.rs
@@ -1,17 +1,14 @@
-use std::borrow::Cow;
use std::fmt::Display;
use std::path::Path;
-use dhall_syntax::{Const, Import, Span, SubExpr, X};
+use dhall_syntax::{Builtin, Const, Expr};
-use crate::core::context::TypecheckContext;
-use crate::core::thunk::Thunk;
-use crate::core::value::Value;
+use crate::core::value::{ToExprOptions, Value};
+use crate::core::valuef::ValueF;
use crate::core::var::{AlphaVar, Shift, Subst};
-use crate::error::{EncodeError, Error, ImportError, TypeError, TypeMessage};
+use crate::error::{EncodeError, Error, ImportError, TypeError};
use resolve::ImportRoot;
-use typecheck::type_of_const;
pub(crate) mod binary;
pub(crate) mod normalize;
@@ -19,30 +16,23 @@ pub(crate) mod parse;
pub(crate) mod resolve;
pub(crate) mod typecheck;
-pub type ParsedSubExpr = SubExpr<Span, Import>;
-pub type DecodedSubExpr = SubExpr<X, Import>;
-pub type ResolvedSubExpr = SubExpr<Span, Normalized>;
-pub type NormalizedSubExpr = SubExpr<X, X>;
+pub type ParsedExpr = Expr<!>;
+pub type DecodedExpr = Expr<!>;
+pub type ResolvedExpr = Expr<Normalized>;
+pub type NormalizedExpr = Expr<Normalized>;
#[derive(Debug, Clone)]
-pub struct Parsed(ParsedSubExpr, ImportRoot);
+pub struct Parsed(ParsedExpr, ImportRoot);
/// An expression where all imports have been resolved
+///
+/// Invariant: there must be no `Import` nodes or `ImportAlt` operations left.
#[derive(Debug, Clone)]
-pub struct Resolved(ResolvedSubExpr);
+pub struct Resolved(ResolvedExpr);
/// A typed expression
#[derive(Debug, Clone)]
-pub enum Typed {
- // Any value, along with (optionally) its type
- Untyped(Thunk),
- Typed(Thunk, Box<Type>),
- // One of the base higher-kinded typed.
- // Used to avoid storing the same tower ot Type->Kind->Sort
- // over and over again. Also enables having Sort as a type
- // even though it doesn't itself have a type.
- Const(Const),
-}
+pub struct Typed(Value);
/// A normalized expression.
///
@@ -50,8 +40,6 @@ pub enum Typed {
#[derive(Debug, Clone)]
pub struct Normalized(Typed);
-pub type Type = Typed;
-
impl Parsed {
pub fn parse_file(f: &Path) -> Result<Parsed, Error> {
parse::parse_file(f)
@@ -59,11 +47,9 @@ impl Parsed {
pub fn parse_str(s: &str) -> Result<Parsed, Error> {
parse::parse_str(s)
}
- #[allow(dead_code)]
pub fn parse_binary_file(f: &Path) -> Result<Parsed, Error> {
parse::parse_binary_file(f)
}
- #[allow(dead_code)]
pub fn parse_binary(data: &[u8]) -> Result<Parsed, Error> {
parse::parse_binary(data)
}
@@ -71,12 +57,10 @@ impl Parsed {
pub fn resolve(self) -> Result<Resolved, ImportError> {
resolve::resolve(self)
}
- #[allow(dead_code)]
pub fn skip_resolve(self) -> Result<Resolved, ImportError> {
resolve::skip_resolve_expr(self)
}
- #[allow(dead_code)]
pub fn encode(&self) -> Result<Vec<u8>, EncodeError> {
crate::phase::binary::encode(&self.0)
}
@@ -84,15 +68,11 @@ impl Parsed {
impl Resolved {
pub fn typecheck(self) -> Result<Typed, TypeError> {
- typecheck::typecheck(self)
+ Ok(typecheck::typecheck(self.0)?.into_typed())
}
- pub fn typecheck_with(self, ty: &Type) -> Result<Typed, TypeError> {
- typecheck::typecheck_with(self, ty)
- }
- /// Pretends this expression has been typechecked. Use with care.
- #[allow(dead_code)]
- pub fn skip_typecheck(self) -> Typed {
- typecheck::skip_typecheck(self)
+ pub fn typecheck_with(self, ty: &Typed) -> Result<Typed, TypeError> {
+ Ok(typecheck::typecheck_with(self.0, ty.normalize_to_expr())?
+ .into_typed())
}
}
@@ -105,117 +85,110 @@ impl Typed {
///
/// However, `normalize` will not fail if the expression is ill-typed and will
/// leave ill-typed sub-expressions unevaluated.
- pub fn normalize(self) -> Normalized {
- match &self {
- Typed::Const(_) => {}
- Typed::Untyped(thunk) | Typed::Typed(thunk, _) => {
- thunk.normalize_nf();
- }
- }
+ pub fn normalize(mut self) -> Normalized {
+ self.normalize_mut();
Normalized(self)
}
- pub fn from_thunk_and_type(th: Thunk, t: Type) -> Self {
- Typed::Typed(th, Box::new(t))
+ pub(crate) fn from_const(c: Const) -> Self {
+ Typed(Value::from_const(c))
}
- pub fn from_thunk_untyped(th: Thunk) -> Self {
- Typed::Untyped(th)
+ pub(crate) fn from_valuef_and_type(v: ValueF, t: Typed) -> Self {
+ Typed(Value::from_valuef_and_type(v, t.into_value()))
}
- pub fn from_const(c: Const) -> Self {
- Typed::Const(c)
+ pub(crate) fn from_value(th: Value) -> Self {
+ Typed(th)
}
- pub fn from_value_untyped(v: Value) -> Self {
- Typed::from_thunk_untyped(Thunk::from_value(v))
+ pub(crate) fn const_type() -> Self {
+ Typed::from_const(Const::Type)
}
- // TODO: Avoid cloning if possible
- pub fn to_value(&self) -> Value {
- match self {
- Typed::Untyped(th) | Typed::Typed(th, _) => th.to_value(),
- Typed::Const(c) => Value::Const(*c),
- }
- }
- pub fn to_expr(&self) -> NormalizedSubExpr {
- self.to_value().normalize_to_expr()
- }
- pub fn to_expr_alpha(&self) -> NormalizedSubExpr {
- self.to_value().normalize_to_expr_maybe_alpha(true)
+ pub(crate) fn to_expr(&self) -> NormalizedExpr {
+ self.0.to_expr(ToExprOptions {
+ alpha: false,
+ normalize: false,
+ })
}
- pub fn to_thunk(&self) -> Thunk {
- match self {
- Typed::Untyped(th) | Typed::Typed(th, _) => th.clone(),
- Typed::Const(c) => Thunk::from_value(Value::Const(*c)),
- }
+ pub fn normalize_to_expr(&self) -> NormalizedExpr {
+ self.0.to_expr(ToExprOptions {
+ alpha: false,
+ normalize: true,
+ })
}
- // Deprecated
- pub fn to_type(&self) -> Type {
- self.clone().into_type()
+ pub(crate) fn normalize_to_expr_alpha(&self) -> NormalizedExpr {
+ self.0.to_expr(ToExprOptions {
+ alpha: true,
+ normalize: true,
+ })
}
- // Deprecated
- pub fn into_type(self) -> Type {
- self
+ pub(crate) fn to_value(&self) -> Value {
+ self.0.clone()
}
- pub fn to_normalized(&self) -> Normalized {
- self.clone().normalize()
+ pub(crate) fn into_value(self) -> Value {
+ self.0
}
- pub fn as_const(&self) -> Option<Const> {
- // TODO: avoid clone
- match &self.to_value() {
- Value::Const(c) => Some(*c),
- _ => None,
- }
+
+ pub(crate) fn normalize_mut(&mut self) {
+ self.0.normalize_mut()
}
- pub fn normalize_mut(&mut self) {
- match self {
- Typed::Untyped(th) | Typed::Typed(th, _) => th.normalize_mut(),
- Typed::Const(_) => {}
- }
+ pub(crate) fn get_type(&self) -> Result<Typed, TypeError> {
+ Ok(self.0.get_type()?.into_typed())
}
- pub fn get_type(&self) -> Result<Cow<'_, Type>, TypeError> {
- match self {
- Typed::Untyped(_) => Err(TypeError::new(
- &TypecheckContext::new(),
- TypeMessage::Untyped,
- )),
- Typed::Typed(_, t) => Ok(Cow::Borrowed(t)),
- Typed::Const(c) => Ok(Cow::Owned(type_of_const(*c)?)),
- }
+ pub fn make_builtin_type(b: Builtin) -> Self {
+ Typed::from_value(Value::from_builtin(b))
+ }
+ pub fn make_optional_type(t: Typed) -> Self {
+ Typed::from_value(
+ Value::from_builtin(Builtin::Optional).app(t.to_value()),
+ )
+ }
+ pub fn make_list_type(t: Typed) -> Self {
+ Typed::from_value(Value::from_builtin(Builtin::List).app(t.to_value()))
+ }
+ pub fn make_record_type(
+ kts: impl Iterator<Item = (String, Typed)>,
+ ) -> Self {
+ Typed::from_valuef_and_type(
+ ValueF::RecordType(
+ kts.map(|(k, t)| (k.into(), t.into_value())).collect(),
+ ),
+ Typed::const_type(),
+ )
+ }
+ pub fn make_union_type(
+ kts: impl Iterator<Item = (String, Option<Typed>)>,
+ ) -> Self {
+ Typed::from_valuef_and_type(
+ ValueF::UnionType(
+ kts.map(|(k, t)| (k.into(), t.map(|t| t.into_value())))
+ .collect(),
+ ),
+ Typed::const_type(),
+ )
}
}
impl Normalized {
- #[allow(dead_code)]
- pub fn to_expr(&self) -> NormalizedSubExpr {
- self.0.to_expr()
- }
- #[allow(dead_code)]
- pub fn to_expr_alpha(&self) -> NormalizedSubExpr {
- self.0.to_expr_alpha()
+ pub fn encode(&self) -> Result<Vec<u8>, EncodeError> {
+ crate::phase::binary::encode(&self.to_expr())
}
- #[allow(dead_code)]
- pub fn to_type(&self) -> Type {
- self.0.to_type()
+
+ pub(crate) fn to_expr(&self) -> NormalizedExpr {
+ self.0.normalize_to_expr()
}
- pub fn to_value(&self) -> Value {
- self.0.to_value()
+ pub(crate) fn to_expr_alpha(&self) -> NormalizedExpr {
+ self.0.normalize_to_expr_alpha()
}
- pub fn into_typed(self) -> Typed {
+ pub(crate) fn into_typed(self) -> Typed {
self.0
}
}
impl Shift for Typed {
fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> {
- Some(match self {
- Typed::Untyped(th) => Typed::Untyped(th.shift(delta, var)?),
- Typed::Typed(th, t) => Typed::Typed(
- th.shift(delta, var)?,
- Box::new(t.shift(delta, var)?),
- ),
- Typed::Const(c) => Typed::Const(*c),
- })
+ Some(Typed(self.0.shift(delta, var)?))
}
}
@@ -225,16 +198,9 @@ impl Shift for Normalized {
}
}
-impl Subst<Typed> for Typed {
- fn subst_shift(&self, var: &AlphaVar, val: &Typed) -> Self {
- match self {
- Typed::Untyped(th) => Typed::Untyped(th.subst_shift(var, val)),
- Typed::Typed(th, t) => Typed::Typed(
- th.subst_shift(var, val),
- Box::new(t.subst_shift(var, val)),
- ),
- Typed::Const(c) => Typed::Const(*c),
- }
+impl Subst<Value> for Typed {
+ fn subst_shift(&self, var: &AlphaVar, val: &Value) -> Self {
+ Typed(self.0.subst_shift(var, val))
}
}
@@ -263,10 +229,21 @@ derive_traits_for_wrapper_struct!(Parsed);
derive_traits_for_wrapper_struct!(Resolved);
derive_traits_for_wrapper_struct!(Normalized);
+impl std::hash::Hash for Normalized {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: std::hash::Hasher,
+ {
+ if let Ok(vec) = self.encode() {
+ vec.hash(state)
+ }
+ }
+}
+
impl Eq for Typed {}
impl PartialEq for Typed {
fn eq(&self, other: &Self) -> bool {
- self.to_value() == other.to_value()
+ self.0 == other.0
}
}
diff --git a/dhall/src/phase/normalize.rs b/dhall/src/phase/normalize.rs
index 405677a..3f6e99c 100644
--- a/dhall/src/phase/normalize.rs
+++ b/dhall/src/phase/normalize.rs
@@ -1,348 +1,431 @@
use std::collections::HashMap;
+use dhall_syntax::Const::Type;
use dhall_syntax::{
- BinOp, Builtin, ExprF, InterpolatedText, InterpolatedTextContents,
- NaiveDouble, X,
+ BinOp, Builtin, ExprF, InterpolatedText, InterpolatedTextContents, Label,
+ NaiveDouble,
};
-use crate::core::context::NormalizationContext;
-use crate::core::thunk::{Thunk, TypeThunk};
use crate::core::value::Value;
-use crate::core::var::Subst;
-use crate::phase::{NormalizedSubExpr, ResolvedSubExpr, Typed};
-
-pub type InputSubExpr = ResolvedSubExpr;
-pub type OutputSubExpr = NormalizedSubExpr;
+use crate::core::valuef::ValueF;
+use crate::core::var::{AlphaLabel, Shift, Subst};
+use crate::phase::Normalized;
+
+// Ad-hoc macro to help construct closures
+macro_rules! make_closure {
+ (#$var:ident) => { $var.clone() };
+ (var($var:ident, $n:expr, $($ty:tt)*)) => {{
+ let var = crate::core::var::AlphaVar::from_var_and_alpha(
+ Label::from(stringify!($var)).into(),
+ $n
+ );
+ ValueF::Var(var)
+ .into_value_with_type(make_closure!($($ty)*))
+ }};
+ // Warning: assumes that $ty, as a dhall value, has type `Type`
+ (λ($var:ident : $($ty:tt)*) -> $($body:tt)*) => {{
+ let var: AlphaLabel = Label::from(stringify!($var)).into();
+ let ty = make_closure!($($ty)*);
+ let body = make_closure!($($body)*);
+ let body_ty = body.get_type_not_sort();
+ let lam_ty = ValueF::Pi(var.clone(), ty.clone(), body_ty)
+ .into_value_with_type(Value::from_const(Type));
+ ValueF::Lam(var, ty, body).into_value_with_type(lam_ty)
+ }};
+ (Natural) => {
+ Value::from_builtin(Builtin::Natural)
+ };
+ (List $($rest:tt)*) => {
+ Value::from_builtin(Builtin::List)
+ .app(make_closure!($($rest)*))
+ };
+ (Some($($rest:tt)*)) => {{
+ let v = make_closure!($($rest)*);
+ let v_type = v.get_type_not_sort();
+ let opt_v_type = Value::from_builtin(Builtin::Optional).app(v_type);
+ ValueF::NEOptionalLit(v).into_value_with_type(opt_v_type)
+ }};
+ (1 + $($rest:tt)*) => {
+ ValueF::PartialExpr(ExprF::BinOp(
+ dhall_syntax::BinOp::NaturalPlus,
+ make_closure!($($rest)*),
+ Value::from_valuef_and_type(
+ ValueF::NaturalLit(1),
+ make_closure!(Natural)
+ ),
+ )).into_value_with_type(
+ make_closure!(Natural)
+ )
+ };
+ ([ $($head:tt)* ] # $($tail:tt)*) => {{
+ let head = make_closure!($($head)*);
+ let tail = make_closure!($($tail)*);
+ let list_type = tail.get_type_not_sort();
+ ValueF::PartialExpr(ExprF::BinOp(
+ dhall_syntax::BinOp::ListAppend,
+ ValueF::NEListLit(vec![head])
+ .into_value_with_type(list_type.clone()),
+ tail,
+ )).into_value_with_type(list_type)
+ }};
+}
#[allow(clippy::cognitive_complexity)]
-pub fn apply_builtin(b: Builtin, args: Vec<Thunk>) -> Value {
+pub(crate) fn apply_builtin(
+ b: Builtin,
+ args: Vec<Value>,
+ ty: &Value,
+) -> ValueF {
use dhall_syntax::Builtin::*;
- use Value::*;
+ use ValueF::*;
+
+ // Small helper enum
+ enum Ret<'a> {
+ ValueF(ValueF),
+ Value(Value),
+ // For applications that can return a function, it's important to keep the remaining
+ // arguments to apply them to the resulting function.
+ ValueWithRemainingArgs(&'a [Value], Value),
+ DoneAsIs,
+ }
- // Return Ok((unconsumed args, returned value)), or Err(()) if value could not be produced.
let ret = match (b, args.as_slice()) {
- (OptionalNone, [t, r..]) => {
- Ok((r, EmptyOptionalLit(TypeThunk::from_thunk(t.clone()))))
- }
- (NaturalIsZero, [n, r..]) => match &*n.as_value() {
- NaturalLit(n) => Ok((r, BoolLit(*n == 0))),
- _ => Err(()),
+ (OptionalNone, [t]) => Ret::ValueF(EmptyOptionalLit(t.clone())),
+ (NaturalIsZero, [n]) => match &*n.as_whnf() {
+ NaturalLit(n) => Ret::ValueF(BoolLit(*n == 0)),
+ _ => Ret::DoneAsIs,
},
- (NaturalEven, [n, r..]) => match &*n.as_value() {
- NaturalLit(n) => Ok((r, BoolLit(*n % 2 == 0))),
- _ => Err(()),
+ (NaturalEven, [n]) => match &*n.as_whnf() {
+ NaturalLit(n) => Ret::ValueF(BoolLit(*n % 2 == 0)),
+ _ => Ret::DoneAsIs,
},
- (NaturalOdd, [n, r..]) => match &*n.as_value() {
- NaturalLit(n) => Ok((r, BoolLit(*n % 2 != 0))),
- _ => Err(()),
+ (NaturalOdd, [n]) => match &*n.as_whnf() {
+ NaturalLit(n) => Ret::ValueF(BoolLit(*n % 2 != 0)),
+ _ => Ret::DoneAsIs,
},
- (NaturalToInteger, [n, r..]) => match &*n.as_value() {
- NaturalLit(n) => Ok((r, IntegerLit(*n as isize))),
- _ => Err(()),
+ (NaturalToInteger, [n]) => match &*n.as_whnf() {
+ NaturalLit(n) => Ret::ValueF(IntegerLit(*n as isize)),
+ _ => Ret::DoneAsIs,
},
- (NaturalShow, [n, r..]) => match &*n.as_value() {
- NaturalLit(n) => Ok((
- r,
- TextLit(vec![InterpolatedTextContents::Text(n.to_string())]),
- )),
- _ => Err(()),
+ (NaturalShow, [n]) => match &*n.as_whnf() {
+ NaturalLit(n) => {
+ Ret::ValueF(TextLit(vec![InterpolatedTextContents::Text(
+ n.to_string(),
+ )]))
+ }
+ _ => Ret::DoneAsIs,
},
- (NaturalSubtract, [a, b, r..]) => {
- match (&*a.as_value(), &*b.as_value()) {
- (NaturalLit(a), NaturalLit(b)) => {
- Ok((r, NaturalLit(if b > a { b - a } else { 0 })))
- }
- (NaturalLit(0), b) => Ok((r, b.clone())),
- (_, NaturalLit(0)) => Ok((r, NaturalLit(0))),
- _ if a == b => Ok((r, NaturalLit(0))),
- _ => Err(()),
+ (NaturalSubtract, [a, b]) => match (&*a.as_whnf(), &*b.as_whnf()) {
+ (NaturalLit(a), NaturalLit(b)) => {
+ Ret::ValueF(NaturalLit(if b > a { b - a } else { 0 }))
}
- }
- (IntegerShow, [n, r..]) => match &*n.as_value() {
+ (NaturalLit(0), _) => Ret::Value(b.clone()),
+ (_, NaturalLit(0)) => Ret::ValueF(NaturalLit(0)),
+ _ if a == b => Ret::ValueF(NaturalLit(0)),
+ _ => Ret::DoneAsIs,
+ },
+ (IntegerShow, [n]) => match &*n.as_whnf() {
IntegerLit(n) => {
let s = if *n < 0 {
n.to_string()
} else {
format!("+{}", n)
};
- Ok((r, TextLit(vec![InterpolatedTextContents::Text(s)])))
+ Ret::ValueF(TextLit(vec![InterpolatedTextContents::Text(s)]))
}
- _ => Err(()),
+ _ => Ret::DoneAsIs,
},
- (IntegerToDouble, [n, r..]) => match &*n.as_value() {
- IntegerLit(n) => Ok((r, DoubleLit(NaiveDouble::from(*n as f64)))),
- _ => Err(()),
+ (IntegerToDouble, [n]) => match &*n.as_whnf() {
+ IntegerLit(n) => {
+ Ret::ValueF(DoubleLit(NaiveDouble::from(*n as f64)))
+ }
+ _ => Ret::DoneAsIs,
},
- (DoubleShow, [n, r..]) => match &*n.as_value() {
- DoubleLit(n) => Ok((
- r,
- TextLit(vec![InterpolatedTextContents::Text(n.to_string())]),
- )),
- _ => Err(()),
+ (DoubleShow, [n]) => match &*n.as_whnf() {
+ DoubleLit(n) => {
+ Ret::ValueF(TextLit(vec![InterpolatedTextContents::Text(
+ n.to_string(),
+ )]))
+ }
+ _ => Ret::DoneAsIs,
},
- (TextShow, [v, r..]) => match &*v.as_value() {
+ (TextShow, [v]) => match &*v.as_whnf() {
TextLit(elts) => {
match elts.as_slice() {
// Empty string literal.
[] => {
// Printing InterpolatedText takes care of all the escaping
- let txt: InterpolatedText<X> =
+ let txt: InterpolatedText<Normalized> =
std::iter::empty().collect();
let s = txt.to_string();
- Ok((
- r,
- TextLit(vec![InterpolatedTextContents::Text(s)]),
- ))
+ Ret::ValueF(TextLit(vec![
+ InterpolatedTextContents::Text(s),
+ ]))
}
// If there are no interpolations (invariants ensure that when there are no
// interpolations, there is a single Text item) in the literal.
[InterpolatedTextContents::Text(s)] => {
// Printing InterpolatedText takes care of all the escaping
- let txt: InterpolatedText<X> = std::iter::once(
- InterpolatedTextContents::Text(s.clone()),
- )
- .collect();
+ let txt: InterpolatedText<Normalized> =
+ std::iter::once(InterpolatedTextContents::Text(
+ s.clone(),
+ ))
+ .collect();
let s = txt.to_string();
- Ok((
- r,
- TextLit(vec![InterpolatedTextContents::Text(s)]),
- ))
+ Ret::ValueF(TextLit(vec![
+ InterpolatedTextContents::Text(s),
+ ]))
}
- _ => Err(()),
+ _ => Ret::DoneAsIs,
}
}
- _ => Err(()),
+ _ => Ret::DoneAsIs,
},
- (ListLength, [_, l, r..]) => match &*l.as_value() {
- EmptyListLit(_) => Ok((r, NaturalLit(0))),
- NEListLit(xs) => Ok((r, NaturalLit(xs.len()))),
- _ => Err(()),
+ (ListLength, [_, l]) => match &*l.as_whnf() {
+ EmptyListLit(_) => Ret::ValueF(NaturalLit(0)),
+ NEListLit(xs) => Ret::ValueF(NaturalLit(xs.len())),
+ _ => Ret::DoneAsIs,
},
- (ListHead, [_, l, r..]) => match &*l.as_value() {
- EmptyListLit(n) => Ok((r, EmptyOptionalLit(n.clone()))),
+ (ListHead, [_, l]) => match &*l.as_whnf() {
+ EmptyListLit(n) => Ret::ValueF(EmptyOptionalLit(n.clone())),
NEListLit(xs) => {
- Ok((r, NEOptionalLit(xs.iter().next().unwrap().clone())))
+ Ret::ValueF(NEOptionalLit(xs.iter().next().unwrap().clone()))
}
- _ => Err(()),
+ _ => Ret::DoneAsIs,
},
- (ListLast, [_, l, r..]) => match &*l.as_value() {
- EmptyListLit(n) => Ok((r, EmptyOptionalLit(n.clone()))),
- NEListLit(xs) => {
- Ok((r, NEOptionalLit(xs.iter().rev().next().unwrap().clone())))
- }
- _ => Err(()),
+ (ListLast, [_, l]) => match &*l.as_whnf() {
+ EmptyListLit(n) => Ret::ValueF(EmptyOptionalLit(n.clone())),
+ NEListLit(xs) => Ret::ValueF(NEOptionalLit(
+ xs.iter().rev().next().unwrap().clone(),
+ )),
+ _ => Ret::DoneAsIs,
},
- (ListReverse, [_, l, r..]) => match &*l.as_value() {
- EmptyListLit(n) => Ok((r, EmptyListLit(n.clone()))),
+ (ListReverse, [_, l]) => match &*l.as_whnf() {
+ EmptyListLit(n) => Ret::ValueF(EmptyListLit(n.clone())),
NEListLit(xs) => {
- Ok((r, NEListLit(xs.iter().rev().cloned().collect())))
+ Ret::ValueF(NEListLit(xs.iter().rev().cloned().collect()))
}
- _ => Err(()),
+ _ => Ret::DoneAsIs,
},
- (ListIndexed, [_, l, r..]) => match &*l.as_value() {
- EmptyListLit(t) => {
- let mut kts = HashMap::new();
- kts.insert(
- "index".into(),
- TypeThunk::from_value(Value::from_builtin(Natural)),
- );
- kts.insert("value".into(), t.clone());
- Ok((r, EmptyListLit(TypeThunk::from_value(RecordType(kts)))))
- }
- NEListLit(xs) => {
- let xs = xs
- .iter()
- .enumerate()
- .map(|(i, e)| {
- let i = NaturalLit(i);
- let mut kvs = HashMap::new();
- kvs.insert("index".into(), Thunk::from_value(i));
- kvs.insert("value".into(), e.clone());
- Thunk::from_value(RecordLit(kvs))
- })
- .collect();
- Ok((r, NEListLit(xs)))
+ (ListIndexed, [_, l]) => {
+ let l_whnf = l.as_whnf();
+ match &*l_whnf {
+ EmptyListLit(_) | NEListLit(_) => {
+ // Extract the type of the list elements
+ let t = match &*l_whnf {
+ EmptyListLit(t) => t.clone(),
+ NEListLit(xs) => xs[0].get_type_not_sort(),
+ _ => unreachable!(),
+ };
+
+ // Construct the returned record type: { index: Natural, value: t }
+ let mut kts = HashMap::new();
+ kts.insert("index".into(), Value::from_builtin(Natural));
+ kts.insert("value".into(), t.clone());
+ let t = Value::from_valuef_and_type(
+ RecordType(kts),
+ Value::from_const(Type),
+ );
+
+ // Construct the new list, with added indices
+ let list = match &*l_whnf {
+ EmptyListLit(_) => EmptyListLit(t),
+ NEListLit(xs) => NEListLit(
+ xs.iter()
+ .enumerate()
+ .map(|(i, e)| {
+ let mut kvs = HashMap::new();
+ kvs.insert(
+ "index".into(),
+ Value::from_valuef_and_type(
+ NaturalLit(i),
+ Value::from_builtin(
+ Builtin::Natural,
+ ),
+ ),
+ );
+ kvs.insert("value".into(), e.clone());
+ Value::from_valuef_and_type(
+ RecordLit(kvs),
+ t.clone(),
+ )
+ })
+ .collect(),
+ ),
+ _ => unreachable!(),
+ };
+ Ret::ValueF(list)
+ }
+ _ => Ret::DoneAsIs,
}
- _ => Err(()),
- },
- (ListBuild, [t, f, r..]) => match &*f.as_value() {
+ }
+ (ListBuild, [t, f]) => match &*f.as_whnf() {
// fold/build fusion
- Value::AppliedBuiltin(ListFold, args) => {
+ ValueF::AppliedBuiltin(ListFold, args) => {
if args.len() >= 2 {
- Ok((r, args[1].to_value()))
+ Ret::Value(args[1].clone())
} else {
// Do we really need to handle this case ?
unimplemented!()
}
}
- _ => Ok((
- r,
- f.app_val(Value::from_builtin(List).app_thunk(t.clone()))
- .app_val(ListConsClosure(
- TypeThunk::from_thunk(t.clone()),
- None,
- ))
- .app_val(EmptyListLit(TypeThunk::from_thunk(t.clone()))),
- )),
+ _ => {
+ let list_t = Value::from_builtin(List).app(t.clone());
+ Ret::Value(
+ f.app(list_t.clone())
+ .app({
+ // Move `t` under new variables
+ let t1 = t.under_binder(Label::from("x"));
+ let t2 = t1.under_binder(Label::from("xs"));
+ make_closure!(
+ λ(x : #t) ->
+ λ(xs : List #t1) ->
+ [ var(x, 1, #t2) ] # var(xs, 0, List #t2)
+ )
+ })
+ .app(
+ EmptyListLit(t.clone())
+ .into_value_with_type(list_t),
+ ),
+ )
+ }
},
- (ListFold, [_, l, _, cons, nil, r..]) => match &*l.as_value() {
- EmptyListLit(_) => Ok((r, nil.to_value())),
+ (ListFold, [_, l, _, cons, nil, r..]) => match &*l.as_whnf() {
+ EmptyListLit(_) => Ret::ValueWithRemainingArgs(r, nil.clone()),
NEListLit(xs) => {
let mut v = nil.clone();
- for x in xs.iter().rev() {
- v = cons
- .clone()
- .app_thunk(x.clone())
- .app_thunk(v)
- .into_thunk();
+ for x in xs.iter().cloned().rev() {
+ v = cons.app(x).app(v);
}
- Ok((r, v.to_value()))
+ Ret::ValueWithRemainingArgs(r, v)
}
- _ => Err(()),
+ _ => Ret::DoneAsIs,
},
- (OptionalBuild, [t, f, r..]) => match &*f.as_value() {
+ (OptionalBuild, [t, f]) => match &*f.as_whnf() {
// fold/build fusion
- Value::AppliedBuiltin(OptionalFold, args) => {
+ ValueF::AppliedBuiltin(OptionalFold, args) => {
if args.len() >= 2 {
- Ok((r, args[1].to_value()))
+ Ret::Value(args[1].clone())
} else {
// Do we really need to handle this case ?
unimplemented!()
}
}
- _ => Ok((
- r,
- f.app_val(Value::from_builtin(Optional).app_thunk(t.clone()))
- .app_val(OptionalSomeClosure(TypeThunk::from_thunk(
- t.clone(),
- )))
- .app_val(EmptyOptionalLit(TypeThunk::from_thunk(
- t.clone(),
- ))),
- )),
+ _ => {
+ let optional_t = Value::from_builtin(Optional).app(t.clone());
+ Ret::Value(
+ f.app(optional_t.clone())
+ .app({
+ let t1 = t.under_binder(Label::from("x"));
+ make_closure!(λ(x: #t) -> Some(var(x, 0, #t1)))
+ })
+ .app(
+ EmptyOptionalLit(t.clone())
+ .into_value_with_type(optional_t),
+ ),
+ )
+ }
},
- (OptionalFold, [_, v, _, just, nothing, r..]) => match &*v.as_value() {
- EmptyOptionalLit(_) => Ok((r, nothing.to_value())),
- NEOptionalLit(x) => Ok((r, just.app_thunk(x.clone()))),
- _ => Err(()),
+ (OptionalFold, [_, v, _, just, nothing, r..]) => match &*v.as_whnf() {
+ EmptyOptionalLit(_) => {
+ Ret::ValueWithRemainingArgs(r, nothing.clone())
+ }
+ NEOptionalLit(x) => {
+ Ret::ValueWithRemainingArgs(r, just.app(x.clone()))
+ }
+ _ => Ret::DoneAsIs,
},
- (NaturalBuild, [f, r..]) => match &*f.as_value() {
+ (NaturalBuild, [f]) => match &*f.as_whnf() {
// fold/build fusion
- Value::AppliedBuiltin(NaturalFold, args) => {
+ ValueF::AppliedBuiltin(NaturalFold, args) => {
if !args.is_empty() {
- Ok((r, args[0].to_value()))
+ Ret::Value(args[0].clone())
} else {
// Do we really need to handle this case ?
unimplemented!()
}
}
- _ => Ok((
- r,
- f.app_val(Value::from_builtin(Natural))
- .app_val(NaturalSuccClosure)
- .app_val(NaturalLit(0)),
- )),
+ _ => Ret::Value(
+ f.app(Value::from_builtin(Natural))
+ .app(make_closure!(
+ λ(x : Natural) -> 1 + var(x, 0, Natural)
+ ))
+ .app(
+ NaturalLit(0)
+ .into_value_with_type(Value::from_builtin(Natural)),
+ ),
+ ),
},
- (NaturalFold, [n, t, succ, zero, r..]) => match &*n.as_value() {
- NaturalLit(0) => Ok((r, zero.to_value())),
+ (NaturalFold, [n, t, succ, zero, r..]) => match &*n.as_whnf() {
+ NaturalLit(0) => Ret::ValueWithRemainingArgs(r, zero.clone()),
NaturalLit(n) => {
let fold = Value::from_builtin(NaturalFold)
- .app(NaturalLit(n - 1))
- .app_thunk(t.clone())
- .app_thunk(succ.clone())
- .app_thunk(zero.clone());
- Ok((r, succ.app_val(fold)))
+ .app(
+ NaturalLit(n - 1)
+ .into_value_with_type(Value::from_builtin(Natural)),
+ )
+ .app(t.clone())
+ .app(succ.clone())
+ .app(zero.clone());
+ Ret::ValueWithRemainingArgs(r, succ.app(fold))
}
- _ => Err(()),
+ _ => Ret::DoneAsIs,
},
- _ => Err(()),
+ _ => Ret::DoneAsIs,
};
match ret {
- Ok((unconsumed_args, mut v)) => {
+ Ret::ValueF(v) => v,
+ Ret::Value(v) => v.to_whnf_check_type(ty),
+ Ret::ValueWithRemainingArgs(unconsumed_args, mut v) => {
let n_consumed_args = args.len() - unconsumed_args.len();
for x in args.into_iter().skip(n_consumed_args) {
- v = v.app_thunk(x);
+ v = v.app(x);
}
- v
+ v.to_whnf_check_type(ty)
}
- Err(()) => AppliedBuiltin(b, args),
+ Ret::DoneAsIs => AppliedBuiltin(b, args),
}
}
-pub fn apply_any(f: Thunk, a: Thunk) -> Value {
- let fallback = |f: Thunk, a: Thunk| Value::PartialExpr(ExprF::App(f, a));
-
- let f_borrow = f.as_value();
+pub(crate) fn apply_any(f: Value, a: Value, ty: &Value) -> ValueF {
+ let f_borrow = f.as_whnf();
match &*f_borrow {
- Value::Lam(x, _, e) => {
- let val = Typed::from_thunk_untyped(a);
- e.subst_shift(&x.into(), &val).to_value()
+ ValueF::Lam(x, _, e) => {
+ e.subst_shift(&x.into(), &a).to_whnf_check_type(ty)
}
- Value::AppliedBuiltin(b, args) => {
+ ValueF::AppliedBuiltin(b, args) => {
use std::iter::once;
let args = args.iter().cloned().chain(once(a.clone())).collect();
- apply_builtin(*b, args)
- }
- Value::OptionalSomeClosure(_) => Value::NEOptionalLit(a),
- Value::ListConsClosure(t, None) => {
- Value::ListConsClosure(t.clone(), Some(a))
+ apply_builtin(*b, args, ty)
}
- Value::ListConsClosure(_, Some(x)) => {
- let a_borrow = a.as_value();
- match &*a_borrow {
- Value::EmptyListLit(_) => Value::NEListLit(vec![x.clone()]),
- Value::NEListLit(xs) => {
- use std::iter::once;
- let xs =
- once(x.clone()).chain(xs.iter().cloned()).collect();
- Value::NEListLit(xs)
- }
- _ => {
- drop(f_borrow);
- drop(a_borrow);
- fallback(f, a)
- }
- }
- }
- Value::NaturalSuccClosure => {
- let a_borrow = a.as_value();
- match &*a_borrow {
- Value::NaturalLit(n) => Value::NaturalLit(n + 1),
- _ => {
- drop(f_borrow);
- drop(a_borrow);
- fallback(f, a)
- }
- }
- }
- Value::UnionConstructor(l, kts) => {
- Value::UnionLit(l.clone(), a, kts.clone())
+ ValueF::UnionConstructor(l, kts) => {
+ ValueF::UnionLit(l.clone(), a, kts.clone())
}
_ => {
drop(f_borrow);
- fallback(f, a)
+ ValueF::PartialExpr(ExprF::App(f, a))
}
}
}
-pub fn squash_textlit(
- elts: impl Iterator<Item = InterpolatedTextContents<Thunk>>,
-) -> Vec<InterpolatedTextContents<Thunk>> {
+pub(crate) fn squash_textlit(
+ elts: impl Iterator<Item = InterpolatedTextContents<Value>>,
+) -> Vec<InterpolatedTextContents<Value>> {
use std::mem::replace;
use InterpolatedTextContents::{Expr, Text};
fn inner(
- elts: impl Iterator<Item = InterpolatedTextContents<Thunk>>,
+ elts: impl Iterator<Item = InterpolatedTextContents<Value>>,
crnt_str: &mut String,
- ret: &mut Vec<InterpolatedTextContents<Thunk>>,
+ ret: &mut Vec<InterpolatedTextContents<Value>>,
) {
for contents in elts {
match contents {
Text(s) => crnt_str.push_str(&s),
Expr(e) => {
- let e_borrow = e.as_value();
+ let e_borrow = e.as_whnf();
match &*e_borrow {
- Value::TextLit(elts2) => {
+ ValueF::TextLit(elts2) => {
inner(elts2.iter().cloned(), crnt_str, ret)
}
_ => {
@@ -367,134 +450,20 @@ pub fn squash_textlit(
ret
}
-/// Reduces the imput expression to a Value. Evaluates as little as possible.
-pub fn normalize_whnf(ctx: NormalizationContext, expr: InputSubExpr) -> Value {
- match expr.as_ref() {
- ExprF::Embed(e) => return e.to_value(),
- ExprF::Var(v) => return ctx.lookup(v),
- _ => {}
- }
-
- // Thunk subexpressions
- let expr: ExprF<Thunk, X> =
- expr.as_ref().map_ref_with_special_handling_of_binders(
- |e| Thunk::new(ctx.clone(), e.clone()),
- |x, e| Thunk::new(ctx.skip(x), e.clone()),
- |_| unreachable!(),
- );
-
- normalize_one_layer(expr)
-}
-
-// Small helper enum to avoid repetition
-enum Ret<'a> {
- Value(Value),
- Thunk(Thunk),
- ThunkRef(&'a Thunk),
- Expr(ExprF<Thunk, X>),
-}
-
-/// Performs an intersection of two HashMaps.
-///
-/// # Arguments
-///
-/// * `f` - Will compute the final value from the intersecting
-/// key and the values from both maps.
-///
-/// # Description
-///
-/// If the key is present in both maps then the final value for
-/// that key is computed via the `f` function.
-///
-/// The final map will contain the shared keys from the
-/// two input maps with the final computed value from `f`.
-pub(crate) fn intersection_with_key<K, T, U, V>(
- mut f: impl FnMut(&K, &T, &U) -> V,
- map1: &HashMap<K, T>,
- map2: &HashMap<K, U>,
-) -> HashMap<K, V>
-where
- K: std::hash::Hash + Eq + Clone,
-{
- let mut kvs = HashMap::new();
-
- for (k, t) in map1 {
- // Only insert in the final map if the key exists in both
- if let Some(u) = map2.get(k) {
- kvs.insert(k.clone(), f(k, t, u));
- }
- }
-
- kvs
-}
-
-/// Performs an outer join of two HashMaps.
-///
-/// # Arguments
-///
-/// * `ft` - Will convert the values of the first map
-/// into the target value.
-///
-/// * `fu` - Will convert the values of the second map
-/// into the target value.
-///
-/// * `fktu` - Will convert the key and values from both maps
-/// into the target type.
-///
-/// # Description
-///
-/// If the key is present in both maps then the final value for
-/// that key is computed via the `fktu` function. Otherwise, the
-/// final value will be calculated by either the `ft` or `fu` value
-/// depending on which map the key is present in.
-///
-/// The final map will contain all keys from the two input maps with
-/// also values computed as per above.
-pub(crate) fn outer_join<K, T, U, V>(
- mut ft: impl FnMut(&T) -> V,
- mut fu: impl FnMut(&U) -> V,
- mut fktu: impl FnMut(&K, &T, &U) -> V,
- map1: &HashMap<K, T>,
- map2: &HashMap<K, U>,
-) -> HashMap<K, V>
-where
- K: std::hash::Hash + Eq + Clone,
-{
- let mut kvs = HashMap::new();
-
- for (k1, t) in map1 {
- let v = if let Some(u) = map2.get(k1) {
- // The key exists in both maps
- // so use all values for computation
- fktu(k1, t, u)
- } else {
- // Key only exists in map1
- ft(t)
- };
- kvs.insert(k1.clone(), v);
- }
-
- for (k1, u) in map2 {
- // Insert if key was missing in map1
- kvs.entry(k1.clone()).or_insert(fu(u));
- }
-
- kvs
-}
-
-pub(crate) fn merge_maps<K, V>(
+pub(crate) fn merge_maps<K, V, F, Err>(
map1: &HashMap<K, V>,
map2: &HashMap<K, V>,
- mut f: impl FnMut(&V, &V) -> V,
-) -> HashMap<K, V>
+ mut f: F,
+) -> Result<HashMap<K, V>, Err>
where
+ F: FnMut(&K, &V, &V) -> Result<V, Err>,
K: std::hash::Hash + Eq + Clone,
V: Clone,
{
let mut kvs = HashMap::new();
for (x, v2) in map2 {
let newv = if let Some(v1) = map1.get(x) {
- f(v1, v2)
+ f(x, v1, v2)?
} else {
v2.clone()
};
@@ -504,85 +473,98 @@ where
// Insert only if key not already present
kvs.entry(x.clone()).or_insert_with(|| v1.clone());
}
- kvs
+ Ok(kvs)
}
-fn apply_binop<'a>(o: BinOp, x: &'a Thunk, y: &'a Thunk) -> Option<Ret<'a>> {
+// Small helper enum to avoid repetition
+enum Ret<'a> {
+ ValueF(ValueF),
+ Value(Value),
+ ValueRef(&'a Value),
+ Expr(ExprF<Value, Normalized>),
+}
+
+fn apply_binop<'a>(
+ o: BinOp,
+ x: &'a Value,
+ y: &'a Value,
+ ty: &Value,
+) -> Option<Ret<'a>> {
use BinOp::{
BoolAnd, BoolEQ, BoolNE, BoolOr, Equivalence, ListAppend, NaturalPlus,
NaturalTimes, RecursiveRecordMerge, RecursiveRecordTypeMerge,
RightBiasedRecordMerge, TextAppend,
};
- use Value::{
+ use ValueF::{
BoolLit, EmptyListLit, NEListLit, NaturalLit, RecordLit, RecordType,
TextLit,
};
- let x_borrow = x.as_value();
- let y_borrow = y.as_value();
+ let x_borrow = x.as_whnf();
+ let y_borrow = y.as_whnf();
Some(match (o, &*x_borrow, &*y_borrow) {
- (BoolAnd, BoolLit(true), _) => Ret::ThunkRef(y),
- (BoolAnd, _, BoolLit(true)) => Ret::ThunkRef(x),
- (BoolAnd, BoolLit(false), _) => Ret::Value(BoolLit(false)),
- (BoolAnd, _, BoolLit(false)) => Ret::Value(BoolLit(false)),
- (BoolAnd, _, _) if x == y => Ret::ThunkRef(x),
- (BoolOr, BoolLit(true), _) => Ret::Value(BoolLit(true)),
- (BoolOr, _, BoolLit(true)) => Ret::Value(BoolLit(true)),
- (BoolOr, BoolLit(false), _) => Ret::ThunkRef(y),
- (BoolOr, _, BoolLit(false)) => Ret::ThunkRef(x),
- (BoolOr, _, _) if x == y => Ret::ThunkRef(x),
- (BoolEQ, BoolLit(true), _) => Ret::ThunkRef(y),
- (BoolEQ, _, BoolLit(true)) => Ret::ThunkRef(x),
- (BoolEQ, BoolLit(x), BoolLit(y)) => Ret::Value(BoolLit(x == y)),
- (BoolEQ, _, _) if x == y => Ret::Value(BoolLit(true)),
- (BoolNE, BoolLit(false), _) => Ret::ThunkRef(y),
- (BoolNE, _, BoolLit(false)) => Ret::ThunkRef(x),
- (BoolNE, BoolLit(x), BoolLit(y)) => Ret::Value(BoolLit(x != y)),
- (BoolNE, _, _) if x == y => Ret::Value(BoolLit(false)),
-
- (NaturalPlus, NaturalLit(0), _) => Ret::ThunkRef(y),
- (NaturalPlus, _, NaturalLit(0)) => Ret::ThunkRef(x),
+ (BoolAnd, BoolLit(true), _) => Ret::ValueRef(y),
+ (BoolAnd, _, BoolLit(true)) => Ret::ValueRef(x),
+ (BoolAnd, BoolLit(false), _) => Ret::ValueF(BoolLit(false)),
+ (BoolAnd, _, BoolLit(false)) => Ret::ValueF(BoolLit(false)),
+ (BoolAnd, _, _) if x == y => Ret::ValueRef(x),
+ (BoolOr, BoolLit(true), _) => Ret::ValueF(BoolLit(true)),
+ (BoolOr, _, BoolLit(true)) => Ret::ValueF(BoolLit(true)),
+ (BoolOr, BoolLit(false), _) => Ret::ValueRef(y),
+ (BoolOr, _, BoolLit(false)) => Ret::ValueRef(x),
+ (BoolOr, _, _) if x == y => Ret::ValueRef(x),
+ (BoolEQ, BoolLit(true), _) => Ret::ValueRef(y),
+ (BoolEQ, _, BoolLit(true)) => Ret::ValueRef(x),
+ (BoolEQ, BoolLit(x), BoolLit(y)) => Ret::ValueF(BoolLit(x == y)),
+ (BoolEQ, _, _) if x == y => Ret::ValueF(BoolLit(true)),
+ (BoolNE, BoolLit(false), _) => Ret::ValueRef(y),
+ (BoolNE, _, BoolLit(false)) => Ret::ValueRef(x),
+ (BoolNE, BoolLit(x), BoolLit(y)) => Ret::ValueF(BoolLit(x != y)),
+ (BoolNE, _, _) if x == y => Ret::ValueF(BoolLit(false)),
+
+ (NaturalPlus, NaturalLit(0), _) => Ret::ValueRef(y),
+ (NaturalPlus, _, NaturalLit(0)) => Ret::ValueRef(x),
(NaturalPlus, NaturalLit(x), NaturalLit(y)) => {
- Ret::Value(NaturalLit(x + y))
+ Ret::ValueF(NaturalLit(x + y))
}
- (NaturalTimes, NaturalLit(0), _) => Ret::Value(NaturalLit(0)),
- (NaturalTimes, _, NaturalLit(0)) => Ret::Value(NaturalLit(0)),
- (NaturalTimes, NaturalLit(1), _) => Ret::ThunkRef(y),
- (NaturalTimes, _, NaturalLit(1)) => Ret::ThunkRef(x),
+ (NaturalTimes, NaturalLit(0), _) => Ret::ValueF(NaturalLit(0)),
+ (NaturalTimes, _, NaturalLit(0)) => Ret::ValueF(NaturalLit(0)),
+ (NaturalTimes, NaturalLit(1), _) => Ret::ValueRef(y),
+ (NaturalTimes, _, NaturalLit(1)) => Ret::ValueRef(x),
(NaturalTimes, NaturalLit(x), NaturalLit(y)) => {
- Ret::Value(NaturalLit(x * y))
+ Ret::ValueF(NaturalLit(x * y))
}
- (ListAppend, EmptyListLit(_), _) => Ret::ThunkRef(y),
- (ListAppend, _, EmptyListLit(_)) => Ret::ThunkRef(x),
- (ListAppend, NEListLit(xs), NEListLit(ys)) => {
- Ret::Value(NEListLit(xs.iter().chain(ys.iter()).cloned().collect()))
- }
+ (ListAppend, EmptyListLit(_), _) => Ret::ValueRef(y),
+ (ListAppend, _, EmptyListLit(_)) => Ret::ValueRef(x),
+ (ListAppend, NEListLit(xs), NEListLit(ys)) => Ret::ValueF(NEListLit(
+ xs.iter().chain(ys.iter()).cloned().collect(),
+ )),
- (TextAppend, TextLit(x), _) if x.is_empty() => Ret::ThunkRef(y),
- (TextAppend, _, TextLit(y)) if y.is_empty() => Ret::ThunkRef(x),
- (TextAppend, TextLit(x), TextLit(y)) => Ret::Value(TextLit(
+ (TextAppend, TextLit(x), _) if x.is_empty() => Ret::ValueRef(y),
+ (TextAppend, _, TextLit(y)) if y.is_empty() => Ret::ValueRef(x),
+ (TextAppend, TextLit(x), TextLit(y)) => Ret::ValueF(TextLit(
squash_textlit(x.iter().chain(y.iter()).cloned()),
)),
(TextAppend, TextLit(x), _) => {
use std::iter::once;
let y = InterpolatedTextContents::Expr(y.clone());
- Ret::Value(TextLit(squash_textlit(
+ Ret::ValueF(TextLit(squash_textlit(
x.iter().cloned().chain(once(y)),
)))
}
(TextAppend, _, TextLit(y)) => {
use std::iter::once;
let x = InterpolatedTextContents::Expr(x.clone());
- Ret::Value(TextLit(squash_textlit(
+ Ret::ValueF(TextLit(squash_textlit(
once(x).chain(y.iter().cloned()),
)))
}
(RightBiasedRecordMerge, _, RecordLit(kvs)) if kvs.is_empty() => {
- Ret::ThunkRef(x)
+ Ret::ValueRef(x)
}
(RightBiasedRecordMerge, RecordLit(kvs), _) if kvs.is_empty() => {
- Ret::ThunkRef(y)
+ Ret::ValueRef(y)
}
(RightBiasedRecordMerge, RecordLit(kvs1), RecordLit(kvs2)) => {
let mut kvs = kvs2.clone();
@@ -590,92 +572,83 @@ fn apply_binop<'a>(o: BinOp, x: &'a Thunk, y: &'a Thunk) -> Option<Ret<'a>> {
// Insert only if key not already present
kvs.entry(x.clone()).or_insert_with(|| v.clone());
}
- Ret::Value(RecordLit(kvs))
+ Ret::ValueF(RecordLit(kvs))
}
(RecursiveRecordMerge, _, RecordLit(kvs)) if kvs.is_empty() => {
- Ret::ThunkRef(x)
+ Ret::ValueRef(x)
}
(RecursiveRecordMerge, RecordLit(kvs), _) if kvs.is_empty() => {
- Ret::ThunkRef(y)
+ Ret::ValueRef(y)
}
(RecursiveRecordMerge, RecordLit(kvs1), RecordLit(kvs2)) => {
- let kvs = merge_maps(kvs1, kvs2, |v1, v2| {
- Thunk::from_partial_expr(ExprF::BinOp(
- RecursiveRecordMerge,
- v1.clone(),
- v2.clone(),
+ let ty_borrow = ty.as_whnf();
+ let kts = match &*ty_borrow {
+ RecordType(kts) => kts,
+ _ => unreachable!("Internal type error"),
+ };
+ let kvs = merge_maps::<_, _, _, !>(kvs1, kvs2, |k, v1, v2| {
+ Ok(Value::from_valuef_and_type(
+ ValueF::PartialExpr(ExprF::BinOp(
+ RecursiveRecordMerge,
+ v1.clone(),
+ v2.clone(),
+ )),
+ kts.get(k).expect("Internal type error").clone(),
))
- });
- Ret::Value(RecordLit(kvs))
+ })?;
+ Ret::ValueF(RecordLit(kvs))
}
- (RecursiveRecordTypeMerge, _, RecordType(kvs)) if kvs.is_empty() => {
- Ret::ThunkRef(x)
- }
- (RecursiveRecordTypeMerge, RecordType(kvs), _) if kvs.is_empty() => {
- Ret::ThunkRef(y)
+ (RecursiveRecordTypeMerge, _, _) | (Equivalence, _, _) => {
+ unreachable!("This case should have been handled in typecheck")
}
- (RecursiveRecordTypeMerge, RecordType(kvs1), RecordType(kvs2)) => {
- let kvs = merge_maps(kvs1, kvs2, |v1, v2| {
- TypeThunk::from_thunk(Thunk::from_partial_expr(ExprF::BinOp(
- RecursiveRecordTypeMerge,
- v1.to_thunk(),
- v2.to_thunk(),
- )))
- });
- Ret::Value(RecordType(kvs))
- }
-
- (Equivalence, _, _) => Ret::Value(Value::Equivalence(
- TypeThunk::from_thunk(x.clone()),
- TypeThunk::from_thunk(y.clone()),
- )),
_ => return None,
})
}
-pub fn normalize_one_layer(expr: ExprF<Thunk, X>) -> Value {
- use Value::{
- AppliedBuiltin, BoolLit, DoubleLit, EmptyListLit, IntegerLit, Lam,
- NEListLit, NEOptionalLit, NaturalLit, Pi, RecordLit, RecordType,
- TextLit, UnionConstructor, UnionLit, UnionType,
+pub(crate) fn normalize_one_layer(
+ expr: ExprF<Value, Normalized>,
+ ty: &Value,
+) -> ValueF {
+ use ValueF::{
+ AppliedBuiltin, BoolLit, DoubleLit, EmptyListLit, IntegerLit,
+ NEListLit, NEOptionalLit, NaturalLit, RecordLit, TextLit,
+ UnionConstructor, UnionLit, UnionType,
};
let ret = match expr {
- ExprF::Embed(_) => unreachable!(),
- ExprF::Var(_) => unreachable!(),
- ExprF::Annot(x, _) => Ret::Thunk(x),
- ExprF::Assert(_) => Ret::Expr(expr),
- ExprF::Lam(x, t, e) => {
- Ret::Value(Lam(x.into(), TypeThunk::from_thunk(t), e))
+ ExprF::Import(_) => unreachable!(
+ "There should remain no imports in a resolved expression"
+ ),
+ // Those cases have already been completely handled in the typechecking phase (using
+ // `RetWhole`), so they won't appear here.
+ ExprF::Lam(_, _, _)
+ | ExprF::Pi(_, _, _)
+ | ExprF::Let(_, _, _, _)
+ | ExprF::Embed(_)
+ | ExprF::Const(_)
+ | ExprF::Builtin(_)
+ | ExprF::Var(_)
+ | ExprF::Annot(_, _)
+ | ExprF::RecordType(_)
+ | ExprF::UnionType(_) => {
+ unreachable!("This case should have been handled in typecheck")
}
- ExprF::Pi(x, t, e) => Ret::Value(Pi(
- x.into(),
- TypeThunk::from_thunk(t),
- TypeThunk::from_thunk(e),
- )),
- ExprF::Let(x, _, v, b) => {
- let v = Typed::from_thunk_untyped(v);
- Ret::Thunk(b.subst_shift(&x.into(), &v))
- }
- ExprF::App(v, a) => Ret::Value(v.app_thunk(a)),
- ExprF::Builtin(b) => Ret::Value(Value::from_builtin(b)),
- ExprF::Const(c) => Ret::Value(Value::Const(c)),
- ExprF::BoolLit(b) => Ret::Value(BoolLit(b)),
- ExprF::NaturalLit(n) => Ret::Value(NaturalLit(n)),
- ExprF::IntegerLit(n) => Ret::Value(IntegerLit(n)),
- ExprF::DoubleLit(n) => Ret::Value(DoubleLit(n)),
- ExprF::SomeLit(e) => Ret::Value(NEOptionalLit(e)),
+ ExprF::Assert(_) => Ret::Expr(expr),
+ ExprF::App(v, a) => Ret::Value(v.app(a)),
+ ExprF::BoolLit(b) => Ret::ValueF(BoolLit(b)),
+ ExprF::NaturalLit(n) => Ret::ValueF(NaturalLit(n)),
+ ExprF::IntegerLit(n) => Ret::ValueF(IntegerLit(n)),
+ ExprF::DoubleLit(n) => Ret::ValueF(DoubleLit(n)),
+ ExprF::SomeLit(e) => Ret::ValueF(NEOptionalLit(e)),
ExprF::EmptyListLit(ref t) => {
// Check if the type is of the form `List x`
- let t_borrow = t.as_value();
+ let t_borrow = t.as_whnf();
match &*t_borrow {
AppliedBuiltin(Builtin::List, args) if args.len() == 1 => {
- Ret::Value(EmptyListLit(TypeThunk::from_thunk(
- args[0].clone(),
- )))
+ Ret::ValueF(EmptyListLit(args[0].clone()))
}
_ => {
drop(t_borrow);
@@ -684,43 +657,33 @@ pub fn normalize_one_layer(expr: ExprF<Thunk, X>) -> Value {
}
}
ExprF::NEListLit(elts) => {
- Ret::Value(NEListLit(elts.into_iter().collect()))
+ Ret::ValueF(NEListLit(elts.into_iter().collect()))
}
ExprF::RecordLit(kvs) => {
- Ret::Value(RecordLit(kvs.into_iter().collect()))
+ Ret::ValueF(RecordLit(kvs.into_iter().collect()))
}
- ExprF::RecordType(kts) => Ret::Value(RecordType(
- kts.into_iter()
- .map(|(k, t)| (k, TypeThunk::from_thunk(t)))
- .collect(),
- )),
- ExprF::UnionType(kts) => Ret::Value(UnionType(
- kts.into_iter()
- .map(|(k, t)| (k, t.map(|t| TypeThunk::from_thunk(t))))
- .collect(),
- )),
ExprF::TextLit(elts) => {
use InterpolatedTextContents::Expr;
let elts: Vec<_> = squash_textlit(elts.into_iter());
// Simplify bare interpolation
if let [Expr(th)] = elts.as_slice() {
- Ret::Thunk(th.clone())
+ Ret::Value(th.clone())
} else {
- Ret::Value(TextLit(elts))
+ Ret::ValueF(TextLit(elts))
}
}
ExprF::BoolIf(ref b, ref e1, ref e2) => {
- let b_borrow = b.as_value();
+ let b_borrow = b.as_whnf();
match &*b_borrow {
- BoolLit(true) => Ret::ThunkRef(e1),
- BoolLit(false) => Ret::ThunkRef(e2),
+ BoolLit(true) => Ret::ValueRef(e1),
+ BoolLit(false) => Ret::ValueRef(e2),
_ => {
- let e1_borrow = e1.as_value();
- let e2_borrow = e2.as_value();
+ let e1_borrow = e1.as_whnf();
+ let e2_borrow = e2.as_whnf();
match (&*e1_borrow, &*e2_borrow) {
// Simplify `if b then True else False`
- (BoolLit(true), BoolLit(false)) => Ret::ThunkRef(b),
- _ if e1 == e2 => Ret::ThunkRef(e1),
+ (BoolLit(true), BoolLit(false)) => Ret::ValueRef(b),
+ _ if e1 == e2 => Ret::ValueRef(e1),
_ => {
drop(b_borrow);
drop(e1_borrow);
@@ -731,18 +694,18 @@ pub fn normalize_one_layer(expr: ExprF<Thunk, X>) -> Value {
}
}
}
- ExprF::BinOp(o, ref x, ref y) => match apply_binop(o, x, y) {
+ ExprF::BinOp(o, ref x, ref y) => match apply_binop(o, x, y, ty) {
Some(ret) => ret,
None => Ret::Expr(expr),
},
- ExprF::Projection(_, ls) if ls.is_empty() => {
- Ret::Value(RecordLit(HashMap::new()))
+ ExprF::Projection(_, ref ls) if ls.is_empty() => {
+ Ret::ValueF(RecordLit(HashMap::new()))
}
ExprF::Projection(ref v, ref ls) => {
- let v_borrow = v.as_value();
+ let v_borrow = v.as_whnf();
match &*v_borrow {
- RecordLit(kvs) => Ret::Value(RecordLit(
+ RecordLit(kvs) => Ret::ValueF(RecordLit(
ls.iter()
.filter_map(|l| {
kvs.get(l).map(|x| (l.clone(), x.clone()))
@@ -756,17 +719,17 @@ pub fn normalize_one_layer(expr: ExprF<Thunk, X>) -> Value {
}
}
ExprF::Field(ref v, ref l) => {
- let v_borrow = v.as_value();
+ let v_borrow = v.as_whnf();
match &*v_borrow {
RecordLit(kvs) => match kvs.get(l) {
- Some(r) => Ret::Thunk(r.clone()),
+ Some(r) => Ret::Value(r.clone()),
None => {
drop(v_borrow);
Ret::Expr(expr)
}
},
UnionType(kts) => {
- Ret::Value(UnionConstructor(l.clone(), kts.clone()))
+ Ret::ValueF(UnionConstructor(l.clone(), kts.clone()))
}
_ => {
drop(v_borrow);
@@ -776,11 +739,11 @@ pub fn normalize_one_layer(expr: ExprF<Thunk, X>) -> Value {
}
ExprF::Merge(ref handlers, ref variant, _) => {
- let handlers_borrow = handlers.as_value();
- let variant_borrow = variant.as_value();
+ let handlers_borrow = handlers.as_whnf();
+ let variant_borrow = variant.as_whnf();
match (&*handlers_borrow, &*variant_borrow) {
(RecordLit(kvs), UnionConstructor(l, _)) => match kvs.get(l) {
- Some(h) => Ret::Thunk(h.clone()),
+ Some(h) => Ret::Value(h.clone()),
None => {
drop(handlers_borrow);
drop(variant_borrow);
@@ -788,7 +751,7 @@ pub fn normalize_one_layer(expr: ExprF<Thunk, X>) -> Value {
}
},
(RecordLit(kvs), UnionLit(l, v, _)) => match kvs.get(l) {
- Some(h) => Ret::Value(h.app_thunk(v.clone())),
+ Some(h) => Ret::Value(h.app(v.clone())),
None => {
drop(handlers_borrow);
drop(variant_borrow);
@@ -802,12 +765,26 @@ pub fn normalize_one_layer(expr: ExprF<Thunk, X>) -> Value {
}
}
}
+ ExprF::ToMap(_, _) => unimplemented!("toMap"),
};
match ret {
- Ret::Value(v) => v,
- Ret::Thunk(th) => th.to_value(),
- Ret::ThunkRef(th) => th.to_value(),
- Ret::Expr(expr) => Value::PartialExpr(expr),
+ Ret::ValueF(v) => v,
+ Ret::Value(v) => v.to_whnf_check_type(ty),
+ Ret::ValueRef(v) => v.to_whnf_check_type(ty),
+ Ret::Expr(expr) => ValueF::PartialExpr(expr),
+ }
+}
+
+/// Normalize a ValueF into WHNF
+pub(crate) fn normalize_whnf(v: ValueF, ty: &Value) -> ValueF {
+ match v {
+ ValueF::AppliedBuiltin(b, args) => apply_builtin(b, args, ty),
+ ValueF::PartialExpr(e) => normalize_one_layer(e, ty),
+ ValueF::TextLit(elts) => {
+ ValueF::TextLit(squash_textlit(elts.into_iter()))
+ }
+ // All other cases are already in WHNF
+ v => v,
}
}
diff --git a/dhall/src/phase/parse.rs b/dhall/src/phase/parse.rs
index 734f6e1..540ceea 100644
--- a/dhall/src/phase/parse.rs
+++ b/dhall/src/phase/parse.rs
@@ -8,7 +8,7 @@ use crate::error::Error;
use crate::phase::resolve::ImportRoot;
use crate::phase::Parsed;
-pub fn parse_file(f: &Path) -> Result<Parsed, Error> {
+pub(crate) fn parse_file(f: &Path) -> Result<Parsed, Error> {
let mut buffer = String::new();
File::open(f)?.read_to_string(&mut buffer)?;
let expr = parse_expr(&*buffer)?;
@@ -16,22 +16,22 @@ pub fn parse_file(f: &Path) -> Result<Parsed, Error> {
Ok(Parsed(expr, root))
}
-pub fn parse_str(s: &str) -> Result<Parsed, Error> {
+pub(crate) fn parse_str(s: &str) -> Result<Parsed, Error> {
let expr = parse_expr(s)?;
let root = ImportRoot::LocalDir(std::env::current_dir()?);
Ok(Parsed(expr, root))
}
-pub fn parse_binary(data: &[u8]) -> Result<Parsed, Error> {
+pub(crate) fn parse_binary(data: &[u8]) -> Result<Parsed, Error> {
let expr = crate::phase::binary::decode(data)?;
let root = ImportRoot::LocalDir(std::env::current_dir()?);
- Ok(Parsed(expr.note_absurd(), root))
+ Ok(Parsed(expr, root))
}
-pub fn parse_binary_file(f: &Path) -> Result<Parsed, Error> {
+pub(crate) fn parse_binary_file(f: &Path) -> Result<Parsed, Error> {
let mut buffer = Vec::new();
File::open(f)?.read_to_end(&mut buffer)?;
let expr = crate::phase::binary::decode(&buffer)?;
let root = ImportRoot::LocalDir(f.parent().unwrap().to_owned());
- Ok(Parsed(expr.note_absurd(), root))
+ Ok(Parsed(expr, root))
}
diff --git a/dhall/src/phase/resolve.rs b/dhall/src/phase/resolve.rs
index ac264e6..5bde68a 100644
--- a/dhall/src/phase/resolve.rs
+++ b/dhall/src/phase/resolve.rs
@@ -1,20 +1,20 @@
use std::collections::HashMap;
use std::path::{Path, PathBuf};
-use dhall_syntax::Import;
-
use crate::error::{Error, ImportError};
-use crate::phase::{Normalized, Parsed, Resolved};
+use crate::phase::{Normalized, NormalizedExpr, Parsed, Resolved};
+
+type Import = dhall_syntax::Import<NormalizedExpr>;
/// A root from which to resolve relative imports.
#[derive(Debug, Clone, PartialEq, Eq)]
-pub enum ImportRoot {
+pub(crate) enum ImportRoot {
LocalDir(PathBuf),
}
type ImportCache = HashMap<Import, Normalized>;
-pub type ImportStack = Vec<Import>;
+pub(crate) type ImportStack = Vec<Import>;
fn resolve_import(
@@ -29,9 +29,9 @@ fn resolve_import(
let cwd = match root {
LocalDir(cwd) => cwd,
};
- match &import.location_hashed.location {
+ match &import.location {
Local(prefix, path) => {
- let path_buf: PathBuf = path.clone().into_iter().collect();
+ let path_buf: PathBuf = path.file_path.iter().collect();
let path_buf = match prefix {
// TODO: fail gracefully
Parent => cwd.parent().unwrap().join(path_buf),
@@ -91,11 +91,11 @@ fn do_resolve_expr(
Ok(Resolved(expr))
}
-pub fn resolve(e: Parsed) -> Result<Resolved, ImportError> {
+pub(crate) fn resolve(e: Parsed) -> Result<Resolved, ImportError> {
do_resolve_expr(e, &mut HashMap::new(), &Vec::new())
}
-pub fn skip_resolve_expr(
+pub(crate) fn skip_resolve_expr(
Parsed(expr, _root): Parsed,
) -> Result<Resolved, ImportError> {
let resolve = |import: &Import| -> Result<Normalized, ImportError> {
@@ -106,18 +106,26 @@ pub fn skip_resolve_expr(
}
#[cfg(test)]
+#[rustfmt::skip]
mod spec_tests {
- #![rustfmt::skip]
-
macro_rules! import_success {
($name:ident, $path:expr) => {
- make_spec_test!(Import, Success, $name, &("../dhall-lang/tests/import/success/".to_owned() + $path));
+ make_spec_test!(
+ ImportSuccess(
+ &("../dhall-lang/tests/import/success/".to_owned() + $path + "A.dhall"),
+ &("../dhall-lang/tests/import/success/".to_owned() + $path + "B.dhall")
+ ),
+ $name
+ );
};
}
// macro_rules! import_failure {
// ($name:ident, $path:expr) => {
- // make_spec_test!(Import, Failure, $name, &("../dhall-lang/tests/import/failure/".to_owned() + $path));
+ // make_spec_test!(
+ // ImportFailure(&("../dhall-lang/tests/import/failure/".to_owned() + $path + ".dhall")),
+ // $name
+ // );
// };
// }
diff --git a/dhall/src/phase/typecheck.rs b/dhall/src/phase/typecheck.rs
index 299997a..9013c1f 100644
--- a/dhall/src/phase/typecheck.rs
+++ b/dhall/src/phase/typecheck.rs
@@ -1,34 +1,29 @@
+use std::cmp::max;
use std::collections::HashMap;
use dhall_syntax::{
- rc, Builtin, Const, Expr, ExprF, InterpolatedTextContents, Label, Span,
- SubExpr, X,
+ rc, Builtin, Const, Expr, ExprF, InterpolatedTextContents, Label,
};
-use crate::core::context::{NormalizationContext, TypecheckContext};
-use crate::core::thunk::{Thunk, TypeThunk};
+use crate::core::context::TypecheckContext;
use crate::core::value::Value;
+use crate::core::valuef::ValueF;
use crate::core::var::{Shift, Subst};
use crate::error::{TypeError, TypeMessage};
-use crate::phase::{Normalized, Resolved, Type, Typed};
+use crate::phase::Normalized;
fn tck_pi_type(
ctx: &TypecheckContext,
x: Label,
- tx: Type,
- te: Type,
-) -> Result<Typed, TypeError> {
+ tx: Value,
+ te: Value,
+) -> Result<Value, TypeError> {
use crate::error::TypeMessage::*;
let ctx2 = ctx.insert_type(&x, tx.clone());
let ka = match tx.get_type()?.as_const() {
Some(k) => k,
- _ => {
- return Err(TypeError::new(
- ctx,
- InvalidInputType(tx.to_normalized()),
- ))
- }
+ _ => return Err(TypeError::new(ctx, InvalidInputType(tx))),
};
let kb = match te.get_type()?.as_const() {
@@ -36,64 +31,58 @@ fn tck_pi_type(
_ => {
return Err(TypeError::new(
&ctx2,
- InvalidOutputType(te.get_type()?.to_normalized()),
+ InvalidOutputType(te.get_type()?),
))
}
};
let k = function_check(ka, kb);
- Ok(Typed::from_thunk_and_type(
- Value::Pi(x.into(), TypeThunk::from_type(tx), TypeThunk::from_type(te))
- .into_thunk(),
- Type::from_const(k),
+ Ok(Value::from_valuef_and_type(
+ ValueF::Pi(x.into(), tx, te),
+ Value::from_const(k),
))
}
fn tck_record_type(
ctx: &TypecheckContext,
- kts: impl IntoIterator<Item = Result<(Label, Type), TypeError>>,
-) -> Result<Typed, TypeError> {
+ kts: impl IntoIterator<Item = Result<(Label, Value), TypeError>>,
+) -> Result<Value, TypeError> {
use crate::error::TypeMessage::*;
use std::collections::hash_map::Entry;
let mut new_kts = HashMap::new();
- // Check that all types are the same const
- let mut k = None;
+ // An empty record type has type Type
+ let mut k = Const::Type;
for e in kts {
let (x, t) = e?;
- match (k, t.get_type()?.as_const()) {
- (None, Some(k2)) => k = Some(k2),
- (Some(k1), Some(k2)) if k1 == k2 => {}
- _ => {
- return Err(TypeError::new(
- ctx,
- InvalidFieldType(x.clone(), t.clone()),
- ))
- }
+ // Construct the union of the contained `Const`s
+ match t.get_type()?.as_const() {
+ Some(k2) => k = max(k, k2),
+ None => return Err(TypeError::new(ctx, InvalidFieldType(x, t))),
}
- let entry = new_kts.entry(x.clone());
+ // Check for duplicated entries
+ let entry = new_kts.entry(x);
match &entry {
Entry::Occupied(_) => {
return Err(TypeError::new(ctx, RecordTypeDuplicateField))
}
- Entry::Vacant(_) => {
- entry.or_insert_with(|| TypeThunk::from_type(t.clone()))
- }
+ Entry::Vacant(_) => entry.or_insert_with(|| t),
};
}
- // An empty record type has type Type
- let k = k.unwrap_or(dhall_syntax::Const::Type);
- Ok(Typed::from_thunk_and_type(
- Value::RecordType(new_kts).into_thunk(),
- Type::from_const(k),
+ Ok(Value::from_valuef_and_type(
+ ValueF::RecordType(new_kts),
+ Value::from_const(k),
))
}
-fn tck_union_type(
+fn tck_union_type<Iter>(
ctx: &TypecheckContext,
- kts: impl IntoIterator<Item = Result<(Label, Option<Type>), TypeError>>,
-) -> Result<Typed, TypeError> {
+ kts: Iter,
+) -> Result<Value, TypeError>
+where
+ Iter: IntoIterator<Item = Result<(Label, Option<Value>), TypeError>>,
+{
use crate::error::TypeMessage::*;
use std::collections::hash_map::Entry;
let mut new_kts = HashMap::new();
@@ -108,49 +97,48 @@ fn tck_union_type(
_ => {
return Err(TypeError::new(
ctx,
- InvalidFieldType(x.clone(), t.clone()),
+ InvalidFieldType(x, t.clone()),
))
}
}
}
- let entry = new_kts.entry(x.clone());
+ let entry = new_kts.entry(x);
match &entry {
Entry::Occupied(_) => {
return Err(TypeError::new(ctx, UnionTypeDuplicateField))
}
- Entry::Vacant(_) => entry.or_insert_with(|| {
- t.as_ref().map(|t| TypeThunk::from_type(t.clone()))
- }),
+ Entry::Vacant(_) => entry.or_insert_with(|| t),
};
}
// An empty union type has type Type;
// an union type with only unary variants also has type Type
- let k = k.unwrap_or(dhall_syntax::Const::Type);
+ let k = k.unwrap_or(Const::Type);
- Ok(Typed::from_thunk_and_type(
- Value::UnionType(new_kts).into_thunk(),
- Type::from_const(k),
+ Ok(Value::from_valuef_and_type(
+ ValueF::UnionType(new_kts),
+ Value::from_const(k),
))
}
fn function_check(a: Const, b: Const) -> Const {
- use dhall_syntax::Const::Type;
- use std::cmp::max;
- if b == Type {
- Type
+ if b == Const::Type {
+ Const::Type
} else {
max(a, b)
}
}
-pub fn type_of_const(c: Const) -> Result<Type, TypeError> {
+pub(crate) fn const_to_value(c: Const) -> Value {
+ let v = ValueF::Const(c);
match c {
- Const::Type => Ok(Type::from_const(Const::Kind)),
- Const::Kind => Ok(Type::from_const(Const::Sort)),
- Const::Sort => {
- Err(TypeError::new(&TypecheckContext::new(), TypeMessage::Sort))
+ Const::Type => {
+ Value::from_valuef_and_type(v, const_to_value(Const::Kind))
}
+ Const::Kind => {
+ Value::from_valuef_and_type(v, const_to_value(Const::Sort))
+ }
+ Const::Sort => Value::const_sort(),
}
}
@@ -210,9 +198,9 @@ macro_rules! make_type {
};
}
-fn type_of_builtin(b: Builtin) -> Expr<X, X> {
+fn type_of_builtin<E>(b: Builtin) -> Expr<E> {
use dhall_syntax::Builtin::*;
- match b {
+ rc(match b {
Bool | Natural | Integer | Double | Text => make_type!(Type),
List | Optional => make_type!(
Type -> Type
@@ -292,28 +280,15 @@ fn type_of_builtin(b: Builtin) -> Expr<X, X> {
OptionalNone => make_type!(
forall (a: Type) -> Optional a
),
- }
-}
-
-/// Takes an expression that is meant to contain a Type
-/// and turn it into a type, typechecking it along the way.
-pub fn mktype(
- ctx: &TypecheckContext,
- e: SubExpr<Span, Normalized>,
-) -> Result<Type, TypeError> {
- Ok(type_with(ctx, e)?.to_type())
-}
-
-pub fn builtin_to_type(b: Builtin) -> Result<Type, TypeError> {
- mktype(&TypecheckContext::new(), SubExpr::from_builtin(b))
+ })
}
-/// Intermediary return type
-enum Ret {
- /// Returns the contained value as is
- RetWhole(Typed),
- /// Use the contained Type as the type of the input expression
- RetTypeOnly(Type),
+pub(crate) fn builtin_to_value(b: Builtin) -> Value {
+ let ctx = TypecheckContext::new();
+ Value::from_valuef_and_type(
+ ValueF::from_builtin(b),
+ type_with(&ctx, type_of_builtin(b)).unwrap(),
+ )
}
/// Type-check an expression and return the expression alongside its type if type-checking
@@ -322,29 +297,25 @@ enum Ret {
/// normalized as well.
fn type_with(
ctx: &TypecheckContext,
- e: SubExpr<Span, Normalized>,
-) -> Result<Typed, TypeError> {
+ e: Expr<Normalized>,
+) -> Result<Value, TypeError> {
use dhall_syntax::ExprF::{Annot, Embed, Lam, Let, Pi, Var};
- use Ret::*;
Ok(match e.as_ref() {
- Lam(x, t, b) => {
- let tx = mktype(ctx, t.clone())?;
- let ctx2 = ctx.insert_type(x, tx.clone());
- let b = type_with(&ctx2, b.clone())?;
- let v = Value::Lam(
- x.clone().into(),
- TypeThunk::from_type(tx.clone()),
- b.to_thunk(),
- );
- let tb = b.get_type()?.into_owned();
- let t = tck_pi_type(ctx, x.clone(), tx, tb)?.to_type();
- Typed::from_thunk_and_type(Thunk::from_value(v), t)
+ Lam(var, annot, body) => {
+ let annot = type_with(ctx, annot.clone())?;
+ let ctx2 = ctx.insert_type(var, annot.clone());
+ let body = type_with(&ctx2, body.clone())?;
+ let body_type = body.get_type()?;
+ Value::from_valuef_and_type(
+ ValueF::Lam(var.clone().into(), annot.clone(), body),
+ tck_pi_type(ctx, var.clone(), annot, body_type)?,
+ )
}
Pi(x, ta, tb) => {
- let ta = mktype(ctx, ta.clone())?;
+ let ta = type_with(ctx, ta.clone())?;
let ctx2 = ctx.insert_type(x, ta.clone());
- let tb = mktype(&ctx2, tb.clone())?;
+ let tb = type_with(&ctx2, tb.clone())?;
return tck_pi_type(ctx, x.clone(), ta, tb);
}
Let(x, t, v, e) => {
@@ -357,9 +328,9 @@ fn type_with(
let v = type_with(ctx, v)?;
return type_with(&ctx.insert_value(x, v.clone())?, e.clone());
}
- Embed(p) => p.clone().into_typed(),
+ Embed(p) => p.clone().into_typed().into_value(),
Var(var) => match ctx.lookup(&var) {
- Some(typed) => typed,
+ Some(typed) => typed.clone(),
None => {
return Err(TypeError::new(
ctx,
@@ -367,25 +338,13 @@ fn type_with(
))
}
},
- _ => {
+ e => {
// Typecheck recursively all subexpressions
- let expr =
- e.as_ref().traverse_ref_with_special_handling_of_binders(
- |e| type_with(ctx, e.clone()),
- |_, _| unreachable!(),
- |_| unreachable!(),
- )?;
- let ret = type_last_layer(ctx, &expr)?;
- match ret {
- RetTypeOnly(typ) => {
- let expr = expr.map_ref_simple(|typed| typed.to_thunk());
- Typed::from_thunk_and_type(
- Thunk::from_partial_expr(expr),
- typ,
- )
- }
- RetWhole(tt) => tt,
- }
+ let expr = e.traverse_ref_with_special_handling_of_binders(
+ |e| type_with(ctx, e.clone()),
+ |_, _| unreachable!(),
+ )?;
+ type_last_layer(ctx, expr)?
}
})
}
@@ -394,473 +353,305 @@ fn type_with(
/// layer.
fn type_last_layer(
ctx: &TypecheckContext,
- e: &ExprF<Typed, X>,
-) -> Result<Ret, TypeError> {
+ e: ExprF<Value, Normalized>,
+) -> Result<Value, TypeError> {
use crate::error::TypeMessage::*;
use dhall_syntax::BinOp::*;
use dhall_syntax::Builtin::*;
use dhall_syntax::Const::Type;
use dhall_syntax::ExprF::*;
+ let mkerr = |msg: TypeMessage| Err(TypeError::new(ctx, msg));
+
+ /// Intermediary return type
+ enum Ret {
+ /// Returns the contained value as is
+ RetWhole(Value),
+ /// Returns the input expression `e` with the contained value as its type
+ RetTypeOnly(Value),
+ }
use Ret::*;
- let mkerr = |msg: TypeMessage| TypeError::new(ctx, msg);
- match e {
+ let ret = match &e {
+ Import(_) => unreachable!(
+ "There should remain no imports in a resolved expression"
+ ),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => {
unreachable!()
}
App(f, a) => {
let tf = f.get_type()?;
- let (x, tx, tb) = match &tf.to_value() {
- Value::Pi(x, tx, tb) => (x.clone(), tx.to_type(), tb.to_type()),
- _ => return Err(mkerr(NotAFunction(f.clone()))),
+ let tf_borrow = tf.as_whnf();
+ let (x, tx, tb) = match &*tf_borrow {
+ ValueF::Pi(x, tx, tb) => (x, tx, tb),
+ _ => return mkerr(NotAFunction(f.clone())),
};
- if a.get_type()?.as_ref() != &tx {
- return Err(mkerr(TypeMismatch(
- f.clone(),
- tx.to_normalized(),
- a.clone(),
- )));
+ if &a.get_type()? != tx {
+ return mkerr(TypeMismatch(f.clone(), tx.clone(), a.clone()));
}
- Ok(RetTypeOnly(tb.subst_shift(&x.into(), &a)))
+ RetTypeOnly(tb.subst_shift(&x.into(), a))
}
Annot(x, t) => {
- let t = t.to_type();
- if &t != x.get_type()?.as_ref() {
- return Err(mkerr(AnnotMismatch(x.clone(), t.to_normalized())));
+ if &x.get_type()? != t {
+ return mkerr(AnnotMismatch(x.clone(), t.clone()));
}
- Ok(RetTypeOnly(x.get_type()?.into_owned()))
+ RetWhole(x.clone())
}
Assert(t) => {
- match t.to_value() {
- Value::Equivalence(x, y) if x == y => {}
- Value::Equivalence(x, y) => {
- return Err(mkerr(AssertMismatch(
- x.to_typed(),
- y.to_typed(),
- )))
+ match &*t.as_whnf() {
+ ValueF::Equivalence(x, y) if x == y => {}
+ ValueF::Equivalence(x, y) => {
+ return mkerr(AssertMismatch(x.clone(), y.clone()))
}
- _ => return Err(mkerr(AssertMustTakeEquivalence)),
+ _ => return mkerr(AssertMustTakeEquivalence),
}
- Ok(RetTypeOnly(t.to_type()))
+ RetTypeOnly(t.clone())
}
BoolIf(x, y, z) => {
- if x.get_type()?.as_ref() != &builtin_to_type(Bool)? {
- return Err(mkerr(InvalidPredicate(x.clone())));
+ if *x.get_type()?.as_whnf() != ValueF::from_builtin(Bool) {
+ return mkerr(InvalidPredicate(x.clone()));
}
if y.get_type()?.get_type()?.as_const() != Some(Type) {
- return Err(mkerr(IfBranchMustBeTerm(true, y.clone())));
+ return mkerr(IfBranchMustBeTerm(true, y.clone()));
}
if z.get_type()?.get_type()?.as_const() != Some(Type) {
- return Err(mkerr(IfBranchMustBeTerm(false, z.clone())));
+ return mkerr(IfBranchMustBeTerm(false, z.clone()));
}
if y.get_type()? != z.get_type()? {
- return Err(mkerr(IfBranchMismatch(y.clone(), z.clone())));
+ return mkerr(IfBranchMismatch(y.clone(), z.clone()));
}
- Ok(RetTypeOnly(y.get_type()?.into_owned()))
+ RetTypeOnly(y.get_type()?)
}
EmptyListLit(t) => {
- let t = t.to_type();
- match &t.to_value() {
- Value::AppliedBuiltin(dhall_syntax::Builtin::List, args)
+ match &*t.as_whnf() {
+ ValueF::AppliedBuiltin(dhall_syntax::Builtin::List, args)
if args.len() == 1 => {}
- _ => {
- return Err(TypeError::new(
- ctx,
- InvalidListType(t.to_normalized()),
- ))
- }
+ _ => return mkerr(InvalidListType(t.clone())),
}
- Ok(RetTypeOnly(t))
+ RetTypeOnly(t.clone())
}
NEListLit(xs) => {
let mut iter = xs.iter().enumerate();
let (_, x) = iter.next().unwrap();
for (i, y) in iter {
if x.get_type()? != y.get_type()? {
- return Err(mkerr(InvalidListElement(
+ return mkerr(InvalidListElement(
i,
- x.get_type()?.to_normalized(),
+ x.get_type()?,
y.clone(),
- )));
+ ));
}
}
let t = x.get_type()?;
if t.get_type()?.as_const() != Some(Type) {
- return Err(TypeError::new(
- ctx,
- InvalidListType(t.to_normalized()),
- ));
+ return mkerr(InvalidListType(t));
}
- Ok(RetTypeOnly(
- Typed::from_thunk_and_type(
- Value::from_builtin(dhall_syntax::Builtin::List)
- .app(t.to_value())
- .into_thunk(),
- Typed::from_const(Type),
- )
- .to_type(),
- ))
+ RetTypeOnly(Value::from_builtin(dhall_syntax::Builtin::List).app(t))
}
SomeLit(x) => {
- let t = x.get_type()?.into_owned();
+ let t = x.get_type()?;
if t.get_type()?.as_const() != Some(Type) {
- return Err(TypeError::new(
- ctx,
- InvalidOptionalType(t.to_normalized()),
- ));
+ return mkerr(InvalidOptionalType(t));
}
- Ok(RetTypeOnly(
- Typed::from_thunk_and_type(
- Value::from_builtin(dhall_syntax::Builtin::Optional)
- .app(t.to_value())
- .into_thunk(),
- Typed::from_const(Type).into_type(),
- )
- .to_type(),
- ))
+ RetTypeOnly(
+ Value::from_builtin(dhall_syntax::Builtin::Optional).app(t),
+ )
}
- RecordType(kts) => Ok(RetWhole(tck_record_type(
+ RecordType(kts) => RetWhole(tck_record_type(
ctx,
- kts.iter().map(|(x, t)| Ok((x.clone(), t.to_type()))),
- )?)),
- UnionType(kts) => Ok(RetWhole(tck_union_type(
+ kts.iter().map(|(x, t)| Ok((x.clone(), t.clone()))),
+ )?),
+ UnionType(kts) => RetWhole(tck_union_type(
ctx,
- kts.iter()
- .map(|(x, t)| Ok((x.clone(), t.as_ref().map(|t| t.to_type())))),
- )?)),
- RecordLit(kvs) => Ok(RetTypeOnly(
- tck_record_type(
- ctx,
- kvs.iter()
- .map(|(x, v)| Ok((x.clone(), v.get_type()?.into_owned()))),
- )?
- .into_type(),
- )),
+ kts.iter().map(|(x, t)| Ok((x.clone(), t.clone()))),
+ )?),
+ RecordLit(kvs) => RetTypeOnly(tck_record_type(
+ ctx,
+ kvs.iter().map(|(x, v)| Ok((x.clone(), v.get_type()?))),
+ )?),
Field(r, x) => {
- match &r.get_type()?.to_value() {
- Value::RecordType(kts) => match kts.get(&x) {
+ match &*r.get_type()?.as_whnf() {
+ ValueF::RecordType(kts) => match kts.get(&x) {
Some(tth) => {
- Ok(RetTypeOnly(tth.to_type()))
+ RetTypeOnly(tth.clone())
},
- None => Err(mkerr(MissingRecordField(x.clone(),
- r.clone()))),
+ None => return mkerr(MissingRecordField(x.clone(),
+ r.clone())),
},
// TODO: branch here only when r.get_type() is a Const
_ => {
- let r = r.to_type();
- match &r.to_value() {
- Value::UnionType(kts) => match kts.get(&x) {
+ match &*r.as_whnf() {
+ ValueF::UnionType(kts) => match kts.get(&x) {
// Constructor has type T -> < x: T, ... >
Some(Some(t)) => {
- // TODO: avoid capture
- Ok(RetTypeOnly(
+ RetTypeOnly(
tck_pi_type(
ctx,
"_".into(),
- t.to_type(),
- r.clone(),
- )?.to_type()
- ))
+ t.clone(),
+ r.under_binder(Label::from("_")),
+ )?
+ )
},
Some(None) => {
- Ok(RetTypeOnly(r.clone()))
+ RetTypeOnly(r.clone())
},
None => {
- Err(mkerr(MissingUnionField(
+ return mkerr(MissingUnionField(
x.clone(),
- r.to_normalized(),
- )))
+ r.clone(),
+ ))
},
},
_ => {
- Err(mkerr(NotARecord(
+ return mkerr(NotARecord(
x.clone(),
- r.to_normalized()
- )))
+ r.clone()
+ ))
},
}
}
- // _ => Err(mkerr(NotARecord(
+ // _ => mkerr(NotARecord(
// x,
- // r.to_type()?.to_normalized(),
- // ))),
+ // r?,
+ // )),
}
}
- Const(c) => Ok(RetWhole(Typed::from_const(*c))),
- Builtin(b) => {
- Ok(RetTypeOnly(mktype(ctx, rc(type_of_builtin(*b)).absurd())?))
- }
- BoolLit(_) => Ok(RetTypeOnly(builtin_to_type(Bool)?)),
- NaturalLit(_) => Ok(RetTypeOnly(builtin_to_type(Natural)?)),
- IntegerLit(_) => Ok(RetTypeOnly(builtin_to_type(Integer)?)),
- DoubleLit(_) => Ok(RetTypeOnly(builtin_to_type(Double)?)),
+ Const(c) => RetWhole(const_to_value(*c)),
+ Builtin(b) => RetWhole(builtin_to_value(*b)),
+ BoolLit(_) => RetTypeOnly(builtin_to_value(Bool)),
+ NaturalLit(_) => RetTypeOnly(builtin_to_value(Natural)),
+ IntegerLit(_) => RetTypeOnly(builtin_to_value(Integer)),
+ DoubleLit(_) => RetTypeOnly(builtin_to_value(Double)),
TextLit(interpolated) => {
- let text_type = builtin_to_type(Text)?;
+ let text_type = builtin_to_value(Text);
for contents in interpolated.iter() {
use InterpolatedTextContents::Expr;
if let Expr(x) = contents {
- if x.get_type()?.as_ref() != &text_type {
- return Err(mkerr(InvalidTextInterpolation(x.clone())));
+ if x.get_type()? != text_type {
+ return mkerr(InvalidTextInterpolation(x.clone()));
}
}
}
- Ok(RetTypeOnly(text_type))
+ RetTypeOnly(text_type)
}
BinOp(RightBiasedRecordMerge, l, r) => {
use crate::phase::normalize::merge_maps;
let l_type = l.get_type()?;
- let l_kind = l_type.get_type()?;
let r_type = r.get_type()?;
- let r_kind = r_type.get_type()?;
-
- // Check the equality of kinds.
- // This is to disallow expression such as:
- // "{ x = Text } // { y = 1 }"
- if l_kind != r_kind {
- return Err(mkerr(RecordMismatch(l.clone(), r.clone())));
- }
// Extract the LHS record type
- let kts_x = match l_type.to_value() {
- Value::RecordType(kts) => kts,
- _ => return Err(mkerr(MustCombineRecord(l.clone()))),
+ let l_type_borrow = l_type.as_whnf();
+ let kts_x = match &*l_type_borrow {
+ ValueF::RecordType(kts) => kts,
+ _ => return mkerr(MustCombineRecord(l.clone())),
};
// Extract the RHS record type
- let kts_y = match r_type.to_value() {
- Value::RecordType(kts) => kts,
- _ => return Err(mkerr(MustCombineRecord(r.clone()))),
+ let r_type_borrow = r_type.as_whnf();
+ let kts_y = match &*r_type_borrow {
+ ValueF::RecordType(kts) => kts,
+ _ => return mkerr(MustCombineRecord(r.clone())),
};
// Union the two records, prefering
// the values found in the RHS.
- let kts = merge_maps(&kts_x, &kts_y, |_, r_t| r_t.clone());
+ let kts = merge_maps::<_, _, _, !>(kts_x, kts_y, |_, _, r_t| {
+ Ok(r_t.clone())
+ })?;
// Construct the final record type from the union
- Ok(RetTypeOnly(
- tck_record_type(
- ctx,
- kts.iter().map(|(x, v)| Ok((x.clone(), v.to_type()))),
- )?
- .into_type(),
- ))
- }
- BinOp(RecursiveRecordMerge, l, r) => {
- // A recursive function to dig down into
- // records of records when merging.
- fn combine_record_types(
- ctx: &TypecheckContext,
- kts_l: HashMap<Label, TypeThunk>,
- kts_r: HashMap<Label, TypeThunk>,
- ) -> Result<Typed, TypeError> {
- use crate::phase::normalize::outer_join;
-
- // If the Label exists for both records and Type for the values
- // are records themselves, then we hit the recursive case.
- // Otherwise we have a field collision.
- let combine = |k: &Label,
- inner_l: &TypeThunk,
- inner_r: &TypeThunk|
- -> Result<Typed, TypeError> {
- match (inner_l.to_value(), inner_r.to_value()) {
- (
- Value::RecordType(inner_l_kvs),
- Value::RecordType(inner_r_kvs),
- ) => {
- combine_record_types(ctx, inner_l_kvs, inner_r_kvs)
- }
- (_, _) => {
- Err(TypeError::new(ctx, FieldCollision(k.clone())))
- }
- }
- };
-
- let kts: HashMap<Label, Result<Typed, TypeError>> = outer_join(
- |l| Ok(l.to_type()),
- |r| Ok(r.to_type()),
- |k: &Label, l: &TypeThunk, r: &TypeThunk| combine(k, l, r),
- &kts_l,
- &kts_r,
- );
-
- Ok(tck_record_type(
- ctx,
- kts.into_iter().map(|(x, v)| v.map(|r| (x.clone(), r))),
- )?
- .into_type())
- };
-
- let l_type = l.get_type()?;
- let l_kind = l_type.get_type()?;
- let r_type = r.get_type()?;
- let r_kind = r_type.get_type()?;
-
- // Check the equality of kinds.
- // This is to disallow expression such as:
- // "{ x = Text } // { y = 1 }"
- if l_kind != r_kind {
- return Err(mkerr(RecordMismatch(l.clone(), r.clone())));
- }
-
- // Extract the LHS record type
- let kts_x = match l_type.to_value() {
- Value::RecordType(kts) => kts,
- _ => return Err(mkerr(MustCombineRecord(l.clone()))),
- };
-
- // Extract the RHS record type
- let kts_y = match r_type.to_value() {
- Value::RecordType(kts) => kts,
- _ => return Err(mkerr(MustCombineRecord(r.clone()))),
- };
-
- combine_record_types(ctx, kts_x, kts_y).map(|r| RetTypeOnly(r))
+ RetTypeOnly(tck_record_type(
+ ctx,
+ kts.into_iter().map(|(x, v)| Ok((x.clone(), v))),
+ )?)
}
+ BinOp(RecursiveRecordMerge, l, r) => RetTypeOnly(type_last_layer(
+ ctx,
+ ExprF::BinOp(
+ RecursiveRecordTypeMerge,
+ l.get_type()?,
+ r.get_type()?,
+ ),
+ )?),
BinOp(RecursiveRecordTypeMerge, l, r) => {
- // A recursive function to dig down into
- // records of records when merging.
- fn combine_record_types(
- ctx: &TypecheckContext,
- kts_l: HashMap<Label, TypeThunk>,
- kts_r: HashMap<Label, TypeThunk>,
- ) -> Result<Typed, TypeError> {
- use crate::phase::normalize::intersection_with_key;
-
- // If the Label exists for both records and Type for the values
- // are records themselves, then we hit the recursive case.
- // Otherwise we have a field collision.
- let combine = |k: &Label,
- kts_l_inner: &TypeThunk,
- kts_r_inner: &TypeThunk|
- -> Result<Typed, TypeError> {
- match (kts_l_inner.to_value(), kts_r_inner.to_value()) {
- (
- Value::RecordType(kvs_l_inner),
- Value::RecordType(kvs_r_inner),
- ) => {
- combine_record_types(ctx, kvs_l_inner, kvs_r_inner)
- }
- (_, _) => {
- Err(TypeError::new(ctx, FieldCollision(k.clone())))
- }
- }
- };
-
- let kts = intersection_with_key(
- |k: &Label, l: &TypeThunk, r: &TypeThunk| combine(k, l, r),
- &kts_l,
- &kts_r,
- );
-
- Ok(tck_record_type(
- ctx,
- kts.into_iter().map(|(x, v)| v.map(|r| (x.clone(), r))),
- )?
- .into_type())
- };
-
- // Extract the Const of the LHS
- let k_l = match l.get_type()?.to_value() {
- Value::Const(k) => k,
- _ => {
- return Err(mkerr(RecordTypeMergeRequiresRecordType(
- l.clone(),
- )))
- }
- };
-
- // Extract the Const of the RHS
- let k_r = match r.get_type()?.to_value() {
- Value::Const(k) => k,
- _ => {
- return Err(mkerr(RecordTypeMergeRequiresRecordType(
- r.clone(),
- )))
- }
- };
-
- // Const values must match for the Records
- let k = if k_l == k_r {
- k_l
- } else {
- return Err(mkerr(RecordTypeMismatch(
- Typed::from_const(k_l),
- Typed::from_const(k_r),
- l.clone(),
- r.clone(),
- )));
- };
+ use crate::phase::normalize::merge_maps;
// Extract the LHS record type
- let kts_x = match l.to_value() {
- Value::RecordType(kts) => kts,
+ let borrow_l = l.as_whnf();
+ let kts_x = match &*borrow_l {
+ ValueF::RecordType(kts) => kts,
_ => {
- return Err(mkerr(RecordTypeMergeRequiresRecordType(
- l.clone(),
- )))
+ return mkerr(RecordTypeMergeRequiresRecordType(l.clone()))
}
};
// Extract the RHS record type
- let kts_y = match r.to_value() {
- Value::RecordType(kts) => kts,
+ let borrow_r = r.as_whnf();
+ let kts_y = match &*borrow_r {
+ ValueF::RecordType(kts) => kts,
_ => {
- return Err(mkerr(RecordTypeMergeRequiresRecordType(
- r.clone(),
- )))
+ return mkerr(RecordTypeMergeRequiresRecordType(r.clone()))
}
};
// Ensure that the records combine without a type error
- // and if not output the final Const value.
- combine_record_types(ctx, kts_x, kts_y)
- .and(Ok(RetTypeOnly(Typed::from_const(k))))
+ let kts = merge_maps(
+ kts_x,
+ kts_y,
+ // If the Label exists for both records, then we hit the recursive case.
+ |_, l: &Value, r: &Value| {
+ type_last_layer(
+ ctx,
+ ExprF::BinOp(
+ RecursiveRecordTypeMerge,
+ l.clone(),
+ r.clone(),
+ ),
+ )
+ },
+ )?;
+
+ RetWhole(tck_record_type(ctx, kts.into_iter().map(Ok))?)
}
BinOp(o @ ListAppend, l, r) => {
- match l.get_type()?.to_value() {
- Value::AppliedBuiltin(List, _) => {}
- _ => return Err(mkerr(BinOpTypeMismatch(*o, l.clone()))),
+ match &*l.get_type()?.as_whnf() {
+ ValueF::AppliedBuiltin(List, _) => {}
+ _ => return mkerr(BinOpTypeMismatch(*o, l.clone())),
}
if l.get_type()? != r.get_type()? {
- return Err(mkerr(BinOpTypeMismatch(*o, r.clone())));
+ return mkerr(BinOpTypeMismatch(*o, r.clone()));
}
- Ok(RetTypeOnly(l.get_type()?.into_owned()))
+ RetTypeOnly(l.get_type()?)
}
BinOp(Equivalence, l, r) => {
if l.get_type()?.get_type()?.as_const() != Some(Type) {
- return Err(mkerr(EquivalenceArgumentMustBeTerm(
- true,
- l.clone(),
- )));
+ return mkerr(EquivalenceArgumentMustBeTerm(true, l.clone()));
}
if r.get_type()?.get_type()?.as_const() != Some(Type) {
- return Err(mkerr(EquivalenceArgumentMustBeTerm(
- false,
- r.clone(),
- )));
+ return mkerr(EquivalenceArgumentMustBeTerm(false, r.clone()));
}
if l.get_type()? != r.get_type()? {
- return Err(mkerr(EquivalenceTypeMismatch(
- r.clone(),
- l.clone(),
- )));
+ return mkerr(EquivalenceTypeMismatch(r.clone(), l.clone()));
}
- Ok(RetTypeOnly(Typed::from_const(Type).into_type()))
+ RetWhole(Value::from_valuef_and_type(
+ ValueF::Equivalence(l.clone(), r.clone()),
+ Value::from_const(Type),
+ ))
}
BinOp(o, l, r) => {
- let t = builtin_to_type(match o {
+ let t = builtin_to_value(match o {
BoolAnd => Bool,
BoolOr => Bool,
BoolEQ => Bool,
@@ -874,143 +665,140 @@ fn type_last_layer(
RecursiveRecordTypeMerge => unreachable!(),
ImportAlt => unreachable!("There should remain no import alternatives in a resolved expression"),
Equivalence => unreachable!(),
- })?;
+ });
- if l.get_type()?.as_ref() != &t {
- return Err(mkerr(BinOpTypeMismatch(*o, l.clone())));
+ if l.get_type()? != t {
+ return mkerr(BinOpTypeMismatch(*o, l.clone()));
}
- if r.get_type()?.as_ref() != &t {
- return Err(mkerr(BinOpTypeMismatch(*o, r.clone())));
+ if r.get_type()? != t {
+ return mkerr(BinOpTypeMismatch(*o, r.clone()));
}
- Ok(RetTypeOnly(t))
+ RetTypeOnly(t)
}
Merge(record, union, type_annot) => {
- let handlers = match record.get_type()?.to_value() {
- Value::RecordType(kts) => kts,
- _ => return Err(mkerr(Merge1ArgMustBeRecord(record.clone()))),
+ let record_type = record.get_type()?;
+ let record_borrow = record_type.as_whnf();
+ let handlers = match &*record_borrow {
+ ValueF::RecordType(kts) => kts,
+ _ => return mkerr(Merge1ArgMustBeRecord(record.clone())),
};
- let variants = match union.get_type()?.to_value() {
- Value::UnionType(kts) => kts,
- _ => return Err(mkerr(Merge2ArgMustBeUnion(union.clone()))),
+ let union_type = union.get_type()?;
+ let union_borrow = union_type.as_whnf();
+ let variants = match &*union_borrow {
+ ValueF::UnionType(kts) => kts,
+ _ => return mkerr(Merge2ArgMustBeUnion(union.clone())),
};
let mut inferred_type = None;
- for (x, handler) in handlers.iter() {
- let handler_return_type = match variants.get(x) {
- // Union alternative with type
- Some(Some(variant_type)) => {
- let variant_type = variant_type.to_type();
- let handler_type = handler.to_type();
- let (x, tx, tb) = match &handler_type.to_value() {
- Value::Pi(x, tx, tb) => {
- (x.clone(), tx.to_type(), tb.to_type())
+ for (x, handler_type) in handlers {
+ let handler_return_type =
+ match variants.get(x) {
+ // Union alternative with type
+ Some(Some(variant_type)) => {
+ let handler_type_borrow = handler_type.as_whnf();
+ let (x, tx, tb) = match &*handler_type_borrow {
+ ValueF::Pi(x, tx, tb) => (x, tx, tb),
+ _ => {
+ return mkerr(NotAFunction(
+ handler_type.clone(),
+ ))
+ }
+ };
+
+ if variant_type != tx {
+ return mkerr(TypeMismatch(
+ handler_type.clone(),
+ tx.clone(),
+ variant_type.clone(),
+ ));
}
- _ => return Err(mkerr(NotAFunction(handler_type))),
- };
-
- if &variant_type != &tx {
- return Err(mkerr(TypeMismatch(
- handler_type,
- tx.to_normalized(),
- variant_type,
- )));
- }
- // Extract `tb` from under the `x` binder. Fails is `x` was free in `tb`.
- match tb.over_binder(x) {
- Some(x) => x,
- None => {
- return Err(mkerr(
+ // Extract `tb` from under the `x` binder. Fails is `x` was free in `tb`.
+ match tb.over_binder(x) {
+ Some(x) => x,
+ None => return mkerr(
MergeHandlerReturnTypeMustNotBeDependent,
- ))
+ ),
}
}
- }
- // Union alternative without type
- Some(None) => handler.to_type(),
- None => {
- return Err(mkerr(MergeHandlerMissingVariant(
- x.clone(),
- )))
- }
- };
+ // Union alternative without type
+ Some(None) => handler_type.clone(),
+ None => {
+ return mkerr(MergeHandlerMissingVariant(x.clone()))
+ }
+ };
match &inferred_type {
None => inferred_type = Some(handler_return_type),
Some(t) => {
if t != &handler_return_type {
- return Err(mkerr(MergeHandlerTypeMismatch));
+ return mkerr(MergeHandlerTypeMismatch);
}
}
}
}
for x in variants.keys() {
if !handlers.contains_key(x) {
- return Err(mkerr(MergeVariantMissingHandler(x.clone())));
+ return mkerr(MergeVariantMissingHandler(x.clone()));
}
}
match (inferred_type, type_annot) {
(Some(ref t1), Some(t2)) => {
- let t2 = t2.to_type();
- if t1 != &t2 {
- return Err(mkerr(MergeAnnotMismatch));
+ if t1 != t2 {
+ return mkerr(MergeAnnotMismatch);
}
- Ok(RetTypeOnly(t2))
+ RetTypeOnly(t2.clone())
}
- (Some(t), None) => Ok(RetTypeOnly(t)),
- (None, Some(t)) => Ok(RetTypeOnly(t.to_type())),
- (None, None) => Err(mkerr(MergeEmptyNeedsAnnotation)),
+ (Some(t), None) => RetTypeOnly(t),
+ (None, Some(t)) => RetTypeOnly(t.clone()),
+ (None, None) => return mkerr(MergeEmptyNeedsAnnotation),
}
}
+ ToMap(_, _) => unimplemented!("toMap"),
Projection(record, labels) => {
- let trecord = record.get_type()?;
- let kts = match trecord.to_value() {
- Value::RecordType(kts) => kts,
- _ => return Err(mkerr(ProjectionMustBeRecord)),
+ let record_type = record.get_type()?;
+ let record_borrow = record_type.as_whnf();
+ let kts = match &*record_borrow {
+ ValueF::RecordType(kts) => kts,
+ _ => return mkerr(ProjectionMustBeRecord),
};
let mut new_kts = HashMap::new();
for l in labels {
match kts.get(l) {
- None => return Err(mkerr(ProjectionMissingEntry)),
+ None => return mkerr(ProjectionMissingEntry),
Some(t) => new_kts.insert(l.clone(), t.clone()),
};
}
- Ok(RetTypeOnly(
- Typed::from_thunk_and_type(
- Value::RecordType(new_kts).into_thunk(),
- trecord.get_type()?.into_owned(),
- )
- .to_type(),
+ RetTypeOnly(Value::from_valuef_and_type(
+ ValueF::RecordType(new_kts),
+ record_type.get_type()?,
))
}
- }
+ };
+
+ Ok(match ret {
+ RetTypeOnly(typ) => {
+ Value::from_valuef_and_type(ValueF::PartialExpr(e), typ)
+ }
+ RetWhole(v) => v,
+ })
}
-/// `typeOf` is the same as `type_with` with an empty context, meaning that the
+/// `type_of` is the same as `type_with` with an empty context, meaning that the
/// expression must be closed (i.e. no free variables), otherwise type-checking
/// will fail.
-fn type_of(e: SubExpr<Span, Normalized>) -> Result<Typed, TypeError> {
- let ctx = TypecheckContext::new();
- let e = type_with(&ctx, e)?;
- // Ensure `e` has a type (i.e. `e` is not `Sort`)
- e.get_type()?;
- Ok(e)
+pub(crate) fn typecheck(e: Expr<Normalized>) -> Result<Value, TypeError> {
+ type_with(&TypecheckContext::new(), e)
}
-pub fn typecheck(e: Resolved) -> Result<Typed, TypeError> {
- type_of(e.0)
-}
-
-pub fn typecheck_with(e: Resolved, ty: &Type) -> Result<Typed, TypeError> {
- let expr: SubExpr<_, _> = e.0;
- let ty: SubExpr<_, _> = ty.to_expr().absurd();
- type_of(expr.rewrap(ExprF::Annot(expr.clone(), ty)))
-}
-pub fn skip_typecheck(e: Resolved) -> Typed {
- Typed::from_thunk_untyped(Thunk::new(NormalizationContext::new(), e.0))
+pub(crate) fn typecheck_with(
+ expr: Expr<Normalized>,
+ ty: Expr<Normalized>,
+) -> Result<Value, TypeError> {
+ typecheck(expr.rewrap(ExprF::Annot(expr.clone(), ty)))
}