diff options
author | Fintan Halpenny | 2019-09-02 23:09:26 +0100 |
---|---|---|
committer | Fintan Halpenny | 2019-09-02 23:09:26 +0100 |
commit | 8553b398a5f97eed240f5360282e911392cab6ff (patch) | |
tree | 076d554b7e066cf854aa50f350096ce55e3bd691 /dhall | |
parent | e73f822b6972e8fa2e72b56ff5378b91bea1a5e6 (diff) | |
parent | 737abd9be6d35bbce784d9cf249edf7ad14677d6 (diff) |
Merge remote-tracking branch 'origin/master' into fintan/canonicalize
Diffstat (limited to 'dhall')
-rw-r--r-- | dhall/Cargo.toml | 4 | ||||
-rw-r--r-- | dhall/build.rs | 490 | ||||
-rw-r--r-- | dhall/src/api/mod.rs | 159 | ||||
-rw-r--r-- | dhall/src/api/serde.rs | 70 | ||||
-rw-r--r-- | dhall/src/api/static_type.rs | 93 | ||||
-rw-r--r-- | dhall/src/core/context.rs | 171 | ||||
-rw-r--r-- | dhall/src/core/mod.rs | 8 | ||||
-rw-r--r-- | dhall/src/core/thunk.rs | 312 | ||||
-rw-r--r-- | dhall/src/core/value.rs | 842 | ||||
-rw-r--r-- | dhall/src/core/valuef.rs | 335 | ||||
-rw-r--r-- | dhall/src/core/var.rs | 221 | ||||
-rw-r--r-- | dhall/src/error/mod.rs | 63 | ||||
-rw-r--r-- | dhall/src/lib.rs | 119 | ||||
-rw-r--r-- | dhall/src/phase/.resolve.rs.swp | bin | 0 -> 16384 bytes | |||
-rw-r--r-- | dhall/src/phase/binary.rs | 148 | ||||
-rw-r--r-- | dhall/src/phase/mod.rs | 235 | ||||
-rw-r--r-- | dhall/src/phase/normalize.rs | 953 | ||||
-rw-r--r-- | dhall/src/phase/parse.rs | 12 | ||||
-rw-r--r-- | dhall/src/phase/resolve.rs | 34 | ||||
-rw-r--r-- | dhall/src/phase/typecheck.rs | 846 | ||||
-rw-r--r-- | dhall/src/tests.rs | 327 | ||||
-rw-r--r-- | dhall/tests/traits.rs | 68 |
22 files changed, 2327 insertions, 3183 deletions
diff --git a/dhall/Cargo.toml b/dhall/Cargo.toml index d08627d..16c1c78 100644 --- a/dhall/Cargo.toml +++ b/dhall/Cargo.toml @@ -9,12 +9,12 @@ build = "build.rs" [dependencies] bytecount = "0.5.1" itertools = "0.8.0" +take_mut = "0.2.2" term-painter = "0.2.3" -serde = { version = "1.0", features = ["derive"] } +serde = { version = "1.0" } serde_cbor = "0.9.0" improved_slice_patterns = { version = "2.0.0", path = "../improved_slice_patterns" } dhall_syntax = { path = "../dhall_syntax" } -dhall_proc_macros = { path = "../dhall_proc_macros" } [dev-dependencies] pretty_assertions = "0.6.1" diff --git a/dhall/build.rs b/dhall/build.rs index 790ad8e..757eed8 100644 --- a/dhall/build.rs +++ b/dhall/build.rs @@ -2,77 +2,99 @@ use std::env; use std::ffi::OsString; use std::fs::File; use std::io::Write; -use std::path::Path; +use std::path::{Path, PathBuf}; use walkdir::WalkDir; +#[derive(Debug, Clone, Copy)] +enum FileType { + Text, + Binary, +} + +impl FileType { + fn to_ext(self) -> &'static str { + match self { + FileType::Text => "dhall", + FileType::Binary => "dhallb", + } + } +} + fn dhall_files_in_dir<'a>( dir: &'a Path, take_a_suffix: bool, + filetype: FileType, ) -> impl Iterator<Item = (String, String)> + 'a { WalkDir::new(dir) .into_iter() .filter_map(|e| e.ok()) .filter_map(move |path| { - let path = path.path(); - let path = path.strip_prefix(dir).unwrap(); - let ext = path.extension(); - if ext != Some(&OsString::from("dhall")) - && ext != Some(&OsString::from("dhallb")) - { + let path = path.path().strip_prefix(dir).unwrap(); + let ext = path.extension()?; + if ext != &OsString::from(filetype.to_ext()) { return None; } - let ext = ext.unwrap(); let path = path.to_string_lossy(); let path = &path[..path.len() - 1 - ext.len()]; - let path = if take_a_suffix { - if &path[path.len() - 1..] != "A" { - return None; - } else { - path[..path.len() - 1].to_owned() - } + let path = if take_a_suffix && &path[path.len() - 1..] != "A" { + return None; + } else if take_a_suffix { + path[..path.len() - 1].to_owned() } else { path.to_owned() }; + // Transform path into a valie Rust identifier let name = path.replace("/", "_").replace("-", "_"); Some((name, path)) }) } +#[derive(Debug, Clone)] +struct TestFeature<F> { + /// Name of the module, used in the output of `cargo test` + module_name: &'static str, + /// Directory containing the tests files + directory: PathBuf, + /// Relevant variant of `dhall::tests::Test` + variant: &'static str, + /// Given a file name, whether to exclude it + path_filter: F, + /// Type of the input file + input_type: FileType, + /// Type of the output file, if any + output_type: Option<FileType>, +} + fn make_test_module( w: &mut impl Write, // Where to output the generated code - mod_name: &str, // Name of the module, used in the output of `cargo test` - subdir: &str, // Directory containing the tests files - feature: &str, // Relevant variant of `dhall::tests::Feature` - mut exclude: impl FnMut(&str) -> bool, // Given a file name, whether to exclude it + mut feature: TestFeature<impl FnMut(&str) -> bool>, ) -> std::io::Result<()> { - let all_tests_dir = Path::new("../dhall-lang/tests/"); - let tests_dir = all_tests_dir.join(subdir); - writeln!(w, "mod {} {{", mod_name)?; - for (name, path) in dhall_files_in_dir(&tests_dir.join("success/"), true) { - if exclude(&("success/".to_owned() + &path)) { - continue; - } - writeln!( - w, - r#"make_spec_test!({}, Success, success_{}, "{}/success/{}");"#, - feature, - name, - tests_dir.to_string_lossy(), - path - )?; - } - for (name, path) in dhall_files_in_dir(&tests_dir.join("failure/"), false) { - if exclude(&("failure/".to_owned() + &path)) { + let tests_dir = feature.directory; + writeln!(w, "mod {} {{", feature.module_name)?; + let take_a_suffix = feature.output_type.is_some(); + for (name, path) in + dhall_files_in_dir(&tests_dir, take_a_suffix, feature.input_type) + { + if (feature.path_filter)(&path) { continue; } - writeln!( - w, - r#"make_spec_test!({}, Failure, failure_{}, "{}/failure/{}");"#, - feature, - name, - tests_dir.to_string_lossy(), - path - )?; + let path = tests_dir.join(path); + let path = path.to_string_lossy(); + let test = match feature.output_type { + None => { + let input_file = + format!("\"{}.{}\"", path, feature.input_type.to_ext()); + format!("{}({})", feature.variant, input_file) + } + Some(output_type) => { + let input_file = + format!("\"{}A.{}\"", path, feature.input_type.to_ext()); + let output_file = + format!("\"{}B.{}\"", path, output_type.to_ext()); + format!("{}({}, {})", feature.variant, input_file, output_file) + } + }; + writeln!(w, "make_spec_test!({}, {});", test, name)?; } writeln!(w, "}}")?; Ok(()) @@ -88,187 +110,265 @@ fn main() -> std::io::Result<()> { let out_dir = env::var("OUT_DIR").unwrap(); let parser_tests_path = Path::new(&out_dir).join("spec_tests.rs"); + let spec_tests_dir = Path::new("../dhall-lang/tests/"); let mut file = File::create(parser_tests_path)?; - make_test_module(&mut file, "parse", "parser/", "Parser", |path| { - // Too slow in debug mode - path == "success/largeExpression" - // TODO: Inline headers - || path == "success/unit/import/inlineUsing" - || path == "success/unit/import/Headers" - || path == "success/unit/import/HeadersDoubleHash" - || path == "success/unit/import/HeadersDoubleHashPrecedence" - || path == "success/unit/import/HeadersHashPrecedence" - || path == "success/unit/import/HeadersInteriorHash" - // TODO: projection by expression - || path == "success/recordProjectionByExpression" - || path == "success/RecordProjectionByType" - || path == "success/unit/RecordProjectionByType" - || path == "success/unit/RecordProjectionByTypeEmpty" - || path == "success/unit/RecordProjectFields" - // TODO: RFC3986 URLs - || path == "success/unit/import/urls/emptyPath0" - || path == "success/unit/import/urls/emptyPath1" - || path == "success/unit/import/urls/emptyPathSegment" - // TODO: toMap - || path == "success/toMap" - })?; + make_test_module( + &mut file, + TestFeature { + module_name: "parser_success", + directory: spec_tests_dir.join("parser/success/"), + variant: "ParserSuccess", + path_filter: |path: &str| { + false + // Too slow in debug mode + || path == "largeExpression" + // Pretty sure the test is incorrect + || path == "unit/import/urls/quotedPathFakeUrlEncode" + // TODO: projection by expression + || path == "recordProjectionByExpression" + || path == "RecordProjectionByType" + || path == "unit/RecordProjectionByType" + || path == "unit/RecordProjectionByTypeEmpty" + || path == "unit/RecordProjectFields" + // TODO: RFC3986 URLs + || path == "unit/import/urls/emptyPath0" + || path == "unit/import/urls/emptyPath1" + || path == "unit/import/urls/emptyPathSegment" + }, + input_type: FileType::Text, + output_type: Some(FileType::Binary), + }, + )?; - make_test_module(&mut file, "printer", "parser/", "Printer", |path| { - // Failure tests are only for the parser - path.starts_with("failure/") - // Too slow in debug mode - || path == "success/largeExpression" - // TODO: Inline headers - || path == "success/unit/import/inlineUsing" - || path == "success/unit/import/Headers" - // TODO: projection by expression - || path == "success/recordProjectionByExpression" - || path == "success/RecordProjectionByType" - || path == "success/unit/RecordProjectionByType" - || path == "success/unit/RecordProjectionByTypeEmpty" - // TODO: RFC3986 URLs - || path == "success/unit/import/urls/emptyPath0" - || path == "success/unit/import/urls/emptyPath1" - || path == "success/unit/import/urls/emptyPathSegment" - // TODO: toMap - || path == "success/toMap" - })?; + make_test_module( + &mut file, + TestFeature { + module_name: "parser_failure", + directory: spec_tests_dir.join("parser/failure/"), + variant: "ParserFailure", + path_filter: |_path: &str| false, + input_type: FileType::Text, + output_type: None, + }, + )?; make_test_module( &mut file, - "binary_encoding", - "parser/", - "BinaryEncoding", - |path| { - // Failure tests are only for the parser - path.starts_with("failure/") - // Too slow in debug mode - || path == "success/largeExpression" - // See https://github.com/pyfisch/cbor/issues/109 - || path == "success/double" - || path == "success/unit/DoubleLitExponentNoDot" - || path == "success/unit/DoubleLitSecretelyInt" - // TODO: Inline headers - || path == "success/unit/import/inlineUsing" - || path == "success/unit/import/Headers" - // TODO: projection by expression - || path == "success/recordProjectionByExpression" - || path == "success/RecordProjectionByType" - || path == "success/unit/RecordProjectionByType" - || path == "success/unit/RecordProjectionByTypeEmpty" - // TODO: RFC3986 URLs - || path == "success/unit/import/urls/emptyPath0" - || path == "success/unit/import/urls/emptyPath1" - || path == "success/unit/import/urls/emptyPathSegment" - // TODO: toMap - || path == "success/toMap" + TestFeature { + module_name: "printer", + directory: spec_tests_dir.join("parser/success/"), + variant: "Printer", + path_filter: |path: &str| { + false + // Too slow in debug mode + || path == "largeExpression" + // TODO: projection by expression + || path == "recordProjectionByExpression" + || path == "RecordProjectionByType" + || path == "unit/RecordProjectionByType" + || path == "unit/RecordProjectionByTypeEmpty" + // TODO: RFC3986 URLs + || path == "unit/import/urls/emptyPath0" + || path == "unit/import/urls/emptyPath1" + || path == "unit/import/urls/emptyPathSegment" + }, + input_type: FileType::Text, + output_type: Some(FileType::Binary), }, )?; make_test_module( &mut file, - "binary_decoding", - "binary-decode/", - "BinaryDecoding", - |path| { - false - // TODO: projection by expression - || path == "success/unit/RecordProjectFields" - || path == "success/unit/recordProjectionByExpression" - // TODO: toMap - || path == "success/unit/ToMap" - || path == "success/unit/ToMapAnnotated" + TestFeature { + module_name: "binary_encoding", + directory: spec_tests_dir.join("parser/success/"), + variant: "BinaryEncoding", + path_filter: |path: &str| { + false + // Too slow in debug mode + || path == "largeExpression" + // Pretty sure the test is incorrect + || path == "unit/import/urls/quotedPathFakeUrlEncode" + // See https://github.com/pyfisch/cbor/issues/109 + || path == "double" + || path == "unit/DoubleLitExponentNoDot" + || path == "unit/DoubleLitSecretelyInt" + // TODO: projection by expression + || path == "recordProjectionByExpression" + || path == "RecordProjectionByType" + || path == "unit/RecordProjectionByType" + || path == "unit/RecordProjectionByTypeEmpty" + // TODO: RFC3986 URLs + || path == "unit/import/urls/emptyPath0" + || path == "unit/import/urls/emptyPath1" + || path == "unit/import/urls/emptyPathSegment" + }, + input_type: FileType::Text, + output_type: Some(FileType::Binary), }, )?; make_test_module( &mut file, - "beta_normalize", - "normalization/", - "Normalization", - |path| { - // We don't support bignums - path == "success/simple/integerToDouble" - // Too slow - || path == "success/remoteSystems" - // TODO: projection by expression - || path == "success/unit/RecordProjectionByTypeEmpty" - || path == "success/unit/RecordProjectionByTypeNonEmpty" - || path == "success/unit/RecordProjectionByTypeNormalizeProjection" - // TODO: fix Double/show - || path == "success/prelude/JSON/number/1" - // TODO: toMap - || path == "success/unit/EmptyToMap" - || path == "success/unit/ToMap" - || path == "success/unit/ToMapWithType" - // TODO: Normalize field selection further by inspecting the argument - || path == "success/simplifications/rightBiasedMergeWithinRecordProjectionWithinFieldSelection0" - || path == "success/simplifications/rightBiasedMergeWithinRecordProjectionWithinFieldSelection1" - || path == "success/simplifications/rightBiasedMergeWithinRecursiveRecordMergeWithinFieldselection" - || path == "success/unit/RecordProjectionByTypeWithinFieldSelection" - || path == "success/unit/RecordProjectionWithinFieldSelection" - || path == "success/unit/RecursiveRecordMergeWithinFieldSelection0" - || path == "success/unit/RecursiveRecordMergeWithinFieldSelection1" - || path == "success/unit/RecursiveRecordMergeWithinFieldSelection2" - || path == "success/unit/RecursiveRecordMergeWithinFieldSelection3" - || path == "success/unit/RightBiasedMergeWithinFieldSelection0" - || path == "success/unit/RightBiasedMergeWithinFieldSelection1" - || path == "success/unit/RightBiasedMergeWithinFieldSelection2" - || path == "success/unit/RightBiasedMergeWithinFieldSelection3" - || path == "success/unit/RightBiasedMergeEquivalentArguments" + TestFeature { + module_name: "binary_decoding_success", + directory: spec_tests_dir.join("binary-decode/success/"), + variant: "BinaryDecodingSuccess", + path_filter: |path: &str| { + false + // TODO: projection by expression + || path == "unit/RecordProjectFields" + || path == "unit/recordProjectionByExpression" + }, + input_type: FileType::Binary, + output_type: Some(FileType::Text), }, )?; make_test_module( &mut file, - "alpha_normalize", - "alpha-normalization/", - "AlphaNormalization", - |_| false, + TestFeature { + module_name: "binary_decoding_failure", + directory: spec_tests_dir.join("binary-decode/failure/"), + variant: "BinaryDecodingFailure", + path_filter: |_path: &str| false, + input_type: FileType::Binary, + output_type: None, + }, + )?; + + make_test_module( + &mut file, + TestFeature { + module_name: "beta_normalize", + directory: spec_tests_dir.join("normalization/success/"), + variant: "Normalization", + path_filter: |path: &str| { + // We don't support bignums + path == "simple/integerToDouble" + // Too slow + || path == "remoteSystems" + // TODO: projection by expression + || path == "unit/RecordProjectionByTypeEmpty" + || path == "unit/RecordProjectionByTypeNonEmpty" + || path == "unit/RecordProjectionByTypeNormalizeProjection" + // TODO: fix Double/show + || path == "prelude/JSON/number/1" + // TODO: toMap + || path == "unit/EmptyToMap" + || path == "unit/ToMap" + || path == "unit/ToMapWithType" + // TODO: Normalize field selection further by inspecting the argument + || path == "simplifications/rightBiasedMergeWithinRecordProjectionWithinFieldSelection0" + || path == "simplifications/rightBiasedMergeWithinRecordProjectionWithinFieldSelection1" + || path == "simplifications/rightBiasedMergeWithinRecursiveRecordMergeWithinFieldselection" + || path == "unit/RecordProjectionByTypeWithinFieldSelection" + || path == "unit/RecordProjectionWithinFieldSelection" + || path == "unit/RecursiveRecordMergeWithinFieldSelection0" + || path == "unit/RecursiveRecordMergeWithinFieldSelection1" + || path == "unit/RecursiveRecordMergeWithinFieldSelection2" + || path == "unit/RecursiveRecordMergeWithinFieldSelection3" + || path == "unit/RightBiasedMergeWithinFieldSelection0" + || path == "unit/RightBiasedMergeWithinFieldSelection1" + || path == "unit/RightBiasedMergeWithinFieldSelection2" + || path == "unit/RightBiasedMergeWithinFieldSelection3" + || path == "unit/RightBiasedMergeEquivalentArguments" + }, + input_type: FileType::Text, + output_type: Some(FileType::Text), + }, + )?; + + make_test_module( + &mut file, + TestFeature { + module_name: "alpha_normalize", + directory: spec_tests_dir.join("alpha-normalization/success/"), + variant: "AlphaNormalization", + path_filter: |path: &str| { + // This test doesn't typecheck + path == "unit/FunctionNestedBindingXXFree" + }, + input_type: FileType::Text, + output_type: Some(FileType::Text), + }, + )?; + + make_test_module( + &mut file, + TestFeature { + module_name: "typecheck_success", + directory: spec_tests_dir.join("typecheck/success/"), + variant: "TypecheckSuccess", + path_filter: |path: &str| { + false + // Too slow + || path == "prelude" + }, + input_type: FileType::Text, + output_type: Some(FileType::Text), + }, + )?; + + make_test_module( + &mut file, + TestFeature { + module_name: "typecheck_failure", + directory: spec_tests_dir.join("typecheck/failure/"), + variant: "TypecheckFailure", + path_filter: |path: &str| { + false + // TODO: Enable imports in typecheck tests + || path == "importBoundary" + || path == "customHeadersUsingBoundVariable" + // TODO: projection by expression + || path == "unit/RecordProjectionByTypeFieldTypeMismatch" + || path == "unit/RecordProjectionByTypeNotPresent" + // TODO: toMap + || path == "unit/EmptyToMap" + || path == "unit/HeterogenousToMap" + || path == "unit/MistypedToMap1" + || path == "unit/MistypedToMap2" + || path == "unit/MistypedToMap3" + || path == "unit/MistypedToMap4" + || path == "unit/NonRecordToMap" + }, + input_type: FileType::Text, + output_type: None, + }, )?; make_test_module( &mut file, - "typecheck", - "typecheck/", - "Typecheck", - |path| { - false - // TODO: Enable imports in typecheck tests - || path == "failure/importBoundary" - // Too slow - || path == "success/prelude" - // TODO: Inline headers - || path == "failure/customHeadersUsingBoundVariable" - // TODO: projection by expression - || path == "failure/unit/RecordProjectionByTypeFieldTypeMismatch" - || path == "failure/unit/RecordProjectionByTypeNotPresent" - // TODO: toMap - || path == "failure/unit/EmptyToMap" - || path == "failure/unit/HeterogenousToMap" - || path == "failure/unit/MistypedToMap1" - || path == "failure/unit/MistypedToMap2" - || path == "failure/unit/MistypedToMap3" - || path == "failure/unit/MistypedToMap4" - || path == "failure/unit/NonRecordToMap" + TestFeature { + module_name: "type_inference_success", + directory: spec_tests_dir.join("type-inference/success/"), + variant: "TypeInferenceSuccess", + path_filter: |path: &str| { + false + // TODO: projection by expression + || path == "unit/RecordProjectionByType" + || path == "unit/RecordProjectionByTypeEmpty" + || path == "unit/RecordProjectionByTypeJudgmentalEquality" + // TODO: toMap + || path == "unit/ToMap" + || path == "unit/ToMapAnnotated" + }, + input_type: FileType::Text, + output_type: Some(FileType::Text), }, )?; make_test_module( &mut file, - "type_inference", - "type-inference/", - "TypeInference", - |path| { - false - // TODO: projection by expression - || path == "success/unit/RecordProjectionByType" - || path == "success/unit/RecordProjectionByTypeEmpty" - || path == "success/unit/RecordProjectionByTypeJudgmentalEquality" - // TODO: toMap - || path == "success/unit/ToMap" - || path == "success/unit/ToMapAnnotated" + TestFeature { + module_name: "type_inference_failure", + directory: spec_tests_dir.join("type-inference/failure/"), + variant: "TypeInferenceFailure", + path_filter: |_path: &str| false, + input_type: FileType::Text, + output_type: None, }, )?; diff --git a/dhall/src/api/mod.rs b/dhall/src/api/mod.rs deleted file mode 100644 index 188b6c0..0000000 --- a/dhall/src/api/mod.rs +++ /dev/null @@ -1,159 +0,0 @@ -mod serde; -pub(crate) mod static_type; - -pub use value::Value; - -mod value { - use super::Type; - use crate::error::Result; - use crate::phase::{NormalizedSubExpr, Parsed, Typed}; - - // A Dhall value - pub struct Value(Typed); - - impl Value { - pub fn from_str(s: &str, ty: Option<&Type>) -> Result<Self> { - let resolved = Parsed::parse_str(s)?.resolve()?; - let typed = match ty { - None => resolved.typecheck()?, - Some(t) => resolved.typecheck_with(&t.to_type())?, - }; - Ok(Value(typed)) - } - pub(crate) fn to_expr(&self) -> NormalizedSubExpr { - self.0.to_expr() - } - pub(crate) fn to_typed(&self) -> Typed { - self.0.clone() - } - } -} - -pub use typ::Type; - -mod typ { - use dhall_syntax::Builtin; - - use crate::core::thunk::{Thunk, TypeThunk}; - use crate::core::value::Value; - use crate::error::Result; - use crate::phase::{NormalizedSubExpr, Typed}; - - /// A Dhall expression representing a type. - /// - /// This captures what is usually simply called a "type", like - /// `Bool`, `{ x: Integer }` or `Natural -> Text`. - #[derive(Debug, Clone, PartialEq, Eq)] - pub struct Type(Typed); - - impl Type { - pub(crate) fn from_value(v: Value) -> Self { - Type(Typed::from_value_untyped(v)) - } - pub(crate) fn make_builtin_type(b: Builtin) -> Self { - Self::from_value(Value::from_builtin(b)) - } - pub(crate) fn make_optional_type(t: Type) -> Self { - Self::from_value(Value::AppliedBuiltin( - Builtin::Optional, - vec![t.to_thunk()], - )) - } - pub(crate) fn make_list_type(t: Type) -> Self { - Self::from_value(Value::AppliedBuiltin( - Builtin::List, - vec![t.to_thunk()], - )) - } - #[doc(hidden)] - pub fn make_record_type( - kts: impl Iterator<Item = (String, Type)>, - ) -> Self { - Self::from_value(Value::RecordType( - kts.map(|(k, t)| { - (k.into(), TypeThunk::from_thunk(t.to_thunk())) - }) - .collect(), - )) - } - #[doc(hidden)] - pub fn make_union_type( - kts: impl Iterator<Item = (String, Option<Type>)>, - ) -> Self { - Self::from_value(Value::UnionType( - kts.map(|(k, t)| { - (k.into(), t.map(|t| TypeThunk::from_thunk(t.to_thunk()))) - }) - .collect(), - )) - } - - pub(crate) fn to_thunk(&self) -> Thunk { - self.0.to_thunk() - } - #[allow(dead_code)] - pub(crate) fn to_expr(&self) -> NormalizedSubExpr { - self.0.to_expr() - } - pub(crate) fn to_type(&self) -> crate::phase::Type { - self.0.to_type() - } - } - - impl crate::de::Deserialize for Type { - fn from_dhall(v: &crate::api::Value) -> Result<Self> { - Ok(Type(v.to_typed())) - } - } -} - -/// Deserialization of Dhall expressions into Rust -pub mod de { - pub use super::static_type::StaticType; - pub use super::{Type, Value}; - use crate::error::Result; - #[doc(hidden)] - pub use dhall_proc_macros::StaticType; - - /// A data structure that can be deserialized from a Dhall expression - /// - /// This is automatically implemented for any type that [serde][serde] - /// can deserialize. - /// - /// This trait cannot be implemented manually. - // TODO: seal trait - pub trait Deserialize: Sized { - /// See [dhall::de::from_str][crate::de::from_str] - fn from_dhall(v: &Value) -> Result<Self>; - } - - /// Deserialize an instance of type T from a string of Dhall text. - /// - /// This will recursively resolve all imports in the expression, and - /// typecheck it before deserialization. Relative imports will be resolved relative to the - /// provided file. More control over this process is not yet available - /// but will be in a coming version of this crate. - /// - /// If a type is provided, this additionally checks that the provided - /// expression has that type. - pub fn from_str<T>(s: &str, ty: Option<&Type>) -> Result<T> - where - T: Deserialize, - { - T::from_dhall(&Value::from_str(s, ty)?) - } - - /// Deserialize an instance of type T from a string of Dhall text, - /// additionally checking that it matches the type of T. - /// - /// This will recursively resolve all imports in the expression, and - /// typecheck it before deserialization. Relative imports will be resolved relative to the - /// provided file. More control over this process is not yet available - /// but will be in a coming version of this crate. - pub fn from_str_auto_type<T>(s: &str) -> Result<T> - where - T: Deserialize + StaticType, - { - from_str(s, Some(&<T as StaticType>::static_type())) - } -} diff --git a/dhall/src/api/serde.rs b/dhall/src/api/serde.rs deleted file mode 100644 index e1c8eef..0000000 --- a/dhall/src/api/serde.rs +++ /dev/null @@ -1,70 +0,0 @@ -use crate::api::de::{Deserialize, Value}; -use crate::error::{Error, Result}; -use dhall_syntax::{ExprF, SubExpr, X}; -use std::borrow::Cow; - -impl<'a, T> Deserialize for T -where - T: serde::Deserialize<'a>, -{ - fn from_dhall(v: &Value) -> Result<Self> { - T::deserialize(Deserializer(Cow::Owned(v.to_expr()))) - } -} - -struct Deserializer<'a>(Cow<'a, SubExpr<X, X>>); - -impl serde::de::Error for Error { - fn custom<T>(msg: T) -> Self - where - T: std::fmt::Display, - { - Error::Deserialize(msg.to_string()) - } -} - -impl<'de: 'a, 'a> serde::de::IntoDeserializer<'de, Error> for Deserializer<'a> { - type Deserializer = Deserializer<'a>; - fn into_deserializer(self) -> Self::Deserializer { - self - } -} - -impl<'de: 'a, 'a> serde::Deserializer<'de> for Deserializer<'a> { - type Error = Error; - fn deserialize_any<V>(self, visitor: V) -> Result<V::Value> - where - V: serde::de::Visitor<'de>, - { - use std::convert::TryInto; - use ExprF::*; - match self.0.as_ref().as_ref() { - NaturalLit(n) => match (*n).try_into() { - Ok(n64) => visitor.visit_u64(n64), - Err(_) => match (*n).try_into() { - Ok(n32) => visitor.visit_u32(n32), - Err(_) => unimplemented!(), - }, - }, - IntegerLit(n) => match (*n).try_into() { - Ok(n64) => visitor.visit_i64(n64), - Err(_) => match (*n).try_into() { - Ok(n32) => visitor.visit_i32(n32), - Err(_) => unimplemented!(), - }, - }, - RecordLit(m) => visitor.visit_map( - serde::de::value::MapDeserializer::new(m.iter().map( - |(k, v)| (k.as_ref(), Deserializer(Cow::Borrowed(v))), - )), - ), - _ => unimplemented!(), - } - } - - serde::forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct enum identifier ignored_any - } -} diff --git a/dhall/src/api/static_type.rs b/dhall/src/api/static_type.rs deleted file mode 100644 index 906bcef..0000000 --- a/dhall/src/api/static_type.rs +++ /dev/null @@ -1,93 +0,0 @@ -use dhall_syntax::{Builtin, Integer, Natural}; - -use crate::api::Type; - -/// A Rust type that can be represented as a Dhall type. -/// -/// A typical example is `Option<bool>`, -/// represented by the dhall expression `Optional Bool`. -/// -/// This trait can and should be automatically derived. -/// -/// The representation needs to be independent of the value. -/// For this reason, something like `HashMap<String, bool>` cannot implement -/// [StaticType] because each different value would -/// have a different Dhall record type. -pub trait StaticType { - fn static_type() -> Type; -} - -macro_rules! derive_builtin { - ($ty:ty, $builtin:ident) => { - impl StaticType for $ty { - fn static_type() -> Type { - Type::make_builtin_type(Builtin::$builtin) - } - } - }; -} - -derive_builtin!(bool, Bool); -derive_builtin!(Natural, Natural); -derive_builtin!(u64, Natural); -derive_builtin!(Integer, Integer); -derive_builtin!(String, Text); - -impl<A, B> StaticType for (A, B) -where - A: StaticType, - B: StaticType, -{ - fn static_type() -> Type { - Type::make_record_type( - vec![ - ("_1".to_owned(), A::static_type()), - ("_2".to_owned(), B::static_type()), - ] - .into_iter(), - ) - } -} - -impl<T, E> StaticType for std::result::Result<T, E> -where - T: StaticType, - E: StaticType, -{ - fn static_type() -> Type { - Type::make_union_type( - vec![ - ("Ok".to_owned(), Some(T::static_type())), - ("Err".to_owned(), Some(E::static_type())), - ] - .into_iter(), - ) - } -} - -impl<T> StaticType for Option<T> -where - T: StaticType, -{ - fn static_type() -> Type { - Type::make_optional_type(T::static_type()) - } -} - -impl<T> StaticType for Vec<T> -where - T: StaticType, -{ - fn static_type() -> Type { - Type::make_list_type(T::static_type()) - } -} - -impl<'a, T> StaticType for &'a T -where - T: StaticType, -{ - fn static_type() -> Type { - T::static_type() - } -} diff --git a/dhall/src/core/context.rs b/dhall/src/core/context.rs index 62affcf..2bf39c5 100644 --- a/dhall/src/core/context.rs +++ b/dhall/src/core/context.rs @@ -3,64 +3,56 @@ use std::rc::Rc; use dhall_syntax::{Label, V}; -use crate::core::thunk::Thunk; use crate::core::value::Value; +use crate::core::valuef::ValueF; use crate::core::var::{AlphaVar, Shift, Subst}; use crate::error::TypeError; -use crate::phase::{Type, Typed}; #[derive(Debug, Clone)] -pub enum CtxItem<T> { - Kept(AlphaVar, T), - Replaced(Thunk, T), +enum CtxItem { + Kept(AlphaVar, Value), + Replaced(Value), } #[derive(Debug, Clone)] -pub struct Context<T>(Rc<Vec<(Label, CtxItem<T>)>>); +pub(crate) struct TypecheckContext(Rc<Vec<(Label, CtxItem)>>); -#[derive(Debug, Clone)] -pub struct NormalizationContext(Context<()>); - -#[derive(Debug, Clone)] -pub struct TypecheckContext(Context<Type>); - -impl<T> Context<T> { +impl TypecheckContext { pub fn new() -> Self { - Context(Rc::new(Vec::new())) + TypecheckContext(Rc::new(Vec::new())) } - pub fn insert_kept(&self, x: &Label, t: T) -> Self - where - T: Shift + Clone, - { + pub fn insert_type(&self, x: &Label, t: Value) -> Self { let mut vec = self.0.as_ref().clone(); vec.push((x.clone(), CtxItem::Kept(x.into(), t.under_binder(x)))); - Context(Rc::new(vec)) + TypecheckContext(Rc::new(vec)) } - pub fn insert_replaced(&self, x: &Label, th: Thunk, t: T) -> Self - where - T: Clone, - { + pub fn insert_value(&self, x: &Label, e: Value) -> Result<Self, TypeError> { let mut vec = self.0.as_ref().clone(); - vec.push((x.clone(), CtxItem::Replaced(th, t))); - Context(Rc::new(vec)) + vec.push((x.clone(), CtxItem::Replaced(e))); + Ok(TypecheckContext(Rc::new(vec))) } - pub fn lookup(&self, var: &V<Label>) -> Result<CtxItem<T>, V<Label>> - where - T: Clone + Shift, - { + pub fn lookup(&self, var: &V<Label>) -> Option<Value> { let mut var = var.clone(); let mut shift_map: HashMap<Label, _> = HashMap::new(); for (l, i) in self.0.iter().rev() { match var.over_binder(l) { - None => return Ok(i.under_multiple_binders(&shift_map)), + None => { + let i = i.under_multiple_binders(&shift_map); + return Some(match i { + CtxItem::Kept(newvar, t) => { + Value::from_valuef_and_type(ValueF::Var(newvar), t) + } + CtxItem::Replaced(v) => v, + }); + } Some(newvar) => var = newvar, }; if let CtxItem::Kept(_, _) = i { *shift_map.entry(l.clone()).or_insert(0) += 1; } } - // Free variable - Err(var) + // Unbound variable + None } /// Given a var that makes sense in the current context, map the given function in such a way /// that the passed variable always makes sense in the context of the passed item. @@ -69,11 +61,8 @@ impl<T> Context<T> { fn do_with_var<E>( &self, var: &AlphaVar, - mut f: impl FnMut(&AlphaVar, &CtxItem<T>) -> Result<CtxItem<T>, E>, - ) -> Result<Self, E> - where - T: Clone, - { + mut f: impl FnMut(&AlphaVar, &CtxItem) -> Result<CtxItem, E>, + ) -> Result<Self, E> { let mut vec = Vec::new(); vec.reserve(self.0.len()); let mut var = var.clone(); @@ -91,125 +80,65 @@ impl<T> Context<T> { vec.push((l.clone(), (*i).clone())); } vec.reverse(); - Ok(Context(Rc::new(vec))) + Ok(TypecheckContext(Rc::new(vec))) } - fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> - where - T: Clone + Shift, - { - Some(self.do_with_var(var, |var, i| Ok(i.shift(delta, &var)?))?) + fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { + if delta < 0 { + Some(self.do_with_var(var, |var, i| Ok(i.shift(delta, &var)?))?) + } else { + Some(TypecheckContext(Rc::new( + self.0 + .iter() + .map(|(l, i)| Ok((l.clone(), i.shift(delta, &var)?))) + .collect::<Result<_, _>>()?, + ))) + } } - fn subst_shift(&self, var: &AlphaVar, val: &Typed) -> Self - where - T: Clone + Subst<Typed>, - { + fn subst_shift(&self, var: &AlphaVar, val: &Value) -> Self { self.do_with_var(var, |var, i| Ok::<_, !>(i.subst_shift(&var, val))) .unwrap() } } -impl NormalizationContext { - pub fn new() -> Self { - NormalizationContext(Context::new()) - } - pub fn skip(&self, x: &Label) -> Self { - NormalizationContext(self.0.insert_kept(x, ())) - } - pub fn lookup(&self, var: &V<Label>) -> Value { - match self.0.lookup(var) { - Ok(CtxItem::Replaced(t, ())) => t.to_value(), - Ok(CtxItem::Kept(newvar, ())) => Value::Var(newvar.clone()), - Err(var) => Value::Var(AlphaVar::from_var(var)), - } - } -} - -impl TypecheckContext { - pub fn new() -> Self { - TypecheckContext(Context::new()) - } - pub fn insert_type(&self, x: &Label, t: Type) -> Self { - TypecheckContext(self.0.insert_kept(x, t)) - } - pub fn insert_value(&self, x: &Label, e: Typed) -> Result<Self, TypeError> { - Ok(TypecheckContext(self.0.insert_replaced( - x, - e.to_thunk(), - e.get_type()?.into_owned(), - ))) - } - pub fn lookup(&self, var: &V<Label>) -> Option<Typed> { - match self.0.lookup(var) { - Ok(CtxItem::Kept(newvar, t)) => Some(Typed::from_thunk_and_type( - Thunk::from_value(Value::Var(newvar.clone())), - t.clone(), - )), - Ok(CtxItem::Replaced(th, t)) => { - Some(Typed::from_thunk_and_type(th.clone(), t.clone())) - } - Err(_) => None, - } - } -} - -impl<T: Shift> Shift for CtxItem<T> { +impl Shift for CtxItem { fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { Some(match self { CtxItem::Kept(v, t) => { CtxItem::Kept(v.shift(delta, var)?, t.shift(delta, var)?) } - CtxItem::Replaced(e, t) => { - CtxItem::Replaced(e.shift(delta, var)?, t.shift(delta, var)?) - } + CtxItem::Replaced(e) => CtxItem::Replaced(e.shift(delta, var)?), }) } } -impl<T: Clone + Shift> Shift for Context<T> { +impl Shift for TypecheckContext { fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { self.shift(delta, var) } } -impl Shift for NormalizationContext { - fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { - Some(NormalizationContext(self.0.shift(delta, var)?)) - } -} - -impl<T: Subst<Typed>> Subst<Typed> for CtxItem<T> { - fn subst_shift(&self, var: &AlphaVar, val: &Typed) -> Self { +impl Subst<Value> for CtxItem { + fn subst_shift(&self, var: &AlphaVar, val: &Value) -> Self { match self { - CtxItem::Replaced(e, t) => CtxItem::Replaced( - e.subst_shift(var, val), - t.subst_shift(var, val), - ), + CtxItem::Replaced(e) => CtxItem::Replaced(e.subst_shift(var, val)), CtxItem::Kept(v, t) => match v.shift(-1, var) { - None => { - CtxItem::Replaced(val.to_thunk(), t.subst_shift(var, val)) - } + None => CtxItem::Replaced(val.clone()), Some(newvar) => CtxItem::Kept(newvar, t.subst_shift(var, val)), }, } } } -impl<T: Clone + Subst<Typed>> Subst<Typed> for Context<T> { - fn subst_shift(&self, var: &AlphaVar, val: &Typed) -> Self { +impl Subst<Value> for TypecheckContext { + fn subst_shift(&self, var: &AlphaVar, val: &Value) -> Self { self.subst_shift(var, val) } } -impl Subst<Typed> for NormalizationContext { - fn subst_shift(&self, var: &AlphaVar, val: &Typed) -> Self { - NormalizationContext(self.0.subst_shift(var, val)) - } -} - +/// Don't count contexts when comparing stuff. +/// This is dirty but needed. impl PartialEq for TypecheckContext { fn eq(&self, _: &Self) -> bool { - // don't count contexts when comparing stuff - // this is dirty but needed for now true } } diff --git a/dhall/src/core/mod.rs b/dhall/src/core/mod.rs index a202e72..08213f7 100644 --- a/dhall/src/core/mod.rs +++ b/dhall/src/core/mod.rs @@ -1,4 +1,4 @@ -pub(crate) mod context; -pub(crate) mod thunk; -pub(crate) mod value; -pub(crate) mod var; +pub mod context; +pub mod value; +pub mod valuef; +pub mod var; diff --git a/dhall/src/core/thunk.rs b/dhall/src/core/thunk.rs deleted file mode 100644 index f41579c..0000000 --- a/dhall/src/core/thunk.rs +++ /dev/null @@ -1,312 +0,0 @@ -use std::cell::{Ref, RefCell}; -use std::rc::Rc; - -use dhall_syntax::{ExprF, X}; - -use crate::core::context::NormalizationContext; -use crate::core::value::Value; -use crate::core::var::{AlphaVar, Shift, Subst}; -use crate::phase::normalize::{ - apply_any, normalize_one_layer, normalize_whnf, InputSubExpr, OutputSubExpr, -}; -use crate::phase::{Type, Typed}; - -#[derive(Debug, Clone, Copy)] -enum Marker { - /// Weak Head Normal Form, i.e. subexpressions may not be normalized - WHNF, - /// Normal form, i.e. completely normalized - NF, -} -use Marker::{NF, WHNF}; - -#[derive(Debug)] -enum ThunkInternal { - /// Non-normalized value alongside a normalization context - Unnormalized(NormalizationContext, InputSubExpr), - /// Partially normalized value whose subexpressions have been thunked (this is returned from - /// typechecking). Note that this is different from `Value::PartialExpr` because there is no - /// requirement of WHNF here. - PartialExpr(ExprF<Thunk, X>), - /// Partially normalized value. - /// Invariant: if the marker is `NF`, the value must be fully normalized - Value(Marker, Value), -} - -/// Stores a possibly unevaluated value. Gets (partially) normalized on-demand, -/// sharing computation automatically. -/// Uses a RefCell to share computation. -#[derive(Debug, Clone)] -pub struct Thunk(Rc<RefCell<ThunkInternal>>); - -/// A thunk in type position. Can optionally store a Type from the typechecking phase to preserve -/// type information through the normalization phase. -#[derive(Debug, Clone)] -pub struct TypeThunk(Typed); - -impl ThunkInternal { - fn into_thunk(self) -> Thunk { - Thunk(Rc::new(RefCell::new(self))) - } - - fn normalize_whnf(&mut self) { - match self { - ThunkInternal::Unnormalized(ctx, e) => { - *self = ThunkInternal::Value( - WHNF, - normalize_whnf(ctx.clone(), e.clone()), - ) - } - ThunkInternal::PartialExpr(e) => { - *self = - ThunkInternal::Value(WHNF, normalize_one_layer(e.clone())) - } - // Already at least in WHNF - ThunkInternal::Value(_, _) => {} - } - } - - fn normalize_nf(&mut self) { - match self { - ThunkInternal::Unnormalized(_, _) - | ThunkInternal::PartialExpr(_) => { - self.normalize_whnf(); - self.normalize_nf(); - } - ThunkInternal::Value(m @ WHNF, v) => { - v.normalize_mut(); - *m = NF; - } - // Already in NF - ThunkInternal::Value(NF, _) => {} - } - } - - // Always use normalize_whnf before - fn as_whnf(&self) -> &Value { - match self { - ThunkInternal::Unnormalized(_, _) - | ThunkInternal::PartialExpr(_) => unreachable!(), - ThunkInternal::Value(_, v) => v, - } - } - - // Always use normalize_nf before - fn as_nf(&self) -> &Value { - match self { - ThunkInternal::Unnormalized(_, _) - | ThunkInternal::PartialExpr(_) - | ThunkInternal::Value(WHNF, _) => unreachable!(), - ThunkInternal::Value(NF, v) => v, - } - } -} - -impl Thunk { - pub fn new(ctx: NormalizationContext, e: InputSubExpr) -> Thunk { - ThunkInternal::Unnormalized(ctx, e).into_thunk() - } - - pub fn from_value(v: Value) -> Thunk { - ThunkInternal::Value(WHNF, v).into_thunk() - } - - pub fn from_partial_expr(e: ExprF<Thunk, X>) -> Thunk { - ThunkInternal::PartialExpr(e).into_thunk() - } - - // Normalizes contents to normal form; faster than `normalize_nf` if - // no one else shares this thunk - pub fn normalize_mut(&mut self) { - match Rc::get_mut(&mut self.0) { - // Mutate directly if sole owner - Some(refcell) => RefCell::get_mut(refcell).normalize_nf(), - // Otherwise mutate through the refcell - None => self.0.borrow_mut().normalize_nf(), - } - } - - fn do_normalize_whnf(&self) { - let borrow = self.0.borrow(); - match &*borrow { - ThunkInternal::Unnormalized(_, _) - | ThunkInternal::PartialExpr(_) => { - drop(borrow); - self.0.borrow_mut().normalize_whnf(); - } - // Already at least in WHNF - ThunkInternal::Value(_, _) => {} - } - } - - fn do_normalize_nf(&self) { - let borrow = self.0.borrow(); - match &*borrow { - ThunkInternal::Unnormalized(_, _) - | ThunkInternal::PartialExpr(_) - | ThunkInternal::Value(WHNF, _) => { - drop(borrow); - self.0.borrow_mut().normalize_nf(); - } - // Already in NF - ThunkInternal::Value(NF, _) => {} - } - } - - // WARNING: avoid normalizing any thunk while holding on to this ref - // or you could run into BorrowMut panics - pub fn normalize_nf(&self) -> Ref<Value> { - self.do_normalize_nf(); - Ref::map(self.0.borrow(), ThunkInternal::as_nf) - } - - // WARNING: avoid normalizing any thunk while holding on to this ref - // or you could run into BorrowMut panics - pub fn as_value(&self) -> Ref<Value> { - self.do_normalize_whnf(); - Ref::map(self.0.borrow(), ThunkInternal::as_whnf) - } - - pub fn to_value(&self) -> Value { - self.as_value().clone() - } - - pub fn normalize_to_expr_maybe_alpha(&self, alpha: bool) -> OutputSubExpr { - self.normalize_nf().normalize_to_expr_maybe_alpha(alpha) - } - - pub fn app_val(&self, val: Value) -> Value { - self.app_thunk(val.into_thunk()) - } - - pub fn app_thunk(&self, th: Thunk) -> Value { - apply_any(self.clone(), th) - } -} - -impl TypeThunk { - pub fn from_value(v: Value) -> TypeThunk { - TypeThunk::from_thunk(Thunk::from_value(v)) - } - - pub fn from_thunk(th: Thunk) -> TypeThunk { - TypeThunk(Typed::from_thunk_untyped(th)) - } - - pub fn from_type(t: Type) -> TypeThunk { - TypeThunk(t) - } - - pub fn normalize_mut(&mut self) { - self.0.normalize_mut() - } - - pub fn normalize_nf(&self) -> Value { - self.0.to_value().normalize() - } - - pub fn to_value(&self) -> Value { - self.0.to_value() - } - - pub fn to_thunk(&self) -> Thunk { - self.0.to_thunk() - } - - pub fn to_type(&self) -> Type { - self.0.to_type() - } - - pub fn to_typed(&self) -> Typed { - self.0.clone() - } - - pub fn normalize_to_expr_maybe_alpha(&self, alpha: bool) -> OutputSubExpr { - self.normalize_nf().normalize_to_expr_maybe_alpha(alpha) - } -} - -impl Shift for Thunk { - fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { - Some(self.0.borrow().shift(delta, var)?.into_thunk()) - } -} - -impl Shift for ThunkInternal { - fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { - Some(match self { - ThunkInternal::Unnormalized(ctx, e) => { - ThunkInternal::Unnormalized(ctx.shift(delta, var)?, e.clone()) - } - ThunkInternal::PartialExpr(e) => ThunkInternal::PartialExpr( - e.traverse_ref_with_special_handling_of_binders( - |v| Ok(v.shift(delta, var)?), - |x, v| Ok(v.shift(delta, &var.under_binder(x))?), - |x| Ok(X::clone(x)), - )?, - ), - ThunkInternal::Value(m, v) => { - ThunkInternal::Value(*m, v.shift(delta, var)?) - } - }) - } -} - -impl Shift for TypeThunk { - fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { - Some(TypeThunk(self.0.shift(delta, var)?)) - } -} - -impl Subst<Typed> for Thunk { - fn subst_shift(&self, var: &AlphaVar, val: &Typed) -> Self { - self.0.borrow().subst_shift(var, val).into_thunk() - } -} - -impl Subst<Typed> for ThunkInternal { - fn subst_shift(&self, var: &AlphaVar, val: &Typed) -> Self { - match self { - ThunkInternal::Unnormalized(ctx, e) => ThunkInternal::Unnormalized( - ctx.subst_shift(var, val), - e.clone(), - ), - ThunkInternal::PartialExpr(e) => ThunkInternal::PartialExpr( - e.map_ref_with_special_handling_of_binders( - |v| v.subst_shift(var, val), - |x, v| { - v.subst_shift( - &var.under_binder(x), - &val.under_binder(x), - ) - }, - X::clone, - ), - ), - ThunkInternal::Value(_, v) => { - // The resulting value may not stay in normal form after substitution - ThunkInternal::Value(WHNF, v.subst_shift(var, val)) - } - } - } -} - -impl Subst<Typed> for TypeThunk { - fn subst_shift(&self, var: &AlphaVar, val: &Typed) -> Self { - TypeThunk(self.0.subst_shift(var, val)) - } -} - -impl std::cmp::PartialEq for Thunk { - fn eq(&self, other: &Self) -> bool { - *self.as_value() == *other.as_value() - } -} -impl std::cmp::Eq for Thunk {} - -impl std::cmp::PartialEq for TypeThunk { - fn eq(&self, other: &Self) -> bool { - self.to_value() == other.to_value() - } -} -impl std::cmp::Eq for TypeThunk {} diff --git a/dhall/src/core/value.rs b/dhall/src/core/value.rs index 0b68bf6..3cccb1d 100644 --- a/dhall/src/core/value.rs +++ b/dhall/src/core/value.rs @@ -1,595 +1,327 @@ -use std::collections::HashMap; +use std::cell::{Ref, RefCell, RefMut}; +use std::rc::Rc; -use dhall_syntax::{ - rc, Builtin, Const, ExprF, Integer, InterpolatedTextContents, Label, - NaiveDouble, Natural, X, -}; +use dhall_syntax::{Builtin, Const}; -use crate::core::thunk::{Thunk, TypeThunk}; -use crate::core::var::{AlphaLabel, AlphaVar, Shift, Subst}; -use crate::phase::normalize::{ - apply_builtin, normalize_one_layer, squash_textlit, OutputSubExpr, -}; -use crate::phase::Typed; +use crate::core::context::TypecheckContext; +use crate::core::valuef::ValueF; +use crate::core::var::{AlphaVar, Shift, Subst}; +use crate::error::{TypeError, TypeMessage}; +use crate::phase::normalize::{apply_any, normalize_whnf}; +use crate::phase::typecheck::{builtin_to_value, const_to_value}; +use crate::phase::{NormalizedExpr, Typed}; -/// A semantic value. The invariants ensure this value represents a Weak-Head -/// Normal Form (WHNF). This means that this first constructor is the first constructor of the -/// final Normal Form (NF). -/// This WHNF must be preserved by operations on `Value`s. In particular, `subst_shift` could break -/// the invariants so need to be careful to reevaluate as needed. -/// Subexpressions are Thunks, which are partially evaluated expressions that are normalized -/// on-demand. When all the Thunks in a Value are at least in WHNF, and recursively so, then the -/// Value is in NF. This is because WHNF ensures that we have the first constructor of the NF; so -/// if we have the first constructor of the NF at all levels, we actually have the NF. -/// Equality is up to alpha-equivalence (renaming of bound variables) and beta-equivalence -/// (normalization). Equality will normalize only as needed. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum Value { - /// Closures - Lam(AlphaLabel, TypeThunk, Thunk), - Pi(AlphaLabel, TypeThunk, TypeThunk), - // Invariant: the evaluation must not be able to progress further. - AppliedBuiltin(Builtin, Vec<Thunk>), - /// `λ(x: a) -> Some x` - OptionalSomeClosure(TypeThunk), - /// `λ(x : a) -> λ(xs : List a) -> [ x ] # xs` - /// `λ(xs : List a) -> [ x ] # xs` - ListConsClosure(TypeThunk, Option<Thunk>), - /// `λ(x : Natural) -> x + 1` - NaturalSuccClosure, +#[derive(Debug, Clone, Copy)] +pub(crate) enum Form { + /// No constraints; expression may not be normalized at all. + Unevaled, + /// Weak Head Normal Form, i.e. normalized up to the first constructor, but subexpressions may + /// not be normalized. This means that the first constructor of the contained ValueF is the first + /// constructor of the final Normal Form (NF). + WHNF, + /// Normal Form, i.e. completely normalized. + /// When all the Values in a ValueF are at least in WHNF, and recursively so, then the + /// ValueF is in NF. This is because WHNF ensures that we have the first constructor of the NF; so + /// if we have the first constructor of the NF at all levels, we actually have the NF. + NF, +} +use Form::{Unevaled, NF, WHNF}; - Var(AlphaVar), - Const(Const), - BoolLit(bool), - NaturalLit(Natural), - IntegerLit(Integer), - DoubleLit(NaiveDouble), - EmptyOptionalLit(TypeThunk), - NEOptionalLit(Thunk), - // EmptyListLit(t) means `[] : List t`, not `[] : t` - EmptyListLit(TypeThunk), - NEListLit(Vec<Thunk>), - RecordLit(HashMap<Label, Thunk>), - RecordType(HashMap<Label, TypeThunk>), - UnionType(HashMap<Label, Option<TypeThunk>>), - UnionConstructor(Label, HashMap<Label, Option<TypeThunk>>), - UnionLit(Label, Thunk, HashMap<Label, Option<TypeThunk>>), - // Invariant: this must not contain interpolations that are themselves TextLits, and - // contiguous text values must be merged. - TextLit(Vec<InterpolatedTextContents<Thunk>>), - Equivalence(TypeThunk, TypeThunk), - // Invariant: this must not contain a value captured by one of the variants above. - PartialExpr(ExprF<Thunk, X>), +/// Partially normalized value. +/// Invariant: if `form` is `WHNF`, `value` must be in Weak Head Normal Form +/// Invariant: if `form` is `NF`, `value` must be fully normalized +#[derive(Debug)] +struct ValueInternal { + form: Form, + value: ValueF, + /// This is None if and only if `value` is `Sort` (which doesn't have a type) + ty: Option<Value>, } -impl Value { - pub fn into_thunk(self) -> Thunk { - Thunk::from_value(self) +/// Stores a possibly unevaluated value. Gets (partially) normalized on-demand, +/// sharing computation automatically. Uses a RefCell to share computation. +/// Can optionally store a type from typechecking to preserve type information. +#[derive(Clone)] +pub(crate) struct Value(Rc<RefCell<ValueInternal>>); + +#[derive(Copy, Clone)] +/// Controls conversion from `Value` to `Expr` +pub(crate) struct ToExprOptions { + /// Whether to convert all variables to `_` + pub(crate) alpha: bool, + /// Whether to normalize before converting + pub(crate) normalize: bool, +} + +impl ValueInternal { + fn into_value(self) -> Value { + Value(Rc::new(RefCell::new(self))) + } + fn as_valuef(&self) -> &ValueF { + &self.value } - /// Convert the value to a fully normalized syntactic expression - pub fn normalize_to_expr(&self) -> OutputSubExpr { - self.normalize_to_expr_maybe_alpha(false) - } - /// Convert the value to a fully normalized syntactic expression. Also alpha-normalize - /// if alpha is `true` - pub fn normalize_to_expr_maybe_alpha(&self, alpha: bool) -> OutputSubExpr { - // Ad-hoc macro to help construct the unapplied closures - macro_rules! make_expr { - (Natural) => { rc(ExprF::Builtin(Builtin::Natural)) }; - (var($var:ident)) => { - rc(ExprF::Var(dhall_syntax::V(stringify!($var).into(), 0))) - }; - ($var:ident) => { $var }; - (List $($rest:tt)*) => { - rc(ExprF::App( - rc(ExprF::Builtin(Builtin::List)), - make_expr!($($rest)*) - )) - }; - (Some $($rest:tt)*) => { - rc(ExprF::SomeLit( - make_expr!($($rest)*) - )) - }; - (1 + $($rest:tt)*) => { - rc(ExprF::BinOp( - dhall_syntax::BinOp::NaturalPlus, - rc(ExprF::NaturalLit(1)), - make_expr!($($rest)*) - )) - }; - ([ $($head:tt)* ] # $($tail:tt)*) => { - rc(ExprF::BinOp( - dhall_syntax::BinOp::ListAppend, - rc(ExprF::NEListLit(vec![make_expr!($($head)*)])), - make_expr!($($tail)*) - )) - }; - (λ($var:ident : $($ty:tt)*) -> $($rest:tt)*) => { - rc(ExprF::Pi( - stringify!($var).into(), - make_expr!($($ty)*), - make_expr!($($rest)*) - )) - }; + fn normalize_whnf(&mut self) { + take_mut::take_or_recover( + self, + // Dummy value in case the other closure panics + || ValueInternal { + form: Unevaled, + value: ValueF::Const(Const::Type), + ty: None, + }, + |vint| match (&vint.form, &vint.ty) { + (Unevaled, Some(ty)) => ValueInternal { + form: WHNF, + value: normalize_whnf(vint.value, &ty), + ty: vint.ty, + }, + // `value` is `Sort` + (Unevaled, None) => ValueInternal { + form: NF, + value: ValueF::Const(Const::Sort), + ty: None, + }, + // Already in WHNF + (WHNF, _) | (NF, _) => vint, + }, + ) + } + fn normalize_nf(&mut self) { + match self.form { + Unevaled => { + self.normalize_whnf(); + self.normalize_nf(); + } + WHNF => { + self.value.normalize_mut(); + self.form = NF; + } + // Already in NF + NF => {} } + } - match self { - Value::Lam(x, t, e) => rc(ExprF::Lam( - x.to_label_maybe_alpha(alpha), - t.normalize_to_expr_maybe_alpha(alpha), - e.normalize_to_expr_maybe_alpha(alpha), - )), - Value::AppliedBuiltin(b, args) => { - let mut e = rc(ExprF::Builtin(*b)); - for v in args { - e = rc(ExprF::App( - e, - v.normalize_to_expr_maybe_alpha(alpha), - )); - } - e - } - Value::OptionalSomeClosure(n) => { - let a = n.normalize_to_expr_maybe_alpha(alpha); - make_expr!(λ(x: a) -> Some var(x)) - } - Value::ListConsClosure(a, None) => { - // Avoid accidental capture of the new `x` variable - let a1 = a.under_binder(Label::from("x")); - let a1 = a1.normalize_to_expr_maybe_alpha(alpha); - let a = a.normalize_to_expr_maybe_alpha(alpha); - make_expr!(λ(x : a) -> λ(xs : List a1) -> [ var(x) ] # var(xs)) - } - Value::ListConsClosure(n, Some(v)) => { - // Avoid accidental capture of the new `xs` variable - let v = v.under_binder(Label::from("xs")); - let v = v.normalize_to_expr_maybe_alpha(alpha); - let a = n.normalize_to_expr_maybe_alpha(alpha); - make_expr!(λ(xs : List a) -> [ v ] # var(xs)) - } - Value::NaturalSuccClosure => { - make_expr!(λ(x : Natural) -> 1 + var(x)) - } - Value::Pi(x, t, e) => rc(ExprF::Pi( - x.to_label_maybe_alpha(alpha), - t.normalize_to_expr_maybe_alpha(alpha), - e.normalize_to_expr_maybe_alpha(alpha), - )), - Value::Var(v) => rc(ExprF::Var(v.to_var(alpha))), - Value::Const(c) => rc(ExprF::Const(*c)), - Value::BoolLit(b) => rc(ExprF::BoolLit(*b)), - Value::NaturalLit(n) => rc(ExprF::NaturalLit(*n)), - Value::IntegerLit(n) => rc(ExprF::IntegerLit(*n)), - Value::DoubleLit(n) => rc(ExprF::DoubleLit(*n)), - Value::EmptyOptionalLit(n) => rc(ExprF::App( - rc(ExprF::Builtin(Builtin::OptionalNone)), - n.normalize_to_expr_maybe_alpha(alpha), - )), - Value::NEOptionalLit(n) => { - rc(ExprF::SomeLit(n.normalize_to_expr_maybe_alpha(alpha))) - } - Value::EmptyListLit(n) => rc(ExprF::EmptyListLit(rc(ExprF::App( - rc(ExprF::Builtin(Builtin::List)), - n.normalize_to_expr_maybe_alpha(alpha), - )))), - Value::NEListLit(elts) => rc(ExprF::NEListLit( - elts.iter() - .map(|n| n.normalize_to_expr_maybe_alpha(alpha)) - .collect(), - )), - Value::RecordLit(kvs) => rc(ExprF::RecordLit( - kvs.iter() - .map(|(k, v)| { - (k.clone(), v.normalize_to_expr_maybe_alpha(alpha)) - }) - .collect(), - )), - Value::RecordType(kts) => rc(ExprF::RecordType( - kts.iter() - .map(|(k, v)| { - (k.clone(), v.normalize_to_expr_maybe_alpha(alpha)) - }) - .collect(), - )), - Value::UnionType(kts) => rc(ExprF::UnionType( - kts.iter() - .map(|(k, v)| { - ( - k.clone(), - v.as_ref().map(|v| { - v.normalize_to_expr_maybe_alpha(alpha) - }), - ) - }) - .collect(), - )), - Value::UnionConstructor(l, kts) => { - let kts = kts - .iter() - .map(|(k, v)| { - ( - k.clone(), - v.as_ref().map(|v| { - v.normalize_to_expr_maybe_alpha(alpha) - }), - ) - }) - .collect(); - rc(ExprF::Field(rc(ExprF::UnionType(kts)), l.clone())) - } - Value::UnionLit(l, v, kts) => rc(ExprF::App( - Value::UnionConstructor(l.clone(), kts.clone()) - .normalize_to_expr_maybe_alpha(alpha), - v.normalize_to_expr_maybe_alpha(alpha), - )), - Value::TextLit(elts) => { - use InterpolatedTextContents::{Expr, Text}; - rc(ExprF::TextLit( - elts.iter() - .map(|contents| match contents { - Expr(e) => { - Expr(e.normalize_to_expr_maybe_alpha(alpha)) - } - Text(s) => Text(s.clone()), - }) - .collect(), - )) - } - Value::Equivalence(x, y) => rc(ExprF::BinOp( - dhall_syntax::BinOp::Equivalence, - x.normalize_to_expr_maybe_alpha(alpha), - y.normalize_to_expr_maybe_alpha(alpha), - )), - Value::PartialExpr(e) => { - rc(e.map_ref_simple(|v| v.normalize_to_expr_maybe_alpha(alpha))) + fn get_type(&self) -> Result<&Value, TypeError> { + match &self.ty { + Some(t) => Ok(t), + None => { + Err(TypeError::new(&TypecheckContext::new(), TypeMessage::Sort)) } } } +} - // Deprecated - pub fn normalize(&self) -> Value { - let mut v = self.clone(); - v.normalize_mut(); - v +impl Value { + fn new(value: ValueF, form: Form, ty: Value) -> Value { + ValueInternal { + form, + value, + ty: Some(ty), + } + .into_value() + } + pub(crate) fn const_sort() -> Value { + ValueInternal { + form: NF, + value: ValueF::Const(Const::Sort), + ty: None, + } + .into_value() + } + pub(crate) fn from_valuef_and_type(v: ValueF, t: Value) -> Value { + Value::new(v, Unevaled, t) + } + pub(crate) fn from_valuef_and_type_whnf(v: ValueF, t: Value) -> Value { + Value::new(v, WHNF, t) + } + pub(crate) fn from_const(c: Const) -> Self { + const_to_value(c) + } + pub(crate) fn from_builtin(b: Builtin) -> Self { + builtin_to_value(b) } - pub fn normalize_mut(&mut self) { - match self { - Value::NaturalSuccClosure - | Value::Var(_) - | Value::Const(_) - | Value::BoolLit(_) - | Value::NaturalLit(_) - | Value::IntegerLit(_) - | Value::DoubleLit(_) => {} + pub(crate) fn as_const(&self) -> Option<Const> { + match &*self.as_whnf() { + ValueF::Const(c) => Some(*c), + _ => None, + } + } - Value::EmptyOptionalLit(tth) - | Value::OptionalSomeClosure(tth) - | Value::EmptyListLit(tth) => { - tth.normalize_mut(); - } + fn as_internal(&self) -> Ref<ValueInternal> { + self.0.borrow() + } + fn as_internal_mut(&self) -> RefMut<ValueInternal> { + self.0.borrow_mut() + } + /// WARNING: The returned ValueF may be entirely unnormalized, in aprticular it may just be an + /// unevaled PartialExpr. You probably want to use `as_whnf`. + fn as_valuef(&self) -> Ref<ValueF> { + Ref::map(self.as_internal(), ValueInternal::as_valuef) + } + /// This is what you want if you want to pattern-match on the value. + /// WARNING: drop this ref before normalizing the same value or you will run into BorrowMut + /// panics. + pub(crate) fn as_whnf(&self) -> Ref<ValueF> { + self.normalize_whnf(); + self.as_valuef() + } - Value::NEOptionalLit(th) => { - th.normalize_mut(); - } - Value::Lam(_, t, e) => { - t.normalize_mut(); - e.normalize_mut(); - } - Value::Pi(_, t, e) => { - t.normalize_mut(); - e.normalize_mut(); - } - Value::AppliedBuiltin(_, args) => { - for x in args.iter_mut() { - x.normalize_mut(); - } - } - Value::ListConsClosure(t, v) => { - t.normalize_mut(); - for x in v.iter_mut() { - x.normalize_mut(); - } - } - Value::NEListLit(elts) => { - for x in elts.iter_mut() { - x.normalize_mut(); - } - } - Value::RecordLit(kvs) => { - for x in kvs.values_mut() { - x.normalize_mut(); - } - } - Value::RecordType(kvs) => { - for x in kvs.values_mut() { - x.normalize_mut(); - } - } - Value::UnionType(kts) | Value::UnionConstructor(_, kts) => { - for x in kts.values_mut().flat_map(|opt| opt) { - x.normalize_mut(); - } - } - Value::UnionLit(_, v, kts) => { - v.normalize_mut(); - for x in kts.values_mut().flat_map(|opt| opt) { - x.normalize_mut(); - } - } - Value::TextLit(elts) => { - for x in elts.iter_mut() { - use InterpolatedTextContents::{Expr, Text}; - match x { - Expr(n) => n.normalize_mut(), - Text(_) => {} - } - } - } - Value::Equivalence(x, y) => { - x.normalize_mut(); - y.normalize_mut(); - } - Value::PartialExpr(e) => { - // TODO: need map_mut_simple - e.map_ref_simple(|v| { - v.normalize_nf(); - }); - } + pub(crate) fn to_expr(&self, opts: ToExprOptions) -> NormalizedExpr { + if opts.normalize { + self.normalize_whnf(); } + self.as_valuef().to_expr(opts) + } + pub(crate) fn to_whnf_ignore_type(&self) -> ValueF { + self.as_whnf().clone() + } + /// Before discarding type information, check that it matches the expected return type. + pub(crate) fn to_whnf_check_type(&self, ty: &Value) -> ValueF { + self.check_type(ty); + self.to_whnf_ignore_type() + } + pub(crate) fn into_typed(self) -> Typed { + Typed::from_value(self) } - /// Apply to a value - pub fn app(self, val: Value) -> Value { - self.app_val(val) + /// Mutates the contents. If no one else shares this, this avoids a RefCell lock. + fn mutate_internal(&mut self, f: impl FnOnce(&mut ValueInternal)) { + match Rc::get_mut(&mut self.0) { + // Mutate directly if sole owner + Some(refcell) => f(RefCell::get_mut(refcell)), + // Otherwise mutate through the refcell + None => f(&mut self.as_internal_mut()), + } + } + /// Normalizes contents to normal form; faster than `normalize_nf` if + /// no one else shares this. + pub(crate) fn normalize_mut(&mut self) { + self.mutate_internal(|vint| vint.normalize_nf()) } - /// Apply to a value - pub fn app_val(self, val: Value) -> Value { - self.app_thunk(val.into_thunk()) + pub(crate) fn normalize_whnf(&self) { + let borrow = self.as_internal(); + match borrow.form { + Unevaled => { + drop(borrow); + self.as_internal_mut().normalize_whnf(); + } + // Already at least in WHNF + WHNF | NF => {} + } + } + pub(crate) fn normalize_nf(&self) { + let borrow = self.as_internal(); + match borrow.form { + Unevaled | WHNF => { + drop(borrow); + self.as_internal_mut().normalize_nf(); + } + // Already in NF + NF => {} + } } - /// Apply to a thunk - pub fn app_thunk(self, th: Thunk) -> Value { - Thunk::from_value(self).app_thunk(th) + pub(crate) fn app(&self, v: Value) -> Value { + let body_t = match &*self.get_type_not_sort().as_whnf() { + ValueF::Pi(x, t, e) => { + v.check_type(t); + e.subst_shift(&x.into(), &v) + } + _ => unreachable!("Internal type error"), + }; + Value::from_valuef_and_type_whnf( + apply_any(self.clone(), v, &body_t), + body_t, + ) } - pub fn from_builtin(b: Builtin) -> Value { - Value::AppliedBuiltin(b, vec![]) + /// In debug mode, panic if the provided type doesn't match the value's type. + /// Otherwise does nothing. + pub(crate) fn check_type(&self, ty: &Value) { + debug_assert_eq!( + Some(ty), + self.get_type().ok().as_ref(), + "Internal type error" + ); + } + pub(crate) fn get_type(&self) -> Result<Value, TypeError> { + Ok(self.as_internal().get_type()?.clone()) + } + /// When we know the value isn't `Sort`, this gets the type directly + pub(crate) fn get_type_not_sort(&self) -> Value { + self.get_type() + .expect("Internal type error: value is `Sort` but shouldn't be") } } impl Shift for Value { fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { - Some(match self { - Value::Lam(x, t, e) => Value::Lam( - x.clone(), - t.shift(delta, var)?, - e.shift(delta, &var.under_binder(x))?, - ), - Value::AppliedBuiltin(b, args) => Value::AppliedBuiltin( - *b, - args.iter() - .map(|v| Ok(v.shift(delta, var)?)) - .collect::<Result<_, _>>()?, - ), - Value::NaturalSuccClosure => Value::NaturalSuccClosure, - Value::OptionalSomeClosure(tth) => { - Value::OptionalSomeClosure(tth.shift(delta, var)?) - } - Value::ListConsClosure(t, v) => Value::ListConsClosure( - t.shift(delta, var)?, - v.as_ref().map(|v| Ok(v.shift(delta, var)?)).transpose()?, - ), - Value::Pi(x, t, e) => Value::Pi( - x.clone(), - t.shift(delta, var)?, - e.shift(delta, &var.under_binder(x))?, - ), - Value::Var(v) => Value::Var(v.shift(delta, var)?), - Value::Const(c) => Value::Const(*c), - Value::BoolLit(b) => Value::BoolLit(*b), - Value::NaturalLit(n) => Value::NaturalLit(*n), - Value::IntegerLit(n) => Value::IntegerLit(*n), - Value::DoubleLit(n) => Value::DoubleLit(*n), - Value::EmptyOptionalLit(tth) => { - Value::EmptyOptionalLit(tth.shift(delta, var)?) - } - Value::NEOptionalLit(th) => { - Value::NEOptionalLit(th.shift(delta, var)?) - } - Value::EmptyListLit(tth) => { - Value::EmptyListLit(tth.shift(delta, var)?) - } - Value::NEListLit(elts) => Value::NEListLit( - elts.iter() - .map(|v| Ok(v.shift(delta, var)?)) - .collect::<Result<_, _>>()?, - ), - Value::RecordLit(kvs) => Value::RecordLit( - kvs.iter() - .map(|(k, v)| Ok((k.clone(), v.shift(delta, var)?))) - .collect::<Result<_, _>>()?, - ), - Value::RecordType(kvs) => Value::RecordType( - kvs.iter() - .map(|(k, v)| Ok((k.clone(), v.shift(delta, var)?))) - .collect::<Result<_, _>>()?, - ), - Value::UnionType(kts) => Value::UnionType( - kts.iter() - .map(|(k, v)| { - Ok(( - k.clone(), - v.as_ref() - .map(|v| Ok(v.shift(delta, var)?)) - .transpose()?, - )) - }) - .collect::<Result<_, _>>()?, - ), - Value::UnionConstructor(x, kts) => Value::UnionConstructor( - x.clone(), - kts.iter() - .map(|(k, v)| { - Ok(( - k.clone(), - v.as_ref() - .map(|v| Ok(v.shift(delta, var)?)) - .transpose()?, - )) - }) - .collect::<Result<_, _>>()?, - ), - Value::UnionLit(x, v, kts) => Value::UnionLit( - x.clone(), - v.shift(delta, var)?, - kts.iter() - .map(|(k, v)| { - Ok(( - k.clone(), - v.as_ref() - .map(|v| Ok(v.shift(delta, var)?)) - .transpose()?, - )) - }) - .collect::<Result<_, _>>()?, - ), - Value::TextLit(elts) => Value::TextLit( - elts.iter() - .map(|contents| { - use InterpolatedTextContents::{Expr, Text}; - Ok(match contents { - Expr(th) => Expr(th.shift(delta, var)?), - Text(s) => Text(s.clone()), - }) - }) - .collect::<Result<_, _>>()?, - ), - Value::Equivalence(x, y) => { - Value::Equivalence(x.shift(delta, var)?, y.shift(delta, var)?) - } - Value::PartialExpr(e) => Value::PartialExpr( - e.traverse_ref_with_special_handling_of_binders( - |v| Ok(v.shift(delta, var)?), - |x, v| Ok(v.shift(delta, &var.under_binder(x))?), - |x| Ok(X::clone(x)), - )?, - ), + Some(Value(self.0.shift(delta, var)?)) + } +} + +impl Shift for ValueInternal { + fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { + Some(ValueInternal { + form: self.form, + value: self.value.shift(delta, var)?, + ty: self.ty.shift(delta, var)?, }) } } -impl Subst<Typed> for Value { - fn subst_shift(&self, var: &AlphaVar, val: &Typed) -> Self { - match self { - // Retry normalizing since substituting may allow progress - Value::AppliedBuiltin(b, args) => apply_builtin( - *b, - args.iter().map(|v| v.subst_shift(var, val)).collect(), - ), - // Retry normalizing since substituting may allow progress - Value::PartialExpr(e) => { - normalize_one_layer(e.map_ref_with_special_handling_of_binders( - |v| v.subst_shift(var, val), - |x, v| { - v.subst_shift( - &var.under_binder(x), - &val.under_binder(x), - ) - }, - X::clone, - )) - } - // Retry normalizing since substituting may allow progress - Value::TextLit(elts) => { - Value::TextLit(squash_textlit(elts.iter().map(|contents| { - use InterpolatedTextContents::{Expr, Text}; - match contents { - Expr(th) => Expr(th.subst_shift(var, val)), - Text(s) => Text(s.clone()), - } - }))) - } - Value::Lam(x, t, e) => Value::Lam( - x.clone(), - t.subst_shift(var, val), - e.subst_shift(&var.under_binder(x), &val.under_binder(x)), - ), - Value::NaturalSuccClosure => Value::NaturalSuccClosure, - Value::OptionalSomeClosure(tth) => { - Value::OptionalSomeClosure(tth.subst_shift(var, val)) - } - Value::ListConsClosure(t, v) => Value::ListConsClosure( - t.subst_shift(var, val), - v.as_ref().map(|v| v.subst_shift(var, val)), - ), - Value::Pi(x, t, e) => Value::Pi( - x.clone(), - t.subst_shift(var, val), - e.subst_shift(&var.under_binder(x), &val.under_binder(x)), - ), - Value::Var(v) => match v.shift(-1, var) { - None => val.to_value().clone(), - Some(newvar) => Value::Var(newvar), - }, - Value::Const(c) => Value::Const(*c), - Value::BoolLit(b) => Value::BoolLit(*b), - Value::NaturalLit(n) => Value::NaturalLit(*n), - Value::IntegerLit(n) => Value::IntegerLit(*n), - Value::DoubleLit(n) => Value::DoubleLit(*n), - Value::EmptyOptionalLit(tth) => { - Value::EmptyOptionalLit(tth.subst_shift(var, val)) - } - Value::NEOptionalLit(th) => { - Value::NEOptionalLit(th.subst_shift(var, val)) - } - Value::EmptyListLit(tth) => { - Value::EmptyListLit(tth.subst_shift(var, val)) +impl Subst<Value> for Value { + fn subst_shift(&self, var: &AlphaVar, val: &Value) -> Self { + match &*self.as_valuef() { + // If the var matches, we can just reuse the provided value instead of copying it. + // We also check that the types match, if in debug mode. + ValueF::Var(v) if v == var => { + if let Ok(self_ty) = self.get_type() { + val.check_type(&self_ty.subst_shift(var, val)); + } + val.clone() } - Value::NEListLit(elts) => Value::NEListLit( - elts.iter().map(|v| v.subst_shift(var, val)).collect(), - ), - Value::RecordLit(kvs) => Value::RecordLit( - kvs.iter() - .map(|(k, v)| (k.clone(), v.subst_shift(var, val))) - .collect(), - ), - Value::RecordType(kvs) => Value::RecordType( - kvs.iter() - .map(|(k, v)| (k.clone(), v.subst_shift(var, val))) - .collect(), - ), - Value::UnionType(kts) => Value::UnionType( - kts.iter() - .map(|(k, v)| { - (k.clone(), v.as_ref().map(|v| v.subst_shift(var, val))) - }) - .collect(), - ), - Value::UnionConstructor(x, kts) => Value::UnionConstructor( - x.clone(), - kts.iter() - .map(|(k, v)| { - (k.clone(), v.as_ref().map(|v| v.subst_shift(var, val))) - }) - .collect(), - ), - Value::UnionLit(x, v, kts) => Value::UnionLit( - x.clone(), - v.subst_shift(var, val), - kts.iter() - .map(|(k, v)| { - (k.clone(), v.as_ref().map(|v| v.subst_shift(var, val))) - }) - .collect(), - ), - Value::Equivalence(x, y) => Value::Equivalence( - x.subst_shift(var, val), - y.subst_shift(var, val), - ), + _ => Value(self.0.subst_shift(var, val)), + } + } +} + +impl Subst<Value> for ValueInternal { + fn subst_shift(&self, var: &AlphaVar, val: &Value) -> Self { + ValueInternal { + // The resulting value may not stay in wnhf after substitution + form: Unevaled, + value: self.value.subst_shift(var, val), + ty: self.ty.subst_shift(var, val), + } + } +} + +// TODO: use Rc comparison to shortcut on identical pointers +impl std::cmp::PartialEq for Value { + fn eq(&self, other: &Self) -> bool { + *self.as_whnf() == *other.as_whnf() + } +} +impl std::cmp::Eq for Value {} + +impl std::fmt::Debug for Value { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let vint: &ValueInternal = &self.as_internal(); + if let ValueF::Const(c) = &vint.value { + write!(fmt, "{:?}", c) + } else { + let mut x = fmt.debug_struct(&format!("Value@{:?}", &vint.form)); + x.field("value", &vint.value); + if let Some(ty) = vint.ty.as_ref() { + x.field("type", &ty); + } else { + x.field("type", &None::<()>); + } + x.finish() } } } diff --git a/dhall/src/core/valuef.rs b/dhall/src/core/valuef.rs new file mode 100644 index 0000000..7ecec86 --- /dev/null +++ b/dhall/src/core/valuef.rs @@ -0,0 +1,335 @@ +use std::collections::HashMap; + +use dhall_syntax::{ + rc, Builtin, Const, ExprF, Integer, InterpolatedTextContents, Label, + NaiveDouble, Natural, +}; + +use crate::core::value::{ToExprOptions, Value}; +use crate::core::var::{AlphaLabel, AlphaVar, Shift, Subst}; +use crate::phase::{Normalized, NormalizedExpr}; + +/// A semantic value. Subexpressions are Values, which are partially evaluated expressions that are +/// normalized on-demand. +/// If you compare for equality two `ValueF`s in WHNF, then equality will be up to +/// alpha-equivalence (renaming of bound variables) and beta-equivalence (normalization). It will +/// recursively normalize as needed. +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) enum ValueF { + /// Closures + Lam(AlphaLabel, Value, Value), + Pi(AlphaLabel, Value, Value), + // Invariant: in whnf, the evaluation must not be able to progress further. + AppliedBuiltin(Builtin, Vec<Value>), + + Var(AlphaVar), + Const(Const), + BoolLit(bool), + NaturalLit(Natural), + IntegerLit(Integer), + DoubleLit(NaiveDouble), + EmptyOptionalLit(Value), + NEOptionalLit(Value), + // EmptyListLit(t) means `[] : List t`, not `[] : t` + EmptyListLit(Value), + NEListLit(Vec<Value>), + RecordType(HashMap<Label, Value>), + RecordLit(HashMap<Label, Value>), + UnionType(HashMap<Label, Option<Value>>), + UnionConstructor(Label, HashMap<Label, Option<Value>>), + UnionLit(Label, Value, HashMap<Label, Option<Value>>), + // Invariant: in whnf, this must not contain interpolations that are themselves TextLits, and + // contiguous text values must be merged. + TextLit(Vec<InterpolatedTextContents<Value>>), + Equivalence(Value, Value), + // Invariant: in whnf, this must not contain a value captured by one of the variants above. + PartialExpr(ExprF<Value, Normalized>), +} + +impl ValueF { + pub(crate) fn into_value_with_type(self, t: Value) -> Value { + Value::from_valuef_and_type(self, t) + } + + pub(crate) fn to_expr(&self, opts: ToExprOptions) -> NormalizedExpr { + match self { + ValueF::Lam(x, t, e) => rc(ExprF::Lam( + x.to_label_maybe_alpha(opts.alpha), + t.to_expr(opts), + e.to_expr(opts), + )), + ValueF::AppliedBuiltin(b, args) => args + .iter() + .map(|v| v.to_expr(opts)) + .fold(rc(ExprF::Builtin(*b)), |acc, v| rc(ExprF::App(acc, v))), + ValueF::Pi(x, t, e) => rc(ExprF::Pi( + x.to_label_maybe_alpha(opts.alpha), + t.to_expr(opts), + e.to_expr(opts), + )), + ValueF::Var(v) => rc(ExprF::Var(v.to_var(opts.alpha))), + ValueF::Const(c) => rc(ExprF::Const(*c)), + ValueF::BoolLit(b) => rc(ExprF::BoolLit(*b)), + ValueF::NaturalLit(n) => rc(ExprF::NaturalLit(*n)), + ValueF::IntegerLit(n) => rc(ExprF::IntegerLit(*n)), + ValueF::DoubleLit(n) => rc(ExprF::DoubleLit(*n)), + ValueF::EmptyOptionalLit(n) => rc(ExprF::App( + rc(ExprF::Builtin(Builtin::OptionalNone)), + n.to_expr(opts), + )), + ValueF::NEOptionalLit(n) => rc(ExprF::SomeLit(n.to_expr(opts))), + ValueF::EmptyListLit(n) => rc(ExprF::EmptyListLit(rc(ExprF::App( + rc(ExprF::Builtin(Builtin::List)), + n.to_expr(opts), + )))), + ValueF::NEListLit(elts) => rc(ExprF::NEListLit( + elts.iter().map(|n| n.to_expr(opts)).collect(), + )), + ValueF::RecordLit(kvs) => rc(ExprF::RecordLit( + kvs.iter() + .map(|(k, v)| (k.clone(), v.to_expr(opts))) + .collect(), + )), + ValueF::RecordType(kts) => rc(ExprF::RecordType( + kts.iter() + .map(|(k, v)| (k.clone(), v.to_expr(opts))) + .collect(), + )), + ValueF::UnionType(kts) => rc(ExprF::UnionType( + kts.iter() + .map(|(k, v)| { + (k.clone(), v.as_ref().map(|v| v.to_expr(opts))) + }) + .collect(), + )), + ValueF::UnionConstructor(l, kts) => rc(ExprF::Field( + ValueF::UnionType(kts.clone()).to_expr(opts), + l.clone(), + )), + ValueF::UnionLit(l, v, kts) => rc(ExprF::App( + ValueF::UnionConstructor(l.clone(), kts.clone()).to_expr(opts), + v.to_expr(opts), + )), + ValueF::TextLit(elts) => { + use InterpolatedTextContents::{Expr, Text}; + rc(ExprF::TextLit( + elts.iter() + .map(|contents| match contents { + Expr(e) => Expr(e.to_expr(opts)), + Text(s) => Text(s.clone()), + }) + .collect(), + )) + } + ValueF::Equivalence(x, y) => rc(ExprF::BinOp( + dhall_syntax::BinOp::Equivalence, + x.to_expr(opts), + y.to_expr(opts), + )), + ValueF::PartialExpr(e) => rc(e.map_ref(|v| v.to_expr(opts))), + } + } + + pub(crate) fn normalize_mut(&mut self) { + match self { + ValueF::Var(_) + | ValueF::Const(_) + | ValueF::BoolLit(_) + | ValueF::NaturalLit(_) + | ValueF::IntegerLit(_) + | ValueF::DoubleLit(_) => {} + + ValueF::EmptyOptionalLit(tth) | ValueF::EmptyListLit(tth) => { + tth.normalize_mut(); + } + + ValueF::NEOptionalLit(th) => { + th.normalize_mut(); + } + ValueF::Lam(_, t, e) => { + t.normalize_mut(); + e.normalize_mut(); + } + ValueF::Pi(_, t, e) => { + t.normalize_mut(); + e.normalize_mut(); + } + ValueF::AppliedBuiltin(_, args) => { + for x in args.iter_mut() { + x.normalize_mut(); + } + } + ValueF::NEListLit(elts) => { + for x in elts.iter_mut() { + x.normalize_mut(); + } + } + ValueF::RecordLit(kvs) => { + for x in kvs.values_mut() { + x.normalize_mut(); + } + } + ValueF::RecordType(kvs) => { + for x in kvs.values_mut() { + x.normalize_mut(); + } + } + ValueF::UnionType(kts) | ValueF::UnionConstructor(_, kts) => { + for x in kts.values_mut().flat_map(|opt| opt) { + x.normalize_mut(); + } + } + ValueF::UnionLit(_, v, kts) => { + v.normalize_mut(); + for x in kts.values_mut().flat_map(|opt| opt) { + x.normalize_mut(); + } + } + ValueF::TextLit(elts) => { + for x in elts.iter_mut() { + use InterpolatedTextContents::{Expr, Text}; + match x { + Expr(n) => n.normalize_mut(), + Text(_) => {} + } + } + } + ValueF::Equivalence(x, y) => { + x.normalize_mut(); + y.normalize_mut(); + } + ValueF::PartialExpr(e) => { + // TODO: need map_mut + e.map_ref(|v| { + v.normalize_nf(); + }); + } + } + } + + pub(crate) fn from_builtin(b: Builtin) -> ValueF { + ValueF::AppliedBuiltin(b, vec![]) + } +} + +impl Shift for ValueF { + fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { + Some(match self { + ValueF::Lam(x, t, e) => ValueF::Lam( + x.clone(), + t.shift(delta, var)?, + e.shift(delta, &var.under_binder(x))?, + ), + ValueF::AppliedBuiltin(b, args) => { + ValueF::AppliedBuiltin(*b, args.shift(delta, var)?) + } + ValueF::Pi(x, t, e) => ValueF::Pi( + x.clone(), + t.shift(delta, var)?, + e.shift(delta, &var.under_binder(x))?, + ), + ValueF::Var(v) => ValueF::Var(v.shift(delta, var)?), + ValueF::Const(c) => ValueF::Const(*c), + ValueF::BoolLit(b) => ValueF::BoolLit(*b), + ValueF::NaturalLit(n) => ValueF::NaturalLit(*n), + ValueF::IntegerLit(n) => ValueF::IntegerLit(*n), + ValueF::DoubleLit(n) => ValueF::DoubleLit(*n), + ValueF::EmptyOptionalLit(tth) => { + ValueF::EmptyOptionalLit(tth.shift(delta, var)?) + } + ValueF::NEOptionalLit(th) => { + ValueF::NEOptionalLit(th.shift(delta, var)?) + } + ValueF::EmptyListLit(tth) => { + ValueF::EmptyListLit(tth.shift(delta, var)?) + } + ValueF::NEListLit(elts) => { + ValueF::NEListLit(elts.shift(delta, var)?) + } + ValueF::RecordLit(kvs) => ValueF::RecordLit(kvs.shift(delta, var)?), + ValueF::RecordType(kvs) => { + ValueF::RecordType(kvs.shift(delta, var)?) + } + ValueF::UnionType(kts) => ValueF::UnionType(kts.shift(delta, var)?), + ValueF::UnionConstructor(x, kts) => { + ValueF::UnionConstructor(x.clone(), kts.shift(delta, var)?) + } + ValueF::UnionLit(x, v, kts) => ValueF::UnionLit( + x.clone(), + v.shift(delta, var)?, + kts.shift(delta, var)?, + ), + ValueF::TextLit(elts) => ValueF::TextLit(elts.shift(delta, var)?), + ValueF::Equivalence(x, y) => { + ValueF::Equivalence(x.shift(delta, var)?, y.shift(delta, var)?) + } + ValueF::PartialExpr(e) => ValueF::PartialExpr(e.shift(delta, var)?), + }) + } +} + +impl Subst<Value> for ValueF { + fn subst_shift(&self, var: &AlphaVar, val: &Value) -> Self { + match self { + ValueF::AppliedBuiltin(b, args) => { + ValueF::AppliedBuiltin(*b, args.subst_shift(var, val)) + } + ValueF::PartialExpr(e) => { + ValueF::PartialExpr(e.subst_shift(var, val)) + } + ValueF::TextLit(elts) => { + ValueF::TextLit(elts.subst_shift(var, val)) + } + ValueF::Lam(x, t, e) => ValueF::Lam( + x.clone(), + t.subst_shift(var, val), + e.subst_shift(&var.under_binder(x), &val.under_binder(x)), + ), + ValueF::Pi(x, t, e) => ValueF::Pi( + x.clone(), + t.subst_shift(var, val), + e.subst_shift(&var.under_binder(x), &val.under_binder(x)), + ), + ValueF::Var(v) if v == var => val.to_whnf_ignore_type(), + ValueF::Var(v) => ValueF::Var(v.shift(-1, var).unwrap()), + ValueF::Const(c) => ValueF::Const(*c), + ValueF::BoolLit(b) => ValueF::BoolLit(*b), + ValueF::NaturalLit(n) => ValueF::NaturalLit(*n), + ValueF::IntegerLit(n) => ValueF::IntegerLit(*n), + ValueF::DoubleLit(n) => ValueF::DoubleLit(*n), + ValueF::EmptyOptionalLit(tth) => { + ValueF::EmptyOptionalLit(tth.subst_shift(var, val)) + } + ValueF::NEOptionalLit(th) => { + ValueF::NEOptionalLit(th.subst_shift(var, val)) + } + ValueF::EmptyListLit(tth) => { + ValueF::EmptyListLit(tth.subst_shift(var, val)) + } + ValueF::NEListLit(elts) => { + ValueF::NEListLit(elts.subst_shift(var, val)) + } + ValueF::RecordLit(kvs) => { + ValueF::RecordLit(kvs.subst_shift(var, val)) + } + ValueF::RecordType(kvs) => { + ValueF::RecordType(kvs.subst_shift(var, val)) + } + ValueF::UnionType(kts) => { + ValueF::UnionType(kts.subst_shift(var, val)) + } + ValueF::UnionConstructor(x, kts) => { + ValueF::UnionConstructor(x.clone(), kts.subst_shift(var, val)) + } + ValueF::UnionLit(x, v, kts) => ValueF::UnionLit( + x.clone(), + v.subst_shift(var, val), + kts.subst_shift(var, val), + ), + ValueF::Equivalence(x, y) => ValueF::Equivalence( + x.subst_shift(var, val), + y.subst_shift(var, val), + ), + } + } +} diff --git a/dhall/src/core/var.rs b/dhall/src/core/var.rs index 35bff80..ce4d137 100644 --- a/dhall/src/core/var.rs +++ b/dhall/src/core/var.rs @@ -2,21 +2,21 @@ use std::collections::HashMap; use dhall_syntax::{Label, V}; -/// Stores a pair of variables: a normal one and if relevant one +/// Stores a pair of variables: a normal one and one /// that corresponds to the alpha-normalized version of the first one. -/// Equality is up to alpha-equivalence. -#[derive(Debug, Clone, Eq)] +/// Equality is up to alpha-equivalence (compares on the second one only). +#[derive(Clone, Eq)] pub struct AlphaVar { normal: V<Label>, - alpha: Option<V<()>>, + alpha: V<()>, } // Exactly like a Label, but equality returns always true. -// This is so that Value equality is exactly alpha-equivalence. -#[derive(Debug, Clone, Eq)] +// This is so that ValueF equality is exactly alpha-equivalence. +#[derive(Clone, Eq)] pub struct AlphaLabel(Label); -pub trait Shift: Sized { +pub(crate) trait Shift: Sized { // Shift an expression to move it around binders without changing the meaning of its free // variables. Shift by 1 to move an expression under a binder. Shift by -1 to extract an // expression from under a binder, if the expression does not refer to that bound variable. @@ -50,34 +50,35 @@ pub trait Shift: Sized { } } -pub trait Subst<T> { - fn subst_shift(&self, var: &AlphaVar, val: &T) -> Self; +pub(crate) trait Subst<S> { + fn subst_shift(&self, var: &AlphaVar, val: &S) -> Self; } impl AlphaVar { - pub fn to_var(&self, alpha: bool) -> V<Label> { - match (alpha, &self.alpha) { - (true, Some(x)) => V("_".into(), x.1), - _ => self.normal.clone(), + pub(crate) fn to_var(&self, alpha: bool) -> V<Label> { + if alpha { + V("_".into(), self.alpha.1) + } else { + self.normal.clone() } } - pub fn from_var(normal: V<Label>) -> Self { + pub(crate) fn from_var_and_alpha(normal: V<Label>, alpha: usize) -> Self { AlphaVar { normal, - alpha: None, + alpha: V((), alpha), } } } impl AlphaLabel { - pub fn to_label_maybe_alpha(&self, alpha: bool) -> Label { + pub(crate) fn to_label_maybe_alpha(&self, alpha: bool) -> Label { if alpha { "_".into() } else { self.to_label() } } - pub fn to_label(&self) -> Label { + pub(crate) fn to_label(&self) -> Label { self.clone().into() } } @@ -86,31 +87,15 @@ impl Shift for AlphaVar { fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { Some(AlphaVar { normal: self.normal.shift(delta, &var.normal)?, - alpha: match (&self.alpha, &var.alpha) { - (Some(x), Some(v)) => Some(x.shift(delta, v)?), - _ => None, - }, + alpha: self.alpha.shift(delta, &var.alpha)?, }) } } -impl Shift for () { - fn shift(&self, _delta: isize, _var: &AlphaVar) -> Option<Self> { - Some(()) - } -} - -impl<T> Subst<T> for () { - fn subst_shift(&self, _var: &AlphaVar, _val: &T) -> Self {} -} - +/// Equality up to alpha-equivalence impl std::cmp::PartialEq for AlphaVar { fn eq(&self, other: &Self) -> bool { - match (&self.alpha, &other.alpha) { - (Some(x), Some(y)) => x == y, - (None, None) => self.normal == other.normal, - _ => false, - } + self.alpha == other.alpha } } impl std::cmp::PartialEq for AlphaLabel { @@ -119,11 +104,23 @@ impl std::cmp::PartialEq for AlphaLabel { } } +impl std::fmt::Debug for AlphaVar { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "AlphaVar({}, {})", self.normal, self.alpha.1) + } +} + +impl std::fmt::Debug for AlphaLabel { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "AlphaLabel({})", &self.0) + } +} + impl From<Label> for AlphaVar { fn from(x: Label) -> AlphaVar { AlphaVar { normal: V(x, 0), - alpha: Some(V((), 0)), + alpha: V((), 0), } } } @@ -154,3 +151,153 @@ impl From<AlphaLabel> for Label { x.0 } } +impl Shift for () { + fn shift(&self, _delta: isize, _var: &AlphaVar) -> Option<Self> { + Some(()) + } +} + +impl<A: Shift, B: Shift> Shift for (A, B) { + fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { + Some((self.0.shift(delta, var)?, self.1.shift(delta, var)?)) + } +} + +impl<T: Shift> Shift for Option<T> { + fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { + Some(match self { + None => None, + Some(x) => Some(x.shift(delta, var)?), + }) + } +} + +impl<T: Shift> Shift for Box<T> { + fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { + Some(Box::new(self.as_ref().shift(delta, var)?)) + } +} + +impl<T: Shift> Shift for std::rc::Rc<T> { + fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { + Some(std::rc::Rc::new(self.as_ref().shift(delta, var)?)) + } +} + +impl<T: Shift> Shift for std::cell::RefCell<T> { + fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { + Some(std::cell::RefCell::new(self.borrow().shift(delta, var)?)) + } +} + +impl<T: Shift, E: Clone> Shift for dhall_syntax::ExprF<T, E> { + fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { + Some(self.traverse_ref_with_special_handling_of_binders( + |v| Ok(v.shift(delta, var)?), + |x, v| Ok(v.shift(delta, &var.under_binder(x))?), + )?) + } +} + +impl<T: Shift> Shift for Vec<T> { + fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { + Some( + self.iter() + .map(|v| Ok(v.shift(delta, var)?)) + .collect::<Result<_, _>>()?, + ) + } +} + +impl<K, T: Shift> Shift for HashMap<K, T> +where + K: Clone + std::hash::Hash + Eq, +{ + fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { + Some( + self.iter() + .map(|(k, v)| Ok((k.clone(), v.shift(delta, var)?))) + .collect::<Result<_, _>>()?, + ) + } +} + +impl<T: Shift> Shift for dhall_syntax::InterpolatedTextContents<T> { + fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { + use dhall_syntax::InterpolatedTextContents::{Expr, Text}; + Some(match self { + Expr(x) => Expr(x.shift(delta, var)?), + Text(s) => Text(s.clone()), + }) + } +} + +impl<S> Subst<S> for () { + fn subst_shift(&self, _var: &AlphaVar, _val: &S) -> Self {} +} + +impl<S, A: Subst<S>, B: Subst<S>> Subst<S> for (A, B) { + fn subst_shift(&self, var: &AlphaVar, val: &S) -> Self { + (self.0.subst_shift(var, val), self.1.subst_shift(var, val)) + } +} + +impl<S, T: Subst<S>> Subst<S> for Option<T> { + fn subst_shift(&self, var: &AlphaVar, val: &S) -> Self { + self.as_ref().map(|x| x.subst_shift(var, val)) + } +} + +impl<S, T: Subst<S>> Subst<S> for Box<T> { + fn subst_shift(&self, var: &AlphaVar, val: &S) -> Self { + Box::new(self.as_ref().subst_shift(var, val)) + } +} + +impl<S, T: Subst<S>> Subst<S> for std::rc::Rc<T> { + fn subst_shift(&self, var: &AlphaVar, val: &S) -> Self { + std::rc::Rc::new(self.as_ref().subst_shift(var, val)) + } +} + +impl<S, T: Subst<S>> Subst<S> for std::cell::RefCell<T> { + fn subst_shift(&self, var: &AlphaVar, val: &S) -> Self { + std::cell::RefCell::new(self.borrow().subst_shift(var, val)) + } +} + +impl<S: Shift, T: Subst<S>, E: Clone> Subst<S> for dhall_syntax::ExprF<T, E> { + fn subst_shift(&self, var: &AlphaVar, val: &S) -> Self { + self.map_ref_with_special_handling_of_binders( + |v| v.subst_shift(var, val), + |x, v| v.subst_shift(&var.under_binder(x), &val.under_binder(x)), + ) + } +} + +impl<S, T: Subst<S>> Subst<S> for Vec<T> { + fn subst_shift(&self, var: &AlphaVar, val: &S) -> Self { + self.iter().map(|v| v.subst_shift(var, val)).collect() + } +} + +impl<S, T: Subst<S>> Subst<S> for dhall_syntax::InterpolatedTextContents<T> { + fn subst_shift(&self, var: &AlphaVar, val: &S) -> Self { + use dhall_syntax::InterpolatedTextContents::{Expr, Text}; + match self { + Expr(x) => Expr(x.subst_shift(var, val)), + Text(s) => Text(s.clone()), + } + } +} + +impl<S, K, T: Subst<S>> Subst<S> for HashMap<K, T> +where + K: Clone + std::hash::Hash + Eq, +{ + fn subst_shift(&self, var: &AlphaVar, val: &S) -> Self { + self.iter() + .map(|(k, v)| (k.clone(), v.subst_shift(var, val))) + .collect() + } +} diff --git a/dhall/src/error/mod.rs b/dhall/src/error/mod.rs index 3c00017..6d4e120 100644 --- a/dhall/src/error/mod.rs +++ b/dhall/src/error/mod.rs @@ -3,8 +3,9 @@ use std::io::Error as IOError; use dhall_syntax::{BinOp, Import, Label, ParseError, V}; use crate::core::context::TypecheckContext; +use crate::core::value::Value; use crate::phase::resolve::ImportStack; -use crate::phase::{Normalized, Type, Typed}; +use crate::phase::NormalizedExpr; pub type Result<T> = std::result::Result<T, Error>; @@ -17,14 +18,13 @@ pub enum Error { Encode(EncodeError), Resolve(ImportError), Typecheck(TypeError), - Deserialize(String), } #[derive(Debug)] pub enum ImportError { - Recursive(Import, Box<Error>), - UnexpectedImport(Import), - ImportCycle(ImportStack, Import), + Recursive(Import<NormalizedExpr>, Box<Error>), + UnexpectedImport(Import<NormalizedExpr>), + ImportCycle(ImportStack, Import<NormalizedExpr>), } #[derive(Debug)] @@ -49,28 +49,26 @@ pub struct TypeError { #[derive(Debug)] pub(crate) enum TypeMessage { UnboundVariable(V<Label>), - InvalidInputType(Normalized), - InvalidOutputType(Normalized), - NotAFunction(Typed), - TypeMismatch(Typed, Normalized, Typed), - AnnotMismatch(Typed, Normalized), - Untyped, - FieldCollision(Label), - InvalidListElement(usize, Normalized, Typed), - InvalidListType(Normalized), - InvalidOptionalType(Normalized), - InvalidPredicate(Typed), - IfBranchMismatch(Typed, Typed), - IfBranchMustBeTerm(bool, Typed), - InvalidFieldType(Label, Type), - NotARecord(Label, Normalized), - MustCombineRecord(Typed), - MissingRecordField(Label, Typed), - MissingUnionField(Label, Normalized), - BinOpTypeMismatch(BinOp, Typed), - InvalidTextInterpolation(Typed), - Merge1ArgMustBeRecord(Typed), - Merge2ArgMustBeUnion(Typed), + InvalidInputType(Value), + InvalidOutputType(Value), + NotAFunction(Value), + TypeMismatch(Value, Value, Value), + AnnotMismatch(Value, Value), + InvalidListElement(usize, Value, Value), + InvalidListType(Value), + InvalidOptionalType(Value), + InvalidPredicate(Value), + IfBranchMismatch(Value, Value), + IfBranchMustBeTerm(bool, Value), + InvalidFieldType(Label, Value), + NotARecord(Label, Value), + MustCombineRecord(Value), + MissingRecordField(Label, Value), + MissingUnionField(Label, Value), + BinOpTypeMismatch(BinOp, Value), + InvalidTextInterpolation(Value), + Merge1ArgMustBeRecord(Value), + Merge2ArgMustBeUnion(Value), MergeEmptyNeedsAnnotation, MergeHandlerMissingVariant(Label), MergeVariantMissingHandler(Label), @@ -80,14 +78,12 @@ pub(crate) enum TypeMessage { ProjectionMustBeRecord, ProjectionMissingEntry, Sort, - RecordMismatch(Typed, Typed), RecordTypeDuplicateField, - RecordTypeMergeRequiresRecordType(Type), - RecordTypeMismatch(Type, Type, Type, Type), + RecordTypeMergeRequiresRecordType(Value), UnionTypeDuplicateField, - EquivalenceArgumentMustBeTerm(bool, Typed), - EquivalenceTypeMismatch(Typed, Typed), - AssertMismatch(Typed, Typed), + EquivalenceArgumentMustBeTerm(bool, Value), + EquivalenceTypeMismatch(Value, Value), + AssertMismatch(Value, Value), AssertMustTakeEquivalence, } @@ -156,7 +152,6 @@ impl std::fmt::Display for Error { Error::Encode(err) => write!(f, "{:?}", err), Error::Resolve(err) => write!(f, "{:?}", err), Error::Typecheck(err) => write!(f, "{:?}", err), - Error::Deserialize(err) => write!(f, "{}", err), } } } diff --git a/dhall/src/lib.rs b/dhall/src/lib.rs index ea29869..0a430e4 100644 --- a/dhall/src/lib.rs +++ b/dhall/src/lib.rs @@ -1,13 +1,7 @@ #![feature(trace_macros)] -#![feature(proc_macro_hygiene)] #![feature(slice_patterns)] -#![feature(label_break_value)] #![feature(non_exhaustive)] -#![feature(bind_by_move_pattern_guards)] -#![feature(try_trait)] -#![feature(inner_deref)] #![feature(never_type)] -#![cfg_attr(test, feature(custom_inner_attributes))] #![allow( clippy::type_complexity, clippy::infallible_destructuring_match, @@ -17,118 +11,9 @@ clippy::ptr_arg )] -//! [Dhall][dhall] is a programmable configuration language that provides a non-repetitive -//! alternative to JSON and YAML. -//! -//! You can think of Dhall as: JSON + types + imports + functions -//! -//! For a description of the dhall language, examples, tutorials, and more, see the [language -//! website][dhall]. -//! -//! This crate provides support for consuming dhall files the same way you would consume JSON or -//! YAML. It uses the [Serde][serde] serialization library to provide drop-in support for dhall -//! for any datatype that supports serde (and that's a lot of them !). -//! -//! This library is limited to deserializing (reading) dhall values; serializing (writing) -//! values to dhall is not supported for now. -//! -//! # Examples -//! -//! ### Custom datatype -//! -//! If you have a custom datatype for which you derived [serde::Deserialize], chances are -//! you will be able to derive [StaticType][de::StaticType] for it as well. -//! This gives you access to a dhall representation of your datatype that can be outputted -//! to users, and allows easy type-safe deserializing. -//! -//! ```edition2018 -//! use serde::Deserialize; -//! use dhall::de::StaticType; -//! -//! #[derive(Debug, Deserialize, StaticType)] -//! struct Point { -//! x: u64, -//! y: u64, -//! } -//! -//! fn main() { -//! // Some dhall data -//! let data = "{ x = 1, y = 1 + 1 }"; -//! -//! // Convert the dhall string to a Point. -//! let point: Point = -//! dhall::de::from_str_auto_type(&data) -//! .expect("An error ocurred !"); -//! -//! // Prints "point = Point { x: 1, y: 2 }" -//! println!("point = {:?}", point); -//! } -//! ``` -//! -//! ### Loosely typed -//! -//! If you used to consume JSON or YAML in a loosely typed way, you can continue to do so -//! with dhall. You only need to replace [serde_json::from_str] or [serde_yaml::from_str] -//! with [dhall::de::from_str][de::from_str]. -//! More generally, if the [StaticType][de::StaticType] derive doesn't suit your -//! needs, you can still deserialize any valid dhall file that serde can handle. -//! -//! [serde_json::from_str]: https://docs.serde.rs/serde_json/de/fn.from_str.html -//! [serde_yaml::from_str]: https://docs.serde.rs/serde_yaml/fn.from_str.html -//! -//! ```edition2018 -//! use std::collections::BTreeMap; -//! -//! let mut map = BTreeMap::new(); -//! map.insert("x".to_string(), 1); -//! map.insert("y".to_string(), 2); -//! -//! // Some dhall data -//! let data = "{ x = 1, y = 1 + 1 } : { x: Natural, y: Natural }"; -//! -//! // Deserialize it to a Rust type. -//! let deserialized_map: BTreeMap<String, usize> = -//! dhall::de::from_str(&data, None) -//! .expect("Failed reading the data !"); -//! assert_eq!(map, deserialized_map); -//! ``` -//! -//! You can of course specify a dhall type that the input should match. -//! -//! ```edition2018 -//! use std::collections::BTreeMap; -//! -//! let mut map = BTreeMap::new(); -//! map.insert("x".to_string(), 1); -//! map.insert("y".to_string(), 2); -//! -//! // Some dhall data -//! let point_data = "{ x = 1, y = 1 + 1 }"; -//! let point_type_data = "{ x: Natural, y: Natural }"; -//! -//! // Construct a type -//! let point_type = -//! dhall::de::from_str(point_type_data, None) -//! .expect("Could not parse the Point type"); -//! -//! // Deserialize it to a Rust type. -//! let deserialized_map: BTreeMap<String, usize> = -//! dhall::de::from_str(&point_data, Some(&point_type)) -//! .expect("Failed reading the data !"); -//! assert_eq!(map, deserialized_map); -//! ``` -//! -//! [dhall]: https://dhall-lang.org/ -//! [serde]: https://docs.serde.rs/serde/ -//! [serde::Deserialize]: https://docs.serde.rs/serde/trait.Deserialize.html - -#[cfg(test)] #[macro_use] mod tests; -pub(crate) mod api; -pub(crate) mod core; +pub mod core; pub mod error; -pub(crate) mod phase; - -pub use api::*; +pub mod phase; diff --git a/dhall/src/phase/.resolve.rs.swp b/dhall/src/phase/.resolve.rs.swp Binary files differnew file mode 100644 index 0000000..5314300 --- /dev/null +++ b/dhall/src/phase/.resolve.rs.swp diff --git a/dhall/src/phase/binary.rs b/dhall/src/phase/binary.rs index 89b6db2..16e7ce9 100644 --- a/dhall/src/phase/binary.rs +++ b/dhall/src/phase/binary.rs @@ -5,30 +5,26 @@ use std::vec; use dhall_syntax::map::DupTreeMap; use dhall_syntax::{ - rc, ExprF, FilePrefix, Hash, Import, ImportHashed, ImportLocation, - ImportMode, Integer, InterpolatedText, Label, Natural, Scheme, SubExpr, - URL, V, File, + rc, Expr, ExprF, FilePrefix, Hash, Import, ImportLocation, ImportMode, + Integer, InterpolatedText, Label, Natural, Scheme, URL, V, File }; use crate::error::{DecodeError, EncodeError}; -use crate::phase::{DecodedSubExpr, ParsedSubExpr}; +use crate::phase::DecodedExpr; -pub fn decode(data: &[u8]) -> Result<DecodedSubExpr, DecodeError> { +pub(crate) fn decode(data: &[u8]) -> Result<DecodedExpr, DecodeError> { match serde_cbor::de::from_slice(data) { Ok(v) => cbor_value_to_dhall(&v), Err(e) => Err(DecodeError::CBORError(e)), } } -//TODO: encode normalized expression too -pub fn encode(expr: &ParsedSubExpr) -> Result<Vec<u8>, EncodeError> { +pub(crate) fn encode<E>(expr: &Expr<E>) -> Result<Vec<u8>, EncodeError> { serde_cbor::ser::to_vec(&Serialize::Expr(expr)) .map_err(|e| EncodeError::CBORError(e)) } -fn cbor_value_to_dhall( - data: &cbor::Value, -) -> Result<DecodedSubExpr, DecodeError> { +fn cbor_value_to_dhall(data: &cbor::Value) -> Result<DecodedExpr, DecodeError> { use cbor::Value::*; use dhall_syntax::{BinOp, Builtin, Const}; use ExprF::*; @@ -262,20 +258,12 @@ fn cbor_value_to_dhall( }; let headers = match rest.next() { Some(Null) => None, - // TODO - // Some(x) => { - // match cbor_value_to_dhall(&x)?.as_ref() { - // Embed(import) => Some(Box::new( - // import.location_hashed.clone(), - // )), - // _ => Err(DecodeError::WrongFormatError( - // "import/remote/headers".to_owned(), - // ))?, - // } - // } + Some(x) => { + let x = cbor_value_to_dhall(&x)?; + Some(x) + } _ => Err(DecodeError::WrongFormatError( - "import/remote/headers is unimplemented" - .to_owned(), + "import/remote/headers".to_owned(), ))?, }; let authority = match rest.next() { @@ -343,9 +331,10 @@ fn cbor_value_to_dhall( "import/type".to_owned(), ))?, }; - Embed(Import { + Import(dhall_syntax::Import { mode, - location_hashed: ImportHashed { hash, location }, + hash, + location, }) } [U64(25), bindings..] => { @@ -380,6 +369,15 @@ fn cbor_value_to_dhall( let y = cbor_value_to_dhall(&y)?; Annot(x, y) } + [U64(27), x] => { + let x = cbor_value_to_dhall(&x)?; + ToMap(x, None) + } + [U64(27), x, y] => { + let x = cbor_value_to_dhall(&x)?; + let y = cbor_value_to_dhall(&y)?; + ToMap(x, Some(y)) + } [U64(28), x] => { let x = cbor_value_to_dhall(&x)?; EmptyListLit(x) @@ -394,7 +392,7 @@ fn cbor_map_to_dhall_map<'a, T>( map: impl IntoIterator<Item = (&'a cbor::ObjectKey, &'a cbor::Value)>, ) -> Result<T, DecodeError> where - T: FromIterator<(Label, DecodedSubExpr)>, + T: FromIterator<(Label, DecodedExpr)>, { map.into_iter() .map(|(k, v)| -> Result<(_, _), _> { @@ -411,7 +409,7 @@ fn cbor_map_to_dhall_opt_map<'a, T>( map: impl IntoIterator<Item = (&'a cbor::ObjectKey, &'a cbor::Value)>, ) -> Result<T, DecodeError> where - T: FromIterator<(Label, Option<DecodedSubExpr>)>, + T: FromIterator<(Label, Option<DecodedExpr>)>, { map.into_iter() .map(|(k, v)| -> Result<(_, _), _> { @@ -427,11 +425,11 @@ where .collect::<Result<_, _>>() } -enum Serialize<'a> { - Expr(&'a ParsedSubExpr), +enum Serialize<'a, E> { + Expr(&'a Expr<E>), CBOR(cbor::Value), - RecordMap(&'a DupTreeMap<Label, ParsedSubExpr>), - UnionMap(&'a DupTreeMap<Label, Option<ParsedSubExpr>>), + RecordMap(&'a DupTreeMap<Label, Expr<E>>), + UnionMap(&'a DupTreeMap<Label, Option<Expr<E>>>), } macro_rules! count { @@ -451,7 +449,7 @@ macro_rules! ser_seq { }}; } -fn serialize_subexpr<S>(ser: S, e: &ParsedSubExpr) -> Result<S::Ok, S::Error> +fn serialize_subexpr<S, E>(ser: S, e: &Expr<E>) -> Result<S::Ok, S::Error> where S: serde::ser::Serializer, { @@ -461,21 +459,14 @@ where use std::iter::once; use self::Serialize::{RecordMap, UnionMap}; - fn expr(x: &ParsedSubExpr) -> self::Serialize<'_> { + fn expr<E>(x: &Expr<E>) -> self::Serialize<'_, E> { self::Serialize::Expr(x) } - fn cbor<'a>(v: cbor::Value) -> self::Serialize<'a> { - self::Serialize::CBOR(v) - } - fn tag<'a>(x: u64) -> self::Serialize<'a> { - cbor(U64(x)) - } - fn null<'a>() -> self::Serialize<'a> { - cbor(cbor::Value::Null) - } - fn label<'a>(l: &Label) -> self::Serialize<'a> { - cbor(cbor::Value::String(l.into())) - } + let cbor = + |v: cbor::Value| -> self::Serialize<'_, E> { self::Serialize::CBOR(v) }; + let tag = |x: u64| cbor(U64(x)); + let null = || cbor(cbor::Value::Null); + let label = |l: &Label| cbor(cbor::Value::String(l.into())); match e.as_ref() { Const(c) => ser.serialize_str(&c.to_string()), @@ -571,25 +562,33 @@ where Merge(x, y, Some(z)) => { ser_seq!(ser; tag(6), expr(x), expr(y), expr(z)) } + ToMap(x, None) => ser_seq!(ser; tag(27), expr(x)), + ToMap(x, Some(y)) => ser_seq!(ser; tag(27), expr(x), expr(y)), Projection(x, ls) => ser.collect_seq( once(tag(10)) .chain(once(expr(x))) .chain(ls.iter().map(label)), ), - Embed(import) => serialize_import(ser, import), + Import(import) => serialize_import(ser, import), + Embed(_) => unimplemented!( + "An expression with resolved imports cannot be binary-encoded" + ), } } -fn serialize_import<S>(ser: S, import: &Import) -> Result<S::Ok, S::Error> +fn serialize_import<S, E>( + ser: S, + import: &Import<Expr<E>>, +) -> Result<S::Ok, S::Error> where S: serde::ser::Serializer, { use cbor::Value::{Bytes, Null, U64}; use serde::ser::SerializeSeq; - let count = 4 + match &import.location_hashed.location { - ImportLocation::Remote(url) => 3 + url.path.clone().into_iter().len(), - ImportLocation::Local(_, path) => path.clone().into_iter().len(), + let count = 4 + match &import.location { + ImportLocation::Remote(url) => 3 + url.path.file_path.len(), + ImportLocation::Local(_, path) => path.file_path.len(), ImportLocation::Env(_) => 1, ImportLocation::Missing => 0, }; @@ -597,7 +596,7 @@ where ser_seq.serialize_element(&U64(24))?; - let hash = match &import.location_hashed.hash { + let hash = match &import.hash { None => Null, Some(Hash::SHA256(h)) => { let mut bytes = vec![18, 32]; @@ -614,7 +613,7 @@ where }; ser_seq.serialize_element(&U64(mode))?; - let scheme = match &import.location_hashed.location { + let scheme = match &import.location { ImportLocation::Remote(url) => match url.scheme { Scheme::HTTP => 0, Scheme::HTTPS => 1, @@ -630,21 +629,16 @@ where }; ser_seq.serialize_element(&U64(scheme))?; - match &import.location_hashed.location { + match &import.location { ImportLocation::Remote(url) => { match &url.headers { None => ser_seq.serialize_element(&Null)?, - Some(location_hashed) => { - ser_seq.serialize_element(&self::Serialize::Expr( - &SubExpr::from_expr_no_note(ExprF::Embed(Import { - mode: ImportMode::Code, - location_hashed: location_hashed.as_ref().clone(), - })), - ))? + Some(e) => { + ser_seq.serialize_element(&self::Serialize::Expr(e))? } }; ser_seq.serialize_element(&url.authority)?; - for p in url.path.clone().into_iter() { + for p in url.path.file_path.iter() { ser_seq.serialize_element(&p)?; } match &url.query { @@ -653,7 +647,7 @@ where }; } ImportLocation::Local(_, path) => { - for p in path.clone().into_iter() { + for p in path.file_path.iter() { ser_seq.serialize_element(&p)?; } } @@ -666,7 +660,7 @@ where ser_seq.end() } -impl<'a> serde::ser::Serialize for Serialize<'a> { +impl<'a, E> serde::ser::Serialize for Serialize<'a, E> { fn serialize<S>(&self, ser: S) -> Result<S::Ok, S::Error> where S: serde::ser::Serializer, @@ -692,13 +686,10 @@ impl<'a> serde::ser::Serialize for Serialize<'a> { } } -fn collect_nested_applications<'a, N, E>( - e: &'a SubExpr<N, E>, -) -> (&'a SubExpr<N, E>, Vec<&'a SubExpr<N, E>>) { - fn go<'a, N, E>( - e: &'a SubExpr<N, E>, - vec: &mut Vec<&'a SubExpr<N, E>>, - ) -> &'a SubExpr<N, E> { +fn collect_nested_applications<'a, E>( + e: &'a Expr<E>, +) -> (&'a Expr<E>, Vec<&'a Expr<E>>) { + fn go<'a, E>(e: &'a Expr<E>, vec: &mut Vec<&'a Expr<E>>) -> &'a Expr<E> { match e.as_ref() { ExprF::App(f, a) => { vec.push(a); @@ -712,16 +703,15 @@ fn collect_nested_applications<'a, N, E>( (e, vec) } -type LetBinding<'a, N, E> = - (&'a Label, &'a Option<SubExpr<N, E>>, &'a SubExpr<N, E>); +type LetBinding<'a, E> = (&'a Label, &'a Option<Expr<E>>, &'a Expr<E>); -fn collect_nested_lets<'a, N, E>( - e: &'a SubExpr<N, E>, -) -> (&'a SubExpr<N, E>, Vec<LetBinding<'a, N, E>>) { - fn go<'a, N, E>( - e: &'a SubExpr<N, E>, - vec: &mut Vec<LetBinding<'a, N, E>>, - ) -> &'a SubExpr<N, E> { +fn collect_nested_lets<'a, E>( + e: &'a Expr<E>, +) -> (&'a Expr<E>, Vec<LetBinding<'a, E>>) { + fn go<'a, E>( + e: &'a Expr<E>, + vec: &mut Vec<LetBinding<'a, E>>, + ) -> &'a Expr<E> { match e.as_ref() { ExprF::Let(l, t, v, e) => { vec.push((l, t, v)); diff --git a/dhall/src/phase/mod.rs b/dhall/src/phase/mod.rs index ccedff2..2c5505c 100644 --- a/dhall/src/phase/mod.rs +++ b/dhall/src/phase/mod.rs @@ -1,17 +1,14 @@ -use std::borrow::Cow; use std::fmt::Display; use std::path::Path; -use dhall_syntax::{Const, Import, Span, SubExpr, X}; +use dhall_syntax::{Builtin, Const, Expr}; -use crate::core::context::TypecheckContext; -use crate::core::thunk::Thunk; -use crate::core::value::Value; +use crate::core::value::{ToExprOptions, Value}; +use crate::core::valuef::ValueF; use crate::core::var::{AlphaVar, Shift, Subst}; -use crate::error::{EncodeError, Error, ImportError, TypeError, TypeMessage}; +use crate::error::{EncodeError, Error, ImportError, TypeError}; use resolve::ImportRoot; -use typecheck::type_of_const; pub(crate) mod binary; pub(crate) mod normalize; @@ -19,30 +16,23 @@ pub(crate) mod parse; pub(crate) mod resolve; pub(crate) mod typecheck; -pub type ParsedSubExpr = SubExpr<Span, Import>; -pub type DecodedSubExpr = SubExpr<X, Import>; -pub type ResolvedSubExpr = SubExpr<Span, Normalized>; -pub type NormalizedSubExpr = SubExpr<X, X>; +pub type ParsedExpr = Expr<!>; +pub type DecodedExpr = Expr<!>; +pub type ResolvedExpr = Expr<Normalized>; +pub type NormalizedExpr = Expr<Normalized>; #[derive(Debug, Clone)] -pub struct Parsed(ParsedSubExpr, ImportRoot); +pub struct Parsed(ParsedExpr, ImportRoot); /// An expression where all imports have been resolved +/// +/// Invariant: there must be no `Import` nodes or `ImportAlt` operations left. #[derive(Debug, Clone)] -pub struct Resolved(ResolvedSubExpr); +pub struct Resolved(ResolvedExpr); /// A typed expression #[derive(Debug, Clone)] -pub enum Typed { - // Any value, along with (optionally) its type - Untyped(Thunk), - Typed(Thunk, Box<Type>), - // One of the base higher-kinded typed. - // Used to avoid storing the same tower ot Type->Kind->Sort - // over and over again. Also enables having Sort as a type - // even though it doesn't itself have a type. - Const(Const), -} +pub struct Typed(Value); /// A normalized expression. /// @@ -50,8 +40,6 @@ pub enum Typed { #[derive(Debug, Clone)] pub struct Normalized(Typed); -pub type Type = Typed; - impl Parsed { pub fn parse_file(f: &Path) -> Result<Parsed, Error> { parse::parse_file(f) @@ -59,11 +47,9 @@ impl Parsed { pub fn parse_str(s: &str) -> Result<Parsed, Error> { parse::parse_str(s) } - #[allow(dead_code)] pub fn parse_binary_file(f: &Path) -> Result<Parsed, Error> { parse::parse_binary_file(f) } - #[allow(dead_code)] pub fn parse_binary(data: &[u8]) -> Result<Parsed, Error> { parse::parse_binary(data) } @@ -71,12 +57,10 @@ impl Parsed { pub fn resolve(self) -> Result<Resolved, ImportError> { resolve::resolve(self) } - #[allow(dead_code)] pub fn skip_resolve(self) -> Result<Resolved, ImportError> { resolve::skip_resolve_expr(self) } - #[allow(dead_code)] pub fn encode(&self) -> Result<Vec<u8>, EncodeError> { crate::phase::binary::encode(&self.0) } @@ -84,15 +68,11 @@ impl Parsed { impl Resolved { pub fn typecheck(self) -> Result<Typed, TypeError> { - typecheck::typecheck(self) + Ok(typecheck::typecheck(self.0)?.into_typed()) } - pub fn typecheck_with(self, ty: &Type) -> Result<Typed, TypeError> { - typecheck::typecheck_with(self, ty) - } - /// Pretends this expression has been typechecked. Use with care. - #[allow(dead_code)] - pub fn skip_typecheck(self) -> Typed { - typecheck::skip_typecheck(self) + pub fn typecheck_with(self, ty: &Typed) -> Result<Typed, TypeError> { + Ok(typecheck::typecheck_with(self.0, ty.normalize_to_expr())? + .into_typed()) } } @@ -105,117 +85,110 @@ impl Typed { /// /// However, `normalize` will not fail if the expression is ill-typed and will /// leave ill-typed sub-expressions unevaluated. - pub fn normalize(self) -> Normalized { - match &self { - Typed::Const(_) => {} - Typed::Untyped(thunk) | Typed::Typed(thunk, _) => { - thunk.normalize_nf(); - } - } + pub fn normalize(mut self) -> Normalized { + self.normalize_mut(); Normalized(self) } - pub fn from_thunk_and_type(th: Thunk, t: Type) -> Self { - Typed::Typed(th, Box::new(t)) + pub(crate) fn from_const(c: Const) -> Self { + Typed(Value::from_const(c)) } - pub fn from_thunk_untyped(th: Thunk) -> Self { - Typed::Untyped(th) + pub(crate) fn from_valuef_and_type(v: ValueF, t: Typed) -> Self { + Typed(Value::from_valuef_and_type(v, t.into_value())) } - pub fn from_const(c: Const) -> Self { - Typed::Const(c) + pub(crate) fn from_value(th: Value) -> Self { + Typed(th) } - pub fn from_value_untyped(v: Value) -> Self { - Typed::from_thunk_untyped(Thunk::from_value(v)) + pub(crate) fn const_type() -> Self { + Typed::from_const(Const::Type) } - // TODO: Avoid cloning if possible - pub fn to_value(&self) -> Value { - match self { - Typed::Untyped(th) | Typed::Typed(th, _) => th.to_value(), - Typed::Const(c) => Value::Const(*c), - } - } - pub fn to_expr(&self) -> NormalizedSubExpr { - self.to_value().normalize_to_expr() - } - pub fn to_expr_alpha(&self) -> NormalizedSubExpr { - self.to_value().normalize_to_expr_maybe_alpha(true) + pub(crate) fn to_expr(&self) -> NormalizedExpr { + self.0.to_expr(ToExprOptions { + alpha: false, + normalize: false, + }) } - pub fn to_thunk(&self) -> Thunk { - match self { - Typed::Untyped(th) | Typed::Typed(th, _) => th.clone(), - Typed::Const(c) => Thunk::from_value(Value::Const(*c)), - } + pub fn normalize_to_expr(&self) -> NormalizedExpr { + self.0.to_expr(ToExprOptions { + alpha: false, + normalize: true, + }) } - // Deprecated - pub fn to_type(&self) -> Type { - self.clone().into_type() + pub(crate) fn normalize_to_expr_alpha(&self) -> NormalizedExpr { + self.0.to_expr(ToExprOptions { + alpha: true, + normalize: true, + }) } - // Deprecated - pub fn into_type(self) -> Type { - self + pub(crate) fn to_value(&self) -> Value { + self.0.clone() } - pub fn to_normalized(&self) -> Normalized { - self.clone().normalize() + pub(crate) fn into_value(self) -> Value { + self.0 } - pub fn as_const(&self) -> Option<Const> { - // TODO: avoid clone - match &self.to_value() { - Value::Const(c) => Some(*c), - _ => None, - } + + pub(crate) fn normalize_mut(&mut self) { + self.0.normalize_mut() } - pub fn normalize_mut(&mut self) { - match self { - Typed::Untyped(th) | Typed::Typed(th, _) => th.normalize_mut(), - Typed::Const(_) => {} - } + pub(crate) fn get_type(&self) -> Result<Typed, TypeError> { + Ok(self.0.get_type()?.into_typed()) } - pub fn get_type(&self) -> Result<Cow<'_, Type>, TypeError> { - match self { - Typed::Untyped(_) => Err(TypeError::new( - &TypecheckContext::new(), - TypeMessage::Untyped, - )), - Typed::Typed(_, t) => Ok(Cow::Borrowed(t)), - Typed::Const(c) => Ok(Cow::Owned(type_of_const(*c)?)), - } + pub fn make_builtin_type(b: Builtin) -> Self { + Typed::from_value(Value::from_builtin(b)) + } + pub fn make_optional_type(t: Typed) -> Self { + Typed::from_value( + Value::from_builtin(Builtin::Optional).app(t.to_value()), + ) + } + pub fn make_list_type(t: Typed) -> Self { + Typed::from_value(Value::from_builtin(Builtin::List).app(t.to_value())) + } + pub fn make_record_type( + kts: impl Iterator<Item = (String, Typed)>, + ) -> Self { + Typed::from_valuef_and_type( + ValueF::RecordType( + kts.map(|(k, t)| (k.into(), t.into_value())).collect(), + ), + Typed::const_type(), + ) + } + pub fn make_union_type( + kts: impl Iterator<Item = (String, Option<Typed>)>, + ) -> Self { + Typed::from_valuef_and_type( + ValueF::UnionType( + kts.map(|(k, t)| (k.into(), t.map(|t| t.into_value()))) + .collect(), + ), + Typed::const_type(), + ) } } impl Normalized { - #[allow(dead_code)] - pub fn to_expr(&self) -> NormalizedSubExpr { - self.0.to_expr() - } - #[allow(dead_code)] - pub fn to_expr_alpha(&self) -> NormalizedSubExpr { - self.0.to_expr_alpha() + pub fn encode(&self) -> Result<Vec<u8>, EncodeError> { + crate::phase::binary::encode(&self.to_expr()) } - #[allow(dead_code)] - pub fn to_type(&self) -> Type { - self.0.to_type() + + pub(crate) fn to_expr(&self) -> NormalizedExpr { + self.0.normalize_to_expr() } - pub fn to_value(&self) -> Value { - self.0.to_value() + pub(crate) fn to_expr_alpha(&self) -> NormalizedExpr { + self.0.normalize_to_expr_alpha() } - pub fn into_typed(self) -> Typed { + pub(crate) fn into_typed(self) -> Typed { self.0 } } impl Shift for Typed { fn shift(&self, delta: isize, var: &AlphaVar) -> Option<Self> { - Some(match self { - Typed::Untyped(th) => Typed::Untyped(th.shift(delta, var)?), - Typed::Typed(th, t) => Typed::Typed( - th.shift(delta, var)?, - Box::new(t.shift(delta, var)?), - ), - Typed::Const(c) => Typed::Const(*c), - }) + Some(Typed(self.0.shift(delta, var)?)) } } @@ -225,16 +198,9 @@ impl Shift for Normalized { } } -impl Subst<Typed> for Typed { - fn subst_shift(&self, var: &AlphaVar, val: &Typed) -> Self { - match self { - Typed::Untyped(th) => Typed::Untyped(th.subst_shift(var, val)), - Typed::Typed(th, t) => Typed::Typed( - th.subst_shift(var, val), - Box::new(t.subst_shift(var, val)), - ), - Typed::Const(c) => Typed::Const(*c), - } +impl Subst<Value> for Typed { + fn subst_shift(&self, var: &AlphaVar, val: &Value) -> Self { + Typed(self.0.subst_shift(var, val)) } } @@ -263,10 +229,21 @@ derive_traits_for_wrapper_struct!(Parsed); derive_traits_for_wrapper_struct!(Resolved); derive_traits_for_wrapper_struct!(Normalized); +impl std::hash::Hash for Normalized { + fn hash<H>(&self, state: &mut H) + where + H: std::hash::Hasher, + { + if let Ok(vec) = self.encode() { + vec.hash(state) + } + } +} + impl Eq for Typed {} impl PartialEq for Typed { fn eq(&self, other: &Self) -> bool { - self.to_value() == other.to_value() + self.0 == other.0 } } diff --git a/dhall/src/phase/normalize.rs b/dhall/src/phase/normalize.rs index 405677a..3f6e99c 100644 --- a/dhall/src/phase/normalize.rs +++ b/dhall/src/phase/normalize.rs @@ -1,348 +1,431 @@ use std::collections::HashMap; +use dhall_syntax::Const::Type; use dhall_syntax::{ - BinOp, Builtin, ExprF, InterpolatedText, InterpolatedTextContents, - NaiveDouble, X, + BinOp, Builtin, ExprF, InterpolatedText, InterpolatedTextContents, Label, + NaiveDouble, }; -use crate::core::context::NormalizationContext; -use crate::core::thunk::{Thunk, TypeThunk}; use crate::core::value::Value; -use crate::core::var::Subst; -use crate::phase::{NormalizedSubExpr, ResolvedSubExpr, Typed}; - -pub type InputSubExpr = ResolvedSubExpr; -pub type OutputSubExpr = NormalizedSubExpr; +use crate::core::valuef::ValueF; +use crate::core::var::{AlphaLabel, Shift, Subst}; +use crate::phase::Normalized; + +// Ad-hoc macro to help construct closures +macro_rules! make_closure { + (#$var:ident) => { $var.clone() }; + (var($var:ident, $n:expr, $($ty:tt)*)) => {{ + let var = crate::core::var::AlphaVar::from_var_and_alpha( + Label::from(stringify!($var)).into(), + $n + ); + ValueF::Var(var) + .into_value_with_type(make_closure!($($ty)*)) + }}; + // Warning: assumes that $ty, as a dhall value, has type `Type` + (λ($var:ident : $($ty:tt)*) -> $($body:tt)*) => {{ + let var: AlphaLabel = Label::from(stringify!($var)).into(); + let ty = make_closure!($($ty)*); + let body = make_closure!($($body)*); + let body_ty = body.get_type_not_sort(); + let lam_ty = ValueF::Pi(var.clone(), ty.clone(), body_ty) + .into_value_with_type(Value::from_const(Type)); + ValueF::Lam(var, ty, body).into_value_with_type(lam_ty) + }}; + (Natural) => { + Value::from_builtin(Builtin::Natural) + }; + (List $($rest:tt)*) => { + Value::from_builtin(Builtin::List) + .app(make_closure!($($rest)*)) + }; + (Some($($rest:tt)*)) => {{ + let v = make_closure!($($rest)*); + let v_type = v.get_type_not_sort(); + let opt_v_type = Value::from_builtin(Builtin::Optional).app(v_type); + ValueF::NEOptionalLit(v).into_value_with_type(opt_v_type) + }}; + (1 + $($rest:tt)*) => { + ValueF::PartialExpr(ExprF::BinOp( + dhall_syntax::BinOp::NaturalPlus, + make_closure!($($rest)*), + Value::from_valuef_and_type( + ValueF::NaturalLit(1), + make_closure!(Natural) + ), + )).into_value_with_type( + make_closure!(Natural) + ) + }; + ([ $($head:tt)* ] # $($tail:tt)*) => {{ + let head = make_closure!($($head)*); + let tail = make_closure!($($tail)*); + let list_type = tail.get_type_not_sort(); + ValueF::PartialExpr(ExprF::BinOp( + dhall_syntax::BinOp::ListAppend, + ValueF::NEListLit(vec![head]) + .into_value_with_type(list_type.clone()), + tail, + )).into_value_with_type(list_type) + }}; +} #[allow(clippy::cognitive_complexity)] -pub fn apply_builtin(b: Builtin, args: Vec<Thunk>) -> Value { +pub(crate) fn apply_builtin( + b: Builtin, + args: Vec<Value>, + ty: &Value, +) -> ValueF { use dhall_syntax::Builtin::*; - use Value::*; + use ValueF::*; + + // Small helper enum + enum Ret<'a> { + ValueF(ValueF), + Value(Value), + // For applications that can return a function, it's important to keep the remaining + // arguments to apply them to the resulting function. + ValueWithRemainingArgs(&'a [Value], Value), + DoneAsIs, + } - // Return Ok((unconsumed args, returned value)), or Err(()) if value could not be produced. let ret = match (b, args.as_slice()) { - (OptionalNone, [t, r..]) => { - Ok((r, EmptyOptionalLit(TypeThunk::from_thunk(t.clone())))) - } - (NaturalIsZero, [n, r..]) => match &*n.as_value() { - NaturalLit(n) => Ok((r, BoolLit(*n == 0))), - _ => Err(()), + (OptionalNone, [t]) => Ret::ValueF(EmptyOptionalLit(t.clone())), + (NaturalIsZero, [n]) => match &*n.as_whnf() { + NaturalLit(n) => Ret::ValueF(BoolLit(*n == 0)), + _ => Ret::DoneAsIs, }, - (NaturalEven, [n, r..]) => match &*n.as_value() { - NaturalLit(n) => Ok((r, BoolLit(*n % 2 == 0))), - _ => Err(()), + (NaturalEven, [n]) => match &*n.as_whnf() { + NaturalLit(n) => Ret::ValueF(BoolLit(*n % 2 == 0)), + _ => Ret::DoneAsIs, }, - (NaturalOdd, [n, r..]) => match &*n.as_value() { - NaturalLit(n) => Ok((r, BoolLit(*n % 2 != 0))), - _ => Err(()), + (NaturalOdd, [n]) => match &*n.as_whnf() { + NaturalLit(n) => Ret::ValueF(BoolLit(*n % 2 != 0)), + _ => Ret::DoneAsIs, }, - (NaturalToInteger, [n, r..]) => match &*n.as_value() { - NaturalLit(n) => Ok((r, IntegerLit(*n as isize))), - _ => Err(()), + (NaturalToInteger, [n]) => match &*n.as_whnf() { + NaturalLit(n) => Ret::ValueF(IntegerLit(*n as isize)), + _ => Ret::DoneAsIs, }, - (NaturalShow, [n, r..]) => match &*n.as_value() { - NaturalLit(n) => Ok(( - r, - TextLit(vec![InterpolatedTextContents::Text(n.to_string())]), - )), - _ => Err(()), + (NaturalShow, [n]) => match &*n.as_whnf() { + NaturalLit(n) => { + Ret::ValueF(TextLit(vec![InterpolatedTextContents::Text( + n.to_string(), + )])) + } + _ => Ret::DoneAsIs, }, - (NaturalSubtract, [a, b, r..]) => { - match (&*a.as_value(), &*b.as_value()) { - (NaturalLit(a), NaturalLit(b)) => { - Ok((r, NaturalLit(if b > a { b - a } else { 0 }))) - } - (NaturalLit(0), b) => Ok((r, b.clone())), - (_, NaturalLit(0)) => Ok((r, NaturalLit(0))), - _ if a == b => Ok((r, NaturalLit(0))), - _ => Err(()), + (NaturalSubtract, [a, b]) => match (&*a.as_whnf(), &*b.as_whnf()) { + (NaturalLit(a), NaturalLit(b)) => { + Ret::ValueF(NaturalLit(if b > a { b - a } else { 0 })) } - } - (IntegerShow, [n, r..]) => match &*n.as_value() { + (NaturalLit(0), _) => Ret::Value(b.clone()), + (_, NaturalLit(0)) => Ret::ValueF(NaturalLit(0)), + _ if a == b => Ret::ValueF(NaturalLit(0)), + _ => Ret::DoneAsIs, + }, + (IntegerShow, [n]) => match &*n.as_whnf() { IntegerLit(n) => { let s = if *n < 0 { n.to_string() } else { format!("+{}", n) }; - Ok((r, TextLit(vec![InterpolatedTextContents::Text(s)]))) + Ret::ValueF(TextLit(vec![InterpolatedTextContents::Text(s)])) } - _ => Err(()), + _ => Ret::DoneAsIs, }, - (IntegerToDouble, [n, r..]) => match &*n.as_value() { - IntegerLit(n) => Ok((r, DoubleLit(NaiveDouble::from(*n as f64)))), - _ => Err(()), + (IntegerToDouble, [n]) => match &*n.as_whnf() { + IntegerLit(n) => { + Ret::ValueF(DoubleLit(NaiveDouble::from(*n as f64))) + } + _ => Ret::DoneAsIs, }, - (DoubleShow, [n, r..]) => match &*n.as_value() { - DoubleLit(n) => Ok(( - r, - TextLit(vec![InterpolatedTextContents::Text(n.to_string())]), - )), - _ => Err(()), + (DoubleShow, [n]) => match &*n.as_whnf() { + DoubleLit(n) => { + Ret::ValueF(TextLit(vec![InterpolatedTextContents::Text( + n.to_string(), + )])) + } + _ => Ret::DoneAsIs, }, - (TextShow, [v, r..]) => match &*v.as_value() { + (TextShow, [v]) => match &*v.as_whnf() { TextLit(elts) => { match elts.as_slice() { // Empty string literal. [] => { // Printing InterpolatedText takes care of all the escaping - let txt: InterpolatedText<X> = + let txt: InterpolatedText<Normalized> = std::iter::empty().collect(); let s = txt.to_string(); - Ok(( - r, - TextLit(vec![InterpolatedTextContents::Text(s)]), - )) + Ret::ValueF(TextLit(vec![ + InterpolatedTextContents::Text(s), + ])) } // If there are no interpolations (invariants ensure that when there are no // interpolations, there is a single Text item) in the literal. [InterpolatedTextContents::Text(s)] => { // Printing InterpolatedText takes care of all the escaping - let txt: InterpolatedText<X> = std::iter::once( - InterpolatedTextContents::Text(s.clone()), - ) - .collect(); + let txt: InterpolatedText<Normalized> = + std::iter::once(InterpolatedTextContents::Text( + s.clone(), + )) + .collect(); let s = txt.to_string(); - Ok(( - r, - TextLit(vec![InterpolatedTextContents::Text(s)]), - )) + Ret::ValueF(TextLit(vec![ + InterpolatedTextContents::Text(s), + ])) } - _ => Err(()), + _ => Ret::DoneAsIs, } } - _ => Err(()), + _ => Ret::DoneAsIs, }, - (ListLength, [_, l, r..]) => match &*l.as_value() { - EmptyListLit(_) => Ok((r, NaturalLit(0))), - NEListLit(xs) => Ok((r, NaturalLit(xs.len()))), - _ => Err(()), + (ListLength, [_, l]) => match &*l.as_whnf() { + EmptyListLit(_) => Ret::ValueF(NaturalLit(0)), + NEListLit(xs) => Ret::ValueF(NaturalLit(xs.len())), + _ => Ret::DoneAsIs, }, - (ListHead, [_, l, r..]) => match &*l.as_value() { - EmptyListLit(n) => Ok((r, EmptyOptionalLit(n.clone()))), + (ListHead, [_, l]) => match &*l.as_whnf() { + EmptyListLit(n) => Ret::ValueF(EmptyOptionalLit(n.clone())), NEListLit(xs) => { - Ok((r, NEOptionalLit(xs.iter().next().unwrap().clone()))) + Ret::ValueF(NEOptionalLit(xs.iter().next().unwrap().clone())) } - _ => Err(()), + _ => Ret::DoneAsIs, }, - (ListLast, [_, l, r..]) => match &*l.as_value() { - EmptyListLit(n) => Ok((r, EmptyOptionalLit(n.clone()))), - NEListLit(xs) => { - Ok((r, NEOptionalLit(xs.iter().rev().next().unwrap().clone()))) - } - _ => Err(()), + (ListLast, [_, l]) => match &*l.as_whnf() { + EmptyListLit(n) => Ret::ValueF(EmptyOptionalLit(n.clone())), + NEListLit(xs) => Ret::ValueF(NEOptionalLit( + xs.iter().rev().next().unwrap().clone(), + )), + _ => Ret::DoneAsIs, }, - (ListReverse, [_, l, r..]) => match &*l.as_value() { - EmptyListLit(n) => Ok((r, EmptyListLit(n.clone()))), + (ListReverse, [_, l]) => match &*l.as_whnf() { + EmptyListLit(n) => Ret::ValueF(EmptyListLit(n.clone())), NEListLit(xs) => { - Ok((r, NEListLit(xs.iter().rev().cloned().collect()))) + Ret::ValueF(NEListLit(xs.iter().rev().cloned().collect())) } - _ => Err(()), + _ => Ret::DoneAsIs, }, - (ListIndexed, [_, l, r..]) => match &*l.as_value() { - EmptyListLit(t) => { - let mut kts = HashMap::new(); - kts.insert( - "index".into(), - TypeThunk::from_value(Value::from_builtin(Natural)), - ); - kts.insert("value".into(), t.clone()); - Ok((r, EmptyListLit(TypeThunk::from_value(RecordType(kts))))) - } - NEListLit(xs) => { - let xs = xs - .iter() - .enumerate() - .map(|(i, e)| { - let i = NaturalLit(i); - let mut kvs = HashMap::new(); - kvs.insert("index".into(), Thunk::from_value(i)); - kvs.insert("value".into(), e.clone()); - Thunk::from_value(RecordLit(kvs)) - }) - .collect(); - Ok((r, NEListLit(xs))) + (ListIndexed, [_, l]) => { + let l_whnf = l.as_whnf(); + match &*l_whnf { + EmptyListLit(_) | NEListLit(_) => { + // Extract the type of the list elements + let t = match &*l_whnf { + EmptyListLit(t) => t.clone(), + NEListLit(xs) => xs[0].get_type_not_sort(), + _ => unreachable!(), + }; + + // Construct the returned record type: { index: Natural, value: t } + let mut kts = HashMap::new(); + kts.insert("index".into(), Value::from_builtin(Natural)); + kts.insert("value".into(), t.clone()); + let t = Value::from_valuef_and_type( + RecordType(kts), + Value::from_const(Type), + ); + + // Construct the new list, with added indices + let list = match &*l_whnf { + EmptyListLit(_) => EmptyListLit(t), + NEListLit(xs) => NEListLit( + xs.iter() + .enumerate() + .map(|(i, e)| { + let mut kvs = HashMap::new(); + kvs.insert( + "index".into(), + Value::from_valuef_and_type( + NaturalLit(i), + Value::from_builtin( + Builtin::Natural, + ), + ), + ); + kvs.insert("value".into(), e.clone()); + Value::from_valuef_and_type( + RecordLit(kvs), + t.clone(), + ) + }) + .collect(), + ), + _ => unreachable!(), + }; + Ret::ValueF(list) + } + _ => Ret::DoneAsIs, } - _ => Err(()), - }, - (ListBuild, [t, f, r..]) => match &*f.as_value() { + } + (ListBuild, [t, f]) => match &*f.as_whnf() { // fold/build fusion - Value::AppliedBuiltin(ListFold, args) => { + ValueF::AppliedBuiltin(ListFold, args) => { if args.len() >= 2 { - Ok((r, args[1].to_value())) + Ret::Value(args[1].clone()) } else { // Do we really need to handle this case ? unimplemented!() } } - _ => Ok(( - r, - f.app_val(Value::from_builtin(List).app_thunk(t.clone())) - .app_val(ListConsClosure( - TypeThunk::from_thunk(t.clone()), - None, - )) - .app_val(EmptyListLit(TypeThunk::from_thunk(t.clone()))), - )), + _ => { + let list_t = Value::from_builtin(List).app(t.clone()); + Ret::Value( + f.app(list_t.clone()) + .app({ + // Move `t` under new variables + let t1 = t.under_binder(Label::from("x")); + let t2 = t1.under_binder(Label::from("xs")); + make_closure!( + λ(x : #t) -> + λ(xs : List #t1) -> + [ var(x, 1, #t2) ] # var(xs, 0, List #t2) + ) + }) + .app( + EmptyListLit(t.clone()) + .into_value_with_type(list_t), + ), + ) + } }, - (ListFold, [_, l, _, cons, nil, r..]) => match &*l.as_value() { - EmptyListLit(_) => Ok((r, nil.to_value())), + (ListFold, [_, l, _, cons, nil, r..]) => match &*l.as_whnf() { + EmptyListLit(_) => Ret::ValueWithRemainingArgs(r, nil.clone()), NEListLit(xs) => { let mut v = nil.clone(); - for x in xs.iter().rev() { - v = cons - .clone() - .app_thunk(x.clone()) - .app_thunk(v) - .into_thunk(); + for x in xs.iter().cloned().rev() { + v = cons.app(x).app(v); } - Ok((r, v.to_value())) + Ret::ValueWithRemainingArgs(r, v) } - _ => Err(()), + _ => Ret::DoneAsIs, }, - (OptionalBuild, [t, f, r..]) => match &*f.as_value() { + (OptionalBuild, [t, f]) => match &*f.as_whnf() { // fold/build fusion - Value::AppliedBuiltin(OptionalFold, args) => { + ValueF::AppliedBuiltin(OptionalFold, args) => { if args.len() >= 2 { - Ok((r, args[1].to_value())) + Ret::Value(args[1].clone()) } else { // Do we really need to handle this case ? unimplemented!() } } - _ => Ok(( - r, - f.app_val(Value::from_builtin(Optional).app_thunk(t.clone())) - .app_val(OptionalSomeClosure(TypeThunk::from_thunk( - t.clone(), - ))) - .app_val(EmptyOptionalLit(TypeThunk::from_thunk( - t.clone(), - ))), - )), + _ => { + let optional_t = Value::from_builtin(Optional).app(t.clone()); + Ret::Value( + f.app(optional_t.clone()) + .app({ + let t1 = t.under_binder(Label::from("x")); + make_closure!(λ(x: #t) -> Some(var(x, 0, #t1))) + }) + .app( + EmptyOptionalLit(t.clone()) + .into_value_with_type(optional_t), + ), + ) + } }, - (OptionalFold, [_, v, _, just, nothing, r..]) => match &*v.as_value() { - EmptyOptionalLit(_) => Ok((r, nothing.to_value())), - NEOptionalLit(x) => Ok((r, just.app_thunk(x.clone()))), - _ => Err(()), + (OptionalFold, [_, v, _, just, nothing, r..]) => match &*v.as_whnf() { + EmptyOptionalLit(_) => { + Ret::ValueWithRemainingArgs(r, nothing.clone()) + } + NEOptionalLit(x) => { + Ret::ValueWithRemainingArgs(r, just.app(x.clone())) + } + _ => Ret::DoneAsIs, }, - (NaturalBuild, [f, r..]) => match &*f.as_value() { + (NaturalBuild, [f]) => match &*f.as_whnf() { // fold/build fusion - Value::AppliedBuiltin(NaturalFold, args) => { + ValueF::AppliedBuiltin(NaturalFold, args) => { if !args.is_empty() { - Ok((r, args[0].to_value())) + Ret::Value(args[0].clone()) } else { // Do we really need to handle this case ? unimplemented!() } } - _ => Ok(( - r, - f.app_val(Value::from_builtin(Natural)) - .app_val(NaturalSuccClosure) - .app_val(NaturalLit(0)), - )), + _ => Ret::Value( + f.app(Value::from_builtin(Natural)) + .app(make_closure!( + λ(x : Natural) -> 1 + var(x, 0, Natural) + )) + .app( + NaturalLit(0) + .into_value_with_type(Value::from_builtin(Natural)), + ), + ), }, - (NaturalFold, [n, t, succ, zero, r..]) => match &*n.as_value() { - NaturalLit(0) => Ok((r, zero.to_value())), + (NaturalFold, [n, t, succ, zero, r..]) => match &*n.as_whnf() { + NaturalLit(0) => Ret::ValueWithRemainingArgs(r, zero.clone()), NaturalLit(n) => { let fold = Value::from_builtin(NaturalFold) - .app(NaturalLit(n - 1)) - .app_thunk(t.clone()) - .app_thunk(succ.clone()) - .app_thunk(zero.clone()); - Ok((r, succ.app_val(fold))) + .app( + NaturalLit(n - 1) + .into_value_with_type(Value::from_builtin(Natural)), + ) + .app(t.clone()) + .app(succ.clone()) + .app(zero.clone()); + Ret::ValueWithRemainingArgs(r, succ.app(fold)) } - _ => Err(()), + _ => Ret::DoneAsIs, }, - _ => Err(()), + _ => Ret::DoneAsIs, }; match ret { - Ok((unconsumed_args, mut v)) => { + Ret::ValueF(v) => v, + Ret::Value(v) => v.to_whnf_check_type(ty), + Ret::ValueWithRemainingArgs(unconsumed_args, mut v) => { let n_consumed_args = args.len() - unconsumed_args.len(); for x in args.into_iter().skip(n_consumed_args) { - v = v.app_thunk(x); + v = v.app(x); } - v + v.to_whnf_check_type(ty) } - Err(()) => AppliedBuiltin(b, args), + Ret::DoneAsIs => AppliedBuiltin(b, args), } } -pub fn apply_any(f: Thunk, a: Thunk) -> Value { - let fallback = |f: Thunk, a: Thunk| Value::PartialExpr(ExprF::App(f, a)); - - let f_borrow = f.as_value(); +pub(crate) fn apply_any(f: Value, a: Value, ty: &Value) -> ValueF { + let f_borrow = f.as_whnf(); match &*f_borrow { - Value::Lam(x, _, e) => { - let val = Typed::from_thunk_untyped(a); - e.subst_shift(&x.into(), &val).to_value() + ValueF::Lam(x, _, e) => { + e.subst_shift(&x.into(), &a).to_whnf_check_type(ty) } - Value::AppliedBuiltin(b, args) => { + ValueF::AppliedBuiltin(b, args) => { use std::iter::once; let args = args.iter().cloned().chain(once(a.clone())).collect(); - apply_builtin(*b, args) - } - Value::OptionalSomeClosure(_) => Value::NEOptionalLit(a), - Value::ListConsClosure(t, None) => { - Value::ListConsClosure(t.clone(), Some(a)) + apply_builtin(*b, args, ty) } - Value::ListConsClosure(_, Some(x)) => { - let a_borrow = a.as_value(); - match &*a_borrow { - Value::EmptyListLit(_) => Value::NEListLit(vec![x.clone()]), - Value::NEListLit(xs) => { - use std::iter::once; - let xs = - once(x.clone()).chain(xs.iter().cloned()).collect(); - Value::NEListLit(xs) - } - _ => { - drop(f_borrow); - drop(a_borrow); - fallback(f, a) - } - } - } - Value::NaturalSuccClosure => { - let a_borrow = a.as_value(); - match &*a_borrow { - Value::NaturalLit(n) => Value::NaturalLit(n + 1), - _ => { - drop(f_borrow); - drop(a_borrow); - fallback(f, a) - } - } - } - Value::UnionConstructor(l, kts) => { - Value::UnionLit(l.clone(), a, kts.clone()) + ValueF::UnionConstructor(l, kts) => { + ValueF::UnionLit(l.clone(), a, kts.clone()) } _ => { drop(f_borrow); - fallback(f, a) + ValueF::PartialExpr(ExprF::App(f, a)) } } } -pub fn squash_textlit( - elts: impl Iterator<Item = InterpolatedTextContents<Thunk>>, -) -> Vec<InterpolatedTextContents<Thunk>> { +pub(crate) fn squash_textlit( + elts: impl Iterator<Item = InterpolatedTextContents<Value>>, +) -> Vec<InterpolatedTextContents<Value>> { use std::mem::replace; use InterpolatedTextContents::{Expr, Text}; fn inner( - elts: impl Iterator<Item = InterpolatedTextContents<Thunk>>, + elts: impl Iterator<Item = InterpolatedTextContents<Value>>, crnt_str: &mut String, - ret: &mut Vec<InterpolatedTextContents<Thunk>>, + ret: &mut Vec<InterpolatedTextContents<Value>>, ) { for contents in elts { match contents { Text(s) => crnt_str.push_str(&s), Expr(e) => { - let e_borrow = e.as_value(); + let e_borrow = e.as_whnf(); match &*e_borrow { - Value::TextLit(elts2) => { + ValueF::TextLit(elts2) => { inner(elts2.iter().cloned(), crnt_str, ret) } _ => { @@ -367,134 +450,20 @@ pub fn squash_textlit( ret } -/// Reduces the imput expression to a Value. Evaluates as little as possible. -pub fn normalize_whnf(ctx: NormalizationContext, expr: InputSubExpr) -> Value { - match expr.as_ref() { - ExprF::Embed(e) => return e.to_value(), - ExprF::Var(v) => return ctx.lookup(v), - _ => {} - } - - // Thunk subexpressions - let expr: ExprF<Thunk, X> = - expr.as_ref().map_ref_with_special_handling_of_binders( - |e| Thunk::new(ctx.clone(), e.clone()), - |x, e| Thunk::new(ctx.skip(x), e.clone()), - |_| unreachable!(), - ); - - normalize_one_layer(expr) -} - -// Small helper enum to avoid repetition -enum Ret<'a> { - Value(Value), - Thunk(Thunk), - ThunkRef(&'a Thunk), - Expr(ExprF<Thunk, X>), -} - -/// Performs an intersection of two HashMaps. -/// -/// # Arguments -/// -/// * `f` - Will compute the final value from the intersecting -/// key and the values from both maps. -/// -/// # Description -/// -/// If the key is present in both maps then the final value for -/// that key is computed via the `f` function. -/// -/// The final map will contain the shared keys from the -/// two input maps with the final computed value from `f`. -pub(crate) fn intersection_with_key<K, T, U, V>( - mut f: impl FnMut(&K, &T, &U) -> V, - map1: &HashMap<K, T>, - map2: &HashMap<K, U>, -) -> HashMap<K, V> -where - K: std::hash::Hash + Eq + Clone, -{ - let mut kvs = HashMap::new(); - - for (k, t) in map1 { - // Only insert in the final map if the key exists in both - if let Some(u) = map2.get(k) { - kvs.insert(k.clone(), f(k, t, u)); - } - } - - kvs -} - -/// Performs an outer join of two HashMaps. -/// -/// # Arguments -/// -/// * `ft` - Will convert the values of the first map -/// into the target value. -/// -/// * `fu` - Will convert the values of the second map -/// into the target value. -/// -/// * `fktu` - Will convert the key and values from both maps -/// into the target type. -/// -/// # Description -/// -/// If the key is present in both maps then the final value for -/// that key is computed via the `fktu` function. Otherwise, the -/// final value will be calculated by either the `ft` or `fu` value -/// depending on which map the key is present in. -/// -/// The final map will contain all keys from the two input maps with -/// also values computed as per above. -pub(crate) fn outer_join<K, T, U, V>( - mut ft: impl FnMut(&T) -> V, - mut fu: impl FnMut(&U) -> V, - mut fktu: impl FnMut(&K, &T, &U) -> V, - map1: &HashMap<K, T>, - map2: &HashMap<K, U>, -) -> HashMap<K, V> -where - K: std::hash::Hash + Eq + Clone, -{ - let mut kvs = HashMap::new(); - - for (k1, t) in map1 { - let v = if let Some(u) = map2.get(k1) { - // The key exists in both maps - // so use all values for computation - fktu(k1, t, u) - } else { - // Key only exists in map1 - ft(t) - }; - kvs.insert(k1.clone(), v); - } - - for (k1, u) in map2 { - // Insert if key was missing in map1 - kvs.entry(k1.clone()).or_insert(fu(u)); - } - - kvs -} - -pub(crate) fn merge_maps<K, V>( +pub(crate) fn merge_maps<K, V, F, Err>( map1: &HashMap<K, V>, map2: &HashMap<K, V>, - mut f: impl FnMut(&V, &V) -> V, -) -> HashMap<K, V> + mut f: F, +) -> Result<HashMap<K, V>, Err> where + F: FnMut(&K, &V, &V) -> Result<V, Err>, K: std::hash::Hash + Eq + Clone, V: Clone, { let mut kvs = HashMap::new(); for (x, v2) in map2 { let newv = if let Some(v1) = map1.get(x) { - f(v1, v2) + f(x, v1, v2)? } else { v2.clone() }; @@ -504,85 +473,98 @@ where // Insert only if key not already present kvs.entry(x.clone()).or_insert_with(|| v1.clone()); } - kvs + Ok(kvs) } -fn apply_binop<'a>(o: BinOp, x: &'a Thunk, y: &'a Thunk) -> Option<Ret<'a>> { +// Small helper enum to avoid repetition +enum Ret<'a> { + ValueF(ValueF), + Value(Value), + ValueRef(&'a Value), + Expr(ExprF<Value, Normalized>), +} + +fn apply_binop<'a>( + o: BinOp, + x: &'a Value, + y: &'a Value, + ty: &Value, +) -> Option<Ret<'a>> { use BinOp::{ BoolAnd, BoolEQ, BoolNE, BoolOr, Equivalence, ListAppend, NaturalPlus, NaturalTimes, RecursiveRecordMerge, RecursiveRecordTypeMerge, RightBiasedRecordMerge, TextAppend, }; - use Value::{ + use ValueF::{ BoolLit, EmptyListLit, NEListLit, NaturalLit, RecordLit, RecordType, TextLit, }; - let x_borrow = x.as_value(); - let y_borrow = y.as_value(); + let x_borrow = x.as_whnf(); + let y_borrow = y.as_whnf(); Some(match (o, &*x_borrow, &*y_borrow) { - (BoolAnd, BoolLit(true), _) => Ret::ThunkRef(y), - (BoolAnd, _, BoolLit(true)) => Ret::ThunkRef(x), - (BoolAnd, BoolLit(false), _) => Ret::Value(BoolLit(false)), - (BoolAnd, _, BoolLit(false)) => Ret::Value(BoolLit(false)), - (BoolAnd, _, _) if x == y => Ret::ThunkRef(x), - (BoolOr, BoolLit(true), _) => Ret::Value(BoolLit(true)), - (BoolOr, _, BoolLit(true)) => Ret::Value(BoolLit(true)), - (BoolOr, BoolLit(false), _) => Ret::ThunkRef(y), - (BoolOr, _, BoolLit(false)) => Ret::ThunkRef(x), - (BoolOr, _, _) if x == y => Ret::ThunkRef(x), - (BoolEQ, BoolLit(true), _) => Ret::ThunkRef(y), - (BoolEQ, _, BoolLit(true)) => Ret::ThunkRef(x), - (BoolEQ, BoolLit(x), BoolLit(y)) => Ret::Value(BoolLit(x == y)), - (BoolEQ, _, _) if x == y => Ret::Value(BoolLit(true)), - (BoolNE, BoolLit(false), _) => Ret::ThunkRef(y), - (BoolNE, _, BoolLit(false)) => Ret::ThunkRef(x), - (BoolNE, BoolLit(x), BoolLit(y)) => Ret::Value(BoolLit(x != y)), - (BoolNE, _, _) if x == y => Ret::Value(BoolLit(false)), - - (NaturalPlus, NaturalLit(0), _) => Ret::ThunkRef(y), - (NaturalPlus, _, NaturalLit(0)) => Ret::ThunkRef(x), + (BoolAnd, BoolLit(true), _) => Ret::ValueRef(y), + (BoolAnd, _, BoolLit(true)) => Ret::ValueRef(x), + (BoolAnd, BoolLit(false), _) => Ret::ValueF(BoolLit(false)), + (BoolAnd, _, BoolLit(false)) => Ret::ValueF(BoolLit(false)), + (BoolAnd, _, _) if x == y => Ret::ValueRef(x), + (BoolOr, BoolLit(true), _) => Ret::ValueF(BoolLit(true)), + (BoolOr, _, BoolLit(true)) => Ret::ValueF(BoolLit(true)), + (BoolOr, BoolLit(false), _) => Ret::ValueRef(y), + (BoolOr, _, BoolLit(false)) => Ret::ValueRef(x), + (BoolOr, _, _) if x == y => Ret::ValueRef(x), + (BoolEQ, BoolLit(true), _) => Ret::ValueRef(y), + (BoolEQ, _, BoolLit(true)) => Ret::ValueRef(x), + (BoolEQ, BoolLit(x), BoolLit(y)) => Ret::ValueF(BoolLit(x == y)), + (BoolEQ, _, _) if x == y => Ret::ValueF(BoolLit(true)), + (BoolNE, BoolLit(false), _) => Ret::ValueRef(y), + (BoolNE, _, BoolLit(false)) => Ret::ValueRef(x), + (BoolNE, BoolLit(x), BoolLit(y)) => Ret::ValueF(BoolLit(x != y)), + (BoolNE, _, _) if x == y => Ret::ValueF(BoolLit(false)), + + (NaturalPlus, NaturalLit(0), _) => Ret::ValueRef(y), + (NaturalPlus, _, NaturalLit(0)) => Ret::ValueRef(x), (NaturalPlus, NaturalLit(x), NaturalLit(y)) => { - Ret::Value(NaturalLit(x + y)) + Ret::ValueF(NaturalLit(x + y)) } - (NaturalTimes, NaturalLit(0), _) => Ret::Value(NaturalLit(0)), - (NaturalTimes, _, NaturalLit(0)) => Ret::Value(NaturalLit(0)), - (NaturalTimes, NaturalLit(1), _) => Ret::ThunkRef(y), - (NaturalTimes, _, NaturalLit(1)) => Ret::ThunkRef(x), + (NaturalTimes, NaturalLit(0), _) => Ret::ValueF(NaturalLit(0)), + (NaturalTimes, _, NaturalLit(0)) => Ret::ValueF(NaturalLit(0)), + (NaturalTimes, NaturalLit(1), _) => Ret::ValueRef(y), + (NaturalTimes, _, NaturalLit(1)) => Ret::ValueRef(x), (NaturalTimes, NaturalLit(x), NaturalLit(y)) => { - Ret::Value(NaturalLit(x * y)) + Ret::ValueF(NaturalLit(x * y)) } - (ListAppend, EmptyListLit(_), _) => Ret::ThunkRef(y), - (ListAppend, _, EmptyListLit(_)) => Ret::ThunkRef(x), - (ListAppend, NEListLit(xs), NEListLit(ys)) => { - Ret::Value(NEListLit(xs.iter().chain(ys.iter()).cloned().collect())) - } + (ListAppend, EmptyListLit(_), _) => Ret::ValueRef(y), + (ListAppend, _, EmptyListLit(_)) => Ret::ValueRef(x), + (ListAppend, NEListLit(xs), NEListLit(ys)) => Ret::ValueF(NEListLit( + xs.iter().chain(ys.iter()).cloned().collect(), + )), - (TextAppend, TextLit(x), _) if x.is_empty() => Ret::ThunkRef(y), - (TextAppend, _, TextLit(y)) if y.is_empty() => Ret::ThunkRef(x), - (TextAppend, TextLit(x), TextLit(y)) => Ret::Value(TextLit( + (TextAppend, TextLit(x), _) if x.is_empty() => Ret::ValueRef(y), + (TextAppend, _, TextLit(y)) if y.is_empty() => Ret::ValueRef(x), + (TextAppend, TextLit(x), TextLit(y)) => Ret::ValueF(TextLit( squash_textlit(x.iter().chain(y.iter()).cloned()), )), (TextAppend, TextLit(x), _) => { use std::iter::once; let y = InterpolatedTextContents::Expr(y.clone()); - Ret::Value(TextLit(squash_textlit( + Ret::ValueF(TextLit(squash_textlit( x.iter().cloned().chain(once(y)), ))) } (TextAppend, _, TextLit(y)) => { use std::iter::once; let x = InterpolatedTextContents::Expr(x.clone()); - Ret::Value(TextLit(squash_textlit( + Ret::ValueF(TextLit(squash_textlit( once(x).chain(y.iter().cloned()), ))) } (RightBiasedRecordMerge, _, RecordLit(kvs)) if kvs.is_empty() => { - Ret::ThunkRef(x) + Ret::ValueRef(x) } (RightBiasedRecordMerge, RecordLit(kvs), _) if kvs.is_empty() => { - Ret::ThunkRef(y) + Ret::ValueRef(y) } (RightBiasedRecordMerge, RecordLit(kvs1), RecordLit(kvs2)) => { let mut kvs = kvs2.clone(); @@ -590,92 +572,83 @@ fn apply_binop<'a>(o: BinOp, x: &'a Thunk, y: &'a Thunk) -> Option<Ret<'a>> { // Insert only if key not already present kvs.entry(x.clone()).or_insert_with(|| v.clone()); } - Ret::Value(RecordLit(kvs)) + Ret::ValueF(RecordLit(kvs)) } (RecursiveRecordMerge, _, RecordLit(kvs)) if kvs.is_empty() => { - Ret::ThunkRef(x) + Ret::ValueRef(x) } (RecursiveRecordMerge, RecordLit(kvs), _) if kvs.is_empty() => { - Ret::ThunkRef(y) + Ret::ValueRef(y) } (RecursiveRecordMerge, RecordLit(kvs1), RecordLit(kvs2)) => { - let kvs = merge_maps(kvs1, kvs2, |v1, v2| { - Thunk::from_partial_expr(ExprF::BinOp( - RecursiveRecordMerge, - v1.clone(), - v2.clone(), + let ty_borrow = ty.as_whnf(); + let kts = match &*ty_borrow { + RecordType(kts) => kts, + _ => unreachable!("Internal type error"), + }; + let kvs = merge_maps::<_, _, _, !>(kvs1, kvs2, |k, v1, v2| { + Ok(Value::from_valuef_and_type( + ValueF::PartialExpr(ExprF::BinOp( + RecursiveRecordMerge, + v1.clone(), + v2.clone(), + )), + kts.get(k).expect("Internal type error").clone(), )) - }); - Ret::Value(RecordLit(kvs)) + })?; + Ret::ValueF(RecordLit(kvs)) } - (RecursiveRecordTypeMerge, _, RecordType(kvs)) if kvs.is_empty() => { - Ret::ThunkRef(x) - } - (RecursiveRecordTypeMerge, RecordType(kvs), _) if kvs.is_empty() => { - Ret::ThunkRef(y) + (RecursiveRecordTypeMerge, _, _) | (Equivalence, _, _) => { + unreachable!("This case should have been handled in typecheck") } - (RecursiveRecordTypeMerge, RecordType(kvs1), RecordType(kvs2)) => { - let kvs = merge_maps(kvs1, kvs2, |v1, v2| { - TypeThunk::from_thunk(Thunk::from_partial_expr(ExprF::BinOp( - RecursiveRecordTypeMerge, - v1.to_thunk(), - v2.to_thunk(), - ))) - }); - Ret::Value(RecordType(kvs)) - } - - (Equivalence, _, _) => Ret::Value(Value::Equivalence( - TypeThunk::from_thunk(x.clone()), - TypeThunk::from_thunk(y.clone()), - )), _ => return None, }) } -pub fn normalize_one_layer(expr: ExprF<Thunk, X>) -> Value { - use Value::{ - AppliedBuiltin, BoolLit, DoubleLit, EmptyListLit, IntegerLit, Lam, - NEListLit, NEOptionalLit, NaturalLit, Pi, RecordLit, RecordType, - TextLit, UnionConstructor, UnionLit, UnionType, +pub(crate) fn normalize_one_layer( + expr: ExprF<Value, Normalized>, + ty: &Value, +) -> ValueF { + use ValueF::{ + AppliedBuiltin, BoolLit, DoubleLit, EmptyListLit, IntegerLit, + NEListLit, NEOptionalLit, NaturalLit, RecordLit, TextLit, + UnionConstructor, UnionLit, UnionType, }; let ret = match expr { - ExprF::Embed(_) => unreachable!(), - ExprF::Var(_) => unreachable!(), - ExprF::Annot(x, _) => Ret::Thunk(x), - ExprF::Assert(_) => Ret::Expr(expr), - ExprF::Lam(x, t, e) => { - Ret::Value(Lam(x.into(), TypeThunk::from_thunk(t), e)) + ExprF::Import(_) => unreachable!( + "There should remain no imports in a resolved expression" + ), + // Those cases have already been completely handled in the typechecking phase (using + // `RetWhole`), so they won't appear here. + ExprF::Lam(_, _, _) + | ExprF::Pi(_, _, _) + | ExprF::Let(_, _, _, _) + | ExprF::Embed(_) + | ExprF::Const(_) + | ExprF::Builtin(_) + | ExprF::Var(_) + | ExprF::Annot(_, _) + | ExprF::RecordType(_) + | ExprF::UnionType(_) => { + unreachable!("This case should have been handled in typecheck") } - ExprF::Pi(x, t, e) => Ret::Value(Pi( - x.into(), - TypeThunk::from_thunk(t), - TypeThunk::from_thunk(e), - )), - ExprF::Let(x, _, v, b) => { - let v = Typed::from_thunk_untyped(v); - Ret::Thunk(b.subst_shift(&x.into(), &v)) - } - ExprF::App(v, a) => Ret::Value(v.app_thunk(a)), - ExprF::Builtin(b) => Ret::Value(Value::from_builtin(b)), - ExprF::Const(c) => Ret::Value(Value::Const(c)), - ExprF::BoolLit(b) => Ret::Value(BoolLit(b)), - ExprF::NaturalLit(n) => Ret::Value(NaturalLit(n)), - ExprF::IntegerLit(n) => Ret::Value(IntegerLit(n)), - ExprF::DoubleLit(n) => Ret::Value(DoubleLit(n)), - ExprF::SomeLit(e) => Ret::Value(NEOptionalLit(e)), + ExprF::Assert(_) => Ret::Expr(expr), + ExprF::App(v, a) => Ret::Value(v.app(a)), + ExprF::BoolLit(b) => Ret::ValueF(BoolLit(b)), + ExprF::NaturalLit(n) => Ret::ValueF(NaturalLit(n)), + ExprF::IntegerLit(n) => Ret::ValueF(IntegerLit(n)), + ExprF::DoubleLit(n) => Ret::ValueF(DoubleLit(n)), + ExprF::SomeLit(e) => Ret::ValueF(NEOptionalLit(e)), ExprF::EmptyListLit(ref t) => { // Check if the type is of the form `List x` - let t_borrow = t.as_value(); + let t_borrow = t.as_whnf(); match &*t_borrow { AppliedBuiltin(Builtin::List, args) if args.len() == 1 => { - Ret::Value(EmptyListLit(TypeThunk::from_thunk( - args[0].clone(), - ))) + Ret::ValueF(EmptyListLit(args[0].clone())) } _ => { drop(t_borrow); @@ -684,43 +657,33 @@ pub fn normalize_one_layer(expr: ExprF<Thunk, X>) -> Value { } } ExprF::NEListLit(elts) => { - Ret::Value(NEListLit(elts.into_iter().collect())) + Ret::ValueF(NEListLit(elts.into_iter().collect())) } ExprF::RecordLit(kvs) => { - Ret::Value(RecordLit(kvs.into_iter().collect())) + Ret::ValueF(RecordLit(kvs.into_iter().collect())) } - ExprF::RecordType(kts) => Ret::Value(RecordType( - kts.into_iter() - .map(|(k, t)| (k, TypeThunk::from_thunk(t))) - .collect(), - )), - ExprF::UnionType(kts) => Ret::Value(UnionType( - kts.into_iter() - .map(|(k, t)| (k, t.map(|t| TypeThunk::from_thunk(t)))) - .collect(), - )), ExprF::TextLit(elts) => { use InterpolatedTextContents::Expr; let elts: Vec<_> = squash_textlit(elts.into_iter()); // Simplify bare interpolation if let [Expr(th)] = elts.as_slice() { - Ret::Thunk(th.clone()) + Ret::Value(th.clone()) } else { - Ret::Value(TextLit(elts)) + Ret::ValueF(TextLit(elts)) } } ExprF::BoolIf(ref b, ref e1, ref e2) => { - let b_borrow = b.as_value(); + let b_borrow = b.as_whnf(); match &*b_borrow { - BoolLit(true) => Ret::ThunkRef(e1), - BoolLit(false) => Ret::ThunkRef(e2), + BoolLit(true) => Ret::ValueRef(e1), + BoolLit(false) => Ret::ValueRef(e2), _ => { - let e1_borrow = e1.as_value(); - let e2_borrow = e2.as_value(); + let e1_borrow = e1.as_whnf(); + let e2_borrow = e2.as_whnf(); match (&*e1_borrow, &*e2_borrow) { // Simplify `if b then True else False` - (BoolLit(true), BoolLit(false)) => Ret::ThunkRef(b), - _ if e1 == e2 => Ret::ThunkRef(e1), + (BoolLit(true), BoolLit(false)) => Ret::ValueRef(b), + _ if e1 == e2 => Ret::ValueRef(e1), _ => { drop(b_borrow); drop(e1_borrow); @@ -731,18 +694,18 @@ pub fn normalize_one_layer(expr: ExprF<Thunk, X>) -> Value { } } } - ExprF::BinOp(o, ref x, ref y) => match apply_binop(o, x, y) { + ExprF::BinOp(o, ref x, ref y) => match apply_binop(o, x, y, ty) { Some(ret) => ret, None => Ret::Expr(expr), }, - ExprF::Projection(_, ls) if ls.is_empty() => { - Ret::Value(RecordLit(HashMap::new())) + ExprF::Projection(_, ref ls) if ls.is_empty() => { + Ret::ValueF(RecordLit(HashMap::new())) } ExprF::Projection(ref v, ref ls) => { - let v_borrow = v.as_value(); + let v_borrow = v.as_whnf(); match &*v_borrow { - RecordLit(kvs) => Ret::Value(RecordLit( + RecordLit(kvs) => Ret::ValueF(RecordLit( ls.iter() .filter_map(|l| { kvs.get(l).map(|x| (l.clone(), x.clone())) @@ -756,17 +719,17 @@ pub fn normalize_one_layer(expr: ExprF<Thunk, X>) -> Value { } } ExprF::Field(ref v, ref l) => { - let v_borrow = v.as_value(); + let v_borrow = v.as_whnf(); match &*v_borrow { RecordLit(kvs) => match kvs.get(l) { - Some(r) => Ret::Thunk(r.clone()), + Some(r) => Ret::Value(r.clone()), None => { drop(v_borrow); Ret::Expr(expr) } }, UnionType(kts) => { - Ret::Value(UnionConstructor(l.clone(), kts.clone())) + Ret::ValueF(UnionConstructor(l.clone(), kts.clone())) } _ => { drop(v_borrow); @@ -776,11 +739,11 @@ pub fn normalize_one_layer(expr: ExprF<Thunk, X>) -> Value { } ExprF::Merge(ref handlers, ref variant, _) => { - let handlers_borrow = handlers.as_value(); - let variant_borrow = variant.as_value(); + let handlers_borrow = handlers.as_whnf(); + let variant_borrow = variant.as_whnf(); match (&*handlers_borrow, &*variant_borrow) { (RecordLit(kvs), UnionConstructor(l, _)) => match kvs.get(l) { - Some(h) => Ret::Thunk(h.clone()), + Some(h) => Ret::Value(h.clone()), None => { drop(handlers_borrow); drop(variant_borrow); @@ -788,7 +751,7 @@ pub fn normalize_one_layer(expr: ExprF<Thunk, X>) -> Value { } }, (RecordLit(kvs), UnionLit(l, v, _)) => match kvs.get(l) { - Some(h) => Ret::Value(h.app_thunk(v.clone())), + Some(h) => Ret::Value(h.app(v.clone())), None => { drop(handlers_borrow); drop(variant_borrow); @@ -802,12 +765,26 @@ pub fn normalize_one_layer(expr: ExprF<Thunk, X>) -> Value { } } } + ExprF::ToMap(_, _) => unimplemented!("toMap"), }; match ret { - Ret::Value(v) => v, - Ret::Thunk(th) => th.to_value(), - Ret::ThunkRef(th) => th.to_value(), - Ret::Expr(expr) => Value::PartialExpr(expr), + Ret::ValueF(v) => v, + Ret::Value(v) => v.to_whnf_check_type(ty), + Ret::ValueRef(v) => v.to_whnf_check_type(ty), + Ret::Expr(expr) => ValueF::PartialExpr(expr), + } +} + +/// Normalize a ValueF into WHNF +pub(crate) fn normalize_whnf(v: ValueF, ty: &Value) -> ValueF { + match v { + ValueF::AppliedBuiltin(b, args) => apply_builtin(b, args, ty), + ValueF::PartialExpr(e) => normalize_one_layer(e, ty), + ValueF::TextLit(elts) => { + ValueF::TextLit(squash_textlit(elts.into_iter())) + } + // All other cases are already in WHNF + v => v, } } diff --git a/dhall/src/phase/parse.rs b/dhall/src/phase/parse.rs index 734f6e1..540ceea 100644 --- a/dhall/src/phase/parse.rs +++ b/dhall/src/phase/parse.rs @@ -8,7 +8,7 @@ use crate::error::Error; use crate::phase::resolve::ImportRoot; use crate::phase::Parsed; -pub fn parse_file(f: &Path) -> Result<Parsed, Error> { +pub(crate) fn parse_file(f: &Path) -> Result<Parsed, Error> { let mut buffer = String::new(); File::open(f)?.read_to_string(&mut buffer)?; let expr = parse_expr(&*buffer)?; @@ -16,22 +16,22 @@ pub fn parse_file(f: &Path) -> Result<Parsed, Error> { Ok(Parsed(expr, root)) } -pub fn parse_str(s: &str) -> Result<Parsed, Error> { +pub(crate) fn parse_str(s: &str) -> Result<Parsed, Error> { let expr = parse_expr(s)?; let root = ImportRoot::LocalDir(std::env::current_dir()?); Ok(Parsed(expr, root)) } -pub fn parse_binary(data: &[u8]) -> Result<Parsed, Error> { +pub(crate) fn parse_binary(data: &[u8]) -> Result<Parsed, Error> { let expr = crate::phase::binary::decode(data)?; let root = ImportRoot::LocalDir(std::env::current_dir()?); - Ok(Parsed(expr.note_absurd(), root)) + Ok(Parsed(expr, root)) } -pub fn parse_binary_file(f: &Path) -> Result<Parsed, Error> { +pub(crate) fn parse_binary_file(f: &Path) -> Result<Parsed, Error> { let mut buffer = Vec::new(); File::open(f)?.read_to_end(&mut buffer)?; let expr = crate::phase::binary::decode(&buffer)?; let root = ImportRoot::LocalDir(f.parent().unwrap().to_owned()); - Ok(Parsed(expr.note_absurd(), root)) + Ok(Parsed(expr, root)) } diff --git a/dhall/src/phase/resolve.rs b/dhall/src/phase/resolve.rs index ac264e6..5bde68a 100644 --- a/dhall/src/phase/resolve.rs +++ b/dhall/src/phase/resolve.rs @@ -1,20 +1,20 @@ use std::collections::HashMap; use std::path::{Path, PathBuf}; -use dhall_syntax::Import; - use crate::error::{Error, ImportError}; -use crate::phase::{Normalized, Parsed, Resolved}; +use crate::phase::{Normalized, NormalizedExpr, Parsed, Resolved}; + +type Import = dhall_syntax::Import<NormalizedExpr>; /// A root from which to resolve relative imports. #[derive(Debug, Clone, PartialEq, Eq)] -pub enum ImportRoot { +pub(crate) enum ImportRoot { LocalDir(PathBuf), } type ImportCache = HashMap<Import, Normalized>; -pub type ImportStack = Vec<Import>; +pub(crate) type ImportStack = Vec<Import>; fn resolve_import( @@ -29,9 +29,9 @@ fn resolve_import( let cwd = match root { LocalDir(cwd) => cwd, }; - match &import.location_hashed.location { + match &import.location { Local(prefix, path) => { - let path_buf: PathBuf = path.clone().into_iter().collect(); + let path_buf: PathBuf = path.file_path.iter().collect(); let path_buf = match prefix { // TODO: fail gracefully Parent => cwd.parent().unwrap().join(path_buf), @@ -91,11 +91,11 @@ fn do_resolve_expr( Ok(Resolved(expr)) } -pub fn resolve(e: Parsed) -> Result<Resolved, ImportError> { +pub(crate) fn resolve(e: Parsed) -> Result<Resolved, ImportError> { do_resolve_expr(e, &mut HashMap::new(), &Vec::new()) } -pub fn skip_resolve_expr( +pub(crate) fn skip_resolve_expr( Parsed(expr, _root): Parsed, ) -> Result<Resolved, ImportError> { let resolve = |import: &Import| -> Result<Normalized, ImportError> { @@ -106,18 +106,26 @@ pub fn skip_resolve_expr( } #[cfg(test)] +#[rustfmt::skip] mod spec_tests { - #![rustfmt::skip] - macro_rules! import_success { ($name:ident, $path:expr) => { - make_spec_test!(Import, Success, $name, &("../dhall-lang/tests/import/success/".to_owned() + $path)); + make_spec_test!( + ImportSuccess( + &("../dhall-lang/tests/import/success/".to_owned() + $path + "A.dhall"), + &("../dhall-lang/tests/import/success/".to_owned() + $path + "B.dhall") + ), + $name + ); }; } // macro_rules! import_failure { // ($name:ident, $path:expr) => { - // make_spec_test!(Import, Failure, $name, &("../dhall-lang/tests/import/failure/".to_owned() + $path)); + // make_spec_test!( + // ImportFailure(&("../dhall-lang/tests/import/failure/".to_owned() + $path + ".dhall")), + // $name + // ); // }; // } diff --git a/dhall/src/phase/typecheck.rs b/dhall/src/phase/typecheck.rs index 299997a..9013c1f 100644 --- a/dhall/src/phase/typecheck.rs +++ b/dhall/src/phase/typecheck.rs @@ -1,34 +1,29 @@ +use std::cmp::max; use std::collections::HashMap; use dhall_syntax::{ - rc, Builtin, Const, Expr, ExprF, InterpolatedTextContents, Label, Span, - SubExpr, X, + rc, Builtin, Const, Expr, ExprF, InterpolatedTextContents, Label, }; -use crate::core::context::{NormalizationContext, TypecheckContext}; -use crate::core::thunk::{Thunk, TypeThunk}; +use crate::core::context::TypecheckContext; use crate::core::value::Value; +use crate::core::valuef::ValueF; use crate::core::var::{Shift, Subst}; use crate::error::{TypeError, TypeMessage}; -use crate::phase::{Normalized, Resolved, Type, Typed}; +use crate::phase::Normalized; fn tck_pi_type( ctx: &TypecheckContext, x: Label, - tx: Type, - te: Type, -) -> Result<Typed, TypeError> { + tx: Value, + te: Value, +) -> Result<Value, TypeError> { use crate::error::TypeMessage::*; let ctx2 = ctx.insert_type(&x, tx.clone()); let ka = match tx.get_type()?.as_const() { Some(k) => k, - _ => { - return Err(TypeError::new( - ctx, - InvalidInputType(tx.to_normalized()), - )) - } + _ => return Err(TypeError::new(ctx, InvalidInputType(tx))), }; let kb = match te.get_type()?.as_const() { @@ -36,64 +31,58 @@ fn tck_pi_type( _ => { return Err(TypeError::new( &ctx2, - InvalidOutputType(te.get_type()?.to_normalized()), + InvalidOutputType(te.get_type()?), )) } }; let k = function_check(ka, kb); - Ok(Typed::from_thunk_and_type( - Value::Pi(x.into(), TypeThunk::from_type(tx), TypeThunk::from_type(te)) - .into_thunk(), - Type::from_const(k), + Ok(Value::from_valuef_and_type( + ValueF::Pi(x.into(), tx, te), + Value::from_const(k), )) } fn tck_record_type( ctx: &TypecheckContext, - kts: impl IntoIterator<Item = Result<(Label, Type), TypeError>>, -) -> Result<Typed, TypeError> { + kts: impl IntoIterator<Item = Result<(Label, Value), TypeError>>, +) -> Result<Value, TypeError> { use crate::error::TypeMessage::*; use std::collections::hash_map::Entry; let mut new_kts = HashMap::new(); - // Check that all types are the same const - let mut k = None; + // An empty record type has type Type + let mut k = Const::Type; for e in kts { let (x, t) = e?; - match (k, t.get_type()?.as_const()) { - (None, Some(k2)) => k = Some(k2), - (Some(k1), Some(k2)) if k1 == k2 => {} - _ => { - return Err(TypeError::new( - ctx, - InvalidFieldType(x.clone(), t.clone()), - )) - } + // Construct the union of the contained `Const`s + match t.get_type()?.as_const() { + Some(k2) => k = max(k, k2), + None => return Err(TypeError::new(ctx, InvalidFieldType(x, t))), } - let entry = new_kts.entry(x.clone()); + // Check for duplicated entries + let entry = new_kts.entry(x); match &entry { Entry::Occupied(_) => { return Err(TypeError::new(ctx, RecordTypeDuplicateField)) } - Entry::Vacant(_) => { - entry.or_insert_with(|| TypeThunk::from_type(t.clone())) - } + Entry::Vacant(_) => entry.or_insert_with(|| t), }; } - // An empty record type has type Type - let k = k.unwrap_or(dhall_syntax::Const::Type); - Ok(Typed::from_thunk_and_type( - Value::RecordType(new_kts).into_thunk(), - Type::from_const(k), + Ok(Value::from_valuef_and_type( + ValueF::RecordType(new_kts), + Value::from_const(k), )) } -fn tck_union_type( +fn tck_union_type<Iter>( ctx: &TypecheckContext, - kts: impl IntoIterator<Item = Result<(Label, Option<Type>), TypeError>>, -) -> Result<Typed, TypeError> { + kts: Iter, +) -> Result<Value, TypeError> +where + Iter: IntoIterator<Item = Result<(Label, Option<Value>), TypeError>>, +{ use crate::error::TypeMessage::*; use std::collections::hash_map::Entry; let mut new_kts = HashMap::new(); @@ -108,49 +97,48 @@ fn tck_union_type( _ => { return Err(TypeError::new( ctx, - InvalidFieldType(x.clone(), t.clone()), + InvalidFieldType(x, t.clone()), )) } } } - let entry = new_kts.entry(x.clone()); + let entry = new_kts.entry(x); match &entry { Entry::Occupied(_) => { return Err(TypeError::new(ctx, UnionTypeDuplicateField)) } - Entry::Vacant(_) => entry.or_insert_with(|| { - t.as_ref().map(|t| TypeThunk::from_type(t.clone())) - }), + Entry::Vacant(_) => entry.or_insert_with(|| t), }; } // An empty union type has type Type; // an union type with only unary variants also has type Type - let k = k.unwrap_or(dhall_syntax::Const::Type); + let k = k.unwrap_or(Const::Type); - Ok(Typed::from_thunk_and_type( - Value::UnionType(new_kts).into_thunk(), - Type::from_const(k), + Ok(Value::from_valuef_and_type( + ValueF::UnionType(new_kts), + Value::from_const(k), )) } fn function_check(a: Const, b: Const) -> Const { - use dhall_syntax::Const::Type; - use std::cmp::max; - if b == Type { - Type + if b == Const::Type { + Const::Type } else { max(a, b) } } -pub fn type_of_const(c: Const) -> Result<Type, TypeError> { +pub(crate) fn const_to_value(c: Const) -> Value { + let v = ValueF::Const(c); match c { - Const::Type => Ok(Type::from_const(Const::Kind)), - Const::Kind => Ok(Type::from_const(Const::Sort)), - Const::Sort => { - Err(TypeError::new(&TypecheckContext::new(), TypeMessage::Sort)) + Const::Type => { + Value::from_valuef_and_type(v, const_to_value(Const::Kind)) } + Const::Kind => { + Value::from_valuef_and_type(v, const_to_value(Const::Sort)) + } + Const::Sort => Value::const_sort(), } } @@ -210,9 +198,9 @@ macro_rules! make_type { }; } -fn type_of_builtin(b: Builtin) -> Expr<X, X> { +fn type_of_builtin<E>(b: Builtin) -> Expr<E> { use dhall_syntax::Builtin::*; - match b { + rc(match b { Bool | Natural | Integer | Double | Text => make_type!(Type), List | Optional => make_type!( Type -> Type @@ -292,28 +280,15 @@ fn type_of_builtin(b: Builtin) -> Expr<X, X> { OptionalNone => make_type!( forall (a: Type) -> Optional a ), - } -} - -/// Takes an expression that is meant to contain a Type -/// and turn it into a type, typechecking it along the way. -pub fn mktype( - ctx: &TypecheckContext, - e: SubExpr<Span, Normalized>, -) -> Result<Type, TypeError> { - Ok(type_with(ctx, e)?.to_type()) -} - -pub fn builtin_to_type(b: Builtin) -> Result<Type, TypeError> { - mktype(&TypecheckContext::new(), SubExpr::from_builtin(b)) + }) } -/// Intermediary return type -enum Ret { - /// Returns the contained value as is - RetWhole(Typed), - /// Use the contained Type as the type of the input expression - RetTypeOnly(Type), +pub(crate) fn builtin_to_value(b: Builtin) -> Value { + let ctx = TypecheckContext::new(); + Value::from_valuef_and_type( + ValueF::from_builtin(b), + type_with(&ctx, type_of_builtin(b)).unwrap(), + ) } /// Type-check an expression and return the expression alongside its type if type-checking @@ -322,29 +297,25 @@ enum Ret { /// normalized as well. fn type_with( ctx: &TypecheckContext, - e: SubExpr<Span, Normalized>, -) -> Result<Typed, TypeError> { + e: Expr<Normalized>, +) -> Result<Value, TypeError> { use dhall_syntax::ExprF::{Annot, Embed, Lam, Let, Pi, Var}; - use Ret::*; Ok(match e.as_ref() { - Lam(x, t, b) => { - let tx = mktype(ctx, t.clone())?; - let ctx2 = ctx.insert_type(x, tx.clone()); - let b = type_with(&ctx2, b.clone())?; - let v = Value::Lam( - x.clone().into(), - TypeThunk::from_type(tx.clone()), - b.to_thunk(), - ); - let tb = b.get_type()?.into_owned(); - let t = tck_pi_type(ctx, x.clone(), tx, tb)?.to_type(); - Typed::from_thunk_and_type(Thunk::from_value(v), t) + Lam(var, annot, body) => { + let annot = type_with(ctx, annot.clone())?; + let ctx2 = ctx.insert_type(var, annot.clone()); + let body = type_with(&ctx2, body.clone())?; + let body_type = body.get_type()?; + Value::from_valuef_and_type( + ValueF::Lam(var.clone().into(), annot.clone(), body), + tck_pi_type(ctx, var.clone(), annot, body_type)?, + ) } Pi(x, ta, tb) => { - let ta = mktype(ctx, ta.clone())?; + let ta = type_with(ctx, ta.clone())?; let ctx2 = ctx.insert_type(x, ta.clone()); - let tb = mktype(&ctx2, tb.clone())?; + let tb = type_with(&ctx2, tb.clone())?; return tck_pi_type(ctx, x.clone(), ta, tb); } Let(x, t, v, e) => { @@ -357,9 +328,9 @@ fn type_with( let v = type_with(ctx, v)?; return type_with(&ctx.insert_value(x, v.clone())?, e.clone()); } - Embed(p) => p.clone().into_typed(), + Embed(p) => p.clone().into_typed().into_value(), Var(var) => match ctx.lookup(&var) { - Some(typed) => typed, + Some(typed) => typed.clone(), None => { return Err(TypeError::new( ctx, @@ -367,25 +338,13 @@ fn type_with( )) } }, - _ => { + e => { // Typecheck recursively all subexpressions - let expr = - e.as_ref().traverse_ref_with_special_handling_of_binders( - |e| type_with(ctx, e.clone()), - |_, _| unreachable!(), - |_| unreachable!(), - )?; - let ret = type_last_layer(ctx, &expr)?; - match ret { - RetTypeOnly(typ) => { - let expr = expr.map_ref_simple(|typed| typed.to_thunk()); - Typed::from_thunk_and_type( - Thunk::from_partial_expr(expr), - typ, - ) - } - RetWhole(tt) => tt, - } + let expr = e.traverse_ref_with_special_handling_of_binders( + |e| type_with(ctx, e.clone()), + |_, _| unreachable!(), + )?; + type_last_layer(ctx, expr)? } }) } @@ -394,473 +353,305 @@ fn type_with( /// layer. fn type_last_layer( ctx: &TypecheckContext, - e: &ExprF<Typed, X>, -) -> Result<Ret, TypeError> { + e: ExprF<Value, Normalized>, +) -> Result<Value, TypeError> { use crate::error::TypeMessage::*; use dhall_syntax::BinOp::*; use dhall_syntax::Builtin::*; use dhall_syntax::Const::Type; use dhall_syntax::ExprF::*; + let mkerr = |msg: TypeMessage| Err(TypeError::new(ctx, msg)); + + /// Intermediary return type + enum Ret { + /// Returns the contained value as is + RetWhole(Value), + /// Returns the input expression `e` with the contained value as its type + RetTypeOnly(Value), + } use Ret::*; - let mkerr = |msg: TypeMessage| TypeError::new(ctx, msg); - match e { + let ret = match &e { + Import(_) => unreachable!( + "There should remain no imports in a resolved expression" + ), Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => { unreachable!() } App(f, a) => { let tf = f.get_type()?; - let (x, tx, tb) = match &tf.to_value() { - Value::Pi(x, tx, tb) => (x.clone(), tx.to_type(), tb.to_type()), - _ => return Err(mkerr(NotAFunction(f.clone()))), + let tf_borrow = tf.as_whnf(); + let (x, tx, tb) = match &*tf_borrow { + ValueF::Pi(x, tx, tb) => (x, tx, tb), + _ => return mkerr(NotAFunction(f.clone())), }; - if a.get_type()?.as_ref() != &tx { - return Err(mkerr(TypeMismatch( - f.clone(), - tx.to_normalized(), - a.clone(), - ))); + if &a.get_type()? != tx { + return mkerr(TypeMismatch(f.clone(), tx.clone(), a.clone())); } - Ok(RetTypeOnly(tb.subst_shift(&x.into(), &a))) + RetTypeOnly(tb.subst_shift(&x.into(), a)) } Annot(x, t) => { - let t = t.to_type(); - if &t != x.get_type()?.as_ref() { - return Err(mkerr(AnnotMismatch(x.clone(), t.to_normalized()))); + if &x.get_type()? != t { + return mkerr(AnnotMismatch(x.clone(), t.clone())); } - Ok(RetTypeOnly(x.get_type()?.into_owned())) + RetWhole(x.clone()) } Assert(t) => { - match t.to_value() { - Value::Equivalence(x, y) if x == y => {} - Value::Equivalence(x, y) => { - return Err(mkerr(AssertMismatch( - x.to_typed(), - y.to_typed(), - ))) + match &*t.as_whnf() { + ValueF::Equivalence(x, y) if x == y => {} + ValueF::Equivalence(x, y) => { + return mkerr(AssertMismatch(x.clone(), y.clone())) } - _ => return Err(mkerr(AssertMustTakeEquivalence)), + _ => return mkerr(AssertMustTakeEquivalence), } - Ok(RetTypeOnly(t.to_type())) + RetTypeOnly(t.clone()) } BoolIf(x, y, z) => { - if x.get_type()?.as_ref() != &builtin_to_type(Bool)? { - return Err(mkerr(InvalidPredicate(x.clone()))); + if *x.get_type()?.as_whnf() != ValueF::from_builtin(Bool) { + return mkerr(InvalidPredicate(x.clone())); } if y.get_type()?.get_type()?.as_const() != Some(Type) { - return Err(mkerr(IfBranchMustBeTerm(true, y.clone()))); + return mkerr(IfBranchMustBeTerm(true, y.clone())); } if z.get_type()?.get_type()?.as_const() != Some(Type) { - return Err(mkerr(IfBranchMustBeTerm(false, z.clone()))); + return mkerr(IfBranchMustBeTerm(false, z.clone())); } if y.get_type()? != z.get_type()? { - return Err(mkerr(IfBranchMismatch(y.clone(), z.clone()))); + return mkerr(IfBranchMismatch(y.clone(), z.clone())); } - Ok(RetTypeOnly(y.get_type()?.into_owned())) + RetTypeOnly(y.get_type()?) } EmptyListLit(t) => { - let t = t.to_type(); - match &t.to_value() { - Value::AppliedBuiltin(dhall_syntax::Builtin::List, args) + match &*t.as_whnf() { + ValueF::AppliedBuiltin(dhall_syntax::Builtin::List, args) if args.len() == 1 => {} - _ => { - return Err(TypeError::new( - ctx, - InvalidListType(t.to_normalized()), - )) - } + _ => return mkerr(InvalidListType(t.clone())), } - Ok(RetTypeOnly(t)) + RetTypeOnly(t.clone()) } NEListLit(xs) => { let mut iter = xs.iter().enumerate(); let (_, x) = iter.next().unwrap(); for (i, y) in iter { if x.get_type()? != y.get_type()? { - return Err(mkerr(InvalidListElement( + return mkerr(InvalidListElement( i, - x.get_type()?.to_normalized(), + x.get_type()?, y.clone(), - ))); + )); } } let t = x.get_type()?; if t.get_type()?.as_const() != Some(Type) { - return Err(TypeError::new( - ctx, - InvalidListType(t.to_normalized()), - )); + return mkerr(InvalidListType(t)); } - Ok(RetTypeOnly( - Typed::from_thunk_and_type( - Value::from_builtin(dhall_syntax::Builtin::List) - .app(t.to_value()) - .into_thunk(), - Typed::from_const(Type), - ) - .to_type(), - )) + RetTypeOnly(Value::from_builtin(dhall_syntax::Builtin::List).app(t)) } SomeLit(x) => { - let t = x.get_type()?.into_owned(); + let t = x.get_type()?; if t.get_type()?.as_const() != Some(Type) { - return Err(TypeError::new( - ctx, - InvalidOptionalType(t.to_normalized()), - )); + return mkerr(InvalidOptionalType(t)); } - Ok(RetTypeOnly( - Typed::from_thunk_and_type( - Value::from_builtin(dhall_syntax::Builtin::Optional) - .app(t.to_value()) - .into_thunk(), - Typed::from_const(Type).into_type(), - ) - .to_type(), - )) + RetTypeOnly( + Value::from_builtin(dhall_syntax::Builtin::Optional).app(t), + ) } - RecordType(kts) => Ok(RetWhole(tck_record_type( + RecordType(kts) => RetWhole(tck_record_type( ctx, - kts.iter().map(|(x, t)| Ok((x.clone(), t.to_type()))), - )?)), - UnionType(kts) => Ok(RetWhole(tck_union_type( + kts.iter().map(|(x, t)| Ok((x.clone(), t.clone()))), + )?), + UnionType(kts) => RetWhole(tck_union_type( ctx, - kts.iter() - .map(|(x, t)| Ok((x.clone(), t.as_ref().map(|t| t.to_type())))), - )?)), - RecordLit(kvs) => Ok(RetTypeOnly( - tck_record_type( - ctx, - kvs.iter() - .map(|(x, v)| Ok((x.clone(), v.get_type()?.into_owned()))), - )? - .into_type(), - )), + kts.iter().map(|(x, t)| Ok((x.clone(), t.clone()))), + )?), + RecordLit(kvs) => RetTypeOnly(tck_record_type( + ctx, + kvs.iter().map(|(x, v)| Ok((x.clone(), v.get_type()?))), + )?), Field(r, x) => { - match &r.get_type()?.to_value() { - Value::RecordType(kts) => match kts.get(&x) { + match &*r.get_type()?.as_whnf() { + ValueF::RecordType(kts) => match kts.get(&x) { Some(tth) => { - Ok(RetTypeOnly(tth.to_type())) + RetTypeOnly(tth.clone()) }, - None => Err(mkerr(MissingRecordField(x.clone(), - r.clone()))), + None => return mkerr(MissingRecordField(x.clone(), + r.clone())), }, // TODO: branch here only when r.get_type() is a Const _ => { - let r = r.to_type(); - match &r.to_value() { - Value::UnionType(kts) => match kts.get(&x) { + match &*r.as_whnf() { + ValueF::UnionType(kts) => match kts.get(&x) { // Constructor has type T -> < x: T, ... > Some(Some(t)) => { - // TODO: avoid capture - Ok(RetTypeOnly( + RetTypeOnly( tck_pi_type( ctx, "_".into(), - t.to_type(), - r.clone(), - )?.to_type() - )) + t.clone(), + r.under_binder(Label::from("_")), + )? + ) }, Some(None) => { - Ok(RetTypeOnly(r.clone())) + RetTypeOnly(r.clone()) }, None => { - Err(mkerr(MissingUnionField( + return mkerr(MissingUnionField( x.clone(), - r.to_normalized(), - ))) + r.clone(), + )) }, }, _ => { - Err(mkerr(NotARecord( + return mkerr(NotARecord( x.clone(), - r.to_normalized() - ))) + r.clone() + )) }, } } - // _ => Err(mkerr(NotARecord( + // _ => mkerr(NotARecord( // x, - // r.to_type()?.to_normalized(), - // ))), + // r?, + // )), } } - Const(c) => Ok(RetWhole(Typed::from_const(*c))), - Builtin(b) => { - Ok(RetTypeOnly(mktype(ctx, rc(type_of_builtin(*b)).absurd())?)) - } - BoolLit(_) => Ok(RetTypeOnly(builtin_to_type(Bool)?)), - NaturalLit(_) => Ok(RetTypeOnly(builtin_to_type(Natural)?)), - IntegerLit(_) => Ok(RetTypeOnly(builtin_to_type(Integer)?)), - DoubleLit(_) => Ok(RetTypeOnly(builtin_to_type(Double)?)), + Const(c) => RetWhole(const_to_value(*c)), + Builtin(b) => RetWhole(builtin_to_value(*b)), + BoolLit(_) => RetTypeOnly(builtin_to_value(Bool)), + NaturalLit(_) => RetTypeOnly(builtin_to_value(Natural)), + IntegerLit(_) => RetTypeOnly(builtin_to_value(Integer)), + DoubleLit(_) => RetTypeOnly(builtin_to_value(Double)), TextLit(interpolated) => { - let text_type = builtin_to_type(Text)?; + let text_type = builtin_to_value(Text); for contents in interpolated.iter() { use InterpolatedTextContents::Expr; if let Expr(x) = contents { - if x.get_type()?.as_ref() != &text_type { - return Err(mkerr(InvalidTextInterpolation(x.clone()))); + if x.get_type()? != text_type { + return mkerr(InvalidTextInterpolation(x.clone())); } } } - Ok(RetTypeOnly(text_type)) + RetTypeOnly(text_type) } BinOp(RightBiasedRecordMerge, l, r) => { use crate::phase::normalize::merge_maps; let l_type = l.get_type()?; - let l_kind = l_type.get_type()?; let r_type = r.get_type()?; - let r_kind = r_type.get_type()?; - - // Check the equality of kinds. - // This is to disallow expression such as: - // "{ x = Text } // { y = 1 }" - if l_kind != r_kind { - return Err(mkerr(RecordMismatch(l.clone(), r.clone()))); - } // Extract the LHS record type - let kts_x = match l_type.to_value() { - Value::RecordType(kts) => kts, - _ => return Err(mkerr(MustCombineRecord(l.clone()))), + let l_type_borrow = l_type.as_whnf(); + let kts_x = match &*l_type_borrow { + ValueF::RecordType(kts) => kts, + _ => return mkerr(MustCombineRecord(l.clone())), }; // Extract the RHS record type - let kts_y = match r_type.to_value() { - Value::RecordType(kts) => kts, - _ => return Err(mkerr(MustCombineRecord(r.clone()))), + let r_type_borrow = r_type.as_whnf(); + let kts_y = match &*r_type_borrow { + ValueF::RecordType(kts) => kts, + _ => return mkerr(MustCombineRecord(r.clone())), }; // Union the two records, prefering // the values found in the RHS. - let kts = merge_maps(&kts_x, &kts_y, |_, r_t| r_t.clone()); + let kts = merge_maps::<_, _, _, !>(kts_x, kts_y, |_, _, r_t| { + Ok(r_t.clone()) + })?; // Construct the final record type from the union - Ok(RetTypeOnly( - tck_record_type( - ctx, - kts.iter().map(|(x, v)| Ok((x.clone(), v.to_type()))), - )? - .into_type(), - )) - } - BinOp(RecursiveRecordMerge, l, r) => { - // A recursive function to dig down into - // records of records when merging. - fn combine_record_types( - ctx: &TypecheckContext, - kts_l: HashMap<Label, TypeThunk>, - kts_r: HashMap<Label, TypeThunk>, - ) -> Result<Typed, TypeError> { - use crate::phase::normalize::outer_join; - - // If the Label exists for both records and Type for the values - // are records themselves, then we hit the recursive case. - // Otherwise we have a field collision. - let combine = |k: &Label, - inner_l: &TypeThunk, - inner_r: &TypeThunk| - -> Result<Typed, TypeError> { - match (inner_l.to_value(), inner_r.to_value()) { - ( - Value::RecordType(inner_l_kvs), - Value::RecordType(inner_r_kvs), - ) => { - combine_record_types(ctx, inner_l_kvs, inner_r_kvs) - } - (_, _) => { - Err(TypeError::new(ctx, FieldCollision(k.clone()))) - } - } - }; - - let kts: HashMap<Label, Result<Typed, TypeError>> = outer_join( - |l| Ok(l.to_type()), - |r| Ok(r.to_type()), - |k: &Label, l: &TypeThunk, r: &TypeThunk| combine(k, l, r), - &kts_l, - &kts_r, - ); - - Ok(tck_record_type( - ctx, - kts.into_iter().map(|(x, v)| v.map(|r| (x.clone(), r))), - )? - .into_type()) - }; - - let l_type = l.get_type()?; - let l_kind = l_type.get_type()?; - let r_type = r.get_type()?; - let r_kind = r_type.get_type()?; - - // Check the equality of kinds. - // This is to disallow expression such as: - // "{ x = Text } // { y = 1 }" - if l_kind != r_kind { - return Err(mkerr(RecordMismatch(l.clone(), r.clone()))); - } - - // Extract the LHS record type - let kts_x = match l_type.to_value() { - Value::RecordType(kts) => kts, - _ => return Err(mkerr(MustCombineRecord(l.clone()))), - }; - - // Extract the RHS record type - let kts_y = match r_type.to_value() { - Value::RecordType(kts) => kts, - _ => return Err(mkerr(MustCombineRecord(r.clone()))), - }; - - combine_record_types(ctx, kts_x, kts_y).map(|r| RetTypeOnly(r)) + RetTypeOnly(tck_record_type( + ctx, + kts.into_iter().map(|(x, v)| Ok((x.clone(), v))), + )?) } + BinOp(RecursiveRecordMerge, l, r) => RetTypeOnly(type_last_layer( + ctx, + ExprF::BinOp( + RecursiveRecordTypeMerge, + l.get_type()?, + r.get_type()?, + ), + )?), BinOp(RecursiveRecordTypeMerge, l, r) => { - // A recursive function to dig down into - // records of records when merging. - fn combine_record_types( - ctx: &TypecheckContext, - kts_l: HashMap<Label, TypeThunk>, - kts_r: HashMap<Label, TypeThunk>, - ) -> Result<Typed, TypeError> { - use crate::phase::normalize::intersection_with_key; - - // If the Label exists for both records and Type for the values - // are records themselves, then we hit the recursive case. - // Otherwise we have a field collision. - let combine = |k: &Label, - kts_l_inner: &TypeThunk, - kts_r_inner: &TypeThunk| - -> Result<Typed, TypeError> { - match (kts_l_inner.to_value(), kts_r_inner.to_value()) { - ( - Value::RecordType(kvs_l_inner), - Value::RecordType(kvs_r_inner), - ) => { - combine_record_types(ctx, kvs_l_inner, kvs_r_inner) - } - (_, _) => { - Err(TypeError::new(ctx, FieldCollision(k.clone()))) - } - } - }; - - let kts = intersection_with_key( - |k: &Label, l: &TypeThunk, r: &TypeThunk| combine(k, l, r), - &kts_l, - &kts_r, - ); - - Ok(tck_record_type( - ctx, - kts.into_iter().map(|(x, v)| v.map(|r| (x.clone(), r))), - )? - .into_type()) - }; - - // Extract the Const of the LHS - let k_l = match l.get_type()?.to_value() { - Value::Const(k) => k, - _ => { - return Err(mkerr(RecordTypeMergeRequiresRecordType( - l.clone(), - ))) - } - }; - - // Extract the Const of the RHS - let k_r = match r.get_type()?.to_value() { - Value::Const(k) => k, - _ => { - return Err(mkerr(RecordTypeMergeRequiresRecordType( - r.clone(), - ))) - } - }; - - // Const values must match for the Records - let k = if k_l == k_r { - k_l - } else { - return Err(mkerr(RecordTypeMismatch( - Typed::from_const(k_l), - Typed::from_const(k_r), - l.clone(), - r.clone(), - ))); - }; + use crate::phase::normalize::merge_maps; // Extract the LHS record type - let kts_x = match l.to_value() { - Value::RecordType(kts) => kts, + let borrow_l = l.as_whnf(); + let kts_x = match &*borrow_l { + ValueF::RecordType(kts) => kts, _ => { - return Err(mkerr(RecordTypeMergeRequiresRecordType( - l.clone(), - ))) + return mkerr(RecordTypeMergeRequiresRecordType(l.clone())) } }; // Extract the RHS record type - let kts_y = match r.to_value() { - Value::RecordType(kts) => kts, + let borrow_r = r.as_whnf(); + let kts_y = match &*borrow_r { + ValueF::RecordType(kts) => kts, _ => { - return Err(mkerr(RecordTypeMergeRequiresRecordType( - r.clone(), - ))) + return mkerr(RecordTypeMergeRequiresRecordType(r.clone())) } }; // Ensure that the records combine without a type error - // and if not output the final Const value. - combine_record_types(ctx, kts_x, kts_y) - .and(Ok(RetTypeOnly(Typed::from_const(k)))) + let kts = merge_maps( + kts_x, + kts_y, + // If the Label exists for both records, then we hit the recursive case. + |_, l: &Value, r: &Value| { + type_last_layer( + ctx, + ExprF::BinOp( + RecursiveRecordTypeMerge, + l.clone(), + r.clone(), + ), + ) + }, + )?; + + RetWhole(tck_record_type(ctx, kts.into_iter().map(Ok))?) } BinOp(o @ ListAppend, l, r) => { - match l.get_type()?.to_value() { - Value::AppliedBuiltin(List, _) => {} - _ => return Err(mkerr(BinOpTypeMismatch(*o, l.clone()))), + match &*l.get_type()?.as_whnf() { + ValueF::AppliedBuiltin(List, _) => {} + _ => return mkerr(BinOpTypeMismatch(*o, l.clone())), } if l.get_type()? != r.get_type()? { - return Err(mkerr(BinOpTypeMismatch(*o, r.clone()))); + return mkerr(BinOpTypeMismatch(*o, r.clone())); } - Ok(RetTypeOnly(l.get_type()?.into_owned())) + RetTypeOnly(l.get_type()?) } BinOp(Equivalence, l, r) => { if l.get_type()?.get_type()?.as_const() != Some(Type) { - return Err(mkerr(EquivalenceArgumentMustBeTerm( - true, - l.clone(), - ))); + return mkerr(EquivalenceArgumentMustBeTerm(true, l.clone())); } if r.get_type()?.get_type()?.as_const() != Some(Type) { - return Err(mkerr(EquivalenceArgumentMustBeTerm( - false, - r.clone(), - ))); + return mkerr(EquivalenceArgumentMustBeTerm(false, r.clone())); } if l.get_type()? != r.get_type()? { - return Err(mkerr(EquivalenceTypeMismatch( - r.clone(), - l.clone(), - ))); + return mkerr(EquivalenceTypeMismatch(r.clone(), l.clone())); } - Ok(RetTypeOnly(Typed::from_const(Type).into_type())) + RetWhole(Value::from_valuef_and_type( + ValueF::Equivalence(l.clone(), r.clone()), + Value::from_const(Type), + )) } BinOp(o, l, r) => { - let t = builtin_to_type(match o { + let t = builtin_to_value(match o { BoolAnd => Bool, BoolOr => Bool, BoolEQ => Bool, @@ -874,143 +665,140 @@ fn type_last_layer( RecursiveRecordTypeMerge => unreachable!(), ImportAlt => unreachable!("There should remain no import alternatives in a resolved expression"), Equivalence => unreachable!(), - })?; + }); - if l.get_type()?.as_ref() != &t { - return Err(mkerr(BinOpTypeMismatch(*o, l.clone()))); + if l.get_type()? != t { + return mkerr(BinOpTypeMismatch(*o, l.clone())); } - if r.get_type()?.as_ref() != &t { - return Err(mkerr(BinOpTypeMismatch(*o, r.clone()))); + if r.get_type()? != t { + return mkerr(BinOpTypeMismatch(*o, r.clone())); } - Ok(RetTypeOnly(t)) + RetTypeOnly(t) } Merge(record, union, type_annot) => { - let handlers = match record.get_type()?.to_value() { - Value::RecordType(kts) => kts, - _ => return Err(mkerr(Merge1ArgMustBeRecord(record.clone()))), + let record_type = record.get_type()?; + let record_borrow = record_type.as_whnf(); + let handlers = match &*record_borrow { + ValueF::RecordType(kts) => kts, + _ => return mkerr(Merge1ArgMustBeRecord(record.clone())), }; - let variants = match union.get_type()?.to_value() { - Value::UnionType(kts) => kts, - _ => return Err(mkerr(Merge2ArgMustBeUnion(union.clone()))), + let union_type = union.get_type()?; + let union_borrow = union_type.as_whnf(); + let variants = match &*union_borrow { + ValueF::UnionType(kts) => kts, + _ => return mkerr(Merge2ArgMustBeUnion(union.clone())), }; let mut inferred_type = None; - for (x, handler) in handlers.iter() { - let handler_return_type = match variants.get(x) { - // Union alternative with type - Some(Some(variant_type)) => { - let variant_type = variant_type.to_type(); - let handler_type = handler.to_type(); - let (x, tx, tb) = match &handler_type.to_value() { - Value::Pi(x, tx, tb) => { - (x.clone(), tx.to_type(), tb.to_type()) + for (x, handler_type) in handlers { + let handler_return_type = + match variants.get(x) { + // Union alternative with type + Some(Some(variant_type)) => { + let handler_type_borrow = handler_type.as_whnf(); + let (x, tx, tb) = match &*handler_type_borrow { + ValueF::Pi(x, tx, tb) => (x, tx, tb), + _ => { + return mkerr(NotAFunction( + handler_type.clone(), + )) + } + }; + + if variant_type != tx { + return mkerr(TypeMismatch( + handler_type.clone(), + tx.clone(), + variant_type.clone(), + )); } - _ => return Err(mkerr(NotAFunction(handler_type))), - }; - - if &variant_type != &tx { - return Err(mkerr(TypeMismatch( - handler_type, - tx.to_normalized(), - variant_type, - ))); - } - // Extract `tb` from under the `x` binder. Fails is `x` was free in `tb`. - match tb.over_binder(x) { - Some(x) => x, - None => { - return Err(mkerr( + // Extract `tb` from under the `x` binder. Fails is `x` was free in `tb`. + match tb.over_binder(x) { + Some(x) => x, + None => return mkerr( MergeHandlerReturnTypeMustNotBeDependent, - )) + ), } } - } - // Union alternative without type - Some(None) => handler.to_type(), - None => { - return Err(mkerr(MergeHandlerMissingVariant( - x.clone(), - ))) - } - }; + // Union alternative without type + Some(None) => handler_type.clone(), + None => { + return mkerr(MergeHandlerMissingVariant(x.clone())) + } + }; match &inferred_type { None => inferred_type = Some(handler_return_type), Some(t) => { if t != &handler_return_type { - return Err(mkerr(MergeHandlerTypeMismatch)); + return mkerr(MergeHandlerTypeMismatch); } } } } for x in variants.keys() { if !handlers.contains_key(x) { - return Err(mkerr(MergeVariantMissingHandler(x.clone()))); + return mkerr(MergeVariantMissingHandler(x.clone())); } } match (inferred_type, type_annot) { (Some(ref t1), Some(t2)) => { - let t2 = t2.to_type(); - if t1 != &t2 { - return Err(mkerr(MergeAnnotMismatch)); + if t1 != t2 { + return mkerr(MergeAnnotMismatch); } - Ok(RetTypeOnly(t2)) + RetTypeOnly(t2.clone()) } - (Some(t), None) => Ok(RetTypeOnly(t)), - (None, Some(t)) => Ok(RetTypeOnly(t.to_type())), - (None, None) => Err(mkerr(MergeEmptyNeedsAnnotation)), + (Some(t), None) => RetTypeOnly(t), + (None, Some(t)) => RetTypeOnly(t.clone()), + (None, None) => return mkerr(MergeEmptyNeedsAnnotation), } } + ToMap(_, _) => unimplemented!("toMap"), Projection(record, labels) => { - let trecord = record.get_type()?; - let kts = match trecord.to_value() { - Value::RecordType(kts) => kts, - _ => return Err(mkerr(ProjectionMustBeRecord)), + let record_type = record.get_type()?; + let record_borrow = record_type.as_whnf(); + let kts = match &*record_borrow { + ValueF::RecordType(kts) => kts, + _ => return mkerr(ProjectionMustBeRecord), }; let mut new_kts = HashMap::new(); for l in labels { match kts.get(l) { - None => return Err(mkerr(ProjectionMissingEntry)), + None => return mkerr(ProjectionMissingEntry), Some(t) => new_kts.insert(l.clone(), t.clone()), }; } - Ok(RetTypeOnly( - Typed::from_thunk_and_type( - Value::RecordType(new_kts).into_thunk(), - trecord.get_type()?.into_owned(), - ) - .to_type(), + RetTypeOnly(Value::from_valuef_and_type( + ValueF::RecordType(new_kts), + record_type.get_type()?, )) } - } + }; + + Ok(match ret { + RetTypeOnly(typ) => { + Value::from_valuef_and_type(ValueF::PartialExpr(e), typ) + } + RetWhole(v) => v, + }) } -/// `typeOf` is the same as `type_with` with an empty context, meaning that the +/// `type_of` is the same as `type_with` with an empty context, meaning that the /// expression must be closed (i.e. no free variables), otherwise type-checking /// will fail. -fn type_of(e: SubExpr<Span, Normalized>) -> Result<Typed, TypeError> { - let ctx = TypecheckContext::new(); - let e = type_with(&ctx, e)?; - // Ensure `e` has a type (i.e. `e` is not `Sort`) - e.get_type()?; - Ok(e) +pub(crate) fn typecheck(e: Expr<Normalized>) -> Result<Value, TypeError> { + type_with(&TypecheckContext::new(), e) } -pub fn typecheck(e: Resolved) -> Result<Typed, TypeError> { - type_of(e.0) -} - -pub fn typecheck_with(e: Resolved, ty: &Type) -> Result<Typed, TypeError> { - let expr: SubExpr<_, _> = e.0; - let ty: SubExpr<_, _> = ty.to_expr().absurd(); - type_of(expr.rewrap(ExprF::Annot(expr.clone(), ty))) -} -pub fn skip_typecheck(e: Resolved) -> Typed { - Typed::from_thunk_untyped(Thunk::new(NormalizationContext::new(), e.0)) +pub(crate) fn typecheck_with( + expr: Expr<Normalized>, + ty: Expr<Normalized>, +) -> Result<Value, TypeError> { + typecheck(expr.rewrap(ExprF::Annot(expr.clone(), ty))) } diff --git a/dhall/src/tests.rs b/dhall/src/tests.rs index 15bc97a..ae41038 100644 --- a/dhall/src/tests.rs +++ b/dhall/src/tests.rs @@ -1,3 +1,6 @@ +#[cfg(not(test))] +use assert_eq as assert_eq_pretty; +#[cfg(test)] use pretty_assertions::assert_eq as assert_eq_pretty; macro_rules! assert_eq_display { @@ -19,13 +22,13 @@ right: `{}`"#, #[macro_export] macro_rules! make_spec_test { - ($type:ident, $status:ident, $name:ident, $path:expr) => { + ($type:expr, $name:ident) => { #[test] #[allow(non_snake_case)] fn $name() { + use crate::tests::Test::*; use crate::tests::*; - match run_test_stringy_error($path, Feature::$type, Status::$status) - { + match run_test_stringy_error($type) { Ok(_) => {} Err(s) => panic!(s), } @@ -40,203 +43,183 @@ use std::path::PathBuf; use crate::error::{Error, Result}; use crate::phase::Parsed; -#[derive(Copy, Clone)] -pub enum Feature { - Parser, - Printer, - BinaryEncoding, - BinaryDecoding, - Import, - Normalization, - AlphaNormalization, - Typecheck, - TypeInference, -} - -#[derive(Copy, Clone)] -pub enum Status { - Success, - Failure, +#[allow(dead_code)] +#[derive(Clone)] +pub enum Test<'a> { + ParserSuccess(&'a str, &'a str), + ParserFailure(&'a str), + Printer(&'a str, &'a str), + BinaryEncoding(&'a str, &'a str), + BinaryDecodingSuccess(&'a str, &'a str), + BinaryDecodingFailure(&'a str), + ImportSuccess(&'a str, &'a str), + ImportFailure(&'a str), + TypecheckSuccess(&'a str, &'a str), + TypecheckFailure(&'a str), + TypeInferenceSuccess(&'a str, &'a str), + TypeInferenceFailure(&'a str), + Normalization(&'a str, &'a str), + AlphaNormalization(&'a str, &'a str), } -fn parse_file_str<'i>(file_path: &str) -> Result<Parsed> { +fn parse_file_str(file_path: &str) -> Result<Parsed> { Parsed::parse_file(&PathBuf::from(file_path)) } +#[allow(dead_code)] pub fn run_test_stringy_error( - base_path: &str, - feature: Feature, - status: Status, + test: Test<'_>, ) -> std::result::Result<(), String> { - let base_path: String = base_path.to_string(); - run_test(&base_path, feature, status) - .map_err(|e| e.to_string()) - .map(|_| ()) + run_test(test).map_err(|e| e.to_string()).map(|_| ()) } -pub fn run_test( - base_path: &str, - feature: Feature, - status: Status, -) -> Result<()> { - use self::Feature::*; - use self::Status::*; - let base_path = base_path.to_owned(); - match status { - Success => { - match feature { - BinaryDecoding => { - let expr_file_path = base_path.clone() + "A.dhallb"; - let expr_file_path = PathBuf::from(&expr_file_path); - let mut expr_data = Vec::new(); - { - File::open(&expr_file_path)? - .read_to_end(&mut expr_data)?; - } - let expr = Parsed::parse_binary(&expr_data)?; - let expected_file_path = base_path + "B.dhall"; - let expected = parse_file_str(&expected_file_path)?; - assert_eq_pretty!(expr, expected); - - return Ok(()); +pub fn run_test(test: Test<'_>) -> Result<()> { + use self::Test::*; + match test { + ParserSuccess(expr_file_path, expected_file_path) => { + let expr = parse_file_str(&expr_file_path)?; + // This exercices both parsing and binary decoding + // Compare parse/decoded + let expected = + Parsed::parse_binary_file(&PathBuf::from(expected_file_path))?; + assert_eq_pretty!(expr, expected); + } + ParserFailure(file_path) => { + let err = parse_file_str(&file_path).unwrap_err(); + match &err { + Error::Parse(_) => {} + Error::IO(e) if e.kind() == std::io::ErrorKind::InvalidData => { } - _ => {} + e => panic!("Expected parse error, got: {:?}", e), } - let expr_file_path = base_path.clone() + "A.dhall"; + } + BinaryEncoding(expr_file_path, expected_file_path) => { let expr = parse_file_str(&expr_file_path)?; - - match feature { - Parser => { - // This exercices both parsing and binary decoding - // Compare parse/decoded - let expected_file_path = base_path + "B.dhallb"; - let expected_file_path = PathBuf::from(&expected_file_path); - let mut expected_data = Vec::new(); - { - File::open(&expected_file_path)? - .read_to_end(&mut expected_data)?; - } - let expected = Parsed::parse_binary(&expected_data)?; - assert_eq_pretty!(expr, expected); - - return Ok(()); - } - Printer => { - // Round-trip pretty-printer - let expr_string = expr.to_string(); - let expected = expr; - let expr: Parsed = Parsed::parse_str(&expr_string)?; - assert_eq!(expr, expected); - - return Ok(()); - } - BinaryEncoding => { - let expected_file_path = base_path + "B.dhallb"; - let expected_file_path = PathBuf::from(&expected_file_path); - let mut expected_data = Vec::new(); - { - File::open(&expected_file_path)? - .read_to_end(&mut expected_data)?; - } - let expr_data = expr.encode()?; - - // Compare bit-by-bit - if expr_data != expected_data { - // use std::io::Write; - // File::create(&expected_file_path)?.write_all(&expr_data)?; - // Pretty-print difference - assert_eq_pretty!( - serde_cbor::de::from_slice::< - serde_cbor::value::Value, - >(&expr_data) - .unwrap(), - serde_cbor::de::from_slice::< - serde_cbor::value::Value, - >(&expected_data) - .unwrap() - ); - // If difference was not visible in the cbor::Value - assert_eq!(expr_data, expected_data); - } - - return Ok(()); - } - _ => {} + let mut expected_data = Vec::new(); + { + File::open(&PathBuf::from(&expected_file_path))? + .read_to_end(&mut expected_data)?; } + let expr_data = expr.encode()?; + + // Compare bit-by-bit + if expr_data != expected_data { + // use std::io::Write; + // File::create(&expected_file_path)?.write_all(&expr_data)?; + // Pretty-print difference + assert_eq_pretty!( + serde_cbor::de::from_slice::<serde_cbor::value::Value>( + &expr_data + ) + .unwrap(), + serde_cbor::de::from_slice::<serde_cbor::value::Value>( + &expected_data + ) + .unwrap() + ); + // If difference was not visible in the cbor::Value + assert_eq!(expr_data, expected_data); + } + } + BinaryDecodingSuccess(expr_file_path, expected_file_path) => { + let expr = + Parsed::parse_binary_file(&PathBuf::from(expr_file_path))?; + let expected = parse_file_str(&expected_file_path)?; + assert_eq_pretty!(expr, expected); + } + BinaryDecodingFailure(file_path) => { + Parsed::parse_binary_file(&PathBuf::from(file_path)).unwrap_err(); + } + Printer(expr_file_path, _) => { + let expected = parse_file_str(&expr_file_path)?; + // Round-trip pretty-printer + let expr: Parsed = Parsed::parse_str(&expected.to_string())?; + assert_eq!(expr, expected); + } + ImportSuccess(expr_file_path, expected_file_path) => { + let expr = parse_file_str(&expr_file_path)? + .resolve()? + .typecheck()? + .normalize(); + let expected = parse_file_str(&expected_file_path)? + .resolve()? + .typecheck()? + .normalize(); - let expr = expr.resolve()?; - - let expected_file_path = base_path + "B.dhall"; + assert_eq_display!(expr, expected); + } + ImportFailure(file_path) => { + parse_file_str(&file_path)?.resolve().unwrap_err(); + } + TypecheckSuccess(expr_file_path, expected_file_path) => { + let expr = parse_file_str(&expr_file_path)?.resolve()?; let expected = parse_file_str(&expected_file_path)? .resolve()? - .skip_typecheck() + .typecheck()? .normalize(); - match feature { - Parser | Printer | BinaryEncoding | BinaryDecoding => { - unreachable!() - } - Import => { - let expr = expr.skip_typecheck().normalize(); - assert_eq_display!(expr, expected); - } - Typecheck => { - expr.typecheck_with(&expected.to_type())?; - } - TypeInference => { - let expr = expr.typecheck()?; - let ty = expr.get_type()?.into_owned(); - assert_eq_display!(ty.to_normalized(), expected); - } - Normalization => { - let expr = expr.skip_typecheck().normalize(); - assert_eq_display!(expr, expected); - } - AlphaNormalization => { - let expr = - expr.skip_typecheck().normalize().to_expr_alpha(); - assert_eq_display!(expr, expected.to_expr()); + expr.typecheck_with(&expected.into_typed())?.get_type()?; + } + TypecheckFailure(file_path) => { + let res = parse_file_str(&file_path)?.skip_resolve()?.typecheck(); + match res { + Err(_) => {} + // If e did typecheck, check that it doesn't have a type + Ok(e) => { + e.get_type().unwrap_err(); } } } - Failure => { - let file_path = base_path + ".dhall"; - match feature { - Parser => { - let err = parse_file_str(&file_path).unwrap_err(); - match err { - Error::Parse(_) => {} - Error::IO(e) - if e.kind() == std::io::ErrorKind::InvalidData => {} - e => panic!("Expected parse error, got: {:?}", e), - } - } - Printer | BinaryEncoding => unreachable!(), - BinaryDecoding => { - let expr_file_path = file_path + "b"; - let mut expr_data = Vec::new(); - { - File::open(&PathBuf::from(&expr_file_path))? - .read_to_end(&mut expr_data)?; - } - Parsed::parse_binary(&expr_data).unwrap_err(); - } - Import => { - parse_file_str(&file_path)?.resolve().unwrap_err(); - } - Normalization | AlphaNormalization => unreachable!(), - Typecheck | TypeInference => { - parse_file_str(&file_path)? - .skip_resolve()? - .typecheck() - .unwrap_err(); + TypeInferenceSuccess(expr_file_path, expected_file_path) => { + let expr = + parse_file_str(&expr_file_path)?.resolve()?.typecheck()?; + let ty = expr.get_type()?.normalize(); + let expected = parse_file_str(&expected_file_path)? + .resolve()? + .typecheck()? + .normalize(); + assert_eq_display!(ty, expected); + } + TypeInferenceFailure(file_path) => { + let res = parse_file_str(&file_path)?.skip_resolve()?.typecheck(); + match res { + Err(_) => {} + // If e did typecheck, check that it doesn't have a type + Ok(e) => { + e.get_type().unwrap_err(); } } } + Normalization(expr_file_path, expected_file_path) => { + let expr = parse_file_str(&expr_file_path)? + .resolve()? + .typecheck()? + .normalize(); + let expected = parse_file_str(&expected_file_path)? + .resolve()? + .typecheck()? + .normalize(); + + assert_eq_display!(expr, expected); + } + AlphaNormalization(expr_file_path, expected_file_path) => { + let expr = parse_file_str(&expr_file_path)? + .resolve()? + .typecheck()? + .normalize() + .to_expr_alpha(); + let expected = parse_file_str(&expected_file_path)? + .resolve()? + .typecheck()? + .normalize(); + + assert_eq_display!(expr, expected.to_expr()); + } } Ok(()) } +#[cfg(test)] mod spec { // See build.rs include!(concat!(env!("OUT_DIR"), "/spec_tests.rs")); diff --git a/dhall/tests/traits.rs b/dhall/tests/traits.rs deleted file mode 100644 index 0f75553..0000000 --- a/dhall/tests/traits.rs +++ /dev/null @@ -1,68 +0,0 @@ -#![feature(proc_macro_hygiene)] -use dhall::de::{from_str, StaticType, Type}; - -#[test] -fn test_static_type() { - fn parse(s: &str) -> Type { - from_str(s, None).unwrap() - } - - assert_eq!(bool::static_type(), parse("Bool")); - assert_eq!(String::static_type(), parse("Text")); - assert_eq!(<Option<bool>>::static_type(), parse("Optional Bool")); - assert_eq!( - <(bool, Vec<String>)>::static_type(), - parse("{ _1: Bool, _2: List Text }") - ); - - #[derive(dhall::de::StaticType)] - #[allow(dead_code)] - struct A { - field1: bool, - field2: Option<bool>, - } - assert_eq!( - <A as dhall::de::StaticType>::static_type(), - parse("{ field1: Bool, field2: Optional Bool }") - ); - - #[derive(StaticType)] - #[allow(dead_code)] - struct B<'a, T: 'a> { - field1: &'a T, - field2: Option<T>, - } - assert_eq!(<B<'static, bool>>::static_type(), A::static_type()); - - #[derive(StaticType)] - #[allow(dead_code)] - struct C<T>(T, Option<String>); - assert_eq!( - <C<bool>>::static_type(), - <(bool, Option<String>)>::static_type() - ); - - #[derive(StaticType)] - #[allow(dead_code)] - struct D(); - assert_eq!( - <C<D>>::static_type(), - parse("{ _1: {}, _2: Optional Text }") - ); - - #[derive(StaticType)] - #[allow(dead_code)] - enum E<T> { - A(T), - B(String), - }; - assert_eq!(<E<bool>>::static_type(), parse("< A: Bool | B: Text >")); - - #[derive(StaticType)] - #[allow(dead_code)] - enum F { - A, - B(bool), - }; - assert_eq!(F::static_type(), parse("< A | B: Bool >")); -} |