From b68f2b6aead6224c14902c80fc00c27705eece6c Mon Sep 17 00:00:00 2001 From: Eduardo Julian Date: Wed, 16 Feb 2022 02:32:09 -0400 Subject: FIXED generating artifact IDs in the context of "lux in-module". --- stdlib/source/library/lux.lux | 222 +++++----- stdlib/source/library/lux/data/binary.lux | 21 +- stdlib/source/library/lux/math/number/frac.lux | 95 ++--- stdlib/source/library/lux/target/jvm/loader.lux | 38 +- .../lux/tool/compiler/language/lux/analysis.lux | 18 +- .../compiler/language/lux/analysis/coverage.lux | 420 +++++++++++++++++++ .../compiler/language/lux/analysis/evaluation.lux | 48 ++- .../compiler/language/lux/analysis/pattern.lux | 4 +- .../compiler/language/lux/phase/analysis/case.lux | 14 +- .../language/lux/phase/analysis/case/coverage.lux | 419 ------------------- stdlib/source/test/lux/data/binary.lux | 14 +- stdlib/source/test/lux/math/number/int.lux | 49 ++- .../lux/tool/compiler/language/lux/analysis.lux | 2 + .../compiler/language/lux/analysis/coverage.lux | 453 +++++++++++++++++++++ stdlib/source/unsafe/lux/data/binary.lux | 243 +++++------ 15 files changed, 1297 insertions(+), 763 deletions(-) create mode 100644 stdlib/source/library/lux/tool/compiler/language/lux/analysis/coverage.lux delete mode 100644 stdlib/source/library/lux/tool/compiler/language/lux/phase/analysis/case/coverage.lux create mode 100644 stdlib/source/test/lux/tool/compiler/language/lux/analysis/coverage.lux diff --git a/stdlib/source/library/lux.lux b/stdlib/source/library/lux.lux index 75575ebe4..69f555fee 100644 --- a/stdlib/source/library/lux.lux +++ b/stdlib/source/library/lux.lux @@ -676,7 +676,7 @@ .public) ... Base functions & macros -("lux def" in_meta +("lux def" meta#in ("lux type check" {#UnivQ {#End} {#Function {#Parameter 1} @@ -706,7 +706,7 @@ ("lux macro" ([_ tokens] ({{#Item lhs {#Item rhs {#Item body {#End}}}} - (in_meta {#Item (form$ {#Item (variant$ {#Item lhs {#Item body {#End}}}) + (meta#in {#Item (form$ {#Item (variant$ {#Item lhs {#Item body {#End}}}) {#Item rhs {#End}}}) {#End}}) @@ -719,7 +719,7 @@ ("lux macro" ([_ tokens] ({{#Item [_ {#Tuple {#Item arg args'}}] {#Item body {#End}}} - (in_meta {#Item (_ann {#Form {#Item (_ann {#Tuple {#Item (_ann {#Symbol ["" ""]}) + (meta#in {#Item (_ann {#Form {#Item (_ann {#Tuple {#Item (_ann {#Symbol ["" ""]}) {#Item arg {#End}}}}) {#Item ({{#End} body @@ -733,7 +733,7 @@ {#End}}) {#Item [_ {#Symbol ["" self]}] {#Item [_ {#Tuple {#Item arg args'}}] {#Item body {#End}}}} - (in_meta {#Item (_ann {#Form {#Item (_ann {#Tuple {#Item (_ann {#Symbol ["" self]}) + (meta#in {#Item (_ann {#Form {#Item (_ann {#Tuple {#Item (_ann {#Symbol ["" self]}) {#Item arg {#End}}}}) {#Item ({{#End} body @@ -786,13 +786,13 @@ ({{#Item [export_policy {#Item [[_ {#Form {#Item [name args]}}] {#Item [type {#Item [body {#End}]}]}]}]} - (in_meta {#Item [(as_def name + (meta#in {#Item [(as_def name (as_checked type (as_function name args body)) export_policy) {#End}]}) {#Item [export_policy {#Item [name {#Item [type {#Item [body {#End}]}]}]}]} - (in_meta {#Item [(as_def name (as_checked type body) + (meta#in {#Item [(as_def name (as_checked type body) export_policy) {#End}]}) @@ -805,7 +805,7 @@ ("lux macro" (function'' [tokens] ({{#Item export_policy {#Item [_ {#Form {#Item name args}}] {#Item body {#End}}}} - (in_meta {#Item (as_def name (as_macro (as_function name args body)) + (meta#in {#Item (as_def name (as_macro (as_function name args body)) export_policy) {#End}}) @@ -815,14 +815,14 @@ #0) (macro:' .public (comment tokens) - (in_meta {#End})) + (meta#in {#End})) (macro:' .private ($' tokens) ({{#Item x {#End}} - (in_meta tokens) + (meta#in tokens) {#Item x {#Item y xs}} - (in_meta {#Item (form$ {#Item (symbol$ [..prelude_module "$'"]) + (meta#in {#Item (form$ {#Item (symbol$ [..prelude_module "$'"]) {#Item (variant$ {#Item (symbol$ [..prelude_module "#Apply"]) {#Item y {#Item x {#End}}}}) xs}}) @@ -1147,7 +1147,7 @@ (macro:' .public (-> tokens) ({{#Item output inputs} - (in_meta {#Item (list#mix ("lux type check" {#Function Code {#Function Code Code}} + (meta#in {#Item (list#mix ("lux type check" {#Function Code {#Function Code Code}} (function'' [i o] (variant$ {#Item (symbol$ [..prelude_module "#Function"]) {#Item i {#Item o {#End}}}}))) output inputs) @@ -1158,12 +1158,12 @@ (list#reversed tokens))) (macro:' .public (list xs) - (in_meta {#Item (list#mix |#Item| |#End| (list#reversed xs)) + (meta#in {#Item (list#mix |#Item| |#End| (list#reversed xs)) {#End}})) (macro:' .public (list& xs) ({{#Item last init} - (in_meta (list (list#mix |#Item| last init))) + (meta#in (list (list#mix |#Item| last init))) _ (failure "Wrong syntax for list&")} @@ -1171,20 +1171,20 @@ (macro:' .public (Union tokens) ({{#End} - (in_meta (list (symbol$ [..prelude_module "Nothing"]))) + (meta#in (list (symbol$ [..prelude_module "Nothing"]))) {#Item last prevs} - (in_meta (list (list#mix (function'' [left right] (variant$ (list (symbol$ [..prelude_module "#Sum"]) left right))) + (meta#in (list (list#mix (function'' [left right] (variant$ (list (symbol$ [..prelude_module "#Sum"]) left right))) last prevs)))} (list#reversed tokens))) (macro:' .public (Tuple tokens) ({{#End} - (in_meta (list (symbol$ [..prelude_module "Any"]))) + (meta#in (list (symbol$ [..prelude_module "Any"]))) {#Item last prevs} - (in_meta (list (list#mix (function'' [left right] (variant$ (list (symbol$ [..prelude_module "#Product"]) left right))) + (meta#in (list (list#mix (function'' [left right] (variant$ (list (symbol$ [..prelude_module "#Product"]) left right))) last prevs)))} (list#reversed tokens))) @@ -1201,7 +1201,7 @@ (failure "function' requires a non-empty arguments tuple.") {#Item [harg targs]} - (in_meta (list (form$ (list (tuple$ (list (local_symbol$ name) + (meta#in (list (form$ (list (tuple$ (list (local_symbol$ name) harg)) (list#mix (function'' [arg body'] (form$ (list (tuple$ (list (local_symbol$ "") @@ -1219,7 +1219,7 @@ ({{#Item [export_policy {#Item [[_ {#Form {#Item [name args]}}] {#Item [type {#Item [body {#End}]}]}]}]} - (in_meta (list (form$ (list (text$ "lux def") + (meta#in (list (form$ (list (text$ "lux def") name (form$ (list (text$ "lux type check") type @@ -1230,7 +1230,7 @@ export_policy)))) {#Item [export_policy {#Item [name {#Item [type {#Item [body {#End}]}]}]}]} - (in_meta (list (form$ (list (text$ "lux def") + (meta#in (list (form$ (list (text$ "lux def") name (form$ (list (text$ "lux type check") type @@ -1269,7 +1269,7 @@ (macro:' .private (let' tokens) ({{#Item [_ {#Tuple bindings}] {#Item body {#End}}} ({{#Some bindings} - (in_meta (list (list#mix ("lux type check" (-> (Tuple Code Code) Code + (meta#in (list (list#mix ("lux type check" (-> (Tuple Code Code) Code Code) (function' [binding body] ({[label value] @@ -1336,7 +1336,7 @@ (macro:' .public (_$ tokens) ({{#Item op tokens'} ({{#Item first nexts} - (in_meta (list (list#mix (function#flipped (right_associativity op)) first nexts))) + (meta#in (list (list#mix (function#flipped (right_associativity op)) first nexts))) _ (failure "Wrong syntax for _$")} @@ -1349,7 +1349,7 @@ (macro:' .public ($_ tokens) ({{#Item op tokens'} ({{#Item last prevs} - (in_meta (list (list#mix (right_associativity op) last prevs))) + (meta#in (list (list#mix (right_associativity op) last prevs))) _ (failure "Wrong syntax for $_")} @@ -1431,7 +1431,7 @@ var)))) body (list#reversed bindings))] - (in_meta (list (form$ (list (variant$ (list (tuple$ (list (symbol$ [..prelude_module "#in"]) g!in + (meta#in (list (form$ (list (variant$ (list (tuple$ (list (symbol$ [..prelude_module "#in"]) g!in (symbol$ [..prelude_module "#then"]) g!then)) body')) monad))))) @@ -1480,7 +1480,7 @@ (macro:' .public (if tokens) ({{#Item test {#Item then {#Item else {#End}}}} - (in_meta (list (form$ (list (variant$ (list (bit$ #1) then + (meta#in (list (form$ (list (variant$ (list (bit$ #1) then (bit$ #0) else)) test)))) @@ -1567,7 +1567,7 @@ (def:''' .private (spliced replace? untemplated elems) (-> Bit (-> Code ($' Meta Code)) ($' List Code) ($' Meta Code)) ({[#1] ({{#End} - (in_meta |#End|) + (meta#in |#End|) {#Item lastI inits} (do meta_monad @@ -1607,22 +1607,22 @@ (def:''' .private (untemplated replace? subst token) (-> Bit Text Code ($' Meta Code)) ({[_ [_ {#Bit value}]] - (in_meta (with_location (variant$ (list (symbol$ [..prelude_module "#Bit"]) (bit$ value))))) + (meta#in (with_location (variant$ (list (symbol$ [..prelude_module "#Bit"]) (bit$ value))))) [_ [_ {#Nat value}]] - (in_meta (with_location (variant$ (list (symbol$ [..prelude_module "#Nat"]) (nat$ value))))) + (meta#in (with_location (variant$ (list (symbol$ [..prelude_module "#Nat"]) (nat$ value))))) [_ [_ {#Int value}]] - (in_meta (with_location (variant$ (list (symbol$ [..prelude_module "#Int"]) (int$ value))))) + (meta#in (with_location (variant$ (list (symbol$ [..prelude_module "#Int"]) (int$ value))))) [_ [_ {#Rev value}]] - (in_meta (with_location (variant$ (list (symbol$ [..prelude_module "#Rev"]) (rev$ value))))) + (meta#in (with_location (variant$ (list (symbol$ [..prelude_module "#Rev"]) (rev$ value))))) [_ [_ {#Frac value}]] - (in_meta (with_location (variant$ (list (symbol$ [..prelude_module "#Frac"]) (frac$ value))))) + (meta#in (with_location (variant$ (list (symbol$ [..prelude_module "#Frac"]) (frac$ value))))) [_ [_ {#Text value}]] - (in_meta (untemplated_text value)) + (meta#in (untemplated_text value)) [#1 [_ {#Symbol [module name]}]] (do meta_monad @@ -1635,13 +1635,13 @@ (in [module name])} module) .let' [[module name] real_name]] - (in_meta (with_location (variant$ (list (symbol$ [..prelude_module "#Symbol"]) (tuple$ (list (text$ module) (text$ name)))))))) + (meta#in (with_location (variant$ (list (symbol$ [..prelude_module "#Symbol"]) (tuple$ (list (text$ module) (text$ name)))))))) [#0 [_ {#Symbol [module name]}]] - (in_meta (with_location (variant$ (list (symbol$ [..prelude_module "#Symbol"]) (tuple$ (list (text$ module) (text$ name))))))) + (meta#in (with_location (variant$ (list (symbol$ [..prelude_module "#Symbol"]) (tuple$ (list (text$ module) (text$ name))))))) [#1 [_ {#Form {#Item [[_ {#Symbol ["" "~"]}] {#Item [unquoted {#End}]}]}}]] - (in_meta (form$ (list (text$ "lux type check") + (meta#in (form$ (list (text$ "lux type check") (symbol$ [..prelude_module "Code"]) unquoted))) @@ -1677,10 +1677,10 @@ (macro:' .public (Primitive tokens) ({{#Item [_ {#Text class_name}] {#End}} - (in_meta (list (variant$ (list (symbol$ [..prelude_module "#Primitive"]) (text$ class_name) |#End|)))) + (meta#in (list (variant$ (list (symbol$ [..prelude_module "#Primitive"]) (text$ class_name) |#End|)))) {#Item [_ {#Text class_name}] {#Item [_ {#Tuple params}] {#End}}} - (in_meta (list (variant$ (list (symbol$ [..prelude_module "#Primitive"]) (text$ class_name) (untemplated_list params))))) + (meta#in (list (variant$ (list (symbol$ [..prelude_module "#Primitive"]) (text$ class_name) (untemplated_list params))))) _ (failure "Wrong syntax for Primitive")} @@ -1735,7 +1735,7 @@ (macro:' .public (|> tokens) ({{#Item [init apps]} - (in_meta (list (list#mix ("lux type check" (-> Code Code Code) + (meta#in (list (list#mix ("lux type check" (-> Code Code Code) (function' [app acc] ({[_ {#Variant parts}] (variant$ (list#composite parts (list acc))) @@ -1758,7 +1758,7 @@ (macro:' .public (<| tokens) ({{#Item [init apps]} - (in_meta (list (list#mix ("lux type check" (-> Code Code Code) + (meta#in (list (list#mix ("lux type check" (-> Code Code Code) (function' [app acc] ({[_ {#Variant parts}] (variant$ (list#composite parts (list acc))) @@ -1879,7 +1879,7 @@ (|> data' (list#each (function#composite apply (replacement_environment bindings'))) list#conjoint - in_meta) + meta#in) (failure "Irregular arguments tuples for template."))) _ @@ -2026,7 +2026,7 @@ (in [module_name name])) _ - (in_meta name)} + (meta#in name)} name)) (def:''' .private (macro' full_name) @@ -2079,7 +2079,7 @@ ?macro)) _ - (in_meta (list token))} + (meta#in (list token))} token)) (def:''' .private (expansion token) @@ -2099,7 +2099,7 @@ ?macro)) _ - (in_meta (list token))} + (meta#in (list token))} token)) (def:''' .private (full_expansion' full_expansion name args) @@ -2192,7 +2192,7 @@ (in (list (tuple$ (list#conjoint members'))))) _ - (in_meta (list syntax))} + (meta#in (list syntax))} syntax))) (def:''' .private (text#encoded original) @@ -2310,7 +2310,7 @@ (macro:' .public (: tokens) ({{#Item type {#Item value {#End}}} - (in_meta (list (` ("lux type check" + (meta#in (list (` ("lux type check" (..type (~ type)) (~ value))))) @@ -2320,7 +2320,7 @@ (macro:' .public (:as tokens) ({{#Item type {#Item value {#End}}} - (in_meta (list (` ("lux type as" + (meta#in (list (` ("lux type as" (..type (~ type)) (~ value))))) @@ -2363,7 +2363,7 @@ (macro:' .public (exec tokens) ({{#Item value actions} (let' [dummy (local_symbol$ "")] - (in_meta (list (list#mix ("lux type check" (-> Code Code Code) + (meta#in (list (list#mix ("lux type check" (-> Code Code Code) (function' [pre post] (` ({(~ dummy) (~ post)} (~ pre))))) value @@ -2403,7 +2403,7 @@ {#None} body'} ?type)] - (in_meta (list (` ("lux def" (~ name) + (meta#in (list (` ("lux def" (~ name) (~ body'') (~ export_policy)))))) @@ -2480,7 +2480,7 @@ (let' [pairs (|> patterns (list#each (function' [pattern] (list pattern body))) (list#conjoint))] - (in_meta (list#composite pairs branches)))) + (meta#in (list#composite pairs branches)))) _ (failure "Wrong syntax for ^or"))) @@ -2508,7 +2508,7 @@ (` (case (~ r) (~ l) (~ body'))))))) body) list - in_meta) + meta#in) {#None} (failure "let requires an even number of parts")) @@ -2533,7 +2533,7 @@ (` ([(~ g!name) (~ arg)] (~ body'))) (` ([(~ g!name) (~ g!blank)] (.case (~ g!blank) (~ arg) (~ body'))))))))] - (in_meta (list (nest (..local_symbol$ g!name) head + (meta#in (list (nest (..local_symbol$ g!name) head (list#mix (nest g!blank) body (list#reversed tail)))))) {#None} @@ -2741,7 +2741,7 @@ {#None} body)] - (in_meta (list (` ("lux def" (~ (..local_symbol$ name)) + (meta#in (list (` ("lux def" (~ (..local_symbol$ name)) (~ body) (~ export_policy)))))) @@ -2769,7 +2769,7 @@ _ (` ("lux macro" (function ((~ name) (~+ (list#each local_symbol$ args))) (~ body)))))] - (in_meta (list (` ("lux def" (~ name) + (meta#in (list (` ("lux def" (~ name) (~ body) (~ export_policy)))))) @@ -2795,7 +2795,7 @@ [(macro: .public ( tokens) (case (list#reversed tokens) (^ (list& last init)) - (in_meta (list (list#mix (: (-> Code Code Code) + (meta#in (list (list#mix (: (-> Code Code Code) (function (_ pre post) (`
))) last init))) @@ -2999,7 +2999,7 @@ ..#module_state _] =module]] (case (plist#value name definitions) {#Some {#Slot [exported type group index]}} - (in_meta [index + (meta#in [index (list#each (function (_ slot) [module slot]) group) @@ -3033,18 +3033,18 @@ {#Some {#Type [exported? {#Named _ _type} {#Right slots}]}} (case (interface_methods _type) {#Some members} - (in_meta {#Some [(list#each (function (_ slot) [module slot]) + (meta#in {#Some [(list#each (function (_ slot) [module slot]) {#Item slots}) members]}) _ - (in_meta {#None})) + (meta#in {#None})) _ (record_slots unnamed))) _ - (in_meta {#None}))) + (meta#in {#None}))) (def: expected_type (Meta Type) @@ -3114,7 +3114,7 @@ tags (: (Meta (List Symbol)) (case tags+type {#Some [tags _]} - (in_meta tags) + (meta#in tags) _ (failure ($_ text#composite @@ -3182,7 +3182,7 @@ _ (` ((~ (local_symbol$ name)) (~+ args))))] - (in_meta (list (` (..def: (~ export_policy) (~ usage) + (meta#in (list (` (..def: (~ export_policy) (~ usage) (~ type) (..implementation (~+ definitions))))))) @@ -3230,7 +3230,7 @@ (macro: .public (Variant tokens) (case (everyP caseP tokens) {#Some cases} - (in_meta (list (` (..Union (~+ (list#each product#right cases)))) + (meta#in (list (` (..Union (~+ (list#each product#right cases)))) (variant$ (list#each (function (_ case) (text$ (product#left case))) cases)))) @@ -3252,7 +3252,7 @@ (^ (list [_ {#Tuple record}])) (case (everyP slotP record) {#Some slots} - (in_meta (list (` (..Tuple (~+ (list#each product#right slots)))) + (meta#in (list (` (..Tuple (~+ (list#each product#right slots)))) (tuple$ (list#each (function (_ slot) (text$ (product#left slot))) slots)))) @@ -3291,7 +3291,7 @@ (^ (list type [_ {#Variant tags}])) (case (everyP textP tags) {#Some tags} - (in_meta [type {#Some {#Left tags}}]) + (meta#in [type {#Some {#Left tags}}]) {#None} (failure "Improper type-definition syntax")) @@ -3299,19 +3299,19 @@ (^ (list type [_ {#Tuple slots}])) (case (everyP textP slots) {#Some slots} - (in_meta [type {#Some {#Right slots}}]) + (meta#in [type {#Some {#Right slots}}]) {#None} (failure "Improper type-definition syntax")) (^ (list type)) - (in_meta [it {#None}]) + (meta#in [it {#None}]) _ (failure "Improper type-definition syntax"))) type - (in_meta [type {#None}])} + (meta#in [type {#None}])} it)) (macro: .public (type: tokens) @@ -3335,7 +3335,7 @@ (let [typeC (` {.#Named [(~ (text$ module_name)) (~ (text$ name))] (.type (~ type''))})] - (in_meta (list (case labels?? + (meta#in (list (case labels?? {#Some labels} (` ("lux def type tagged" (~ type_name) (~ typeC) @@ -3399,7 +3399,7 @@ (function (_ def) (case def [_ {#Symbol ["" name]}] - (in_meta name) + (meta#in name) _ (failure "only/+ and exclude/- require symbols.")))) @@ -3422,20 +3422,20 @@ (^or (^ (list& [_ {#Text "*"}] tokens')) (^ (list& [_ {#Text "all"}] tokens'))) - (in_meta [{#All} tokens']) + (meta#in [{#All} tokens']) (^or (^ (list& [_ {#Text "_"}] tokens')) (^ (list& [_ {#Text "ignore"}] tokens'))) - (in_meta [{#Ignore} tokens']) + (meta#in [{#Ignore} tokens']) _ - (in_meta [{#Nothing} tokens]))) + (meta#in [{#Nothing} tokens]))) (def: (openings_parser parts) (-> (List Code) (Meta [(List Openings) (List Code)])) (case parts {#End} - (in_meta [{#End} {#End}]) + (meta#in [{#End} {#End}]) (^ (list& [_ {#Form (list& [_ {#Text prefix}] structs)}] parts')) (do meta_monad @@ -3443,18 +3443,18 @@ (function (_ struct) (case struct [_ {#Symbol ["" struct_name]}] - (in_meta struct_name) + (meta#in struct_name) _ (failure "Expected all implementations of opening form to be symbols."))) structs) next+remainder (openings_parser parts')] (let [[next remainder] next+remainder] - (in_meta [{#Item [prefix structs'] next} + (meta#in [{#Item [prefix structs'] next} remainder]))) _ - (in_meta [{#End} parts]))) + (meta#in [{#End} parts]))) (def: (text#split_at' at x) (-> Nat Text [Text Text]) @@ -3546,7 +3546,7 @@ (-> Bit Text Text (Meta Text)) (case (relative_ups 0 module) 0 - (in_meta (if nested? + (meta#in (if nested? ($_ "lux text concat" relative_root ..module_separator module) module)) @@ -3563,7 +3563,7 @@ output (case ("lux text size" clean) 0 prefix _ ($_ text#composite prefix ..module_separator clean))] - (in_meta output)) + (meta#in output)) (failure ($_ "lux text concat" "Cannot climb the module hierarchy..." \n "Importing module: " module \n @@ -3939,10 +3939,10 @@ (^ (list& else branches')) (case (pairs branches') {#Some branches'} - (in_meta (list (list#mix (: (-> [Code Code] Code Code) + (meta#in (list (list#mix (: (-> [Code Code] Code Code) (function (_ branch else) - (let [[right left] branch] - (` (if (~ left) (~ right) (~ else)))))) + (let [[then ?] branch] + (` (if (~ ?) (~ then) (~ else)))))) else branches'))) @@ -3987,13 +3987,13 @@ g!_))))) list#conjoint tuple$)] - (in_meta (list (` ({(~ pattern) (~ g!output)} (~ record)))))) + (meta#in (list (` ({(~ pattern) (~ g!output)} (~ record)))))) _ (failure "value@ can only use records."))) (^ (list [_ {#Tuple slots}] record)) - (in_meta (list (list#mix (: (-> Code Code Code) + (meta#in (list (list#mix (: (-> Code Code Code) (function (_ slot inner) (` (..value@ (~ slot) (~ inner))))) record @@ -4030,10 +4030,10 @@ (function (_ [sub_tag_index sname stype]) (open_declaration alias tags' sub_tag_index sname source+ stype))) (enumeration (zipped/2 tags' members')))] - (in_meta (list#conjoint decls'))) + (meta#in (list#conjoint decls'))) _ - (in_meta (list (` ("lux def" (~ (local_symbol$ (..module_alias (list short) alias))) + (meta#in (list (` ("lux def" (~ (local_symbol$ (..module_alias (list short) alias))) (~ source+) #0))))))) @@ -4053,7 +4053,7 @@ (function (_ [tag_index sname stype]) (open_declaration alias tags tag_index sname source stype))) (enumeration (zipped/2 tags members)))] - (in_meta (list#conjoint decls'))) + (meta#in (list#conjoint decls'))) _ (failure (text#composite "Can only 'open:' structs: " (type#encoded struct_type))))) @@ -4061,7 +4061,7 @@ _ (do meta_monad [g!struct (..generated_symbol "struct")] - (in_meta (list (` ("lux def" (~ g!struct) (~ struct) #0)) + (meta#in (list (` ("lux def" (~ g!struct) (~ struct) #0)) (` (..open: (~ (text$ alias)) (~ g!struct))))))) _ @@ -4071,13 +4071,13 @@ (do meta_monad [g!_ (..generated_symbol "_") g!arg (..generated_symbol "arg")] - (in_meta (list (` (function ((~ g!_) (~ g!arg)) (|> (~ g!arg) (~+ tokens)))))))) + (meta#in (list (` (function ((~ g!_) (~ g!arg)) (|> (~ g!arg) (~+ tokens)))))))) (macro: .public (<<| tokens) (do meta_monad [g!_ (..generated_symbol "_") g!arg (..generated_symbol "arg")] - (in_meta (list (` (function ((~ g!_) (~ g!arg)) (<| (~+ tokens) (~ g!arg)))))))) + (meta#in (list (` (function ((~ g!_) (~ g!arg)) (<| (~+ tokens) (~ g!arg)))))))) (def: (imported_by? import_name module_name) (-> Text Text (Meta Bit)) @@ -4116,7 +4116,7 @@ (: (-> Text (Meta Any)) (function (_ _def) (if (is_member? all_defs _def) - (in_meta []) + (meta#in []) (failure ($_ text#composite _def " is not defined in module " module_name " @ " current_module))))) referred_defs)))] defs' (case r_defs @@ -4193,11 +4193,11 @@ (macro: .public (# tokens) (case tokens (^ (list struct [_ {#Symbol member}])) - (in_meta (list (` (let [(^open (~ (text$ (alias_stand_in 0)))) (~ struct)] + (meta#in (list (` (let [(^open (~ (text$ (alias_stand_in 0)))) (~ struct)] (~ (symbol$ member)))))) (^ (list& struct member args)) - (in_meta (list (` ((..# (~ struct) (~ member)) (~+ args))))) + (meta#in (list (` ((..# (~ struct) (~ member)) (~+ args))))) _ (failure "Wrong syntax for #"))) @@ -4217,7 +4217,7 @@ (function (_ [r_slot_name [r_idx r_type]]) (do meta_monad [g!slot (..generated_symbol "")] - (in_meta [r_slot_name r_idx g!slot])))) + (meta#in [r_slot_name r_idx g!slot])))) (zipped/2 tags (enumeration members)))] (let [pattern (|> pattern' (list#each (: (-> [Symbol Nat Code] (List Code)) @@ -4235,7 +4235,7 @@ r_var))))) list#conjoint tuple$)] - (in_meta (list (` ({(~ pattern) (~ output)} (~ record))))))) + (meta#in (list (` ({(~ pattern) (~ output)} (~ record))))))) _ (failure "with@ can only use records."))) @@ -4300,7 +4300,7 @@ (function (_ [r_slot_name [r_idx r_type]]) (do meta_monad [g!slot (..generated_symbol "")] - (in_meta [r_slot_name r_idx g!slot])))) + (meta#in [r_slot_name r_idx g!slot])))) (zipped/2 tags (enumeration members)))] (let [pattern (|> pattern' (list#each (: (-> [Symbol Nat Code] (List Code)) @@ -4318,7 +4318,7 @@ r_var))))) list#conjoint tuple$)] - (in_meta (list (` ({(~ pattern) (~ output)} (~ record))))))) + (meta#in (list (` ({(~ pattern) (~ output)} (~ record))))))) _ (failure "revised@ can only use records."))) @@ -4375,7 +4375,7 @@ in)) {#None})))) {#Some output} - (in_meta (list#composite output branches)) + (meta#in (list#composite output branches)) {#None} (failure "Wrong syntax for ^template")) @@ -4459,11 +4459,11 @@ (do meta_monad [inits' (: (Meta (List Symbol)) (case (monad#each maybe_monad symbol_name inits) - {#Some inits'} (in_meta inits') + {#Some inits'} (meta#in inits') {#None} (failure "Wrong syntax for loop"))) init_types (monad#each meta_monad type_definition inits') expected ..expected_type] - (in_meta (list (` (("lux type check" + (meta#in (list (` (("lux type check" (-> (~+ (list#each type_code init_types)) (~ (type_code expected))) (function ((~ name) (~+ vars)) @@ -4474,7 +4474,7 @@ (: (-> Code (Meta Code)) (function (_ _) (..generated_symbol ""))) inits)] - (in_meta (list (` (let [(~+ (..interleaved aliases inits))] + (meta#in (list (` (let [(~+ (..interleaved aliases inits))] (.loop (~ name) [(~+ (..interleaved vars aliases))] (~ body))))))))) @@ -4594,7 +4594,7 @@ [#Tuple]) _ - (in_meta token) + (meta#in token) ... TODO: Figure out why this doesn't work: ... (# meta_monad in token) )) @@ -4616,10 +4616,10 @@ (-> Code (Meta [Code Code])) (case level (^ [_ {#Tuple (list expr binding)}]) - (in_meta [expr binding]) + (meta#in [expr binding]) _ - (in_meta [level (` #1)]) + (meta#in [level (` #1)]) )) (def: (multi_level_case^ levels) @@ -4696,7 +4696,7 @@ (macro: .public (symbol tokens) (case tokens (^ (list [_ {#Symbol [module name]}])) - (in_meta (list (` [(~ (text$ module)) (~ (text$ name))]))) + (meta#in (list (` [(~ (text$ module)) (~ (text$ name))]))) _ (failure (..wrong_syntax_error [..prelude_module "symbol"])))) @@ -4710,7 +4710,7 @@ (case tokens (^ (list& [_meta {#Form (list [_ {#Symbol ["" name]}] pattern)}] body branches)) (let [g!whole (local_symbol$ name)] - (in_meta (list& g!whole + (meta#in (list& g!whole (` (case (~ g!whole) (~ pattern) (~ body))) branches))) @@ -4721,7 +4721,7 @@ (case tokens (^ (list& [_meta {#Form (list [_ {#Symbol ["" name]}] [_ {#Tuple steps}])}] body branches)) (let [g!name (local_symbol$ name)] - (in_meta (list& g!name + (meta#in (list& g!name (` (let [(~ g!name) (|> (~ g!name) (~+ steps))] (~ body))) branches))) @@ -4833,7 +4833,7 @@ (-> Code (Meta Text)) (case choice [_ {#Text platform}] - (..in_meta platform) + (..meta#in platform) [_ {#Symbol symbol}] (do meta_monad @@ -4864,13 +4864,13 @@ (failure ($_ text#composite "No code for target platform: " target)) {#Some default} - (in_meta (list default))) + (meta#in (list default))) {#Item [key pick] options'} (do meta_monad [platform (..platform_name key)] (if (text#= target platform) - (in_meta (list pick)) + (meta#in (list pick)) (target_pick target options' default))))) (macro: .public (for tokens) @@ -4966,7 +4966,7 @@ [#Tuple]) _ - (in_meta [(list) code]))) + (meta#in [(list) code]))) (macro: .public (`` tokens) (case tokens @@ -5033,7 +5033,7 @@ [.#Symbol name$]) [_ {#Form {#Item [[_ {#Symbol ["" "~"]}] {#Item [unquoted {#End}]}]}}] - (in_meta unquoted) + (meta#in unquoted) [_ {#Form {#Item [[_ {#Symbol ["" "~+"]}] {#Item [spliced {#End}]}]}}] (failure "Cannot use (~+) inside of ^code unless it is the last element in a form or a tuple.") @@ -5074,7 +5074,7 @@ (^ (list [_ {#Tuple bindings}] bodyT)) (case (..pairs bindings) {#Some bindings} - (in_meta (list (` (..with_expansions [(~+ (|> bindings + (meta#in (list (` (..with_expansions [(~+ (|> bindings (list#each (function (_ [localT valueT]) (list localT (` (..as_is (~ valueT)))))) (list#mix list#composite (list))))] diff --git a/stdlib/source/library/lux/data/binary.lux b/stdlib/source/library/lux/data/binary.lux index 479c7d7a1..8df174ce5 100644 --- a/stdlib/source/library/lux/data/binary.lux +++ b/stdlib/source/library/lux/data/binary.lux @@ -78,9 +78,9 @@ (def: (= reference sample) (/.= reference sample))) -(exception: .public (cannot_copy_bytes [bytes Nat - source_input Nat - target_output Nat]) +(exception: .public (cannot_copy [bytes Nat + source_input Nat + target_output Nat]) (exception.report ["Bytes" (%.nat bytes)] ["Source input space" (%.nat source_input)] @@ -88,10 +88,11 @@ (def: .public (copy bytes source_offset source target_offset target) (-> Nat Nat Binary Nat Binary (Try Binary)) - (let [source_input (n.- source_offset (/.size source))] - (if (n.< bytes source_input) - (let [target_output (n.- target_offset (/.size target))] - (exception.except ..cannot_copy_bytes [bytes source_input target_output])) + (let [source_input (n.- source_offset (/.size source)) + target_output (n.- target_offset (/.size target))] + (if (or (n.< bytes source_input) + (n.< bytes target_output)) + (exception.except ..cannot_copy [bytes source_input target_output]) {try.#Success (/.copy! bytes source_offset source target_offset target)}))) (exception: .public (slice_out_of_bounds [size Nat @@ -115,11 +116,11 @@ (cond (n.= 0 bytes) binary - (n.< bytes (/.size binary)) - (/.empty 0) + (n.< (/.size binary) bytes) + (/.slice bytes (n.- bytes (/.size binary)) binary) ... else - (/.slice bytes (n.- bytes (/.size binary)) binary))) + (/.empty 0))) (implementation: .public monoid (Monoid Binary) diff --git a/stdlib/source/library/lux/math/number/frac.lux b/stdlib/source/library/lux/math/number/frac.lux index bf73a6491..ac528ad6d 100644 --- a/stdlib/source/library/lux/math/number/frac.lux +++ b/stdlib/source/library/lux/math/number/frac.lux @@ -1,26 +1,26 @@ (.using - [library - [lux {"-" nat int rev} - ["@" target] - [abstract - [hash {"+" Hash}] - [monoid {"+" Monoid}] - [equivalence {"+" Equivalence}] - [codec {"+" Codec}] - [predicate {"+" Predicate}] - [order {"+" Order}] - [monad {"+" do}]] - [control - ["[0]" maybe] - ["[0]" try {"+" Try}]] - [data - ["[0]" text]]]] - ["[0]" // "_" - ["[1][0]" i64] - ["[1][0]" nat] - ["[1][0]" int] - ["[1][0]" rev] - ["/[1]" //]]) + [library + [lux {"-" nat int rev} + ["@" target] + [abstract + [hash {"+" Hash}] + [monoid {"+" Monoid}] + [equivalence {"+" Equivalence}] + [codec {"+" Codec}] + [predicate {"+" Predicate}] + [order {"+" Order}] + [monad {"+" do}]] + [control + ["[0]" maybe] + ["[0]" try {"+" Try}]] + [data + ["[0]" text]]]] + ["[0]" // "_" + ["[1][0]" i64] + ["[1][0]" nat] + ["[1][0]" int] + ["[1][0]" rev] + ["/[1]" //]]) (def: .public (= reference sample) (-> Frac Frac Bit) @@ -126,10 +126,36 @@ "lux f64 i64" ("lux i64 left-shift" ..exponent_size))) +(template [ ] + [(def: .public + Frac + (../ +0.0 ))] + + [not_a_number +0.0] + [positive_infinity +1.0] + ) + +(def: .public negative_infinity + Frac + (..* -1.0 ..positive_infinity)) + +(def: .public (not_a_number? it) + (-> Frac Bit) + (not (..= it it))) + +(def: .public (number? it) + (-> Frac Bit) + (not (or (..not_a_number? it) + (..= ..positive_infinity it) + (..= ..negative_infinity it)))) + (implementation: .public equivalence (Equivalence Frac) - (def: = ..=)) + (def: (= left right) + (or (..= left right) + (and (..not_a_number? left) + (..not_a_number? right))))) (implementation: .public order (Order Frac) @@ -163,29 +189,6 @@ [maximum ..max (..* -1.0 ..biggest)] ) -(template [ ] - [(def: .public - Frac - (../ +0.0 ))] - - [not_a_number +0.0] - [positive_infinity +1.0] - ) - -(def: .public negative_infinity - Frac - (..* -1.0 ..positive_infinity)) - -(def: .public (not_a_number? it) - (-> Frac Bit) - (not (..= it it))) - -(def: .public (number? it) - (-> Frac Bit) - (not (or (..not_a_number? it) - (..= ..positive_infinity it) - (..= ..negative_infinity it)))) - (implementation: .public decimal (Codec Text Frac) diff --git a/stdlib/source/library/lux/target/jvm/loader.lux b/stdlib/source/library/lux/target/jvm/loader.lux index 9e391c554..99a4573bc 100644 --- a/stdlib/source/library/lux/target/jvm/loader.lux +++ b/stdlib/source/library/lux/target/jvm/loader.lux @@ -1,23 +1,23 @@ (.using - [library - [lux "*" - ["@" target] - ["[0]" ffi {"+" import: object do_to}] - [abstract - [monad {"+" do}]] - [control - ["[0]" try {"+" Try}] - ["[0]" exception {"+" exception:}] - ["[0]" io {"+" IO}] - [concurrency - ["[0]" atom {"+" Atom}]]] - [data - ["[0]" binary {"+" Binary}] - ["[0]" text - ["%" format {"+" format}]] - [collection - ["[0]" array] - ["[0]" dictionary {"+" Dictionary}]]]]]) + [library + [lux "*" + ["@" target] + ["[0]" ffi {"+" import: object do_to}] + [abstract + [monad {"+" do}]] + [control + ["[0]" try {"+" Try}] + ["[0]" exception {"+" exception:}] + ["[0]" io {"+" IO}] + [concurrency + ["[0]" atom {"+" Atom}]]] + [data + ["[0]" binary {"+" Binary}] + ["[0]" text + ["%" format {"+" format}]] + [collection + ["[0]" array] + ["[0]" dictionary {"+" Dictionary}]]]]]) (type: .public Library (Atom (Dictionary Text Binary))) diff --git a/stdlib/source/library/lux/tool/compiler/language/lux/analysis.lux b/stdlib/source/library/lux/tool/compiler/language/lux/analysis.lux index 65b191979..5ac2fda49 100644 --- a/stdlib/source/library/lux/tool/compiler/language/lux/analysis.lux +++ b/stdlib/source/library/lux/tool/compiler/language/lux/analysis.lux @@ -8,7 +8,7 @@ [control ["[0]" function] ["[0]" maybe] - ["[0]" try] + ["[0]" try {"+" Try}] ["[0]" exception {"+" Exception}] [parser ["<[0]>" code]]] @@ -280,7 +280,7 @@ failure failure))))) -(def: (locate_error location error) +(def: (located location error) (-> Location Text Text) (%.format (%.location location) text.new_line error)) @@ -288,7 +288,17 @@ (def: .public (failure error) (-> Text Operation) (function (_ [bundle state]) - {try.#Failure (locate_error (value@ .#location state) error)})) + {try.#Failure (located (value@ .#location state) error)})) + +(def: .public (of_try it) + (All (_ a) (-> (Try a) (Operation a))) + (function (_ [bundle state]) + (.case it + {try.#Failure error} + {try.#Failure (located (value@ .#location state) error)} + + {try.#Success it} + {try.#Success [[bundle state] it]}))) (def: .public (except exception parameters) (All (_ e) (-> (Exception e) e Operation)) @@ -307,7 +317,7 @@ (action bundle,state)) {try.#Failure error} (let [[bundle state] bundle,state] - {try.#Failure (locate_error (value@ .#location state) error)}) + {try.#Failure (located (value@ .#location state) error)}) success success))) diff --git a/stdlib/source/library/lux/tool/compiler/language/lux/analysis/coverage.lux b/stdlib/source/library/lux/tool/compiler/language/lux/analysis/coverage.lux new file mode 100644 index 000000000..71bc09f77 --- /dev/null +++ b/stdlib/source/library/lux/tool/compiler/language/lux/analysis/coverage.lux @@ -0,0 +1,420 @@ +(.using + [library + [lux {"-" Variant} + [abstract + equivalence + ["[0]" monad {"+" do}]] + [control + ["[0]" maybe ("[1]#[0]" monoid monad)] + ["[0]" try {"+" Try}] + ["[0]" exception {"+" exception:}]] + [data + ["[0]" bit ("[1]#[0]" equivalence)] + ["[0]" text + ["%" format]] + [collection + ["[0]" list ("[1]#[0]" functor mix)] + ["[0]" dictionary {"+" Dictionary}] + ["[0]" set {"+" Set} ("[1]#[0]" equivalence)]]] + [macro + ["[0]" template]] + [math + [number + ["n" nat ("[1]#[0]" interval)] + ["i" int] + ["r" rev] + ["f" frac]]]]] + ["[0]" // "_" + ["[1][0]" simple] + ["[1][0]" complex] + ["[1][0]" pattern {"+" Pattern}]]) + +... The coverage of a pattern-matching expression summarizes how well +... all the possible values of an input are being covered by the +... different patterns involved. +... Ideally, the pattern-matching has "exhaustive" coverage, which just +... means that every possible value can be matched by at least 1 +... pattern. +... Every other coverage is considered partial, and it would be valued +... as insuficient (since it could lead to runtime errors due to values +... not being handled by any pattern). +(template.let [(Variant' @) + [[(Maybe Nat) (Dictionary Nat @)]]] + (as_is (type: .public Coverage + (Rec @ + (.Variant + {#Exhaustive} + {#Bit Bit} + {#Nat (Set Nat)} + {#Int (Set Int)} + {#Rev (Set Rev)} + {#Frac (Set Frac)} + {#Text (Set Text)} + {#Variant (Variant' @)} + {#Seq @ @} + {#Alt @ @}))) + + (type: .public Variant + (Variant' Coverage)))) + +(def: .public (minimum [max cases]) + (-> Variant Nat) + (maybe.else (|> cases + dictionary.keys + (list#mix n.max 0) + ++) + max)) + +(def: .public (maximum [max cases]) + (-> Variant Nat) + (maybe.else n#top max)) + +(def: (alternatives coverage) + (-> Coverage (List Coverage)) + (case coverage + {#Alt left right} + (list& left (alternatives right)) + + _ + (list coverage))) + +(implementation: .public equivalence + (Equivalence Coverage) + + (def: (= reference sample) + (case [reference sample] + [{#Exhaustive} {#Exhaustive}] + #1 + + [{#Bit sideR} {#Bit sideS}] + (bit#= sideR sideS) + + (^template [] + [[{ partialR} { partialS}] + (set#= partialR partialS)]) + ([#Nat] + [#Int] + [#Rev] + [#Frac] + [#Text]) + + [{#Variant allR casesR} {#Variant allS casesS}] + (and (# (maybe.equivalence n.equivalence) = allR allS) + (# (dictionary.equivalence =) = casesR casesS)) + + [{#Seq leftR rightR} {#Seq leftS rightS}] + (and (= leftR leftS) + (= rightR rightS)) + + [{#Alt _} {#Alt _}] + (let [flatR (alternatives reference) + flatS (alternatives sample)] + (and (n.= (list.size flatR) (list.size flatS)) + (list.every? (function (_ [coverageR coverageS]) + (= coverageR coverageS)) + (list.zipped/2 flatR flatS)))) + + _ + #0))) + +(open: "/#[0]" ..equivalence) + +(def: .public (format value) + (%.Format Coverage) + (case value + {#Bit it} + (%.bit it) + + (^template [ ] + [{ it} + (|> it + set.list + (list#each ) + (text.interposed " ") + (text.enclosed ["[" "]"]))]) + ([#Nat %.nat] + [#Int %.int] + [#Rev %.rev] + [#Frac %.frac] + [#Text %.text]) + + {#Variant ?max_cases cases} + (|> cases + dictionary.entries + (list#each (function (_ [tag it]) + (%.format (%.nat tag) " " (format it)))) + (text.interposed " ") + (%.format (maybe.else "?" (maybe#each %.nat ?max_cases)) " ") + (text.enclosed ["{" "}"])) + + {#Seq left right} + (%.format "(& " (format left) " " (format right) ")") + + {#Alt left right} + (%.format "(| " (format left) " " (format right) ")") + + {#Exhaustive} + "*")) + +(exception: .public (invalid_tuple [size Nat]) + (exception.report + ["Expected size" ">= 2"] + ["Actual size" (%.nat size)])) + +(def: .public (coverage pattern) + (-> Pattern (Try Coverage)) + (case pattern + (^or {//pattern.#Simple {//simple.#Unit}} + {//pattern.#Bind _}) + {try.#Success {#Exhaustive}} + + ... Simple patterns (other than unit/[]) always have partial coverage because there + ... are too many possibilities as far as values go. + (^template [ ] + [{//pattern.#Simple { it}} + {try.#Success { (set.of_list (list it))}}]) + ([//simple.#Nat #Nat n.hash] + [//simple.#Int #Int i.hash] + [//simple.#Rev #Rev r.hash] + [//simple.#Frac #Frac f.hash] + [//simple.#Text #Text text.hash]) + + ... Bits are the exception, since there is only "#1" and + ... "#0", which means it is possible for bit + ... pattern-matching to become exhaustive if complementary parts meet. + {//pattern.#Simple {//simple.#Bit value}} + {try.#Success {#Bit value}} + + ... Tuple patterns can be exhaustive if there is exhaustiveness for all of + ... their sub-patterns. + {//pattern.#Complex {//complex.#Tuple membersP+}} + (case (list.reversed membersP+) + (^or (^ (list)) (^ (list _))) + (exception.except ..invalid_tuple [(list.size membersP+)]) + + {.#Item lastP prevsP+} + (do [! try.monad] + [lastC (coverage lastP)] + (monad.mix ! + (function (_ leftP rightC) + (do ! + [leftC (coverage leftP)] + (case rightC + {#Exhaustive} + (in leftC) + + _ + (in {#Seq leftC rightC})))) + lastC prevsP+))) + + ... Variant patterns can be shown to be exhaustive if all the possible + ... cases are handled exhaustively. + {//pattern.#Complex {//complex.#Variant [lefts right? value]}} + (do try.monad + [value_coverage (coverage value) + .let [idx (if right? + (++ lefts) + lefts)]] + (in {#Variant (if right? + {.#Some (++ idx)} + {.#None}) + (|> (dictionary.empty n.hash) + (dictionary.has idx value_coverage))})))) + +(def: (xor left right) + (-> Bit Bit Bit) + (or (and left (not right)) + (and (not left) right))) + +... The coverage checker not only verifies that pattern-matching is +... exhaustive, but also that there are no redundant patterns. +... Redundant patterns will never be executed, since there will +... always be a pattern prior to them that would match the input. +... Because of that, the presence of redundant patterns is assumed to +... be a bug, likely due to programmer carelessness. +(exception: .public (redundancy [so_far Coverage + addition Coverage]) + (exception.report + ["Coverage so-far" (format so_far)] + ["Additional coverage" (format addition)])) + +(exception: .public (variant_mismatch [expected Nat + mismatched Nat]) + (exception.report + ["Expected cases" (%.nat expected)] + ["Mismatched cases" (%.nat mismatched)])) + +(def: .public (exhaustive? coverage) + (-> Coverage Bit) + (case coverage + {#Exhaustive} + #1 + + _ + #0)) + +... After determining the coverage of each individual pattern, it is +... necessary to merge them all to figure out if the entire +... pattern-matching expression is exhaustive and whether it contains +... redundant patterns. +(def: .public (composite addition so_far) + (-> Coverage Coverage (Try Coverage)) + (with_expansions [ (exception.except ..redundancy [so_far addition]) + {try.#Success {#Alt addition so_far}} + (if (/#= so_far addition) + ... The addition cannot possibly improve the coverage. + + ... There are now 2 alternative paths. + )] + (case [addition so_far] + ... 2 bit coverages are exhaustive if they complement one another. + [{#Bit sideA} {#Bit sideSF}] + (if (xor sideA sideSF) + {try.#Success {#Exhaustive}} + ) + + (^template [] + [[{ partialA} { partialSF}] + (if (set.empty? (set.intersection partialA partialSF)) + {try.#Success { (set.union partialA partialSF)}} + )]) + ([#Nat] + [#Int] + [#Rev] + [#Frac] + [#Text]) + + [{#Variant addition'} {#Variant so_far'}] + (let [[allA casesA] addition' + [allSF casesSF] so_far' + addition_cases (..maximum addition') + so_far_cases (..maximum so_far')] + (cond (template.let [(known_cases? it) + [(n.< n#top it)]] + (and (known_cases? so_far_cases) + (if (known_cases? addition_cases) + (not (n.= so_far_cases addition_cases)) + (n.> so_far_cases (..minimum addition'))))) + (exception.except ..variant_mismatch [so_far_cases addition_cases]) + + (# (dictionary.equivalence ..equivalence) = casesSF casesA) + + + ... else + (do [! try.monad] + [casesM (monad.mix ! + (function (_ [tagA coverageA] casesSF') + (case (dictionary.value tagA casesSF') + {.#Some coverageSF} + (do ! + [coverageM (composite coverageA coverageSF)] + (in (dictionary.has tagA coverageM casesSF'))) + + {.#None} + (in (dictionary.has tagA coverageA casesSF')))) + casesSF + (dictionary.entries casesA))] + (in (if (and (n.= (n.min addition_cases so_far_cases) + (dictionary.size casesM)) + (list.every? ..exhaustive? (dictionary.values casesM))) + {#Exhaustive} + {#Variant (maybe#composite allA allSF) casesM}))))) + + [{#Seq leftA rightA} {#Seq leftSF rightSF}] + (case [(/#= leftSF leftA) (/#= rightSF rightA)] + ... Same prefix + [#1 #0] + (do try.monad + [rightM (composite rightA rightSF)] + (in (if (..exhaustive? rightM) + ... If all that follows is exhaustive, then it can be safely dropped + ... (since only the "left" part would influence whether the + ... composite coverage is exhaustive or not). + leftSF + {#Seq leftSF rightM}))) + + ... Same suffix + [#0 #1] + (do try.monad + [leftM (composite leftA leftSF)] + (in {#Seq leftM rightA})) + + ... The 2 sequences cannot possibly be merged. + [#0 #0] + + + ... There is nothing the addition adds to the coverage. + [#1 #1] + ) + + ... The addition cannot possibly improve the coverage. + [_ {#Exhaustive}] + + + ... The addition completes the coverage. + [{#Exhaustive} _] + {try.#Success {#Exhaustive}} + + ... When merging a new coverage against one based on Alt, it may be + ... that one of the many coverages in the Alt is complementary to + ... the new one, so effort must be made to fuse carefully, to match + ... the right coverages together. + ... If one of the Alt sub-coverages matches the new one, the cycle + ... must be repeated, in case the resulting coverage can now match + ... other ones in the original Alt. + ... This process must be repeated until no further productive + ... merges can be done. + [_ {#Alt leftS rightS}] + (do [! try.monad] + [.let [fuse_once (: (-> Coverage (List Coverage) + (Try [(Maybe Coverage) + (List Coverage)])) + (function (_ coverageA possibilitiesSF) + (loop [altsSF possibilitiesSF] + (case altsSF + {.#End} + (in [{.#None} (list coverageA)]) + + {.#Item altSF altsSF'} + (do ! + [altMSF (composite coverageA altSF)] + (case altMSF + {#Alt _} + (do ! + [[success altsSF+] (again altsSF')] + (in [success {.#Item altSF altsSF+}])) + + _ + (in [{.#Some altMSF} altsSF'])))))))]] + (loop [addition addition + possibilitiesSF (alternatives so_far)] + (do ! + [[addition' possibilitiesSF'] (fuse_once addition possibilitiesSF)] + (case addition' + {.#Some addition'} + (again addition' possibilitiesSF') + + {.#None} + (case (list.reversed possibilitiesSF') + {.#Item last prevs} + (in (list#mix (function (_ left right) {#Alt left right}) + last + prevs)) + + {.#End} + (undefined)))))) + + ... The left part will always match, so the addition is redundant. + [{#Seq left right} single] + (if (/#= left single) + + ) + + ... The right part is not necessary, since it can always match the left. + [single {#Seq left right}] + (if (/#= left single) + {try.#Success single} + ) + + _ + ))) diff --git a/stdlib/source/library/lux/tool/compiler/language/lux/analysis/evaluation.lux b/stdlib/source/library/lux/tool/compiler/language/lux/analysis/evaluation.lux index d27d54fe7..7b8181b9c 100644 --- a/stdlib/source/library/lux/tool/compiler/language/lux/analysis/evaluation.lux +++ b/stdlib/source/library/lux/tool/compiler/language/lux/analysis/evaluation.lux @@ -1,18 +1,22 @@ (.using [library [lux "*" + [type {"+" :sharing}] ["[0]" meta] [abstract [monad {"+" do}]] [control - ["[0]" try {"+" Try}]] + ["[0]" maybe] + ["[0]" try] + ["[0]" io] + [concurrency + ["[0]" atom {"+" Atom}]]] [data - [text - ["%" format]]] + [collection + ["[0]" dictionary {"+" Dictionary}]]] [math - [number {"+" hex} - ["n" nat] - ["[0]" i64]]]]] + [number + ["n" nat]]]]] ["[0]" // {"+" Operation} [macro {"+" Expander}] ["[1][0]" type] @@ -28,11 +32,16 @@ [/// ["[0]" phase] [meta - ["[0]" archive {"+" Archive}]]]]]]]) + ["[0]" archive {"+" Archive} + ["[0]" module]]]]]]]]) (type: .public Eval (-> Archive Type Code (Operation Any))) +(def: evals + (Atom (Dictionary module.ID Nat)) + (atom.atom (dictionary.empty n.hash))) + (def: .public (evaluator expander synthesis_state generation_state generate) (All (_ anchor expression artifact) (-> Expander @@ -43,9 +52,7 @@ (let [analyze (analysisP.phase expander)] (function (eval archive type exprC) (do phase.monad - [count (extensionP.lifted - meta.seed) - exprA (<| (//type.expecting type) + [exprA (<| (//type.expecting type) //scope.reset (analyze archive exprC)) module (extensionP.lifted @@ -54,11 +61,18 @@ (do try.monad [exprS (|> exprA (synthesisP.phase archive) (phase.result synthesis_state))]) (phase.result generation_state) - (let [shift (|> count - (i64.left_shifted 32) - (i64.or (hex "FF,FF,FF,FF")))]) (do phase.monad - [exprO (generation.with_registry_shift shift - (generate archive exprS)) - module_id (generation.module_id module archive)] - (generation.evaluate! [module_id count] exprO))))))) + [@module (:sharing [anchor expression artifact] + (generation.Phase anchor expression artifact) + generate + + (generation.Operation anchor expression artifact module.ID) + (generation.module_id module archive)) + .let [[evals _] (io.run! (atom.update! (dictionary.revised' @module 0 ++) ..evals)) + @eval (maybe.else 0 (dictionary.value @module evals))] + exprO (<| (generation.with_registry_shift (|> @module + ("lux i64 left-shift" 16) + ("lux i64 or" @eval) + ("lux i64 left-shift" 32))) + (generate archive exprS))] + (generation.evaluate! [@module @eval] exprO))))))) diff --git a/stdlib/source/library/lux/tool/compiler/language/lux/analysis/pattern.lux b/stdlib/source/library/lux/tool/compiler/language/lux/analysis/pattern.lux index 21b6218ba..cfce834d0 100644 --- a/stdlib/source/library/lux/tool/compiler/language/lux/analysis/pattern.lux +++ b/stdlib/source/library/lux/tool/compiler/language/lux/analysis/pattern.lux @@ -5,7 +5,7 @@ [equivalence {"+" Equivalence}]] [data [text - ["%" format {"+" Format}]]] + ["%" format]]] [math [number ["n" nat]]]]] @@ -41,7 +41,7 @@ false))) (def: .public (format it) - (Format Pattern) + (%.Format Pattern) (case it {#Simple it} (//simple.format it) diff --git a/stdlib/source/library/lux/tool/compiler/language/lux/phase/analysis/case.lux b/stdlib/source/library/lux/tool/compiler/language/lux/phase/analysis/case.lux index e1b1a8c07..3e81c08b8 100644 --- a/stdlib/source/library/lux/tool/compiler/language/lux/phase/analysis/case.lux +++ b/stdlib/source/library/lux/tool/compiler/language/lux/phase/analysis/case.lux @@ -22,7 +22,6 @@ ["[0]" type ["[0]" check]]]] ["[0]" / "_" - ["[1][0]" coverage {"+" Coverage}] ["/[1]" // "_" ["[1][0]" complex] ["/[1]" // "_" @@ -33,7 +32,8 @@ ["[1][0]" complex] ["[1][0]" pattern {"+" Pattern}] ["[1][0]" type] - ["[1][0]" scope]] + ["[1][0]" scope] + ["[1][0]" coverage {"+" Coverage}]] [/// ["[1]" phase]]]]]]) @@ -64,7 +64,7 @@ (list#each (function (_ [slot value]) (list slot value))) list#conjoint)))] - ["Coverage" (/coverage.%coverage coverage)])) + ["Coverage" (/coverage.format coverage)])) (exception: .public (cannot_have_empty_branches [message Text]) message) @@ -337,11 +337,11 @@ (function (_ [patternT bodyT]) (analyse_pattern {.#None} inputT patternT (analyse archive bodyT))) branchesT) - outputHC (|> outputH product.left /coverage.determine) - outputTC (monad.each ! (|>> product.left /coverage.determine) outputT) - _ (.case (monad.mix try.monad /coverage.merged outputHC outputTC) + outputHC (|> outputH product.left /coverage.coverage /.of_try) + outputTC (monad.each ! (|>> product.left /coverage.coverage /.of_try) outputT) + _ (.case (monad.mix try.monad /coverage.composite outputHC outputTC) {try.#Success coverage} - (///.assertion non_exhaustive_pattern_matching [inputC branches coverage] + (///.assertion ..non_exhaustive_pattern_matching [inputC branches coverage] (/coverage.exhaustive? coverage)) {try.#Failure error} diff --git a/stdlib/source/library/lux/tool/compiler/language/lux/phase/analysis/case/coverage.lux b/stdlib/source/library/lux/tool/compiler/language/lux/phase/analysis/case/coverage.lux deleted file mode 100644 index 7ec92c76b..000000000 --- a/stdlib/source/library/lux/tool/compiler/language/lux/phase/analysis/case/coverage.lux +++ /dev/null @@ -1,419 +0,0 @@ -(.using - [library - [lux "*" - [abstract - equivalence - ["[0]" monad {"+" do}]] - [control - ["[0]" maybe] - ["[0]" try {"+" Try} ("[1]#[0]" monad)] - ["[0]" exception {"+" exception:}]] - [data - ["[0]" bit ("[1]#[0]" equivalence)] - ["[0]" text - ["%" format {"+" Format format}]] - [collection - ["[0]" list ("[1]#[0]" functor mix)] - ["[0]" dictionary {"+" Dictionary}] - ["[0]" set {"+" Set}]]] - [math - [number - ["n" nat] - ["i" int] - ["r" rev] - ["f" frac]]]]] - ["[0]" //// "_" - [// - ["/" analysis {"+" Operation} - ["[1][0]" simple] - ["[1][0]" complex] - ["[1][0]" pattern {"+" Pattern}]] - [/// - ["[1]" phase ("[1]#[0]" monad)]]]]) - -(exception: .public invalid_tuple_pattern - "Tuple size must be >= 2") - -(def: cases - (-> (Maybe Nat) Nat) - (|>> (maybe.else 0))) - -(def: known_cases? - (-> Nat Bit) - (n.> 0)) - -... The coverage of a pattern-matching expression summarizes how well -... all the possible values of an input are being covered by the -... different patterns involved. -... Ideally, the pattern-matching has "exhaustive" coverage, which just -... means that every possible value can be matched by at least 1 -... pattern. -... Every other coverage is considered partial, and it would be valued -... as insuficient (since it could lead to runtime errors due to values -... not being handled by any pattern). -(type: .public Coverage - (Rec Coverage - (Variant - {#Bit Bit} - {#Nat (Set Nat)} - {#Int (Set Int)} - {#Rev (Set Rev)} - {#Frac (Set Frac)} - {#Text (Set Text)} - {#Variant (Maybe Nat) (Dictionary Nat Coverage)} - {#Seq Coverage Coverage} - {#Alt Coverage Coverage} - {#Exhaustive}))) - -(def: .public (exhaustive? coverage) - (-> Coverage Bit) - (case coverage - {#Exhaustive _} - #1 - - _ - #0)) - -(def: .public (%coverage value) - (Format Coverage) - (case value - {#Bit value'} - (|> value' - %.bit - (text.enclosed ["{#Bit " "}"])) - - (^template [ ] - [{ it} - (|> it - set.list - (list#each ) - (text.interposed " ") - (text.enclosed [(format "{" (%.symbol (symbol )) " ") "}"]))]) - ([#Nat %.nat] - [#Int %.int] - [#Rev %.rev] - [#Frac %.frac] - [#Text %.text]) - - {#Variant ?max_cases cases} - (|> cases - dictionary.entries - (list#each (function (_ [idx coverage]) - (format (%.nat idx) " " (%coverage coverage)))) - (text.interposed " ") - (text.enclosed ["{" "}"]) - (format (%.nat (..cases ?max_cases)) " ") - (text.enclosed ["{#Variant " "}"])) - - {#Seq left right} - (format "{#Seq " (%coverage left) " " (%coverage right) "}") - - {#Alt left right} - (format "{#Alt " (%coverage left) " " (%coverage right) "}") - - {#Exhaustive} - "#Exhaustive")) - -(def: .public (determine pattern) - (-> Pattern (Operation Coverage)) - (case pattern - (^or {/pattern.#Simple {/simple.#Unit}} - {/pattern.#Bind _}) - (////#in {#Exhaustive}) - - ... Simple patterns always have partial coverage because there - ... are too many possibilities as far as values go. - (^template [ ] - [{/pattern.#Simple { it}} - (////#in { (set.of_list (list it))})]) - ([/simple.#Nat #Nat n.hash] - [/simple.#Int #Int i.hash] - [/simple.#Rev #Rev r.hash] - [/simple.#Frac #Frac f.hash] - [/simple.#Text #Text text.hash]) - - ... Bits are the exception, since there is only "#1" and - ... "#0", which means it is possible for bit - ... pattern-matching to become exhaustive if complementary parts meet. - {/pattern.#Simple {/simple.#Bit value}} - (////#in {#Bit value}) - - ... Tuple patterns can be exhaustive if there is exhaustiveness for all of - ... their sub-patterns. - {/pattern.#Complex {/complex.#Tuple membersP+}} - (case (list.reversed membersP+) - (^or {.#End} {.#Item _ {.#End}}) - (/.except ..invalid_tuple_pattern []) - - {.#Item lastP prevsP+} - (do ////.monad - [lastC (determine lastP)] - (monad.mix ////.monad - (function (_ leftP rightC) - (do ////.monad - [leftC (determine leftP)] - (case rightC - {#Exhaustive} - (in leftC) - - _ - (in {#Seq leftC rightC})))) - lastC prevsP+))) - - ... Variant patterns can be shown to be exhaustive if all the possible - ... cases are handled exhaustively. - {/pattern.#Complex {/complex.#Variant [lefts right? value]}} - (do ////.monad - [value_coverage (determine value) - .let [idx (if right? - (++ lefts) - lefts)]] - (in {#Variant (if right? - {.#Some idx} - {.#None}) - (|> (dictionary.empty n.hash) - (dictionary.has idx value_coverage))})))) - -(def: (xor left right) - (-> Bit Bit Bit) - (or (and left (not right)) - (and (not left) right))) - -... The coverage checker not only verifies that pattern-matching is -... exhaustive, but also that there are no redundant patterns. -... Redundant patterns will never be executed, since there will -... always be a pattern prior to them that would match the input. -... Because of that, the presence of redundant patterns is assumed to -... be a bug, likely due to programmer carelessness. -(exception: .public (redundant_pattern [so_far Coverage - addition Coverage]) - (exception.report - ["Coverage so-far" (%coverage so_far)] - ["Coverage addition" (%coverage addition)])) - -(def: (flat_alt coverage) - (-> Coverage (List Coverage)) - (case coverage - {#Alt left right} - (list& left (flat_alt right)) - - _ - (list coverage))) - -(implementation: equivalence - (Equivalence Coverage) - - (def: (= reference sample) - (case [reference sample] - [{#Exhaustive} {#Exhaustive}] - #1 - - [{#Bit sideR} {#Bit sideS}] - (bit#= sideR sideS) - - (^template [] - [[{ partialR} { partialS}] - (# set.equivalence = partialR partialS)]) - ([#Nat] - [#Int] - [#Rev] - [#Frac] - [#Text]) - - [{#Variant allR casesR} {#Variant allS casesS}] - (and (n.= (cases allR) - (cases allS)) - (# (dictionary.equivalence =) = casesR casesS)) - - [{#Seq leftR rightR} {#Seq leftS rightS}] - (and (= leftR leftS) - (= rightR rightS)) - - [{#Alt _} {#Alt _}] - (let [flatR (flat_alt reference) - flatS (flat_alt sample)] - (and (n.= (list.size flatR) (list.size flatS)) - (list.every? (function (_ [coverageR coverageS]) - (= coverageR coverageS)) - (list.zipped/2 flatR flatS)))) - - _ - #0))) - -(open: "coverage#[0]" ..equivalence) - -(exception: .public (variants_do_not_match [addition_cases Nat - so_far_cases Nat]) - (exception.report - ["So-far Cases" (%.nat so_far_cases)] - ["Addition Cases" (%.nat addition_cases)])) - -... After determining the coverage of each individual pattern, it is -... necessary to merge them all to figure out if the entire -... pattern-matching expression is exhaustive and whether it contains -... redundant patterns. -(def: .public (merged addition so_far) - (-> Coverage Coverage (Try Coverage)) - (with_expansions [ (exception.except ..redundant_pattern [so_far addition])] - (case [addition so_far] - ... 2 bit coverages are exhaustive if they complement one another. - [{#Bit sideA} {#Bit sideSF}] - (if (xor sideA sideSF) - (try#in {#Exhaustive}) - ) - - (^template [] - [[{ partialA} { partialSF}] - (let [common (set.intersection partialA partialSF)] - (if (set.empty? common) - (try#in { (set.union partialA partialSF)}) - ))]) - ([#Nat] - [#Int] - [#Rev] - [#Frac] - [#Text]) - - [{#Variant allA casesA} {#Variant allSF casesSF}] - (let [addition_cases (cases allSF) - so_far_cases (cases allA)] - (cond (and (known_cases? addition_cases) - (known_cases? so_far_cases) - (not (n.= addition_cases so_far_cases))) - (exception.except ..variants_do_not_match [addition_cases so_far_cases]) - - (# (dictionary.equivalence ..equivalence) = casesSF casesA) - - - ... else - (do [! try.monad] - [casesM (monad.mix ! - (function (_ [tagA coverageA] casesSF') - (case (dictionary.value tagA casesSF') - {.#Some coverageSF} - (do ! - [coverageM (merged coverageA coverageSF)] - (in (dictionary.has tagA coverageM casesSF'))) - - {.#None} - (in (dictionary.has tagA coverageA casesSF')))) - casesSF (dictionary.entries casesA))] - (in (if (and (or (known_cases? addition_cases) - (known_cases? so_far_cases)) - (n.= (++ (n.max addition_cases so_far_cases)) - (dictionary.size casesM)) - (list.every? exhaustive? (dictionary.values casesM))) - {#Exhaustive} - {#Variant (case allSF - {.#Some _} - allSF - - _ - allA) - casesM}))))) - - [{#Seq leftA rightA} {#Seq leftSF rightSF}] - (case [(coverage#= leftSF leftA) (coverage#= rightSF rightA)] - ... Same prefix - [#1 #0] - (do try.monad - [rightM (merged rightA rightSF)] - (if (exhaustive? rightM) - ... If all that follows is exhaustive, then it can be safely dropped - ... (since only the "left" part would influence whether the - ... merged coverage is exhaustive or not). - (in leftSF) - (in {#Seq leftSF rightM}))) - - ... Same suffix - [#0 #1] - (do try.monad - [leftM (merged leftA leftSF)] - (in {#Seq leftM rightA})) - - ... The 2 sequences cannot possibly be merged. - [#0 #0] - (try#in {#Alt so_far addition}) - - ... There is nothing the addition adds to the coverage. - [#1 #1] - ) - - ... The addition cannot possibly improve the coverage. - [_ {#Exhaustive}] - - - ... The addition completes the coverage. - [{#Exhaustive} _] - (try#in {#Exhaustive}) - - ... The left part will always match, so the addition is redundant. - (^multi [{#Seq left right} single] - (coverage#= left single)) - - - ... The right part is not necessary, since it can always match the left. - (^multi [single {#Seq left right}] - (coverage#= left single)) - (try#in single) - - ... When merging a new coverage against one based on Alt, it may be - ... that one of the many coverages in the Alt is complementary to - ... the new one, so effort must be made to fuse carefully, to match - ... the right coverages together. - ... If one of the Alt sub-coverages matches the new one, the cycle - ... must be repeated, in case the resulting coverage can now match - ... other ones in the original Alt. - ... This process must be repeated until no further productive - ... merges can be done. - [_ {#Alt leftS rightS}] - (do [! try.monad] - [.let [fuse_once (: (-> Coverage (List Coverage) - (Try [(Maybe Coverage) - (List Coverage)])) - (function (_ coverageA possibilitiesSF) - (loop [altsSF possibilitiesSF] - (case altsSF - {.#End} - (in [{.#None} (list coverageA)]) - - {.#Item altSF altsSF'} - (case (merged coverageA altSF) - {try.#Success altMSF} - (case altMSF - {#Alt _} - (do ! - [[success altsSF+] (again altsSF')] - (in [success {.#Item altSF altsSF+}])) - - _ - (in [{.#Some altMSF} altsSF'])) - - {try.#Failure error} - {try.#Failure error}) - ))))] - [successA possibilitiesSF] (fuse_once addition (flat_alt so_far))] - (loop [successA successA - possibilitiesSF possibilitiesSF] - (case successA - {.#Some coverageA'} - (do ! - [[successA' possibilitiesSF'] (fuse_once coverageA' possibilitiesSF)] - (again successA' possibilitiesSF')) - - {.#None} - (case (list.reversed possibilitiesSF) - {.#Item last prevs} - (in (list#mix (function (_ left right) {#Alt left right}) - last - prevs)) - - {.#End} - (undefined))))) - - _ - (if (coverage#= so_far addition) - ... The addition cannot possibly improve the coverage. - - ... There are now 2 alternative paths. - (try#in {#Alt so_far addition}))))) diff --git a/stdlib/source/test/lux/data/binary.lux b/stdlib/source/test/lux/data/binary.lux index 0a354092d..c9e821229 100644 --- a/stdlib/source/test/lux/data/binary.lux +++ b/stdlib/source/test/lux/data/binary.lux @@ -19,8 +19,8 @@ [math ["[0]" random {"+" Random}] [number - ["[0]" i64] - ["n" nat]]]]] + ["n" nat] + ["[0]" i64]]]]] [\\library ["[0]" / ["!" \\unsafe]]]) @@ -204,6 +204,7 @@ (_.cover [/.after] (and (# /.equivalence = sample (/.after 0 sample)) (# /.equivalence = (/.empty 0) (/.after size sample)) + (n.= (n.- offset size) (/.size (/.after offset sample))) (case (list.reversed (..as_list sample)) {.#End} false @@ -227,6 +228,15 @@ copy/1 (/.read/8! 1 copy)] (in (and (n.= sample/0 copy/0) (n.= 0 copy/1))))))) + (_.cover [/.cannot_copy] + (and (not (throws? /.cannot_copy + (/.copy size 0 sample 0 (/.empty size)))) + (throws? /.cannot_copy + (/.copy (n.+ offset size) 0 sample 0 (/.empty size))) + (throws? /.cannot_copy + (/.copy size offset sample 0 (/.empty size))) + (throws? /.cannot_copy + (/.copy size 0 sample offset (/.empty size))))) ..test|unsafe )))) diff --git a/stdlib/source/test/lux/math/number/int.lux b/stdlib/source/test/lux/math/number/int.lux index 394c34c15..2c47ee6d1 100644 --- a/stdlib/source/test/lux/math/number/int.lux +++ b/stdlib/source/test/lux/math/number/int.lux @@ -36,15 +36,15 @@ ($enum.spec /.enum random.int)) (_.for [/.interval] ($interval.spec /.interval random.int)) - (~~ (template [ ] - [(_.for [ ] + (~~ (template [] + [(_.for [] ($monoid.spec /.equivalence random.int))] - [/.+ /.addition] - [/.* /.multiplication] + [/.addition] + [/.multiplication] - [/.min /.minimum] - [/.max /.maximum] + [/.minimum] + [/.maximum] )) (~~ (template [] [(_.for [] @@ -77,24 +77,55 @@ Test (<| (_.covering /._) (_.for [.Int]) + (let [(^open "/#[0]") /.interval]) ($_ _.and (do random.monad - [sample random.int] + [sample random.int + left random.int + right random.int] ($_ _.and + (_.cover [/.+] + (and (/.= (/.+ left right) + (/.+ right left)) + (/.= sample (/.+ +0 sample)))) (_.cover [/.-] (and (/.= +0 (/.- sample sample)) (/.= sample (/.- +0 sample)) (/.= (/.opposite sample) - (/.- sample +0)))) + (/.- sample +0)) + (/.= /#bottom + (/.- /#bottom +0)))) + (_.cover [/.*] + (and (/.= (/.* left right) + (/.* right left)) + (/.= sample (/.* +1 sample)) + (/.= /#bottom + (/.* -1 /#bottom)))) (_.cover [/./] (and (/.= +1 (/./ sample sample)) - (/.= sample (/./ +1 sample)))) + (/.= sample (/./ +1 sample)) + (/.= /#bottom + (/./ -1 /#bottom)))) (_.cover [/.abs] (bit#= (/.> sample (/.abs sample)) (/.negative? sample))) (_.cover [/.signum] (/.= (/.abs sample) (/.* (/.signum sample) sample))) + (_.cover [/.min] + (and (/.= (/.min left right) + (/.min right left)) + (/.= sample + (/.min /#top sample)) + (/.= /#bottom + (/.min /#bottom sample)))) + (_.cover [/.max] + (and (/.= (/.max left right) + (/.max right left)) + (/.= /#top + (/.max /#top sample)) + (/.= sample + (/.max /#bottom sample)))) )) (do random.monad [left random.int diff --git a/stdlib/source/test/lux/tool/compiler/language/lux/analysis.lux b/stdlib/source/test/lux/tool/compiler/language/lux/analysis.lux index 858a294ae..75f0d5d1c 100644 --- a/stdlib/source/test/lux/tool/compiler/language/lux/analysis.lux +++ b/stdlib/source/test/lux/tool/compiler/language/lux/analysis.lux @@ -33,6 +33,7 @@ ["[1][0]" scope] ["[1][0]" simple] ["[1][0]" type] + ["[1][0]" coverage] [//// ["[1][0]" reference ["[2][0]" variable]] @@ -474,4 +475,5 @@ /scope.test /simple.test /type.test + /coverage.test )))) diff --git a/stdlib/source/test/lux/tool/compiler/language/lux/analysis/coverage.lux b/stdlib/source/test/lux/tool/compiler/language/lux/analysis/coverage.lux new file mode 100644 index 000000000..ab856f9a1 --- /dev/null +++ b/stdlib/source/test/lux/tool/compiler/language/lux/analysis/coverage.lux @@ -0,0 +1,453 @@ +(.using + [library + [lux "*" + ["_" test {"+" Test}] + [abstract + ["[0]" monad {"+" do}] + ["[0]" predicate] + [\\specification + ["$[0]" equivalence]]] + [control + [pipe {"+" case>}] + ["[0]" try {"+" Try} ("[1]#[0]" functor)] + ["[0]" exception {"+" Exception}]] + [data + ["[0]" product] + ["[0]" bit ("[1]#[0]" equivalence)] + ["[0]" text ("[1]#[0]" equivalence) + ["%" format]] + [collection + ["[0]" set] + ["[0]" dictionary] + ["[0]" list ("[1]#[0]" functor mix)]]] + [math + ["[0]" random {"+" Random} ("[1]#[0]" monad)] + [number + ["n" nat ("[1]#[0]" interval)] + ["i" int] + ["r" rev] + ["f" frac]]]]] + [\\library + ["[0]" / + ["/[1]" // "_" + ["[1][0]" simple] + ["[1][0]" complex] + ["[1][0]" pattern {"+" Pattern}]]]]) + +(def: spread 16) + +(def: random_tag + (Random Nat) + (random#each (n.% ..spread) random.nat)) + +(def: .public random + (Random /.Coverage) + (<| random.rec + (function (_ again)) + ($_ random.or + (random#in []) + random.bit + (random.set n.hash ..spread random.nat) + (random.set i.hash ..spread random.int) + (random.set r.hash ..spread random.rev) + (random.set f.hash ..spread random.frac) + (random.set text.hash ..spread (random.unicode 1)) + ($_ random.and + (random.maybe (random#in ..spread)) + (do [! random.monad] + [cases ..random_tag + cases (random.set n.hash cases ..random_tag)] + (|> cases + set.list + (monad.each ! (function (_ case) (# ! each (|>> [case]) again))) + (# ! each (dictionary.of_list n.hash)))) + ) + (random.and again again) + (random.and again again) + ))) + +(def: (ranged min range) + (-> Nat Nat (Random Nat)) + (random#each (|>> (n.% (++ range)) (n.+ min)) + random.nat)) + +(def: random_pattern + (Random [/.Coverage Pattern]) + (<| random.rec + (function (_ again)) + (`` ($_ random.either + (random#in [{/.#Exhaustive} + {//pattern.#Simple {//simple.#Unit}}]) + (do random.monad + [it random.bit] + (in [{/.#Bit it} + {//pattern.#Simple {//simple.#Bit it}}])) + (~~ (template [ ] + [(do random.monad + [it ] + (in [{ (set.of_list (list it))} + {//pattern.#Simple { it}}]))] + + [random.nat n.hash /.#Nat //simple.#Nat] + [random.int i.hash /.#Int //simple.#Int] + [random.rev r.hash /.#Rev //simple.#Rev] + [random.frac f.hash /.#Frac //simple.#Frac] + [(random.unicode 1) text.hash /.#Text //simple.#Text] + )) + + (do [! random.monad] + [tag (# ! each ++ ..random_tag) + right? random.bit + .let [lefts (//complex.lefts right? tag)] + [sub_coverage sub_pattern] again] + (in [{/.#Variant (if right? {.#Some tag} {.#None}) + (dictionary.of_list n.hash (list [tag sub_coverage]))} + {//pattern.#Complex + {//complex.#Variant + [//complex.#lefts lefts + //complex.#right? right? + //complex.#value sub_pattern]}}])) + + (do [! random.monad] + [arity (..ranged 2 (n.- 2 ..spread)) + it (random.list arity again) + .let [coverages (list#each product.left it) + patterns (list#each product.right it)]] + (in [(|> coverages + (list.only (|>> /.exhaustive? not)) + list.reversed + (case> {.#End} + {/.#Exhaustive} + + {.#Item last prevs} + (list#mix (function (_ left right) + {/.#Seq left right}) + last + prevs))) + {//pattern.#Complex {//complex.#Tuple patterns}}])) + + (do random.monad + [register random.nat] + (in [{/.#Exhaustive} + {//pattern.#Bind register}])) + )))) + +(def: (failure? exception it) + (All (_ a) (-> (Exception a) (Try /.Coverage) Bit)) + (case it + {try.#Failure error} + (exception.match? exception error) + + _ + false)) + +(def: test|value + Test + (<| (let [(^open "/#[0]") /.equivalence]) + (do [! random.monad] + [left ..random + right ..random] + ($_ _.and + (_.for [/.equivalence] + ($equivalence.spec /.equivalence ..random)) + + (_.cover [/.exhaustive?] + (bit#= (/#= {/.#Exhaustive} left) + (/.exhaustive? left))) + (_.cover [/.format] + (bit#= (/#= left right) + (text#= (/.format left) (/.format right)))) + )))) + +(def: test|coverage + Test + (<| (let [(^open "/#[0]") /.equivalence]) + (do [! random.monad] + [[expected pattern] ..random_pattern] + ($_ _.and + (_.cover [/.coverage] + (|> pattern + /.coverage + (try#each (/#= expected)) + (try.else false))) + (_.cover [/.invalid_tuple] + (let [invalid? (..failure? /.invalid_tuple)] + (and (|> (list) + {//complex.#Tuple} + {//pattern.#Complex} + /.coverage + invalid?) + (|> (list pattern) + {//complex.#Tuple} + {//pattern.#Complex} + /.coverage + invalid?) + (|> (list pattern pattern) + {//complex.#Tuple} + {//pattern.#Complex} + /.coverage + invalid? + not)))) + )))) + +(def: random_partial_pattern + (Random [/.Coverage Pattern]) + (random.only (|>> product.left /.exhaustive? not) + ..random_pattern)) + +(def: test|variant + Test + (<| (let [(^open "/#[0]") /.equivalence]) + (do [! random.monad] + [[expected/0 pattern/0] ..random_partial_pattern + [expected/1 pattern/1] (random.only (|>> product.left (/#= expected/0) not) + ..random_partial_pattern) + expected_maximum (# ! each (n.+ 2) ..random_tag) + .let [random_tag (random#each (n.% expected_maximum) random.nat)] + tag/0 random_tag + tag/1 (random.only (|>> (n.= tag/0) not) random_tag) + .let [cases (dictionary.of_list n.hash (list [tag/0 expected/0] + [tag/1 expected/1])) + expected_minimum (++ (n.max tag/0 tag/1))]] + ($_ _.and + (_.cover [/.minimum] + (and (n.= expected_minimum (/.minimum [{.#None} cases])) + (n.= expected_maximum (/.minimum [{.#Some expected_maximum} cases])))) + (_.cover [/.maximum] + (and (n.= n#top (/.maximum [{.#None} cases])) + (n.= expected_maximum (/.maximum [{.#Some expected_maximum} cases])))) + )))) + +(def: test|composite + Test + (<| (let [(^open "/#[0]") /.equivalence]) + (do [! random.monad] + [[expected/0 pattern/0] ..random_partial_pattern + [expected/1 pattern/1] (random.only (|>> product.left (/#= expected/0) not) + ..random_partial_pattern) + [expected/2 pattern/2] (random.only ($_ predicate.and + (|>> product.left (/#= expected/0) not) + (|>> product.left (/#= expected/1) not) + (|>> product.left (case> {/.#Variant _} false _ true))) + ..random_partial_pattern) + + bit random.bit + nat random.nat + int random.int + rev random.rev + frac random.frac + text (random.unicode 1) + + arity (# ! each (n.+ 2) ..random_tag) + .let [random_tag (random#each (n.% arity) random.nat)] + tag/0 random_tag + tag/1 (random.only (|>> (n.= tag/0) not) random_tag)] + ($_ _.and + (_.cover [/.composite] + (let [composes_simples! + (`` (and (|> (/.composite {/.#Bit bit} {/.#Bit (not bit)}) + (try#each (/#= {/.#Exhaustive})) + (try.else false)) + (|> {/.#Bit bit} + (/.composite {/.#Exhaustive}) + (try#each (/#= {/.#Exhaustive})) + (try.else false)) + (~~ (template [ ] + [(|> (/.composite { (set.of_list (list ))} + { (set.of_list (list (|> )))}) + (try#each (/#= { (set.of_list (list (|> )))})) + (try.else false)) + (|> { (set.of_list (list ))} + (/.composite {/.#Exhaustive}) + (try#each (/#= {/.#Exhaustive})) + (try.else false))] + + [/.#Nat n.hash nat ++] + [/.#Int i.hash int ++] + [/.#Rev r.hash rev ++] + [/.#Frac f.hash frac (f.+ frac)] + [/.#Text text.hash text (%.format text)] + )))) + + composes_variants! + (let [composes_different_variants! + (let [composes? (: (-> (Maybe Nat) (Maybe Nat) (Maybe Nat) Bit) + (function (_ left right both) + (|> (/.composite {/.#Variant left (dictionary.of_list n.hash (list [tag/0 expected/0]))} + {/.#Variant right (dictionary.of_list n.hash (list [tag/1 expected/1]))}) + (try#each (/#= {/.#Variant both (dictionary.of_list n.hash (list [tag/0 expected/0] + [tag/1 expected/1]))})) + (try.else false))))] + (and (composes? {.#None} {.#None} {.#None}) + (composes? {.#Some arity} {.#None} {.#Some arity}) + (composes? {.#None} {.#Some arity} {.#Some arity}) + (composes? {.#Some arity} {.#Some arity} {.#Some arity}))) + + composes_same_variants! + (let [composes? (: (-> (Maybe Nat) (Maybe Nat) (Maybe Nat) Bit) + (function (_ left right both) + (|> (do try.monad + [variant (/.composite {/.#Variant left (dictionary.of_list n.hash (list [tag/0 expected/0]))} + {/.#Variant right (dictionary.of_list n.hash (list [tag/0 expected/1]))}) + expected (/.composite expected/0 expected/1)] + (in (/#= {/.#Variant both (dictionary.of_list n.hash (list [tag/0 expected]))} + variant))) + (try.else false))))] + (and (composes? {.#None} {.#None} {.#None}) + (composes? {.#Some arity} {.#None} {.#Some arity}) + (composes? {.#None} {.#Some arity} {.#Some arity}) + (composes? {.#Some arity} {.#Some arity} {.#Some arity})))] + (and composes_different_variants! + composes_same_variants! + (and (|> {/.#Variant {.#None} (dictionary.of_list n.hash (list [tag/0 expected/0]))} + (/.composite {/.#Exhaustive}) + (try#each (/#= {/.#Exhaustive})) + (try.else false)) + (|> {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 expected/0]))} + (/.composite {/.#Exhaustive}) + (try#each (/#= {/.#Exhaustive})) + (try.else false))))) + + composes_sequences! + (and (|> (/.composite {/.#Seq expected/0 expected/1} + {/.#Seq expected/1 expected/0}) + (try#each (/#= {/.#Alt {/.#Seq expected/0 expected/1} + {/.#Seq expected/1 expected/0}})) + (try.else false)) + (|> (do try.monad + [seq (/.composite {/.#Seq expected/0 expected/0} + {/.#Seq expected/0 expected/1}) + expected (/.composite expected/0 expected/1)] + (in (/#= (if (/.exhaustive? expected) + expected/0 + {/.#Seq expected/0 expected}) + seq))) + (try.else false)) + (|> (do try.monad + [seq (/.composite {/.#Seq expected/0 expected/0} + {/.#Seq expected/1 expected/0}) + expected (/.composite expected/0 expected/1)] + (in (/#= {/.#Seq expected expected/0} + seq))) + (try.else false)) + (|> (/.composite {/.#Seq expected/0 expected/1} + expected/1) + (try#each (/#= {/.#Alt {/.#Seq expected/0 expected/1} + expected/1})) + (try.else false)) + (|> (/.composite expected/1 + {/.#Seq expected/0 expected/1}) + (try#each (/#= {/.#Alt expected/1 + {/.#Seq expected/0 expected/1}})) + (try.else false)) + (|> (/.composite expected/0 + {/.#Seq expected/0 expected/1}) + (try#each (/#= expected/0)) + (try.else false))) + + composes_alts! + (and (|> (do try.monad + [alt (/.composite {/.#Exhaustive} + {/.#Alt expected/0 + expected/1})] + (in (/#= {/.#Exhaustive} + alt))) + (try.else false)) + (|> (do try.monad + [alt (/.composite {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 {/.#Exhaustive}]))} + {/.#Alt {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 expected/0]))} + {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 expected/1]))}})] + (in (/#= {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 {/.#Exhaustive}]))} + alt))) + (try.else false)) + (|> (do try.monad + [alt (/.composite {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 {/.#Exhaustive}]))} + {/.#Alt {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 expected/0]))} + {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/1 expected/1]))}})] + (in (/#= {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 {/.#Exhaustive}] + [tag/1 expected/1]))} + alt))) + (try.else false)) + (|> (do try.monad + [alt (/.composite {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 expected/2]))} + {/.#Alt {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 expected/0]))} + {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/1 expected/1]))}}) + expected (/.composite expected/2 expected/0)] + (in (/#= {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 expected] + [tag/1 expected/1]))} + alt))) + (try.else false)) + (|> (do try.monad + [alt (/.composite {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/1 expected/2]))} + {/.#Alt {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 expected/0]))} + {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/1 expected/1]))}}) + expected (/.composite expected/2 expected/1)] + (in (/#= {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 expected/0] + [tag/1 expected]))} + alt))) + (try.else false)) + (|> (do try.monad + [alt (/.composite {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/1 expected/1]))} + {/.#Alt {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 expected/0]))} + expected/2})] + (in (/#= {/.#Alt expected/2 + {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 expected/0] + [tag/1 expected/1]))}} + alt))) + (try.else false)))] + (and composes_simples! + composes_variants! + composes_sequences! + composes_alts!))) + (_.cover [/.redundancy] + (let [redundant? (..failure? /.redundancy)] + (`` (and (redundant? (/.composite {/.#Exhaustive} {/.#Exhaustive})) + (~~ (template [] + [(redundant? (/.composite )) + (redundant? (/.composite {/.#Exhaustive}))] + + [{/.#Bit bit}] + [{/.#Nat (set.of_list n.hash (list nat))}] + [{/.#Int (set.of_list i.hash (list int))}] + [{/.#Rev (set.of_list r.hash (list rev))}] + [{/.#Frac (set.of_list f.hash (list frac))}] + [{/.#Text (set.of_list text.hash (list text))}] + [{/.#Variant {.#None} (dictionary.of_list n.hash (list [tag/0 expected/0]))}] + [{/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 expected/0]))}] + [{/.#Seq expected/0 expected/1}] + )) + (redundant? (/.composite {/.#Seq expected/0 expected/1} expected/0)))))) + (_.cover [/.variant_mismatch] + (let [mismatch? (..failure? /.variant_mismatch)] + (and (not (mismatch? (/.composite {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 expected/0]))} + {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/1 expected/1]))}))) + + (mismatch? (/.composite {/.#Variant {.#Some (++ arity)} (dictionary.of_list n.hash (list [tag/0 expected/0]))} + {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/1 expected/1]))})) + (mismatch? (/.composite {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 expected/0]))} + {/.#Variant {.#Some (++ arity)} (dictionary.of_list n.hash (list [tag/1 expected/1]))})) + + (mismatch? (/.composite {/.#Variant {.#Some (-- arity)} (dictionary.of_list n.hash (list [tag/0 expected/0]))} + {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/1 expected/1]))})) + (mismatch? (/.composite {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/0 expected/0]))} + {/.#Variant {.#Some (-- arity)} (dictionary.of_list n.hash (list [tag/1 expected/1]))})) + + (not (mismatch? (/.composite {/.#Variant {.#None} (dictionary.of_list n.hash (list [tag/0 expected/0]))} + {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/1 expected/1]))}))) + (mismatch? (/.composite {/.#Variant {.#None} (dictionary.of_list n.hash (list [arity expected/0]))} + {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/1 expected/1]))})) + (not (mismatch? (/.composite {/.#Variant {.#None} (dictionary.of_list n.hash (list [(-- arity) expected/0]))} + {/.#Variant {.#Some arity} (dictionary.of_list n.hash (list [tag/1 expected/1]))})))))) + )))) + +(def: .public test + Test + (<| (_.covering /._) + (_.for [/.Coverage]) + ($_ _.and + ..test|value + ..test|coverage + (_.for [/.Variant] + ..test|variant) + ..test|composite + ))) diff --git a/stdlib/source/unsafe/lux/data/binary.lux b/stdlib/source/unsafe/lux/data/binary.lux index 868dd82f3..d91893c38 100644 --- a/stdlib/source/unsafe/lux/data/binary.lux +++ b/stdlib/source/unsafe/lux/data/binary.lux @@ -59,12 +59,11 @@ (type: .public Binary (array.Array (I64 Any))))) -(template: .public (empty size) - [(with_expansions [ (: Nat size) - (|> - (ffi.array byte) - (: ..Binary))] - (: ..Binary +(with_expansions [ (: Nat size) + (ffi.array byte ) + (: ..Binary )] + (template: .public (empty size) + [(: ..Binary (for [@.old @.jvm @@ -84,12 +83,12 @@ (..make-bytevector )] ... Default - (array.empty ))))]) + (array.empty )))])) -(template: .public (size it) - [(with_expansions [ (: ..Binary it) - (ffi.length )] - (: Nat +(with_expansions [ (: ..Binary it) + (ffi.length )] + (template: .public (size it) + [(: Nat (for [@.old @.jvm @@ -109,47 +108,47 @@ (..bytevector-length [])] ... Default - (array.size ))))]) + (array.size )))])) (def: byte_mask Nat (i64.mask i64.bits_per_byte)) -(with_expansions [ (.static ..byte_mask)] +(with_expansions [ (.static ..byte_mask) + (: ..Binary it) + (: Nat index) + (ffi.read! ) + (ffi.byte_to_long ) + (|> + (:as I64) + ("lux i64 and" ))] (template: .public (bytes/1 index it) - [(with_expansions [ (: ..Binary it) - (: Nat index) - (|> - (ffi.read! ) - ffi.byte_to_long - (:as I64) - ("lux i64 and" ))] - (: I64 - (`` (for [@.old (~~ ) - @.jvm (~~ ) - - @.js - (|> - (:as (array.Array .Frac)) - ("js array read" ) - "lux f64 i64" - .i64) - - @.python - (|> - (:as (array.Array .I64)) - ("python array read" )) - - @.scheme - (..bytevector-u8-ref [ ])] - - ... Default - (.case (array.read! ) - {.#Some it} - it - - {.#None} - (.i64 (: (I64 Any) 0)))))))])) + [(: I64 + (`` (for [@.old (~~ ) + @.jvm (~~ ) + + @.js + (|> + (:as (array.Array .Frac)) + ("js array read" ) + "lux f64 i64" + .i64) + + @.python + (|> + (:as (array.Array .I64)) + ("python array read" )) + + @.scheme + (..bytevector-u8-ref [ ])] + + ... Default + (.case (array.read! ) + {.#Some it} + it + + {.#None} + (.i64 (: (I64 Any) 0))))))])) (template: .public (bytes/2 index' it') [(let [index (: Nat index') @@ -183,48 +182,49 @@ ("lux i64 left-shift" 8 (..bytes/1 ("lux i64 +" 6 index) it)) (..bytes/1 ("lux i64 +" 7 index) it))))]) -(with_expansions [ (hex "FF")] +(with_expansions [ (hex "FF") + (: ..Binary it) + (: Nat index) + (: (I64 Any) value) + (for [@.old + (:as Int ) + + @.jvm + (:as (Primitive "java.lang.Long") )] + ) + (ffi.long_to_byte ) + (ffi.write! )] (template: .public (with/1! index value it) - [(with_expansions [ (: ..Binary it) - (: Nat index) - (: (I64 Any) value) - (for [@.old - (|> (:as Int) ffi.long_to_byte) - - @.jvm - (|> (:as (Primitive "java.lang.Long")) ffi.long_to_byte)] - ) - (ffi.write! )] - (: ..Binary - (for [@.old - @.jvm - - @.js - (|> - (: ..Binary) - (:as (array.Array .Frac)) - ("js array write" - (|> - .int - ("lux i64 and" (.int )) - "lux i64 f64")) - (:as ..Binary)) - - @.python - (|> - (: ..Binary) - (:as (array.Array (I64 Any))) - ("python array write" (|> ("lux i64 and" ) (: (I64 Any)))) - (:as ..Binary)) - - @.scheme - (let [it' ] - (exec - (..bytevector-u8-set! [it' ]) - it'))] - - ... Default - (array.write! (|> .int ("lux i64 and" (.int ))) ))))])) + [(: ..Binary + (for [@.old + @.jvm + + @.js + (|> + (: ..Binary) + (:as (array.Array .Frac)) + ("js array write" + (|> + .int + ("lux i64 and" (.int )) + "lux i64 f64")) + (:as ..Binary)) + + @.python + (|> + (: ..Binary) + (:as (array.Array (I64 Any))) + ("python array write" (|> ("lux i64 and" ) (: (I64 Any)))) + (:as ..Binary)) + + @.scheme + (let [it' ] + (exec + (..bytevector-u8-set! [it' ]) + it'))] + + ... Default + (array.write! (|> .int ("lux i64 and" (.int ))) )))])) (template: .public (with/2! index' value' it) [(let [index (: Nat index') @@ -268,28 +268,33 @@ (..with/1! ("lux i64 +" 6 index) ("lux i64 right-shift" 8 value)) (..with/1! ("lux i64 +" 7 index) value))))]) -(def: .public (= reference sample) - (-> ..Binary ..Binary Bit) - (with_expansions [ (java/util/Arrays::equals reference sample)] - (for [@.old - @.jvm ] - (let [limit (..size reference)] - (and ("lux i64 =" limit (..size sample)) - (loop [index 0] - (if ("lux i64 =" limit index) - (and ("lux i64 =" - (..bytes/1 index reference) - (..bytes/1 index sample)) - (again (++ index))) - true))))))) - -(def: .public (copy! bytes source_offset source target_offset target) +(with_expansions [ (: ..Binary reference') + (: ..Binary sample') + (java/util/Arrays::equals )] + (template: .public (= reference' sample') + [(for [@.old + @.jvm ] + (let [reference + sample + limit (..size reference)] + (and ("lux i64 =" limit (..size sample)) + (loop [index 0] + (if ("lux i64 =" limit index) + (and ("lux i64 =" + (..bytes/1 index reference) + (..bytes/1 index sample)) + (again (++ index))) + true)))))])) + +... TODO: Turn into a template ASAP. +(inline: .public (copy! bytes source_offset source target_offset target) (-> Nat Nat ..Binary Nat ..Binary ..Binary) - (with_expansions [ (as_is (exec - (java/lang/System::arraycopy source (.int source_offset) - target (.int target_offset) - (.int bytes)) - target))] + (with_expansions [ (java/lang/System::arraycopy source (.int source_offset) + target (.int target_offset) + (.int bytes)) + (exec + + target)] (for [@.old @.jvm ] @@ -303,12 +308,16 @@ (again (++ index))) target))))) -(def: .public (slice offset size binary) - (-> Nat Nat ..Binary ..Binary) - (let [limit ("lux i64 +" size offset)] - (with_expansions [ (as_is (java/util/Arrays::copyOfRange binary (.int offset) (.int limit)))] - (for [@.old - @.jvm ] - - ... Default - (..copy! size offset binary 0 (..empty size)))))) +... TODO: Turn into a template ASAP. +(with_expansions [ (java/util/Arrays::copyOfRange binary + (.int offset) + (.int limit)) + (let [limit ("lux i64 +" size offset)] + )] + (inline: .public (slice offset size binary) + (-> Nat Nat ..Binary ..Binary) + (for [@.old + @.jvm ] + + ... Default + (..copy! size offset binary 0 (..empty size))))) -- cgit v1.2.3