From 60db3c210aeaf66a4fe312544c6e5d4662681de7 Mon Sep 17 00:00:00 2001 From: Son Ho Date: Fri, 4 Aug 2023 22:33:05 +0200 Subject: Generate the array test files for Coq and F* --- tests/coq/array/Array_Funs.v | 300 +++++++++++++++ tests/coq/array/Array_Types.v | 11 + tests/coq/array/Makefile | 23 ++ tests/coq/array/Primitives.v | 523 +++++++++++++++++++++++++++ tests/coq/array/_CoqProject | 8 + tests/fstar/.gitignore | 1 + tests/fstar/array/Array.Clauses.Template.fst | 18 + tests/fstar/array/Array.Clauses.fst | 19 + tests/fstar/array/Array.Funs.fst | 226 ++++++++++++ tests/fstar/array/Array.Types.fst | 7 + tests/fstar/array/Makefile | 49 +++ tests/fstar/array/Primitives.fst | 375 +++++++++++++++++++ 12 files changed, 1560 insertions(+) create mode 100644 tests/coq/array/Array_Funs.v create mode 100644 tests/coq/array/Array_Types.v create mode 100644 tests/coq/array/Makefile create mode 100644 tests/coq/array/Primitives.v create mode 100644 tests/coq/array/_CoqProject create mode 100644 tests/fstar/.gitignore create mode 100644 tests/fstar/array/Array.Clauses.Template.fst create mode 100644 tests/fstar/array/Array.Clauses.fst create mode 100644 tests/fstar/array/Array.Funs.fst create mode 100644 tests/fstar/array/Array.Types.fst create mode 100644 tests/fstar/array/Makefile create mode 100644 tests/fstar/array/Primitives.fst diff --git a/tests/coq/array/Array_Funs.v b/tests/coq/array/Array_Funs.v new file mode 100644 index 00000000..4a862d1b --- /dev/null +++ b/tests/coq/array/Array_Funs.v @@ -0,0 +1,300 @@ +(** THIS FILE WAS AUTOMATICALLY GENERATED BY AENEAS *) +(** [array]: function definitions *) +Require Import Primitives. +Import Primitives. +Require Import Coq.ZArith.ZArith. +Require Import List. +Import ListNotations. +Local Open Scope Primitives_scope. +Require Export Array_Types. +Import Array_Types. +Module Array_Funs. + +(** [array::array_to_shared_slice_]: forward function *) +Definition array_to_shared_slice__fwd + (T : Type) (s : array T 32%usize) : result (slice T) := + array_to_slice_shared T 32%usize s +. + +(** [array::array_to_mut_slice_]: forward function *) +Definition array_to_mut_slice__fwd + (T : Type) (s : array T 32%usize) : result (slice T) := + array_to_slice_mut_fwd T 32%usize s +. + +(** [array::array_to_mut_slice_]: backward function 0 *) +Definition array_to_mut_slice__back + (T : Type) (s : array T 32%usize) (ret : slice T) : + result (array T 32%usize) + := + array_to_slice_mut_back T 32%usize s ret +. + +(** [array::array_len]: forward function *) +Definition array_len_fwd (T : Type) (s : array T 32%usize) : result usize := + s0 <- array_to_slice_shared T 32%usize s; let i := slice_len T s0 in Return i +. + +(** [array::shared_array_len]: forward function *) +Definition shared_array_len_fwd + (T : Type) (s : array T 32%usize) : result usize := + s0 <- array_to_slice_shared T 32%usize s; let i := slice_len T s0 in Return i +. + +(** [array::shared_slice_len]: forward function *) +Definition shared_slice_len_fwd (T : Type) (s : slice T) : result usize := + let i := slice_len T s in Return i +. + +(** [array::index_array_shared]: forward function *) +Definition index_array_shared_fwd + (T : Type) (s : array T 32%usize) (i : usize) : result T := + array_index_shared T 32%usize s i +. + +(** [array::index_array_u32]: forward function *) +Definition index_array_u32_fwd + (s : array u32 32%usize) (i : usize) : result u32 := + array_index_shared u32 32%usize s i +. + +(** [array::index_array_generic]: forward function *) +Definition index_array_generic_fwd + (N : usize) (s : array u32 N) (i : usize) : result u32 := + array_index_shared u32 N s i +. + +(** [array::index_array_generic_call]: forward function *) +Definition index_array_generic_call_fwd + (N : usize) (s : array u32 N) (i : usize) : result u32 := + index_array_generic_fwd N s i +. + +(** [array::index_array_copy]: forward function *) +Definition index_array_copy_fwd (x : array u32 32%usize) : result u32 := + array_index_shared u32 32%usize x 0%usize +. + +(** [array::index_mut_array]: forward function *) +Definition index_mut_array_fwd + (T : Type) (s : array T 32%usize) (i : usize) : result T := + array_index_mut_fwd T 32%usize s i +. + +(** [array::index_mut_array]: backward function 0 *) +Definition index_mut_array_back + (T : Type) (s : array T 32%usize) (i : usize) (ret : T) : + result (array T 32%usize) + := + array_index_mut_back T 32%usize s i ret +. + +(** [array::index_slice]: forward function *) +Definition index_slice_fwd (T : Type) (s : slice T) (i : usize) : result T := + slice_index_shared T s i +. + +(** [array::index_mut_slice]: forward function *) +Definition index_mut_slice_fwd + (T : Type) (s : slice T) (i : usize) : result T := + slice_index_mut_fwd T s i +. + +(** [array::index_mut_slice]: backward function 0 *) +Definition index_mut_slice_back + (T : Type) (s : slice T) (i : usize) (ret : T) : result (slice T) := + slice_index_mut_back T s i ret +. + +(** [array::slice_subslice_shared_]: forward function *) +Definition slice_subslice_shared__fwd + (x : slice u32) (y : usize) (z : usize) : result (slice u32) := + slice_subslice_shared u32 x (mk_range y z) +. + +(** [array::slice_subslice_mut_]: forward function *) +Definition slice_subslice_mut__fwd + (x : slice u32) (y : usize) (z : usize) : result (slice u32) := + slice_subslice_mut_fwd u32 x (mk_range y z) +. + +(** [array::slice_subslice_mut_]: backward function 0 *) +Definition slice_subslice_mut__back + (x : slice u32) (y : usize) (z : usize) (ret : slice u32) : + result (slice u32) + := + slice_subslice_mut_back u32 x (mk_range y z) ret +. + +(** [array::array_to_slice_shared_]: forward function *) +Definition array_to_slice_shared__fwd + (x : array u32 32%usize) : result (slice u32) := + array_to_slice_shared u32 32%usize x +. + +(** [array::array_to_slice_mut_]: forward function *) +Definition array_to_slice_mut__fwd + (x : array u32 32%usize) : result (slice u32) := + array_to_slice_mut_fwd u32 32%usize x +. + +(** [array::array_to_slice_mut_]: backward function 0 *) +Definition array_to_slice_mut__back + (x : array u32 32%usize) (ret : slice u32) : result (array u32 32%usize) := + array_to_slice_mut_back u32 32%usize x ret +. + +(** [array::array_subslice_shared_]: forward function *) +Definition array_subslice_shared__fwd + (x : array u32 32%usize) (y : usize) (z : usize) : result (slice u32) := + array_subslice_shared u32 32%usize x (mk_range y z) +. + +(** [array::array_subslice_mut_]: forward function *) +Definition array_subslice_mut__fwd + (x : array u32 32%usize) (y : usize) (z : usize) : result (slice u32) := + array_subslice_mut_fwd u32 32%usize x (mk_range y z) +. + +(** [array::array_subslice_mut_]: backward function 0 *) +Definition array_subslice_mut__back + (x : array u32 32%usize) (y : usize) (z : usize) (ret : slice u32) : + result (array u32 32%usize) + := + array_subslice_mut_back u32 32%usize x (mk_range y z) ret +. + +(** [array::index_slice_0]: forward function *) +Definition index_slice_0_fwd (T : Type) (s : slice T) : result T := + slice_index_shared T s 0%usize +. + +(** [array::index_array_0]: forward function *) +Definition index_array_0_fwd (T : Type) (s : array T 32%usize) : result T := + array_index_shared T 32%usize s 0%usize +. + +(** [array::index_index_array]: forward function *) +Definition index_index_array_fwd + (s : array (array u32 32%usize) 32%usize) (i : usize) (j : usize) : + result u32 + := + a <- array_index_shared (array u32 32%usize) 32%usize s i; + array_index_shared u32 32%usize a j +. + +(** [array::update_update_array]: forward function *) +Definition update_update_array_fwd + (s : array (array u32 32%usize) 32%usize) (i : usize) (j : usize) : + result unit + := + a <- array_index_mut_fwd (array u32 32%usize) 32%usize s i; + a0 <- array_index_mut_back u32 32%usize a j 0%u32; + _ <- array_index_mut_back (array u32 32%usize) 32%usize s i a0; + Return tt +. + +(** [array::array_local_deep_copy]: forward function *) +Definition array_local_deep_copy_fwd (x : array u32 32%usize) : result unit := + Return tt +. + +(** [array::f0]: forward function *) +Definition f0_fwd : result unit := + s <- + array_to_slice_mut_fwd u32 2%usize (mk_array u32 2%usize [ 1%u32; 2%u32 ]); + s0 <- slice_index_mut_back u32 s 0%usize 1%u32; + _ <- + array_to_slice_mut_back u32 2%usize (mk_array u32 2%usize [ 1%u32; 2%u32 ]) + s0; + Return tt +. + +(** [array::f1]: forward function *) +Definition f1_fwd : result unit := + _ <- + array_index_mut_back u32 2%usize (mk_array u32 2%usize [ 1%u32; 2%u32 ]) + 0%usize 1%u32; + Return tt +. + +(** [array::sum]: loop 0: forward function *) +Fixpoint sum_loop_fwd + (n : nat) (s : slice u32) (sum : u32) (i : usize) : result u32 := + match n with + | O => Fail_ OutOfFuel + | S n0 => + let i0 := slice_len u32 s in + if i s< i0 + then ( + i1 <- slice_index_shared u32 s i; + sum0 <- u32_add sum i1; + i2 <- usize_add i 1%usize; + sum_loop_fwd n0 s sum0 i2) + else Return sum + end +. + +(** [array::sum]: forward function *) +Definition sum_fwd (n : nat) (s : slice u32) : result u32 := + sum_loop_fwd n s 0%u32 0%usize +. + +(** [array::sum2]: loop 0: forward function *) +Fixpoint sum2_loop_fwd + (n : nat) (s : slice u32) (s2 : slice u32) (sum : u32) (i : usize) : + result u32 + := + match n with + | O => Fail_ OutOfFuel + | S n0 => + let i0 := slice_len u32 s in + if i s< i0 + then ( + i1 <- slice_index_shared u32 s i; + i2 <- slice_index_shared u32 s2 i; + i3 <- u32_add i1 i2; + sum0 <- u32_add sum i3; + i4 <- usize_add i 1%usize; + sum2_loop_fwd n0 s s2 sum0 i4) + else Return sum + end +. + +(** [array::sum2]: forward function *) +Definition sum2_fwd (n : nat) (s : slice u32) (s2 : slice u32) : result u32 := + let i := slice_len u32 s in + let i0 := slice_len u32 s2 in + if negb (i s= i0) then Fail_ Failure else sum2_loop_fwd n s s2 0%u32 0%usize +. + +(** [array::f2]: forward function *) +Definition f2_fwd (i : u32) : result unit := + Return tt. + +(** [array::f4]: forward function *) +Definition f4_fwd + (x : array u32 32%usize) (y : usize) (z : usize) : result (slice u32) := + array_subslice_shared u32 32%usize x (mk_range y z) +. + +(** [array::f3]: forward function *) +Definition f3_fwd (n : nat) : result u32 := + i <- + array_index_shared u32 2%usize (mk_array u32 2%usize [ 1%u32; 2%u32 ]) + 0%usize; + _ <- f2_fwd i; + s <- + array_to_slice_shared u32 2%usize (mk_array u32 2%usize [ 1%u32; 2%u32 ]); + s0 <- + f4_fwd + (mk_array u32 32%usize [ + 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; + 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; + 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; + 0%u32; 0%u32 + ]) 16%usize 18%usize; + sum2_fwd n s s0 +. + +End Array_Funs . diff --git a/tests/coq/array/Array_Types.v b/tests/coq/array/Array_Types.v new file mode 100644 index 00000000..7e1f2a07 --- /dev/null +++ b/tests/coq/array/Array_Types.v @@ -0,0 +1,11 @@ +(** THIS FILE WAS AUTOMATICALLY GENERATED BY AENEAS *) +(** [array]: type definitions *) +Require Import Primitives. +Import Primitives. +Require Import Coq.ZArith.ZArith. +Require Import List. +Import ListNotations. +Local Open Scope Primitives_scope. +Module Array_Types. + +End Array_Types . diff --git a/tests/coq/array/Makefile b/tests/coq/array/Makefile new file mode 100644 index 00000000..1a5aee4a --- /dev/null +++ b/tests/coq/array/Makefile @@ -0,0 +1,23 @@ +# This file was automatically generated - modify ../Makefile.template instead +# Makefile originally taken from coq-club + +%: Makefile.coq phony + +make -f Makefile.coq $@ + +all: Makefile.coq + +make -f Makefile.coq all + +clean: Makefile.coq + +make -f Makefile.coq clean + rm -f Makefile.coq + +Makefile.coq: _CoqProject Makefile + coq_makefile -f _CoqProject | sed 's/$$(COQCHK) $$(COQCHKFLAGS) $$(COQLIBS)/$$(COQCHK) $$(COQCHKFLAGS) $$(subst -Q,-R,$$(COQLIBS))/' > Makefile.coq + +_CoqProject: ; + +Makefile: ; + +phony: ; + +.PHONY: all clean phony diff --git a/tests/coq/array/Primitives.v b/tests/coq/array/Primitives.v new file mode 100644 index 00000000..71a2d9c3 --- /dev/null +++ b/tests/coq/array/Primitives.v @@ -0,0 +1,523 @@ +Require Import Lia. +Require Coq.Strings.Ascii. +Require Coq.Strings.String. +Require Import Coq.Program.Equality. +Require Import Coq.ZArith.ZArith. +Require Import Coq.ZArith.Znat. +Require Import List. +Import ListNotations. + +Module Primitives. + + (* TODO: use more *) +Declare Scope Primitives_scope. + +(*** Result *) + +Inductive error := + | Failure + | OutOfFuel. + +Inductive result A := + | Return : A -> result A + | Fail_ : error -> result A. + +Arguments Return {_} a. +Arguments Fail_ {_}. + +Definition bind {A B} (m: result A) (f: A -> result B) : result B := + match m with + | Fail_ e => Fail_ e + | Return x => f x + end. + +Definition return_ {A: Type} (x: A) : result A := Return x. +Definition fail_ {A: Type} (e: error) : result A := Fail_ e. + +Notation "x <- c1 ; c2" := (bind c1 (fun x => c2)) + (at level 61, c1 at next level, right associativity). + +(** Monadic assert *) +Definition massert (b: bool) : result unit := + if b then Return tt else Fail_ Failure. + +(** Normalize and unwrap a successful result (used for globals) *) +Definition eval_result_refl {A} {x} (a: result A) (p: a = Return x) : A := + match a as r return (r = Return x -> A) with + | Return a' => fun _ => a' + | Fail_ e => fun p' => + False_rect _ (eq_ind (Fail_ e) + (fun e : result A => + match e with + | Return _ => False + | Fail_ e => True + end) + I (Return x) p') + end p. + +Notation "x %global" := (eval_result_refl x eq_refl) (at level 40). +Notation "x %return" := (eval_result_refl x eq_refl) (at level 40). + +(* Sanity check *) +Check (if true then Return (1 + 2) else Fail_ Failure)%global = 3. + +(*** Misc *) + + +Definition string := Coq.Strings.String.string. +Definition char := Coq.Strings.Ascii.ascii. +Definition char_of_byte := Coq.Strings.Ascii.ascii_of_byte. + +Definition mem_replace_fwd (a : Type) (x : a) (y : a) : a := x . +Definition mem_replace_back (a : Type) (x : a) (y : a) : a := y . + +(*** Scalars *) + +Definition i8_min : Z := -128%Z. +Definition i8_max : Z := 127%Z. +Definition i16_min : Z := -32768%Z. +Definition i16_max : Z := 32767%Z. +Definition i32_min : Z := -2147483648%Z. +Definition i32_max : Z := 2147483647%Z. +Definition i64_min : Z := -9223372036854775808%Z. +Definition i64_max : Z := 9223372036854775807%Z. +Definition i128_min : Z := -170141183460469231731687303715884105728%Z. +Definition i128_max : Z := 170141183460469231731687303715884105727%Z. +Definition u8_min : Z := 0%Z. +Definition u8_max : Z := 255%Z. +Definition u16_min : Z := 0%Z. +Definition u16_max : Z := 65535%Z. +Definition u32_min : Z := 0%Z. +Definition u32_max : Z := 4294967295%Z. +Definition u64_min : Z := 0%Z. +Definition u64_max : Z := 18446744073709551615%Z. +Definition u128_min : Z := 0%Z. +Definition u128_max : Z := 340282366920938463463374607431768211455%Z. + +(** The bounds of [isize] and [usize] vary with the architecture. *) +Axiom isize_min : Z. +Axiom isize_max : Z. +Definition usize_min : Z := 0%Z. +Axiom usize_max : Z. + +Open Scope Z_scope. + +(** We provide those lemmas to reason about the bounds of [isize] and [usize] *) +Axiom isize_min_bound : isize_min <= i32_min. +Axiom isize_max_bound : i32_max <= isize_max. +Axiom usize_max_bound : u32_max <= usize_max. + +Inductive scalar_ty := + | Isize + | I8 + | I16 + | I32 + | I64 + | I128 + | Usize + | U8 + | U16 + | U32 + | U64 + | U128 +. + +Definition scalar_min (ty: scalar_ty) : Z := + match ty with + | Isize => isize_min + | I8 => i8_min + | I16 => i16_min + | I32 => i32_min + | I64 => i64_min + | I128 => i128_min + | Usize => usize_min + | U8 => u8_min + | U16 => u16_min + | U32 => u32_min + | U64 => u64_min + | U128 => u128_min +end. + +Definition scalar_max (ty: scalar_ty) : Z := + match ty with + | Isize => isize_max + | I8 => i8_max + | I16 => i16_max + | I32 => i32_max + | I64 => i64_max + | I128 => i128_max + | Usize => usize_max + | U8 => u8_max + | U16 => u16_max + | U32 => u32_max + | U64 => u64_max + | U128 => u128_max +end. + +(** We use the following conservative bounds to make sure we can compute bound + checks in most situations *) +Definition scalar_min_cons (ty: scalar_ty) : Z := + match ty with + | Isize => i32_min + | Usize => u32_min + | _ => scalar_min ty +end. + +Definition scalar_max_cons (ty: scalar_ty) : Z := + match ty with + | Isize => i32_max + | Usize => u32_max + | _ => scalar_max ty +end. + +Lemma scalar_min_cons_valid : forall ty, scalar_min ty <= scalar_min_cons ty . +Proof. + destruct ty; unfold scalar_min_cons, scalar_min; try lia. + - pose isize_min_bound; lia. + - apply Z.le_refl. +Qed. + +Lemma scalar_max_cons_valid : forall ty, scalar_max ty >= scalar_max_cons ty . +Proof. + destruct ty; unfold scalar_max_cons, scalar_max; try lia. + - pose isize_max_bound; lia. + - pose usize_max_bound. lia. +Qed. + +Definition scalar (ty: scalar_ty) : Type := + { x: Z | scalar_min ty <= x <= scalar_max ty }. + +Definition to_Z {ty} (x: scalar ty) : Z := proj1_sig x. + +(** Bounds checks: we start by using the conservative bounds, to make sure we + can compute in most situations, then we use the real bounds (for [isize] + and [usize]). *) +Definition scalar_ge_min (ty: scalar_ty) (x: Z) : bool := + Z.leb (scalar_min_cons ty) x || Z.leb (scalar_min ty) x. + +Definition scalar_le_max (ty: scalar_ty) (x: Z) : bool := + Z.leb x (scalar_max_cons ty) || Z.leb x (scalar_max ty). + +Lemma scalar_ge_min_valid (ty: scalar_ty) (x: Z) : + scalar_ge_min ty x = true -> scalar_min ty <= x . +Proof. + unfold scalar_ge_min. + pose (scalar_min_cons_valid ty). + lia. +Qed. + +Lemma scalar_le_max_valid (ty: scalar_ty) (x: Z) : + scalar_le_max ty x = true -> x <= scalar_max ty . +Proof. + unfold scalar_le_max. + pose (scalar_max_cons_valid ty). + lia. +Qed. + +Definition scalar_in_bounds (ty: scalar_ty) (x: Z) : bool := + scalar_ge_min ty x && scalar_le_max ty x . + +Lemma scalar_in_bounds_valid (ty: scalar_ty) (x: Z) : + scalar_in_bounds ty x = true -> scalar_min ty <= x <= scalar_max ty . +Proof. + unfold scalar_in_bounds. + intros H. + destruct (scalar_ge_min ty x) eqn:Hmin. + - destruct (scalar_le_max ty x) eqn:Hmax. + + pose (scalar_ge_min_valid ty x Hmin). + pose (scalar_le_max_valid ty x Hmax). + lia. + + inversion H. + - inversion H. +Qed. + +Import Sumbool. + +Definition mk_scalar (ty: scalar_ty) (x: Z) : result (scalar ty) := + match sumbool_of_bool (scalar_in_bounds ty x) with + | left H => Return (exist _ x (scalar_in_bounds_valid _ _ H)) + | right _ => Fail_ Failure + end. + +Definition scalar_add {ty} (x y: scalar ty) : result (scalar ty) := mk_scalar ty (to_Z x + to_Z y). + +Definition scalar_sub {ty} (x y: scalar ty) : result (scalar ty) := mk_scalar ty (to_Z x - to_Z y). + +Definition scalar_mul {ty} (x y: scalar ty) : result (scalar ty) := mk_scalar ty (to_Z x * to_Z y). + +Definition scalar_div {ty} (x y: scalar ty) : result (scalar ty) := + if to_Z y =? 0 then Fail_ Failure else + mk_scalar ty (to_Z x / to_Z y). + +Definition scalar_rem {ty} (x y: scalar ty) : result (scalar ty) := mk_scalar ty (Z.rem (to_Z x) (to_Z y)). + +Definition scalar_neg {ty} (x: scalar ty) : result (scalar ty) := mk_scalar ty (-(to_Z x)). + +(** Cast an integer from a [src_ty] to a [tgt_ty] *) +(* TODO: check the semantics of casts in Rust *) +Definition scalar_cast (src_ty tgt_ty : scalar_ty) (x : scalar src_ty) : result (scalar tgt_ty) := + mk_scalar tgt_ty (to_Z x). + +(** Comparisons *) +Definition scalar_leb {ty : scalar_ty} (x : scalar ty) (y : scalar ty) : bool := + Z.leb (to_Z x) (to_Z y) . + +Definition scalar_ltb {ty : scalar_ty} (x : scalar ty) (y : scalar ty) : bool := + Z.ltb (to_Z x) (to_Z y) . + +Definition scalar_geb {ty : scalar_ty} (x : scalar ty) (y : scalar ty) : bool := + Z.geb (to_Z x) (to_Z y) . + +Definition scalar_gtb {ty : scalar_ty} (x : scalar ty) (y : scalar ty) : bool := + Z.gtb (to_Z x) (to_Z y) . + +Definition scalar_eqb {ty : scalar_ty} (x : scalar ty) (y : scalar ty) : bool := + Z.eqb (to_Z x) (to_Z y) . + +Definition scalar_neqb {ty : scalar_ty} (x : scalar ty) (y : scalar ty) : bool := + negb (Z.eqb (to_Z x) (to_Z y)) . + + +(** The scalar types *) +Definition isize := scalar Isize. +Definition i8 := scalar I8. +Definition i16 := scalar I16. +Definition i32 := scalar I32. +Definition i64 := scalar I64. +Definition i128 := scalar I128. +Definition usize := scalar Usize. +Definition u8 := scalar U8. +Definition u16 := scalar U16. +Definition u32 := scalar U32. +Definition u64 := scalar U64. +Definition u128 := scalar U128. + +(** Negaion *) +Definition isize_neg := @scalar_neg Isize. +Definition i8_neg := @scalar_neg I8. +Definition i16_neg := @scalar_neg I16. +Definition i32_neg := @scalar_neg I32. +Definition i64_neg := @scalar_neg I64. +Definition i128_neg := @scalar_neg I128. + +(** Division *) +Definition isize_div := @scalar_div Isize. +Definition i8_div := @scalar_div I8. +Definition i16_div := @scalar_div I16. +Definition i32_div := @scalar_div I32. +Definition i64_div := @scalar_div I64. +Definition i128_div := @scalar_div I128. +Definition usize_div := @scalar_div Usize. +Definition u8_div := @scalar_div U8. +Definition u16_div := @scalar_div U16. +Definition u32_div := @scalar_div U32. +Definition u64_div := @scalar_div U64. +Definition u128_div := @scalar_div U128. + +(** Remainder *) +Definition isize_rem := @scalar_rem Isize. +Definition i8_rem := @scalar_rem I8. +Definition i16_rem := @scalar_rem I16. +Definition i32_rem := @scalar_rem I32. +Definition i64_rem := @scalar_rem I64. +Definition i128_rem := @scalar_rem I128. +Definition usize_rem := @scalar_rem Usize. +Definition u8_rem := @scalar_rem U8. +Definition u16_rem := @scalar_rem U16. +Definition u32_rem := @scalar_rem U32. +Definition u64_rem := @scalar_rem U64. +Definition u128_rem := @scalar_rem U128. + +(** Addition *) +Definition isize_add := @scalar_add Isize. +Definition i8_add := @scalar_add I8. +Definition i16_add := @scalar_add I16. +Definition i32_add := @scalar_add I32. +Definition i64_add := @scalar_add I64. +Definition i128_add := @scalar_add I128. +Definition usize_add := @scalar_add Usize. +Definition u8_add := @scalar_add U8. +Definition u16_add := @scalar_add U16. +Definition u32_add := @scalar_add U32. +Definition u64_add := @scalar_add U64. +Definition u128_add := @scalar_add U128. + +(** Substraction *) +Definition isize_sub := @scalar_sub Isize. +Definition i8_sub := @scalar_sub I8. +Definition i16_sub := @scalar_sub I16. +Definition i32_sub := @scalar_sub I32. +Definition i64_sub := @scalar_sub I64. +Definition i128_sub := @scalar_sub I128. +Definition usize_sub := @scalar_sub Usize. +Definition u8_sub := @scalar_sub U8. +Definition u16_sub := @scalar_sub U16. +Definition u32_sub := @scalar_sub U32. +Definition u64_sub := @scalar_sub U64. +Definition u128_sub := @scalar_sub U128. + +(** Multiplication *) +Definition isize_mul := @scalar_mul Isize. +Definition i8_mul := @scalar_mul I8. +Definition i16_mul := @scalar_mul I16. +Definition i32_mul := @scalar_mul I32. +Definition i64_mul := @scalar_mul I64. +Definition i128_mul := @scalar_mul I128. +Definition usize_mul := @scalar_mul Usize. +Definition u8_mul := @scalar_mul U8. +Definition u16_mul := @scalar_mul U16. +Definition u32_mul := @scalar_mul U32. +Definition u64_mul := @scalar_mul U64. +Definition u128_mul := @scalar_mul U128. + +(** Small utility *) +Definition usize_to_nat (x: usize) : nat := Z.to_nat (to_Z x). + +(** Notations *) +Notation "x %isize" := ((mk_scalar Isize x)%return) (at level 9). +Notation "x %i8" := ((mk_scalar I8 x)%return) (at level 9). +Notation "x %i16" := ((mk_scalar I16 x)%return) (at level 9). +Notation "x %i32" := ((mk_scalar I32 x)%return) (at level 9). +Notation "x %i64" := ((mk_scalar I64 x)%return) (at level 9). +Notation "x %i128" := ((mk_scalar I128 x)%return) (at level 9). +Notation "x %usize" := ((mk_scalar Usize x)%return) (at level 9). +Notation "x %u8" := ((mk_scalar U8 x)%return) (at level 9). +Notation "x %u16" := ((mk_scalar U16 x)%return) (at level 9). +Notation "x %u32" := ((mk_scalar U32 x)%return) (at level 9). +Notation "x %u64" := ((mk_scalar U64 x)%return) (at level 9). +Notation "x %u128" := ((mk_scalar U128 x)%return) (at level 9). + +Notation "x s= y" := (scalar_eqb x y) (at level 80) : Primitives_scope. +Notation "x s<> y" := (scalar_neqb x y) (at level 80) : Primitives_scope. +Notation "x s<= y" := (scalar_leb x y) (at level 80) : Primitives_scope. +Notation "x s< y" := (scalar_ltb x y) (at level 80) : Primitives_scope. +Notation "x s>= y" := (scalar_geb x y) (at level 80) : Primitives_scope. +Notation "x s> y" := (scalar_gtb x y) (at level 80) : Primitives_scope. + +(*** Range *) +Record range (T : Type) := mk_range { + start: T; + end_: T; +}. +Arguments mk_range {_}. + +(*** Arrays *) +Definition array T (n : usize) := { l: list T | Z.of_nat (length l) = to_Z n}. + +Lemma le_0_usize_max : 0 <= usize_max. +Proof. + pose (H := usize_max_bound). + unfold u32_max in H. + lia. +Qed. + +Lemma eqb_imp_eq (x y : Z) : Z.eqb x y = true -> x = y. +Proof. + lia. +Qed. + +(* TODO: finish the definitions *) +Axiom mk_array : forall (T : Type) (n : usize) (l : list T), array T n. + +Axiom array_index_shared : forall (T : Type) (n : usize) (x : array T n) (i : usize), result T. +Axiom array_index_mut_fwd : forall (T : Type) (n : usize) (x : array T n) (i : usize), result T. +Axiom array_index_mut_back : forall (T : Type) (n : usize) (x : array T n) (i : usize) (nx : T), result (array T n). + +(*** Slice *) +Definition slice T := { l: list T | Z.of_nat (length l) <= usize_max}. + +Axiom slice_len : forall (T : Type) (s : slice T), usize. +Axiom slice_index_shared : forall (T : Type) (x : slice T) (i : usize), result T. +Axiom slice_index_mut_fwd : forall (T : Type) (x : slice T) (i : usize), result T. +Axiom slice_index_mut_back : forall (T : Type) (x : slice T) (i : usize) (nx : T), result (slice T). + +(*** Subslices *) + +Axiom array_to_slice_shared : forall (T : Type) (n : usize) (x : array T n), result (slice T). +Axiom array_to_slice_mut_fwd : forall (T : Type) (n : usize) (x : array T n), result (slice T). +Axiom array_to_slice_mut_back : forall (T : Type) (n : usize) (x : array T n) (s : slice T), result (array T n). + +Axiom array_subslice_shared: forall (T : Type) (n : usize) (x : array T n) (r : range usize), result (slice T). +Axiom array_subslice_mut_fwd: forall (T : Type) (n : usize) (x : array T n) (r : range usize), result (slice T). +Axiom array_subslice_mut_back: forall (T : Type) (n : usize) (x : array T n) (r : range usize) (ns : slice T), result (array T n). +Axiom slice_subslice_shared: forall (T : Type) (x : slice T) (r : range usize), result (slice T). +Axiom slice_subslice_mut_fwd: forall (T : Type) (x : slice T) (r : range usize), result (slice T). +Axiom slice_subslice_mut_back: forall (T : Type) (x : slice T) (r : range usize) (ns : slice T), result (slice T). + +(*** Vectors *) + +Definition vec T := { l: list T | Z.of_nat (length l) <= usize_max }. + +Definition vec_to_list {T: Type} (v: vec T) : list T := proj1_sig v. + +Definition vec_length {T: Type} (v: vec T) : Z := Z.of_nat (length (vec_to_list v)). + +Definition vec_new (T: Type) : vec T := (exist _ [] le_0_usize_max). + +Lemma vec_len_in_usize {T} (v: vec T) : usize_min <= vec_length v <= usize_max. +Proof. + unfold vec_length, usize_min. + split. + - lia. + - apply (proj2_sig v). +Qed. + +Definition vec_len (T: Type) (v: vec T) : usize := + exist _ (vec_length v) (vec_len_in_usize v). + +Fixpoint list_update {A} (l: list A) (n: nat) (a: A) + : list A := + match l with + | [] => [] + | x :: t => match n with + | 0%nat => a :: t + | S m => x :: (list_update t m a) +end end. + +Definition vec_bind {A B} (v: vec A) (f: list A -> result (list B)) : result (vec B) := + l <- f (vec_to_list v) ; + match sumbool_of_bool (scalar_le_max Usize (Z.of_nat (length l))) with + | left H => Return (exist _ l (scalar_le_max_valid _ _ H)) + | right _ => Fail_ Failure + end. + +(* The **forward** function shouldn't be used *) +Definition vec_push_fwd (T: Type) (v: vec T) (x: T) : unit := tt. + +Definition vec_push_back (T: Type) (v: vec T) (x: T) : result (vec T) := + vec_bind v (fun l => Return (l ++ [x])). + +(* The **forward** function shouldn't be used *) +Definition vec_insert_fwd (T: Type) (v: vec T) (i: usize) (x: T) : result unit := + if to_Z i + if to_Z i Return n + | None => Fail_ Failure + end. + +Definition vec_index_back (T: Type) (v: vec T) (i: usize) (x: T) : result unit := + if to_Z i Return n + | None => Fail_ Failure + end. + +Definition vec_index_mut_back (T: Type) (v: vec T) (i: usize) (x: T) : result (vec T) := + vec_bind v (fun l => + if to_Z i $@ + +.PHONY: .FORCE +.FORCE: +endif +endif + +include .depend +endif + +# For the interactive mode +%.fst-in %.fsti-in: + @echo $(FSTAR_OPTIONS) + +# Generete the .checked files in batch mode +%.checked: + $(FSTAR) $(FSTAR_OPTIONS) $< && \ + touch -c $@ + +.PHONY: clean +clean: + rm -f obj/* diff --git a/tests/fstar/array/Primitives.fst b/tests/fstar/array/Primitives.fst new file mode 100644 index 00000000..9db82069 --- /dev/null +++ b/tests/fstar/array/Primitives.fst @@ -0,0 +1,375 @@ +/// This file lists primitive and assumed functions and types +module Primitives +open FStar.Mul +open FStar.List.Tot + +#set-options "--z3rlimit 15 --fuel 0 --ifuel 1" + +(*** Utilities *) +val list_update (#a : Type0) (ls : list a) (i : nat{i < length ls}) (x : a) : + ls':list a{ + length ls' = length ls /\ + index ls' i == x + } +#push-options "--fuel 1" +let rec list_update #a ls i x = + match ls with + | x' :: ls -> if i = 0 then x :: ls else x' :: list_update ls (i-1) x +#pop-options + +(*** Result *) +type error : Type0 = +| Failure +| OutOfFuel + +type result (a : Type0) : Type0 = +| Return : v:a -> result a +| Fail : e:error -> result a + +// Monadic return operator +unfold let return (#a : Type0) (x : a) : result a = Return x + +// Monadic bind operator. +// Allows to use the notation: +// ``` +// let* x = y in +// ... +// ``` +unfold let (let*) (#a #b : Type0) (m: result a) + (f: (x:a) -> Pure (result b) (requires (m == Return x)) (ensures fun _ -> True)) : + result b = + match m with + | Return x -> f x + | Fail e -> Fail e + +// Monadic assert(...) +let massert (b:bool) : result unit = if b then Return () else Fail Failure + +// Normalize and unwrap a successful result (used for globals). +let eval_global (#a : Type0) (x : result a{Return? (normalize_term x)}) : a = Return?.v x + +(*** Misc *) +type char = FStar.Char.char +type string = string + +let is_zero (n: nat) : bool = n = 0 +let decrease (n: nat{n > 0}) : nat = n - 1 + +let mem_replace_fwd (a : Type0) (x : a) (y : a) : a = x +let mem_replace_back (a : Type0) (x : a) (y : a) : a = y + +(*** Scalars *) +/// Rem.: most of the following code was partially generated + +let isize_min : int = -9223372036854775808 // TODO: should be opaque +let isize_max : int = 9223372036854775807 // TODO: should be opaque +let i8_min : int = -128 +let i8_max : int = 127 +let i16_min : int = -32768 +let i16_max : int = 32767 +let i32_min : int = -2147483648 +let i32_max : int = 2147483647 +let i64_min : int = -9223372036854775808 +let i64_max : int = 9223372036854775807 +let i128_min : int = -170141183460469231731687303715884105728 +let i128_max : int = 170141183460469231731687303715884105727 +let usize_min : int = 0 +let usize_max : int = 4294967295 // TODO: should be opaque +let u8_min : int = 0 +let u8_max : int = 255 +let u16_min : int = 0 +let u16_max : int = 65535 +let u32_min : int = 0 +let u32_max : int = 4294967295 +let u64_min : int = 0 +let u64_max : int = 18446744073709551615 +let u128_min : int = 0 +let u128_max : int = 340282366920938463463374607431768211455 + +type scalar_ty = +| Isize +| I8 +| I16 +| I32 +| I64 +| I128 +| Usize +| U8 +| U16 +| U32 +| U64 +| U128 + +let scalar_min (ty : scalar_ty) : int = + match ty with + | Isize -> isize_min + | I8 -> i8_min + | I16 -> i16_min + | I32 -> i32_min + | I64 -> i64_min + | I128 -> i128_min + | Usize -> usize_min + | U8 -> u8_min + | U16 -> u16_min + | U32 -> u32_min + | U64 -> u64_min + | U128 -> u128_min + +let scalar_max (ty : scalar_ty) : int = + match ty with + | Isize -> isize_max + | I8 -> i8_max + | I16 -> i16_max + | I32 -> i32_max + | I64 -> i64_max + | I128 -> i128_max + | Usize -> usize_max + | U8 -> u8_max + | U16 -> u16_max + | U32 -> u32_max + | U64 -> u64_max + | U128 -> u128_max + +type scalar (ty : scalar_ty) : eqtype = x:int{scalar_min ty <= x && x <= scalar_max ty} + +let mk_scalar (ty : scalar_ty) (x : int) : result (scalar ty) = + if scalar_min ty <= x && scalar_max ty >= x then Return x else Fail Failure + +let scalar_neg (#ty : scalar_ty) (x : scalar ty) : result (scalar ty) = mk_scalar ty (-x) + +let scalar_div (#ty : scalar_ty) (x : scalar ty) (y : scalar ty) : result (scalar ty) = + if y <> 0 then mk_scalar ty (x / y) else Fail Failure + +/// The remainder operation +let int_rem (x : int) (y : int{y <> 0}) : int = + if x >= 0 then (x % y) else -(x % y) + +(* Checking consistency with Rust *) +let _ = assert_norm(int_rem 1 2 = 1) +let _ = assert_norm(int_rem (-1) 2 = -1) +let _ = assert_norm(int_rem 1 (-2) = 1) +let _ = assert_norm(int_rem (-1) (-2) = -1) + +let scalar_rem (#ty : scalar_ty) (x : scalar ty) (y : scalar ty) : result (scalar ty) = + if y <> 0 then mk_scalar ty (int_rem x y) else Fail Failure + +let scalar_add (#ty : scalar_ty) (x : scalar ty) (y : scalar ty) : result (scalar ty) = + mk_scalar ty (x + y) + +let scalar_sub (#ty : scalar_ty) (x : scalar ty) (y : scalar ty) : result (scalar ty) = + mk_scalar ty (x - y) + +let scalar_mul (#ty : scalar_ty) (x : scalar ty) (y : scalar ty) : result (scalar ty) = + mk_scalar ty (x * y) + +(** Cast an integer from a [src_ty] to a [tgt_ty] *) +// TODO: check the semantics of casts in Rust +let scalar_cast (src_ty : scalar_ty) (tgt_ty : scalar_ty) (x : scalar src_ty) : result (scalar tgt_ty) = + mk_scalar tgt_ty x + +/// The scalar types +type isize : eqtype = scalar Isize +type i8 : eqtype = scalar I8 +type i16 : eqtype = scalar I16 +type i32 : eqtype = scalar I32 +type i64 : eqtype = scalar I64 +type i128 : eqtype = scalar I128 +type usize : eqtype = scalar Usize +type u8 : eqtype = scalar U8 +type u16 : eqtype = scalar U16 +type u32 : eqtype = scalar U32 +type u64 : eqtype = scalar U64 +type u128 : eqtype = scalar U128 + +/// Negation +let isize_neg = scalar_neg #Isize +let i8_neg = scalar_neg #I8 +let i16_neg = scalar_neg #I16 +let i32_neg = scalar_neg #I32 +let i64_neg = scalar_neg #I64 +let i128_neg = scalar_neg #I128 + +/// Division +let isize_div = scalar_div #Isize +let i8_div = scalar_div #I8 +let i16_div = scalar_div #I16 +let i32_div = scalar_div #I32 +let i64_div = scalar_div #I64 +let i128_div = scalar_div #I128 +let usize_div = scalar_div #Usize +let u8_div = scalar_div #U8 +let u16_div = scalar_div #U16 +let u32_div = scalar_div #U32 +let u64_div = scalar_div #U64 +let u128_div = scalar_div #U128 + +/// Remainder +let isize_rem = scalar_rem #Isize +let i8_rem = scalar_rem #I8 +let i16_rem = scalar_rem #I16 +let i32_rem = scalar_rem #I32 +let i64_rem = scalar_rem #I64 +let i128_rem = scalar_rem #I128 +let usize_rem = scalar_rem #Usize +let u8_rem = scalar_rem #U8 +let u16_rem = scalar_rem #U16 +let u32_rem = scalar_rem #U32 +let u64_rem = scalar_rem #U64 +let u128_rem = scalar_rem #U128 + +/// Addition +let isize_add = scalar_add #Isize +let i8_add = scalar_add #I8 +let i16_add = scalar_add #I16 +let i32_add = scalar_add #I32 +let i64_add = scalar_add #I64 +let i128_add = scalar_add #I128 +let usize_add = scalar_add #Usize +let u8_add = scalar_add #U8 +let u16_add = scalar_add #U16 +let u32_add = scalar_add #U32 +let u64_add = scalar_add #U64 +let u128_add = scalar_add #U128 + +/// Substraction +let isize_sub = scalar_sub #Isize +let i8_sub = scalar_sub #I8 +let i16_sub = scalar_sub #I16 +let i32_sub = scalar_sub #I32 +let i64_sub = scalar_sub #I64 +let i128_sub = scalar_sub #I128 +let usize_sub = scalar_sub #Usize +let u8_sub = scalar_sub #U8 +let u16_sub = scalar_sub #U16 +let u32_sub = scalar_sub #U32 +let u64_sub = scalar_sub #U64 +let u128_sub = scalar_sub #U128 + +/// Multiplication +let isize_mul = scalar_mul #Isize +let i8_mul = scalar_mul #I8 +let i16_mul = scalar_mul #I16 +let i32_mul = scalar_mul #I32 +let i64_mul = scalar_mul #I64 +let i128_mul = scalar_mul #I128 +let usize_mul = scalar_mul #Usize +let u8_mul = scalar_mul #U8 +let u16_mul = scalar_mul #U16 +let u32_mul = scalar_mul #U32 +let u64_mul = scalar_mul #U64 +let u128_mul = scalar_mul #U128 + +(*** Range *) +type range (a : Type0) = { + start : a; + end_ : a; +} + +(*** Array *) +type array (a : Type0) (n : usize) = s:list a{length s = n} + +// We tried putting the normalize_term condition as a refinement on the list +// but it didn't work. It works with the requires clause. +let mk_array (a : Type0) (n : usize) + (l : list a) : + Pure (array a n) + (requires (normalize_term(FStar.List.Tot.length l) = n)) + (ensures (fun _ -> True)) = + normalize_term_spec (FStar.List.Tot.length l); + l + +let array_index_shared (a : Type0) (n : usize) (x : array a n) (i : usize) : result a = + if i < length x then Return (index x i) + else Fail Failure + +let array_index_mut_fwd (a : Type0) (n : usize) (x : array a n) (i : usize) : result a = + if i < length x then Return (index x i) + else Fail Failure + +let array_index_mut_back (a : Type0) (n : usize) (x : array a n) (i : usize) (nx : a) : result (array a n) = + if i < length x then Return (list_update x i nx) + else Fail Failure + +(*** Slice *) +type slice (a : Type0) = s:list a{length s <= usize_max} + +let slice_len (a : Type0) (s : slice a) : usize = length s + +let slice_index_shared (a : Type0) (x : slice a) (i : usize) : result a = + if i < length x then Return (index x i) + else Fail Failure + +let slice_index_mut_fwd (a : Type0) (x : slice a) (i : usize) : result a = + if i < length x then Return (index x i) + else Fail Failure + +let slice_index_mut_back (a : Type0) (x : slice a) (i : usize) (nx : a) : result (slice a) = + if i < length x then Return (list_update x i nx) + else Fail Failure + +(*** Subslices *) + +let array_to_slice_shared (a : Type0) (n : usize) (x : array a n) : result (slice a) = Return x +let array_to_slice_mut_fwd (a : Type0) (n : usize) (x : array a n) : result (slice a) = Return x +let array_to_slice_mut_back (a : Type0) (n : usize) (x : array a n) (s : slice a) : result (array a n) = + if length s = n then Return s + else Fail Failure + +// TODO: finish the definitions below (there lacks [List.drop] and [List.take] in the standard library *) +let array_subslice_shared (a : Type0) (n : usize) (x : array a n) (r : range usize) : result (slice a) = + admit() + +let array_subslice_mut_fwd (a : Type0) (n : usize) (x : array a n) (r : range usize) : result (slice a) = + admit() + +let array_subslice_mut_back (a : Type0) (n : usize) (x : array a n) (r : range usize) (ns : slice a) : result (array a n) = + admit() + +let slice_subslice_shared (a : Type0) (x : slice a) (r : range usize) : result (slice a) = + admit() + +let slice_subslice_mut_fwd (a : Type0) (x : slice a) (r : range usize) : result (slice a) = + admit() + +let slice_subslice_mut_back (a : Type0) (x : slice a) (r : range usize) (ns : slice a) : result (slice a) = + admit() + +(*** Vector *) +type vec (a : Type0) = v:list a{length v <= usize_max} + +let vec_new (a : Type0) : vec a = assert_norm(length #a [] == 0); [] +let vec_len (a : Type0) (v : vec a) : usize = length v + +// The **forward** function shouldn't be used +let vec_push_fwd (a : Type0) (v : vec a) (x : a) : unit = () +let vec_push_back (a : Type0) (v : vec a) (x : a) : + Pure (result (vec a)) + (requires True) + (ensures (fun res -> + match res with + | Fail e -> e == Failure + | Return v' -> length v' = length v + 1)) = + if length v < usize_max then begin + (**) assert_norm(length [x] == 1); + (**) append_length v [x]; + (**) assert(length (append v [x]) = length v + 1); + Return (append v [x]) + end + else Fail Failure + +// The **forward** function shouldn't be used +let vec_insert_fwd (a : Type0) (v : vec a) (i : usize) (x : a) : result unit = + if i < length v then Return () else Fail Failure +let vec_insert_back (a : Type0) (v : vec a) (i : usize) (x : a) : result (vec a) = + if i < length v then Return (list_update v i x) else Fail Failure + +// The **backward** function shouldn't be used +let vec_index_fwd (a : Type0) (v : vec a) (i : usize) : result a = + if i < length v then Return (index v i) else Fail Failure +let vec_index_back (a : Type0) (v : vec a) (i : usize) (x : a) : result unit = + if i < length v then Return () else Fail Failure + +let vec_index_mut_fwd (a : Type0) (v : vec a) (i : usize) : result a = + if i < length v then Return (index v i) else Fail Failure +let vec_index_mut_back (a : Type0) (v : vec a) (i : usize) (nx : a) : result (vec a) = + if i < length v then Return (list_update v i nx) else Fail Failure -- cgit v1.2.3