summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSon Ho2023-08-04 22:33:05 +0200
committerSon Ho2023-08-04 22:33:05 +0200
commit60db3c210aeaf66a4fe312544c6e5d4662681de7 (patch)
tree7ecc73ed9a8406db05f3723511a6772c48e4d96f
parent5e38184af1b99a307271f738329cd96cb364fc1d (diff)
Generate the array test files for Coq and F*
-rw-r--r--tests/coq/array/Array_Funs.v300
-rw-r--r--tests/coq/array/Array_Types.v11
-rw-r--r--tests/coq/array/Makefile23
-rw-r--r--tests/coq/array/Primitives.v523
-rw-r--r--tests/coq/array/_CoqProject8
-rw-r--r--tests/fstar/.gitignore1
-rw-r--r--tests/fstar/array/Array.Clauses.Template.fst18
-rw-r--r--tests/fstar/array/Array.Clauses.fst19
-rw-r--r--tests/fstar/array/Array.Funs.fst226
-rw-r--r--tests/fstar/array/Array.Types.fst7
-rw-r--r--tests/fstar/array/Makefile49
-rw-r--r--tests/fstar/array/Primitives.fst375
12 files changed, 1560 insertions, 0 deletions
diff --git a/tests/coq/array/Array_Funs.v b/tests/coq/array/Array_Funs.v
new file mode 100644
index 00000000..4a862d1b
--- /dev/null
+++ b/tests/coq/array/Array_Funs.v
@@ -0,0 +1,300 @@
+(** THIS FILE WAS AUTOMATICALLY GENERATED BY AENEAS *)
+(** [array]: function definitions *)
+Require Import Primitives.
+Import Primitives.
+Require Import Coq.ZArith.ZArith.
+Require Import List.
+Import ListNotations.
+Local Open Scope Primitives_scope.
+Require Export Array_Types.
+Import Array_Types.
+Module Array_Funs.
+
+(** [array::array_to_shared_slice_]: forward function *)
+Definition array_to_shared_slice__fwd
+ (T : Type) (s : array T 32%usize) : result (slice T) :=
+ array_to_slice_shared T 32%usize s
+.
+
+(** [array::array_to_mut_slice_]: forward function *)
+Definition array_to_mut_slice__fwd
+ (T : Type) (s : array T 32%usize) : result (slice T) :=
+ array_to_slice_mut_fwd T 32%usize s
+.
+
+(** [array::array_to_mut_slice_]: backward function 0 *)
+Definition array_to_mut_slice__back
+ (T : Type) (s : array T 32%usize) (ret : slice T) :
+ result (array T 32%usize)
+ :=
+ array_to_slice_mut_back T 32%usize s ret
+.
+
+(** [array::array_len]: forward function *)
+Definition array_len_fwd (T : Type) (s : array T 32%usize) : result usize :=
+ s0 <- array_to_slice_shared T 32%usize s; let i := slice_len T s0 in Return i
+.
+
+(** [array::shared_array_len]: forward function *)
+Definition shared_array_len_fwd
+ (T : Type) (s : array T 32%usize) : result usize :=
+ s0 <- array_to_slice_shared T 32%usize s; let i := slice_len T s0 in Return i
+.
+
+(** [array::shared_slice_len]: forward function *)
+Definition shared_slice_len_fwd (T : Type) (s : slice T) : result usize :=
+ let i := slice_len T s in Return i
+.
+
+(** [array::index_array_shared]: forward function *)
+Definition index_array_shared_fwd
+ (T : Type) (s : array T 32%usize) (i : usize) : result T :=
+ array_index_shared T 32%usize s i
+.
+
+(** [array::index_array_u32]: forward function *)
+Definition index_array_u32_fwd
+ (s : array u32 32%usize) (i : usize) : result u32 :=
+ array_index_shared u32 32%usize s i
+.
+
+(** [array::index_array_generic]: forward function *)
+Definition index_array_generic_fwd
+ (N : usize) (s : array u32 N) (i : usize) : result u32 :=
+ array_index_shared u32 N s i
+.
+
+(** [array::index_array_generic_call]: forward function *)
+Definition index_array_generic_call_fwd
+ (N : usize) (s : array u32 N) (i : usize) : result u32 :=
+ index_array_generic_fwd N s i
+.
+
+(** [array::index_array_copy]: forward function *)
+Definition index_array_copy_fwd (x : array u32 32%usize) : result u32 :=
+ array_index_shared u32 32%usize x 0%usize
+.
+
+(** [array::index_mut_array]: forward function *)
+Definition index_mut_array_fwd
+ (T : Type) (s : array T 32%usize) (i : usize) : result T :=
+ array_index_mut_fwd T 32%usize s i
+.
+
+(** [array::index_mut_array]: backward function 0 *)
+Definition index_mut_array_back
+ (T : Type) (s : array T 32%usize) (i : usize) (ret : T) :
+ result (array T 32%usize)
+ :=
+ array_index_mut_back T 32%usize s i ret
+.
+
+(** [array::index_slice]: forward function *)
+Definition index_slice_fwd (T : Type) (s : slice T) (i : usize) : result T :=
+ slice_index_shared T s i
+.
+
+(** [array::index_mut_slice]: forward function *)
+Definition index_mut_slice_fwd
+ (T : Type) (s : slice T) (i : usize) : result T :=
+ slice_index_mut_fwd T s i
+.
+
+(** [array::index_mut_slice]: backward function 0 *)
+Definition index_mut_slice_back
+ (T : Type) (s : slice T) (i : usize) (ret : T) : result (slice T) :=
+ slice_index_mut_back T s i ret
+.
+
+(** [array::slice_subslice_shared_]: forward function *)
+Definition slice_subslice_shared__fwd
+ (x : slice u32) (y : usize) (z : usize) : result (slice u32) :=
+ slice_subslice_shared u32 x (mk_range y z)
+.
+
+(** [array::slice_subslice_mut_]: forward function *)
+Definition slice_subslice_mut__fwd
+ (x : slice u32) (y : usize) (z : usize) : result (slice u32) :=
+ slice_subslice_mut_fwd u32 x (mk_range y z)
+.
+
+(** [array::slice_subslice_mut_]: backward function 0 *)
+Definition slice_subslice_mut__back
+ (x : slice u32) (y : usize) (z : usize) (ret : slice u32) :
+ result (slice u32)
+ :=
+ slice_subslice_mut_back u32 x (mk_range y z) ret
+.
+
+(** [array::array_to_slice_shared_]: forward function *)
+Definition array_to_slice_shared__fwd
+ (x : array u32 32%usize) : result (slice u32) :=
+ array_to_slice_shared u32 32%usize x
+.
+
+(** [array::array_to_slice_mut_]: forward function *)
+Definition array_to_slice_mut__fwd
+ (x : array u32 32%usize) : result (slice u32) :=
+ array_to_slice_mut_fwd u32 32%usize x
+.
+
+(** [array::array_to_slice_mut_]: backward function 0 *)
+Definition array_to_slice_mut__back
+ (x : array u32 32%usize) (ret : slice u32) : result (array u32 32%usize) :=
+ array_to_slice_mut_back u32 32%usize x ret
+.
+
+(** [array::array_subslice_shared_]: forward function *)
+Definition array_subslice_shared__fwd
+ (x : array u32 32%usize) (y : usize) (z : usize) : result (slice u32) :=
+ array_subslice_shared u32 32%usize x (mk_range y z)
+.
+
+(** [array::array_subslice_mut_]: forward function *)
+Definition array_subslice_mut__fwd
+ (x : array u32 32%usize) (y : usize) (z : usize) : result (slice u32) :=
+ array_subslice_mut_fwd u32 32%usize x (mk_range y z)
+.
+
+(** [array::array_subslice_mut_]: backward function 0 *)
+Definition array_subslice_mut__back
+ (x : array u32 32%usize) (y : usize) (z : usize) (ret : slice u32) :
+ result (array u32 32%usize)
+ :=
+ array_subslice_mut_back u32 32%usize x (mk_range y z) ret
+.
+
+(** [array::index_slice_0]: forward function *)
+Definition index_slice_0_fwd (T : Type) (s : slice T) : result T :=
+ slice_index_shared T s 0%usize
+.
+
+(** [array::index_array_0]: forward function *)
+Definition index_array_0_fwd (T : Type) (s : array T 32%usize) : result T :=
+ array_index_shared T 32%usize s 0%usize
+.
+
+(** [array::index_index_array]: forward function *)
+Definition index_index_array_fwd
+ (s : array (array u32 32%usize) 32%usize) (i : usize) (j : usize) :
+ result u32
+ :=
+ a <- array_index_shared (array u32 32%usize) 32%usize s i;
+ array_index_shared u32 32%usize a j
+.
+
+(** [array::update_update_array]: forward function *)
+Definition update_update_array_fwd
+ (s : array (array u32 32%usize) 32%usize) (i : usize) (j : usize) :
+ result unit
+ :=
+ a <- array_index_mut_fwd (array u32 32%usize) 32%usize s i;
+ a0 <- array_index_mut_back u32 32%usize a j 0%u32;
+ _ <- array_index_mut_back (array u32 32%usize) 32%usize s i a0;
+ Return tt
+.
+
+(** [array::array_local_deep_copy]: forward function *)
+Definition array_local_deep_copy_fwd (x : array u32 32%usize) : result unit :=
+ Return tt
+.
+
+(** [array::f0]: forward function *)
+Definition f0_fwd : result unit :=
+ s <-
+ array_to_slice_mut_fwd u32 2%usize (mk_array u32 2%usize [ 1%u32; 2%u32 ]);
+ s0 <- slice_index_mut_back u32 s 0%usize 1%u32;
+ _ <-
+ array_to_slice_mut_back u32 2%usize (mk_array u32 2%usize [ 1%u32; 2%u32 ])
+ s0;
+ Return tt
+.
+
+(** [array::f1]: forward function *)
+Definition f1_fwd : result unit :=
+ _ <-
+ array_index_mut_back u32 2%usize (mk_array u32 2%usize [ 1%u32; 2%u32 ])
+ 0%usize 1%u32;
+ Return tt
+.
+
+(** [array::sum]: loop 0: forward function *)
+Fixpoint sum_loop_fwd
+ (n : nat) (s : slice u32) (sum : u32) (i : usize) : result u32 :=
+ match n with
+ | O => Fail_ OutOfFuel
+ | S n0 =>
+ let i0 := slice_len u32 s in
+ if i s< i0
+ then (
+ i1 <- slice_index_shared u32 s i;
+ sum0 <- u32_add sum i1;
+ i2 <- usize_add i 1%usize;
+ sum_loop_fwd n0 s sum0 i2)
+ else Return sum
+ end
+.
+
+(** [array::sum]: forward function *)
+Definition sum_fwd (n : nat) (s : slice u32) : result u32 :=
+ sum_loop_fwd n s 0%u32 0%usize
+.
+
+(** [array::sum2]: loop 0: forward function *)
+Fixpoint sum2_loop_fwd
+ (n : nat) (s : slice u32) (s2 : slice u32) (sum : u32) (i : usize) :
+ result u32
+ :=
+ match n with
+ | O => Fail_ OutOfFuel
+ | S n0 =>
+ let i0 := slice_len u32 s in
+ if i s< i0
+ then (
+ i1 <- slice_index_shared u32 s i;
+ i2 <- slice_index_shared u32 s2 i;
+ i3 <- u32_add i1 i2;
+ sum0 <- u32_add sum i3;
+ i4 <- usize_add i 1%usize;
+ sum2_loop_fwd n0 s s2 sum0 i4)
+ else Return sum
+ end
+.
+
+(** [array::sum2]: forward function *)
+Definition sum2_fwd (n : nat) (s : slice u32) (s2 : slice u32) : result u32 :=
+ let i := slice_len u32 s in
+ let i0 := slice_len u32 s2 in
+ if negb (i s= i0) then Fail_ Failure else sum2_loop_fwd n s s2 0%u32 0%usize
+.
+
+(** [array::f2]: forward function *)
+Definition f2_fwd (i : u32) : result unit :=
+ Return tt.
+
+(** [array::f4]: forward function *)
+Definition f4_fwd
+ (x : array u32 32%usize) (y : usize) (z : usize) : result (slice u32) :=
+ array_subslice_shared u32 32%usize x (mk_range y z)
+.
+
+(** [array::f3]: forward function *)
+Definition f3_fwd (n : nat) : result u32 :=
+ i <-
+ array_index_shared u32 2%usize (mk_array u32 2%usize [ 1%u32; 2%u32 ])
+ 0%usize;
+ _ <- f2_fwd i;
+ s <-
+ array_to_slice_shared u32 2%usize (mk_array u32 2%usize [ 1%u32; 2%u32 ]);
+ s0 <-
+ f4_fwd
+ (mk_array u32 32%usize [
+ 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32;
+ 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32;
+ 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32; 0%u32;
+ 0%u32; 0%u32
+ ]) 16%usize 18%usize;
+ sum2_fwd n s s0
+.
+
+End Array_Funs .
diff --git a/tests/coq/array/Array_Types.v b/tests/coq/array/Array_Types.v
new file mode 100644
index 00000000..7e1f2a07
--- /dev/null
+++ b/tests/coq/array/Array_Types.v
@@ -0,0 +1,11 @@
+(** THIS FILE WAS AUTOMATICALLY GENERATED BY AENEAS *)
+(** [array]: type definitions *)
+Require Import Primitives.
+Import Primitives.
+Require Import Coq.ZArith.ZArith.
+Require Import List.
+Import ListNotations.
+Local Open Scope Primitives_scope.
+Module Array_Types.
+
+End Array_Types .
diff --git a/tests/coq/array/Makefile b/tests/coq/array/Makefile
new file mode 100644
index 00000000..1a5aee4a
--- /dev/null
+++ b/tests/coq/array/Makefile
@@ -0,0 +1,23 @@
+# This file was automatically generated - modify ../Makefile.template instead
+# Makefile originally taken from coq-club
+
+%: Makefile.coq phony
+ +make -f Makefile.coq $@
+
+all: Makefile.coq
+ +make -f Makefile.coq all
+
+clean: Makefile.coq
+ +make -f Makefile.coq clean
+ rm -f Makefile.coq
+
+Makefile.coq: _CoqProject Makefile
+ coq_makefile -f _CoqProject | sed 's/$$(COQCHK) $$(COQCHKFLAGS) $$(COQLIBS)/$$(COQCHK) $$(COQCHKFLAGS) $$(subst -Q,-R,$$(COQLIBS))/' > Makefile.coq
+
+_CoqProject: ;
+
+Makefile: ;
+
+phony: ;
+
+.PHONY: all clean phony
diff --git a/tests/coq/array/Primitives.v b/tests/coq/array/Primitives.v
new file mode 100644
index 00000000..71a2d9c3
--- /dev/null
+++ b/tests/coq/array/Primitives.v
@@ -0,0 +1,523 @@
+Require Import Lia.
+Require Coq.Strings.Ascii.
+Require Coq.Strings.String.
+Require Import Coq.Program.Equality.
+Require Import Coq.ZArith.ZArith.
+Require Import Coq.ZArith.Znat.
+Require Import List.
+Import ListNotations.
+
+Module Primitives.
+
+ (* TODO: use more *)
+Declare Scope Primitives_scope.
+
+(*** Result *)
+
+Inductive error :=
+ | Failure
+ | OutOfFuel.
+
+Inductive result A :=
+ | Return : A -> result A
+ | Fail_ : error -> result A.
+
+Arguments Return {_} a.
+Arguments Fail_ {_}.
+
+Definition bind {A B} (m: result A) (f: A -> result B) : result B :=
+ match m with
+ | Fail_ e => Fail_ e
+ | Return x => f x
+ end.
+
+Definition return_ {A: Type} (x: A) : result A := Return x.
+Definition fail_ {A: Type} (e: error) : result A := Fail_ e.
+
+Notation "x <- c1 ; c2" := (bind c1 (fun x => c2))
+ (at level 61, c1 at next level, right associativity).
+
+(** Monadic assert *)
+Definition massert (b: bool) : result unit :=
+ if b then Return tt else Fail_ Failure.
+
+(** Normalize and unwrap a successful result (used for globals) *)
+Definition eval_result_refl {A} {x} (a: result A) (p: a = Return x) : A :=
+ match a as r return (r = Return x -> A) with
+ | Return a' => fun _ => a'
+ | Fail_ e => fun p' =>
+ False_rect _ (eq_ind (Fail_ e)
+ (fun e : result A =>
+ match e with
+ | Return _ => False
+ | Fail_ e => True
+ end)
+ I (Return x) p')
+ end p.
+
+Notation "x %global" := (eval_result_refl x eq_refl) (at level 40).
+Notation "x %return" := (eval_result_refl x eq_refl) (at level 40).
+
+(* Sanity check *)
+Check (if true then Return (1 + 2) else Fail_ Failure)%global = 3.
+
+(*** Misc *)
+
+
+Definition string := Coq.Strings.String.string.
+Definition char := Coq.Strings.Ascii.ascii.
+Definition char_of_byte := Coq.Strings.Ascii.ascii_of_byte.
+
+Definition mem_replace_fwd (a : Type) (x : a) (y : a) : a := x .
+Definition mem_replace_back (a : Type) (x : a) (y : a) : a := y .
+
+(*** Scalars *)
+
+Definition i8_min : Z := -128%Z.
+Definition i8_max : Z := 127%Z.
+Definition i16_min : Z := -32768%Z.
+Definition i16_max : Z := 32767%Z.
+Definition i32_min : Z := -2147483648%Z.
+Definition i32_max : Z := 2147483647%Z.
+Definition i64_min : Z := -9223372036854775808%Z.
+Definition i64_max : Z := 9223372036854775807%Z.
+Definition i128_min : Z := -170141183460469231731687303715884105728%Z.
+Definition i128_max : Z := 170141183460469231731687303715884105727%Z.
+Definition u8_min : Z := 0%Z.
+Definition u8_max : Z := 255%Z.
+Definition u16_min : Z := 0%Z.
+Definition u16_max : Z := 65535%Z.
+Definition u32_min : Z := 0%Z.
+Definition u32_max : Z := 4294967295%Z.
+Definition u64_min : Z := 0%Z.
+Definition u64_max : Z := 18446744073709551615%Z.
+Definition u128_min : Z := 0%Z.
+Definition u128_max : Z := 340282366920938463463374607431768211455%Z.
+
+(** The bounds of [isize] and [usize] vary with the architecture. *)
+Axiom isize_min : Z.
+Axiom isize_max : Z.
+Definition usize_min : Z := 0%Z.
+Axiom usize_max : Z.
+
+Open Scope Z_scope.
+
+(** We provide those lemmas to reason about the bounds of [isize] and [usize] *)
+Axiom isize_min_bound : isize_min <= i32_min.
+Axiom isize_max_bound : i32_max <= isize_max.
+Axiom usize_max_bound : u32_max <= usize_max.
+
+Inductive scalar_ty :=
+ | Isize
+ | I8
+ | I16
+ | I32
+ | I64
+ | I128
+ | Usize
+ | U8
+ | U16
+ | U32
+ | U64
+ | U128
+.
+
+Definition scalar_min (ty: scalar_ty) : Z :=
+ match ty with
+ | Isize => isize_min
+ | I8 => i8_min
+ | I16 => i16_min
+ | I32 => i32_min
+ | I64 => i64_min
+ | I128 => i128_min
+ | Usize => usize_min
+ | U8 => u8_min
+ | U16 => u16_min
+ | U32 => u32_min
+ | U64 => u64_min
+ | U128 => u128_min
+end.
+
+Definition scalar_max (ty: scalar_ty) : Z :=
+ match ty with
+ | Isize => isize_max
+ | I8 => i8_max
+ | I16 => i16_max
+ | I32 => i32_max
+ | I64 => i64_max
+ | I128 => i128_max
+ | Usize => usize_max
+ | U8 => u8_max
+ | U16 => u16_max
+ | U32 => u32_max
+ | U64 => u64_max
+ | U128 => u128_max
+end.
+
+(** We use the following conservative bounds to make sure we can compute bound
+ checks in most situations *)
+Definition scalar_min_cons (ty: scalar_ty) : Z :=
+ match ty with
+ | Isize => i32_min
+ | Usize => u32_min
+ | _ => scalar_min ty
+end.
+
+Definition scalar_max_cons (ty: scalar_ty) : Z :=
+ match ty with
+ | Isize => i32_max
+ | Usize => u32_max
+ | _ => scalar_max ty
+end.
+
+Lemma scalar_min_cons_valid : forall ty, scalar_min ty <= scalar_min_cons ty .
+Proof.
+ destruct ty; unfold scalar_min_cons, scalar_min; try lia.
+ - pose isize_min_bound; lia.
+ - apply Z.le_refl.
+Qed.
+
+Lemma scalar_max_cons_valid : forall ty, scalar_max ty >= scalar_max_cons ty .
+Proof.
+ destruct ty; unfold scalar_max_cons, scalar_max; try lia.
+ - pose isize_max_bound; lia.
+ - pose usize_max_bound. lia.
+Qed.
+
+Definition scalar (ty: scalar_ty) : Type :=
+ { x: Z | scalar_min ty <= x <= scalar_max ty }.
+
+Definition to_Z {ty} (x: scalar ty) : Z := proj1_sig x.
+
+(** Bounds checks: we start by using the conservative bounds, to make sure we
+ can compute in most situations, then we use the real bounds (for [isize]
+ and [usize]). *)
+Definition scalar_ge_min (ty: scalar_ty) (x: Z) : bool :=
+ Z.leb (scalar_min_cons ty) x || Z.leb (scalar_min ty) x.
+
+Definition scalar_le_max (ty: scalar_ty) (x: Z) : bool :=
+ Z.leb x (scalar_max_cons ty) || Z.leb x (scalar_max ty).
+
+Lemma scalar_ge_min_valid (ty: scalar_ty) (x: Z) :
+ scalar_ge_min ty x = true -> scalar_min ty <= x .
+Proof.
+ unfold scalar_ge_min.
+ pose (scalar_min_cons_valid ty).
+ lia.
+Qed.
+
+Lemma scalar_le_max_valid (ty: scalar_ty) (x: Z) :
+ scalar_le_max ty x = true -> x <= scalar_max ty .
+Proof.
+ unfold scalar_le_max.
+ pose (scalar_max_cons_valid ty).
+ lia.
+Qed.
+
+Definition scalar_in_bounds (ty: scalar_ty) (x: Z) : bool :=
+ scalar_ge_min ty x && scalar_le_max ty x .
+
+Lemma scalar_in_bounds_valid (ty: scalar_ty) (x: Z) :
+ scalar_in_bounds ty x = true -> scalar_min ty <= x <= scalar_max ty .
+Proof.
+ unfold scalar_in_bounds.
+ intros H.
+ destruct (scalar_ge_min ty x) eqn:Hmin.
+ - destruct (scalar_le_max ty x) eqn:Hmax.
+ + pose (scalar_ge_min_valid ty x Hmin).
+ pose (scalar_le_max_valid ty x Hmax).
+ lia.
+ + inversion H.
+ - inversion H.
+Qed.
+
+Import Sumbool.
+
+Definition mk_scalar (ty: scalar_ty) (x: Z) : result (scalar ty) :=
+ match sumbool_of_bool (scalar_in_bounds ty x) with
+ | left H => Return (exist _ x (scalar_in_bounds_valid _ _ H))
+ | right _ => Fail_ Failure
+ end.
+
+Definition scalar_add {ty} (x y: scalar ty) : result (scalar ty) := mk_scalar ty (to_Z x + to_Z y).
+
+Definition scalar_sub {ty} (x y: scalar ty) : result (scalar ty) := mk_scalar ty (to_Z x - to_Z y).
+
+Definition scalar_mul {ty} (x y: scalar ty) : result (scalar ty) := mk_scalar ty (to_Z x * to_Z y).
+
+Definition scalar_div {ty} (x y: scalar ty) : result (scalar ty) :=
+ if to_Z y =? 0 then Fail_ Failure else
+ mk_scalar ty (to_Z x / to_Z y).
+
+Definition scalar_rem {ty} (x y: scalar ty) : result (scalar ty) := mk_scalar ty (Z.rem (to_Z x) (to_Z y)).
+
+Definition scalar_neg {ty} (x: scalar ty) : result (scalar ty) := mk_scalar ty (-(to_Z x)).
+
+(** Cast an integer from a [src_ty] to a [tgt_ty] *)
+(* TODO: check the semantics of casts in Rust *)
+Definition scalar_cast (src_ty tgt_ty : scalar_ty) (x : scalar src_ty) : result (scalar tgt_ty) :=
+ mk_scalar tgt_ty (to_Z x).
+
+(** Comparisons *)
+Definition scalar_leb {ty : scalar_ty} (x : scalar ty) (y : scalar ty) : bool :=
+ Z.leb (to_Z x) (to_Z y) .
+
+Definition scalar_ltb {ty : scalar_ty} (x : scalar ty) (y : scalar ty) : bool :=
+ Z.ltb (to_Z x) (to_Z y) .
+
+Definition scalar_geb {ty : scalar_ty} (x : scalar ty) (y : scalar ty) : bool :=
+ Z.geb (to_Z x) (to_Z y) .
+
+Definition scalar_gtb {ty : scalar_ty} (x : scalar ty) (y : scalar ty) : bool :=
+ Z.gtb (to_Z x) (to_Z y) .
+
+Definition scalar_eqb {ty : scalar_ty} (x : scalar ty) (y : scalar ty) : bool :=
+ Z.eqb (to_Z x) (to_Z y) .
+
+Definition scalar_neqb {ty : scalar_ty} (x : scalar ty) (y : scalar ty) : bool :=
+ negb (Z.eqb (to_Z x) (to_Z y)) .
+
+
+(** The scalar types *)
+Definition isize := scalar Isize.
+Definition i8 := scalar I8.
+Definition i16 := scalar I16.
+Definition i32 := scalar I32.
+Definition i64 := scalar I64.
+Definition i128 := scalar I128.
+Definition usize := scalar Usize.
+Definition u8 := scalar U8.
+Definition u16 := scalar U16.
+Definition u32 := scalar U32.
+Definition u64 := scalar U64.
+Definition u128 := scalar U128.
+
+(** Negaion *)
+Definition isize_neg := @scalar_neg Isize.
+Definition i8_neg := @scalar_neg I8.
+Definition i16_neg := @scalar_neg I16.
+Definition i32_neg := @scalar_neg I32.
+Definition i64_neg := @scalar_neg I64.
+Definition i128_neg := @scalar_neg I128.
+
+(** Division *)
+Definition isize_div := @scalar_div Isize.
+Definition i8_div := @scalar_div I8.
+Definition i16_div := @scalar_div I16.
+Definition i32_div := @scalar_div I32.
+Definition i64_div := @scalar_div I64.
+Definition i128_div := @scalar_div I128.
+Definition usize_div := @scalar_div Usize.
+Definition u8_div := @scalar_div U8.
+Definition u16_div := @scalar_div U16.
+Definition u32_div := @scalar_div U32.
+Definition u64_div := @scalar_div U64.
+Definition u128_div := @scalar_div U128.
+
+(** Remainder *)
+Definition isize_rem := @scalar_rem Isize.
+Definition i8_rem := @scalar_rem I8.
+Definition i16_rem := @scalar_rem I16.
+Definition i32_rem := @scalar_rem I32.
+Definition i64_rem := @scalar_rem I64.
+Definition i128_rem := @scalar_rem I128.
+Definition usize_rem := @scalar_rem Usize.
+Definition u8_rem := @scalar_rem U8.
+Definition u16_rem := @scalar_rem U16.
+Definition u32_rem := @scalar_rem U32.
+Definition u64_rem := @scalar_rem U64.
+Definition u128_rem := @scalar_rem U128.
+
+(** Addition *)
+Definition isize_add := @scalar_add Isize.
+Definition i8_add := @scalar_add I8.
+Definition i16_add := @scalar_add I16.
+Definition i32_add := @scalar_add I32.
+Definition i64_add := @scalar_add I64.
+Definition i128_add := @scalar_add I128.
+Definition usize_add := @scalar_add Usize.
+Definition u8_add := @scalar_add U8.
+Definition u16_add := @scalar_add U16.
+Definition u32_add := @scalar_add U32.
+Definition u64_add := @scalar_add U64.
+Definition u128_add := @scalar_add U128.
+
+(** Substraction *)
+Definition isize_sub := @scalar_sub Isize.
+Definition i8_sub := @scalar_sub I8.
+Definition i16_sub := @scalar_sub I16.
+Definition i32_sub := @scalar_sub I32.
+Definition i64_sub := @scalar_sub I64.
+Definition i128_sub := @scalar_sub I128.
+Definition usize_sub := @scalar_sub Usize.
+Definition u8_sub := @scalar_sub U8.
+Definition u16_sub := @scalar_sub U16.
+Definition u32_sub := @scalar_sub U32.
+Definition u64_sub := @scalar_sub U64.
+Definition u128_sub := @scalar_sub U128.
+
+(** Multiplication *)
+Definition isize_mul := @scalar_mul Isize.
+Definition i8_mul := @scalar_mul I8.
+Definition i16_mul := @scalar_mul I16.
+Definition i32_mul := @scalar_mul I32.
+Definition i64_mul := @scalar_mul I64.
+Definition i128_mul := @scalar_mul I128.
+Definition usize_mul := @scalar_mul Usize.
+Definition u8_mul := @scalar_mul U8.
+Definition u16_mul := @scalar_mul U16.
+Definition u32_mul := @scalar_mul U32.
+Definition u64_mul := @scalar_mul U64.
+Definition u128_mul := @scalar_mul U128.
+
+(** Small utility *)
+Definition usize_to_nat (x: usize) : nat := Z.to_nat (to_Z x).
+
+(** Notations *)
+Notation "x %isize" := ((mk_scalar Isize x)%return) (at level 9).
+Notation "x %i8" := ((mk_scalar I8 x)%return) (at level 9).
+Notation "x %i16" := ((mk_scalar I16 x)%return) (at level 9).
+Notation "x %i32" := ((mk_scalar I32 x)%return) (at level 9).
+Notation "x %i64" := ((mk_scalar I64 x)%return) (at level 9).
+Notation "x %i128" := ((mk_scalar I128 x)%return) (at level 9).
+Notation "x %usize" := ((mk_scalar Usize x)%return) (at level 9).
+Notation "x %u8" := ((mk_scalar U8 x)%return) (at level 9).
+Notation "x %u16" := ((mk_scalar U16 x)%return) (at level 9).
+Notation "x %u32" := ((mk_scalar U32 x)%return) (at level 9).
+Notation "x %u64" := ((mk_scalar U64 x)%return) (at level 9).
+Notation "x %u128" := ((mk_scalar U128 x)%return) (at level 9).
+
+Notation "x s= y" := (scalar_eqb x y) (at level 80) : Primitives_scope.
+Notation "x s<> y" := (scalar_neqb x y) (at level 80) : Primitives_scope.
+Notation "x s<= y" := (scalar_leb x y) (at level 80) : Primitives_scope.
+Notation "x s< y" := (scalar_ltb x y) (at level 80) : Primitives_scope.
+Notation "x s>= y" := (scalar_geb x y) (at level 80) : Primitives_scope.
+Notation "x s> y" := (scalar_gtb x y) (at level 80) : Primitives_scope.
+
+(*** Range *)
+Record range (T : Type) := mk_range {
+ start: T;
+ end_: T;
+}.
+Arguments mk_range {_}.
+
+(*** Arrays *)
+Definition array T (n : usize) := { l: list T | Z.of_nat (length l) = to_Z n}.
+
+Lemma le_0_usize_max : 0 <= usize_max.
+Proof.
+ pose (H := usize_max_bound).
+ unfold u32_max in H.
+ lia.
+Qed.
+
+Lemma eqb_imp_eq (x y : Z) : Z.eqb x y = true -> x = y.
+Proof.
+ lia.
+Qed.
+
+(* TODO: finish the definitions *)
+Axiom mk_array : forall (T : Type) (n : usize) (l : list T), array T n.
+
+Axiom array_index_shared : forall (T : Type) (n : usize) (x : array T n) (i : usize), result T.
+Axiom array_index_mut_fwd : forall (T : Type) (n : usize) (x : array T n) (i : usize), result T.
+Axiom array_index_mut_back : forall (T : Type) (n : usize) (x : array T n) (i : usize) (nx : T), result (array T n).
+
+(*** Slice *)
+Definition slice T := { l: list T | Z.of_nat (length l) <= usize_max}.
+
+Axiom slice_len : forall (T : Type) (s : slice T), usize.
+Axiom slice_index_shared : forall (T : Type) (x : slice T) (i : usize), result T.
+Axiom slice_index_mut_fwd : forall (T : Type) (x : slice T) (i : usize), result T.
+Axiom slice_index_mut_back : forall (T : Type) (x : slice T) (i : usize) (nx : T), result (slice T).
+
+(*** Subslices *)
+
+Axiom array_to_slice_shared : forall (T : Type) (n : usize) (x : array T n), result (slice T).
+Axiom array_to_slice_mut_fwd : forall (T : Type) (n : usize) (x : array T n), result (slice T).
+Axiom array_to_slice_mut_back : forall (T : Type) (n : usize) (x : array T n) (s : slice T), result (array T n).
+
+Axiom array_subslice_shared: forall (T : Type) (n : usize) (x : array T n) (r : range usize), result (slice T).
+Axiom array_subslice_mut_fwd: forall (T : Type) (n : usize) (x : array T n) (r : range usize), result (slice T).
+Axiom array_subslice_mut_back: forall (T : Type) (n : usize) (x : array T n) (r : range usize) (ns : slice T), result (array T n).
+Axiom slice_subslice_shared: forall (T : Type) (x : slice T) (r : range usize), result (slice T).
+Axiom slice_subslice_mut_fwd: forall (T : Type) (x : slice T) (r : range usize), result (slice T).
+Axiom slice_subslice_mut_back: forall (T : Type) (x : slice T) (r : range usize) (ns : slice T), result (slice T).
+
+(*** Vectors *)
+
+Definition vec T := { l: list T | Z.of_nat (length l) <= usize_max }.
+
+Definition vec_to_list {T: Type} (v: vec T) : list T := proj1_sig v.
+
+Definition vec_length {T: Type} (v: vec T) : Z := Z.of_nat (length (vec_to_list v)).
+
+Definition vec_new (T: Type) : vec T := (exist _ [] le_0_usize_max).
+
+Lemma vec_len_in_usize {T} (v: vec T) : usize_min <= vec_length v <= usize_max.
+Proof.
+ unfold vec_length, usize_min.
+ split.
+ - lia.
+ - apply (proj2_sig v).
+Qed.
+
+Definition vec_len (T: Type) (v: vec T) : usize :=
+ exist _ (vec_length v) (vec_len_in_usize v).
+
+Fixpoint list_update {A} (l: list A) (n: nat) (a: A)
+ : list A :=
+ match l with
+ | [] => []
+ | x :: t => match n with
+ | 0%nat => a :: t
+ | S m => x :: (list_update t m a)
+end end.
+
+Definition vec_bind {A B} (v: vec A) (f: list A -> result (list B)) : result (vec B) :=
+ l <- f (vec_to_list v) ;
+ match sumbool_of_bool (scalar_le_max Usize (Z.of_nat (length l))) with
+ | left H => Return (exist _ l (scalar_le_max_valid _ _ H))
+ | right _ => Fail_ Failure
+ end.
+
+(* The **forward** function shouldn't be used *)
+Definition vec_push_fwd (T: Type) (v: vec T) (x: T) : unit := tt.
+
+Definition vec_push_back (T: Type) (v: vec T) (x: T) : result (vec T) :=
+ vec_bind v (fun l => Return (l ++ [x])).
+
+(* The **forward** function shouldn't be used *)
+Definition vec_insert_fwd (T: Type) (v: vec T) (i: usize) (x: T) : result unit :=
+ if to_Z i <? vec_length v then Return tt else Fail_ Failure.
+
+Definition vec_insert_back (T: Type) (v: vec T) (i: usize) (x: T) : result (vec T) :=
+ vec_bind v (fun l =>
+ if to_Z i <? Z.of_nat (length l)
+ then Return (list_update l (usize_to_nat i) x)
+ else Fail_ Failure).
+
+(* The **backward** function shouldn't be used *)
+Definition vec_index_fwd (T: Type) (v: vec T) (i: usize) : result T :=
+ match nth_error (vec_to_list v) (usize_to_nat i) with
+ | Some n => Return n
+ | None => Fail_ Failure
+ end.
+
+Definition vec_index_back (T: Type) (v: vec T) (i: usize) (x: T) : result unit :=
+ if to_Z i <? vec_length v then Return tt else Fail_ Failure.
+
+(* The **backward** function shouldn't be used *)
+Definition vec_index_mut_fwd (T: Type) (v: vec T) (i: usize) : result T :=
+ match nth_error (vec_to_list v) (usize_to_nat i) with
+ | Some n => Return n
+ | None => Fail_ Failure
+ end.
+
+Definition vec_index_mut_back (T: Type) (v: vec T) (i: usize) (x: T) : result (vec T) :=
+ vec_bind v (fun l =>
+ if to_Z i <? Z.of_nat (length l)
+ then Return (list_update l (usize_to_nat i) x)
+ else Fail_ Failure).
+
+End Primitives.
diff --git a/tests/coq/array/_CoqProject b/tests/coq/array/_CoqProject
new file mode 100644
index 00000000..f33cefe6
--- /dev/null
+++ b/tests/coq/array/_CoqProject
@@ -0,0 +1,8 @@
+# This file was automatically generated - see ../Makefile
+-R . Lib
+-arg -w
+-arg all
+
+Array_Funs.v
+Primitives.v
+Array_Types.v
diff --git a/tests/fstar/.gitignore b/tests/fstar/.gitignore
new file mode 100644
index 00000000..28a11147
--- /dev/null
+++ b/tests/fstar/.gitignore
@@ -0,0 +1 @@
+*/obj \ No newline at end of file
diff --git a/tests/fstar/array/Array.Clauses.Template.fst b/tests/fstar/array/Array.Clauses.Template.fst
new file mode 100644
index 00000000..8a15e230
--- /dev/null
+++ b/tests/fstar/array/Array.Clauses.Template.fst
@@ -0,0 +1,18 @@
+(** THIS FILE WAS AUTOMATICALLY GENERATED BY AENEAS *)
+(** [array]: templates for the decreases clauses *)
+module Array.Clauses.Template
+open Primitives
+open Array.Types
+
+#set-options "--z3rlimit 50 --fuel 1 --ifuel 1"
+
+(** [array::sum]: decreases clause *)
+unfold
+let sum_loop_decreases (s : slice u32) (sum : u32) (i : usize) : nat = admit ()
+
+(** [array::sum2]: decreases clause *)
+unfold
+let sum2_loop_decreases (s : slice u32) (s2 : slice u32) (sum : u32)
+ (i : usize) : nat =
+ admit ()
+
diff --git a/tests/fstar/array/Array.Clauses.fst b/tests/fstar/array/Array.Clauses.fst
new file mode 100644
index 00000000..68cbf216
--- /dev/null
+++ b/tests/fstar/array/Array.Clauses.fst
@@ -0,0 +1,19 @@
+(** [array]: decreases clauses *)
+module Array.Clauses
+open Primitives
+open Array.Types
+open FStar.List.Tot
+
+#set-options "--z3rlimit 50 --fuel 1 --ifuel 1"
+
+(** [array::sum]: decreases clause *)
+unfold
+let sum_loop_decreases (s : slice u32) (sum : u32) (i : usize) : nat =
+ if i < length s then length s - i else 0
+
+(** [array::sum2]: decreases clause *)
+unfold
+let sum2_loop_decreases (s : slice u32) (s2 : slice u32) (sum : u32)
+ (i : usize) : nat =
+ if i < length s then length s - i else 0
+
diff --git a/tests/fstar/array/Array.Funs.fst b/tests/fstar/array/Array.Funs.fst
new file mode 100644
index 00000000..985633bc
--- /dev/null
+++ b/tests/fstar/array/Array.Funs.fst
@@ -0,0 +1,226 @@
+(** THIS FILE WAS AUTOMATICALLY GENERATED BY AENEAS *)
+(** [array]: function definitions *)
+module Array.Funs
+open Primitives
+include Array.Types
+include Array.Clauses
+
+#set-options "--z3rlimit 50 --fuel 1 --ifuel 1"
+
+(** [array::array_to_shared_slice_]: forward function *)
+let array_to_shared_slice__fwd
+ (t : Type0) (s : array t 32) : result (slice t) =
+ array_to_slice_shared t 32 s
+
+(** [array::array_to_mut_slice_]: forward function *)
+let array_to_mut_slice__fwd (t : Type0) (s : array t 32) : result (slice t) =
+ array_to_slice_mut_fwd t 32 s
+
+(** [array::array_to_mut_slice_]: backward function 0 *)
+let array_to_mut_slice__back
+ (t : Type0) (s : array t 32) (ret : slice t) : result (array t 32) =
+ array_to_slice_mut_back t 32 s ret
+
+(** [array::array_len]: forward function *)
+let array_len_fwd (t : Type0) (s : array t 32) : result usize =
+ let* s0 = array_to_slice_shared t 32 s in let i = slice_len t s0 in Return i
+
+(** [array::shared_array_len]: forward function *)
+let shared_array_len_fwd (t : Type0) (s : array t 32) : result usize =
+ let* s0 = array_to_slice_shared t 32 s in let i = slice_len t s0 in Return i
+
+(** [array::shared_slice_len]: forward function *)
+let shared_slice_len_fwd (t : Type0) (s : slice t) : result usize =
+ let i = slice_len t s in Return i
+
+(** [array::index_array_shared]: forward function *)
+let index_array_shared_fwd
+ (t : Type0) (s : array t 32) (i : usize) : result t =
+ array_index_shared t 32 s i
+
+(** [array::index_array_u32]: forward function *)
+let index_array_u32_fwd (s : array u32 32) (i : usize) : result u32 =
+ array_index_shared u32 32 s i
+
+(** [array::index_array_generic]: forward function *)
+let index_array_generic_fwd
+ (n : usize) (s : array u32 n) (i : usize) : result u32 =
+ array_index_shared u32 n s i
+
+(** [array::index_array_generic_call]: forward function *)
+let index_array_generic_call_fwd
+ (n : usize) (s : array u32 n) (i : usize) : result u32 =
+ index_array_generic_fwd n s i
+
+(** [array::index_array_copy]: forward function *)
+let index_array_copy_fwd (x : array u32 32) : result u32 =
+ array_index_shared u32 32 x 0
+
+(** [array::index_mut_array]: forward function *)
+let index_mut_array_fwd (t : Type0) (s : array t 32) (i : usize) : result t =
+ array_index_mut_fwd t 32 s i
+
+(** [array::index_mut_array]: backward function 0 *)
+let index_mut_array_back
+ (t : Type0) (s : array t 32) (i : usize) (ret : t) : result (array t 32) =
+ array_index_mut_back t 32 s i ret
+
+(** [array::index_slice]: forward function *)
+let index_slice_fwd (t : Type0) (s : slice t) (i : usize) : result t =
+ slice_index_shared t s i
+
+(** [array::index_mut_slice]: forward function *)
+let index_mut_slice_fwd (t : Type0) (s : slice t) (i : usize) : result t =
+ slice_index_mut_fwd t s i
+
+(** [array::index_mut_slice]: backward function 0 *)
+let index_mut_slice_back
+ (t : Type0) (s : slice t) (i : usize) (ret : t) : result (slice t) =
+ slice_index_mut_back t s i ret
+
+(** [array::slice_subslice_shared_]: forward function *)
+let slice_subslice_shared__fwd
+ (x : slice u32) (y : usize) (z : usize) : result (slice u32) =
+ slice_subslice_shared u32 x (Mkrange y z)
+
+(** [array::slice_subslice_mut_]: forward function *)
+let slice_subslice_mut__fwd
+ (x : slice u32) (y : usize) (z : usize) : result (slice u32) =
+ slice_subslice_mut_fwd u32 x (Mkrange y z)
+
+(** [array::slice_subslice_mut_]: backward function 0 *)
+let slice_subslice_mut__back
+ (x : slice u32) (y : usize) (z : usize) (ret : slice u32) :
+ result (slice u32)
+ =
+ slice_subslice_mut_back u32 x (Mkrange y z) ret
+
+(** [array::array_to_slice_shared_]: forward function *)
+let array_to_slice_shared__fwd (x : array u32 32) : result (slice u32) =
+ array_to_slice_shared u32 32 x
+
+(** [array::array_to_slice_mut_]: forward function *)
+let array_to_slice_mut__fwd (x : array u32 32) : result (slice u32) =
+ array_to_slice_mut_fwd u32 32 x
+
+(** [array::array_to_slice_mut_]: backward function 0 *)
+let array_to_slice_mut__back
+ (x : array u32 32) (ret : slice u32) : result (array u32 32) =
+ array_to_slice_mut_back u32 32 x ret
+
+(** [array::array_subslice_shared_]: forward function *)
+let array_subslice_shared__fwd
+ (x : array u32 32) (y : usize) (z : usize) : result (slice u32) =
+ array_subslice_shared u32 32 x (Mkrange y z)
+
+(** [array::array_subslice_mut_]: forward function *)
+let array_subslice_mut__fwd
+ (x : array u32 32) (y : usize) (z : usize) : result (slice u32) =
+ array_subslice_mut_fwd u32 32 x (Mkrange y z)
+
+(** [array::array_subslice_mut_]: backward function 0 *)
+let array_subslice_mut__back
+ (x : array u32 32) (y : usize) (z : usize) (ret : slice u32) :
+ result (array u32 32)
+ =
+ array_subslice_mut_back u32 32 x (Mkrange y z) ret
+
+(** [array::index_slice_0]: forward function *)
+let index_slice_0_fwd (t : Type0) (s : slice t) : result t =
+ slice_index_shared t s 0
+
+(** [array::index_array_0]: forward function *)
+let index_array_0_fwd (t : Type0) (s : array t 32) : result t =
+ array_index_shared t 32 s 0
+
+(** [array::index_index_array]: forward function *)
+let index_index_array_fwd
+ (s : array (array u32 32) 32) (i : usize) (j : usize) : result u32 =
+ let* a = array_index_shared (array u32 32) 32 s i in
+ array_index_shared u32 32 a j
+
+(** [array::update_update_array]: forward function *)
+let update_update_array_fwd
+ (s : array (array u32 32) 32) (i : usize) (j : usize) : result unit =
+ let* a = array_index_mut_fwd (array u32 32) 32 s i in
+ let* a0 = array_index_mut_back u32 32 a j 0 in
+ let* _ = array_index_mut_back (array u32 32) 32 s i a0 in
+ Return ()
+
+(** [array::array_local_deep_copy]: forward function *)
+let array_local_deep_copy_fwd (x : array u32 32) : result unit =
+ Return ()
+
+(** [array::f0]: forward function *)
+let f0_fwd : result unit =
+ let* s = array_to_slice_mut_fwd u32 2 (mk_array u32 2 [ 1; 2 ]) in
+ let* s0 = slice_index_mut_back u32 s 0 1 in
+ let* _ = array_to_slice_mut_back u32 2 (mk_array u32 2 [ 1; 2 ]) s0 in
+ Return ()
+
+(** [array::f1]: forward function *)
+let f1_fwd : result unit =
+ let* _ = array_index_mut_back u32 2 (mk_array u32 2 [ 1; 2 ]) 0 1 in
+ Return ()
+
+(** [array::sum]: loop 0: forward function *)
+let rec sum_loop_fwd
+ (s : slice u32) (sum : u32) (i : usize) :
+ Tot (result u32) (decreases (sum_loop_decreases s sum i))
+ =
+ let i0 = slice_len u32 s in
+ if i < i0
+ then
+ let* i1 = slice_index_shared u32 s i in
+ let* sum0 = u32_add sum i1 in
+ let* i2 = usize_add i 1 in
+ sum_loop_fwd s sum0 i2
+ else Return sum
+
+(** [array::sum]: forward function *)
+let sum_fwd (s : slice u32) : result u32 =
+ sum_loop_fwd s 0 0
+
+(** [array::sum2]: loop 0: forward function *)
+let rec sum2_loop_fwd
+ (s : slice u32) (s2 : slice u32) (sum : u32) (i : usize) :
+ Tot (result u32) (decreases (sum2_loop_decreases s s2 sum i))
+ =
+ let i0 = slice_len u32 s in
+ if i < i0
+ then
+ let* i1 = slice_index_shared u32 s i in
+ let* i2 = slice_index_shared u32 s2 i in
+ let* i3 = u32_add i1 i2 in
+ let* sum0 = u32_add sum i3 in
+ let* i4 = usize_add i 1 in
+ sum2_loop_fwd s s2 sum0 i4
+ else Return sum
+
+(** [array::sum2]: forward function *)
+let sum2_fwd (s : slice u32) (s2 : slice u32) : result u32 =
+ let i = slice_len u32 s in
+ let i0 = slice_len u32 s2 in
+ if not (i = i0) then Fail Failure else sum2_loop_fwd s s2 0 0
+
+(** [array::f2]: forward function *)
+let f2_fwd (i : u32) : result unit =
+ Return ()
+
+(** [array::f4]: forward function *)
+let f4_fwd (x : array u32 32) (y : usize) (z : usize) : result (slice u32) =
+ array_subslice_shared u32 32 x (Mkrange y z)
+
+(** [array::f3]: forward function *)
+let f3_fwd : result u32 =
+ let* i = array_index_shared u32 2 (mk_array u32 2 [ 1; 2 ]) 0 in
+ let* _ = f2_fwd i in
+ let* s = array_to_slice_shared u32 2 (mk_array u32 2 [ 1; 2 ]) in
+ let* s0 =
+ f4_fwd
+ (mk_array u32 32 [
+ 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0;
+ 0; 0; 0; 0; 0; 0; 0; 0
+ ]) 16 18 in
+ sum2_fwd s s0
+
diff --git a/tests/fstar/array/Array.Types.fst b/tests/fstar/array/Array.Types.fst
new file mode 100644
index 00000000..d20f1718
--- /dev/null
+++ b/tests/fstar/array/Array.Types.fst
@@ -0,0 +1,7 @@
+(** THIS FILE WAS AUTOMATICALLY GENERATED BY AENEAS *)
+(** [array]: type definitions *)
+module Array.Types
+open Primitives
+
+#set-options "--z3rlimit 50 --fuel 1 --ifuel 1"
+
diff --git a/tests/fstar/array/Makefile b/tests/fstar/array/Makefile
new file mode 100644
index 00000000..fa7d1f36
--- /dev/null
+++ b/tests/fstar/array/Makefile
@@ -0,0 +1,49 @@
+# This file was automatically generated - modify ../Makefile.template instead
+INCLUDE_DIRS = .
+
+FSTAR_INCLUDES = $(addprefix --include ,$(INCLUDE_DIRS))
+
+FSTAR_HINTS ?= --use_hints --use_hint_hashes --record_hints
+
+FSTAR_OPTIONS = $(FSTAR_HINTS) \
+ --cache_checked_modules $(FSTAR_INCLUDES) --cmi \
+ --warn_error '+241@247+285-274' \
+
+FSTAR_EXE ?= fstar.exe
+FSTAR_NO_FLAGS = $(FSTAR_EXE) --already_cached 'Prims FStar LowStar Steel' --odir obj --cache_dir obj
+
+FSTAR = $(FSTAR_NO_FLAGS) $(FSTAR_OPTIONS)
+
+# The F* roots are used to compute the dependency graph, and generate the .depend file
+FSTAR_ROOTS ?= $(wildcard *.fst *.fsti)
+
+# Build all the files
+all: $(addprefix obj/,$(addsuffix .checked,$(FSTAR_ROOTS)))
+
+# This is the right way to ensure the .depend file always gets re-built.
+ifeq (,$(filter %-in,$(MAKECMDGOALS)))
+ifndef NODEPEND
+ifndef MAKE_RESTARTS
+.depend: .FORCE
+ $(FSTAR_NO_FLAGS) --dep full $(notdir $(FSTAR_ROOTS)) > $@
+
+.PHONY: .FORCE
+.FORCE:
+endif
+endif
+
+include .depend
+endif
+
+# For the interactive mode
+%.fst-in %.fsti-in:
+ @echo $(FSTAR_OPTIONS)
+
+# Generete the .checked files in batch mode
+%.checked:
+ $(FSTAR) $(FSTAR_OPTIONS) $< && \
+ touch -c $@
+
+.PHONY: clean
+clean:
+ rm -f obj/*
diff --git a/tests/fstar/array/Primitives.fst b/tests/fstar/array/Primitives.fst
new file mode 100644
index 00000000..9db82069
--- /dev/null
+++ b/tests/fstar/array/Primitives.fst
@@ -0,0 +1,375 @@
+/// This file lists primitive and assumed functions and types
+module Primitives
+open FStar.Mul
+open FStar.List.Tot
+
+#set-options "--z3rlimit 15 --fuel 0 --ifuel 1"
+
+(*** Utilities *)
+val list_update (#a : Type0) (ls : list a) (i : nat{i < length ls}) (x : a) :
+ ls':list a{
+ length ls' = length ls /\
+ index ls' i == x
+ }
+#push-options "--fuel 1"
+let rec list_update #a ls i x =
+ match ls with
+ | x' :: ls -> if i = 0 then x :: ls else x' :: list_update ls (i-1) x
+#pop-options
+
+(*** Result *)
+type error : Type0 =
+| Failure
+| OutOfFuel
+
+type result (a : Type0) : Type0 =
+| Return : v:a -> result a
+| Fail : e:error -> result a
+
+// Monadic return operator
+unfold let return (#a : Type0) (x : a) : result a = Return x
+
+// Monadic bind operator.
+// Allows to use the notation:
+// ```
+// let* x = y in
+// ...
+// ```
+unfold let (let*) (#a #b : Type0) (m: result a)
+ (f: (x:a) -> Pure (result b) (requires (m == Return x)) (ensures fun _ -> True)) :
+ result b =
+ match m with
+ | Return x -> f x
+ | Fail e -> Fail e
+
+// Monadic assert(...)
+let massert (b:bool) : result unit = if b then Return () else Fail Failure
+
+// Normalize and unwrap a successful result (used for globals).
+let eval_global (#a : Type0) (x : result a{Return? (normalize_term x)}) : a = Return?.v x
+
+(*** Misc *)
+type char = FStar.Char.char
+type string = string
+
+let is_zero (n: nat) : bool = n = 0
+let decrease (n: nat{n > 0}) : nat = n - 1
+
+let mem_replace_fwd (a : Type0) (x : a) (y : a) : a = x
+let mem_replace_back (a : Type0) (x : a) (y : a) : a = y
+
+(*** Scalars *)
+/// Rem.: most of the following code was partially generated
+
+let isize_min : int = -9223372036854775808 // TODO: should be opaque
+let isize_max : int = 9223372036854775807 // TODO: should be opaque
+let i8_min : int = -128
+let i8_max : int = 127
+let i16_min : int = -32768
+let i16_max : int = 32767
+let i32_min : int = -2147483648
+let i32_max : int = 2147483647
+let i64_min : int = -9223372036854775808
+let i64_max : int = 9223372036854775807
+let i128_min : int = -170141183460469231731687303715884105728
+let i128_max : int = 170141183460469231731687303715884105727
+let usize_min : int = 0
+let usize_max : int = 4294967295 // TODO: should be opaque
+let u8_min : int = 0
+let u8_max : int = 255
+let u16_min : int = 0
+let u16_max : int = 65535
+let u32_min : int = 0
+let u32_max : int = 4294967295
+let u64_min : int = 0
+let u64_max : int = 18446744073709551615
+let u128_min : int = 0
+let u128_max : int = 340282366920938463463374607431768211455
+
+type scalar_ty =
+| Isize
+| I8
+| I16
+| I32
+| I64
+| I128
+| Usize
+| U8
+| U16
+| U32
+| U64
+| U128
+
+let scalar_min (ty : scalar_ty) : int =
+ match ty with
+ | Isize -> isize_min
+ | I8 -> i8_min
+ | I16 -> i16_min
+ | I32 -> i32_min
+ | I64 -> i64_min
+ | I128 -> i128_min
+ | Usize -> usize_min
+ | U8 -> u8_min
+ | U16 -> u16_min
+ | U32 -> u32_min
+ | U64 -> u64_min
+ | U128 -> u128_min
+
+let scalar_max (ty : scalar_ty) : int =
+ match ty with
+ | Isize -> isize_max
+ | I8 -> i8_max
+ | I16 -> i16_max
+ | I32 -> i32_max
+ | I64 -> i64_max
+ | I128 -> i128_max
+ | Usize -> usize_max
+ | U8 -> u8_max
+ | U16 -> u16_max
+ | U32 -> u32_max
+ | U64 -> u64_max
+ | U128 -> u128_max
+
+type scalar (ty : scalar_ty) : eqtype = x:int{scalar_min ty <= x && x <= scalar_max ty}
+
+let mk_scalar (ty : scalar_ty) (x : int) : result (scalar ty) =
+ if scalar_min ty <= x && scalar_max ty >= x then Return x else Fail Failure
+
+let scalar_neg (#ty : scalar_ty) (x : scalar ty) : result (scalar ty) = mk_scalar ty (-x)
+
+let scalar_div (#ty : scalar_ty) (x : scalar ty) (y : scalar ty) : result (scalar ty) =
+ if y <> 0 then mk_scalar ty (x / y) else Fail Failure
+
+/// The remainder operation
+let int_rem (x : int) (y : int{y <> 0}) : int =
+ if x >= 0 then (x % y) else -(x % y)
+
+(* Checking consistency with Rust *)
+let _ = assert_norm(int_rem 1 2 = 1)
+let _ = assert_norm(int_rem (-1) 2 = -1)
+let _ = assert_norm(int_rem 1 (-2) = 1)
+let _ = assert_norm(int_rem (-1) (-2) = -1)
+
+let scalar_rem (#ty : scalar_ty) (x : scalar ty) (y : scalar ty) : result (scalar ty) =
+ if y <> 0 then mk_scalar ty (int_rem x y) else Fail Failure
+
+let scalar_add (#ty : scalar_ty) (x : scalar ty) (y : scalar ty) : result (scalar ty) =
+ mk_scalar ty (x + y)
+
+let scalar_sub (#ty : scalar_ty) (x : scalar ty) (y : scalar ty) : result (scalar ty) =
+ mk_scalar ty (x - y)
+
+let scalar_mul (#ty : scalar_ty) (x : scalar ty) (y : scalar ty) : result (scalar ty) =
+ mk_scalar ty (x * y)
+
+(** Cast an integer from a [src_ty] to a [tgt_ty] *)
+// TODO: check the semantics of casts in Rust
+let scalar_cast (src_ty : scalar_ty) (tgt_ty : scalar_ty) (x : scalar src_ty) : result (scalar tgt_ty) =
+ mk_scalar tgt_ty x
+
+/// The scalar types
+type isize : eqtype = scalar Isize
+type i8 : eqtype = scalar I8
+type i16 : eqtype = scalar I16
+type i32 : eqtype = scalar I32
+type i64 : eqtype = scalar I64
+type i128 : eqtype = scalar I128
+type usize : eqtype = scalar Usize
+type u8 : eqtype = scalar U8
+type u16 : eqtype = scalar U16
+type u32 : eqtype = scalar U32
+type u64 : eqtype = scalar U64
+type u128 : eqtype = scalar U128
+
+/// Negation
+let isize_neg = scalar_neg #Isize
+let i8_neg = scalar_neg #I8
+let i16_neg = scalar_neg #I16
+let i32_neg = scalar_neg #I32
+let i64_neg = scalar_neg #I64
+let i128_neg = scalar_neg #I128
+
+/// Division
+let isize_div = scalar_div #Isize
+let i8_div = scalar_div #I8
+let i16_div = scalar_div #I16
+let i32_div = scalar_div #I32
+let i64_div = scalar_div #I64
+let i128_div = scalar_div #I128
+let usize_div = scalar_div #Usize
+let u8_div = scalar_div #U8
+let u16_div = scalar_div #U16
+let u32_div = scalar_div #U32
+let u64_div = scalar_div #U64
+let u128_div = scalar_div #U128
+
+/// Remainder
+let isize_rem = scalar_rem #Isize
+let i8_rem = scalar_rem #I8
+let i16_rem = scalar_rem #I16
+let i32_rem = scalar_rem #I32
+let i64_rem = scalar_rem #I64
+let i128_rem = scalar_rem #I128
+let usize_rem = scalar_rem #Usize
+let u8_rem = scalar_rem #U8
+let u16_rem = scalar_rem #U16
+let u32_rem = scalar_rem #U32
+let u64_rem = scalar_rem #U64
+let u128_rem = scalar_rem #U128
+
+/// Addition
+let isize_add = scalar_add #Isize
+let i8_add = scalar_add #I8
+let i16_add = scalar_add #I16
+let i32_add = scalar_add #I32
+let i64_add = scalar_add #I64
+let i128_add = scalar_add #I128
+let usize_add = scalar_add #Usize
+let u8_add = scalar_add #U8
+let u16_add = scalar_add #U16
+let u32_add = scalar_add #U32
+let u64_add = scalar_add #U64
+let u128_add = scalar_add #U128
+
+/// Substraction
+let isize_sub = scalar_sub #Isize
+let i8_sub = scalar_sub #I8
+let i16_sub = scalar_sub #I16
+let i32_sub = scalar_sub #I32
+let i64_sub = scalar_sub #I64
+let i128_sub = scalar_sub #I128
+let usize_sub = scalar_sub #Usize
+let u8_sub = scalar_sub #U8
+let u16_sub = scalar_sub #U16
+let u32_sub = scalar_sub #U32
+let u64_sub = scalar_sub #U64
+let u128_sub = scalar_sub #U128
+
+/// Multiplication
+let isize_mul = scalar_mul #Isize
+let i8_mul = scalar_mul #I8
+let i16_mul = scalar_mul #I16
+let i32_mul = scalar_mul #I32
+let i64_mul = scalar_mul #I64
+let i128_mul = scalar_mul #I128
+let usize_mul = scalar_mul #Usize
+let u8_mul = scalar_mul #U8
+let u16_mul = scalar_mul #U16
+let u32_mul = scalar_mul #U32
+let u64_mul = scalar_mul #U64
+let u128_mul = scalar_mul #U128
+
+(*** Range *)
+type range (a : Type0) = {
+ start : a;
+ end_ : a;
+}
+
+(*** Array *)
+type array (a : Type0) (n : usize) = s:list a{length s = n}
+
+// We tried putting the normalize_term condition as a refinement on the list
+// but it didn't work. It works with the requires clause.
+let mk_array (a : Type0) (n : usize)
+ (l : list a) :
+ Pure (array a n)
+ (requires (normalize_term(FStar.List.Tot.length l) = n))
+ (ensures (fun _ -> True)) =
+ normalize_term_spec (FStar.List.Tot.length l);
+ l
+
+let array_index_shared (a : Type0) (n : usize) (x : array a n) (i : usize) : result a =
+ if i < length x then Return (index x i)
+ else Fail Failure
+
+let array_index_mut_fwd (a : Type0) (n : usize) (x : array a n) (i : usize) : result a =
+ if i < length x then Return (index x i)
+ else Fail Failure
+
+let array_index_mut_back (a : Type0) (n : usize) (x : array a n) (i : usize) (nx : a) : result (array a n) =
+ if i < length x then Return (list_update x i nx)
+ else Fail Failure
+
+(*** Slice *)
+type slice (a : Type0) = s:list a{length s <= usize_max}
+
+let slice_len (a : Type0) (s : slice a) : usize = length s
+
+let slice_index_shared (a : Type0) (x : slice a) (i : usize) : result a =
+ if i < length x then Return (index x i)
+ else Fail Failure
+
+let slice_index_mut_fwd (a : Type0) (x : slice a) (i : usize) : result a =
+ if i < length x then Return (index x i)
+ else Fail Failure
+
+let slice_index_mut_back (a : Type0) (x : slice a) (i : usize) (nx : a) : result (slice a) =
+ if i < length x then Return (list_update x i nx)
+ else Fail Failure
+
+(*** Subslices *)
+
+let array_to_slice_shared (a : Type0) (n : usize) (x : array a n) : result (slice a) = Return x
+let array_to_slice_mut_fwd (a : Type0) (n : usize) (x : array a n) : result (slice a) = Return x
+let array_to_slice_mut_back (a : Type0) (n : usize) (x : array a n) (s : slice a) : result (array a n) =
+ if length s = n then Return s
+ else Fail Failure
+
+// TODO: finish the definitions below (there lacks [List.drop] and [List.take] in the standard library *)
+let array_subslice_shared (a : Type0) (n : usize) (x : array a n) (r : range usize) : result (slice a) =
+ admit()
+
+let array_subslice_mut_fwd (a : Type0) (n : usize) (x : array a n) (r : range usize) : result (slice a) =
+ admit()
+
+let array_subslice_mut_back (a : Type0) (n : usize) (x : array a n) (r : range usize) (ns : slice a) : result (array a n) =
+ admit()
+
+let slice_subslice_shared (a : Type0) (x : slice a) (r : range usize) : result (slice a) =
+ admit()
+
+let slice_subslice_mut_fwd (a : Type0) (x : slice a) (r : range usize) : result (slice a) =
+ admit()
+
+let slice_subslice_mut_back (a : Type0) (x : slice a) (r : range usize) (ns : slice a) : result (slice a) =
+ admit()
+
+(*** Vector *)
+type vec (a : Type0) = v:list a{length v <= usize_max}
+
+let vec_new (a : Type0) : vec a = assert_norm(length #a [] == 0); []
+let vec_len (a : Type0) (v : vec a) : usize = length v
+
+// The **forward** function shouldn't be used
+let vec_push_fwd (a : Type0) (v : vec a) (x : a) : unit = ()
+let vec_push_back (a : Type0) (v : vec a) (x : a) :
+ Pure (result (vec a))
+ (requires True)
+ (ensures (fun res ->
+ match res with
+ | Fail e -> e == Failure
+ | Return v' -> length v' = length v + 1)) =
+ if length v < usize_max then begin
+ (**) assert_norm(length [x] == 1);
+ (**) append_length v [x];
+ (**) assert(length (append v [x]) = length v + 1);
+ Return (append v [x])
+ end
+ else Fail Failure
+
+// The **forward** function shouldn't be used
+let vec_insert_fwd (a : Type0) (v : vec a) (i : usize) (x : a) : result unit =
+ if i < length v then Return () else Fail Failure
+let vec_insert_back (a : Type0) (v : vec a) (i : usize) (x : a) : result (vec a) =
+ if i < length v then Return (list_update v i x) else Fail Failure
+
+// The **backward** function shouldn't be used
+let vec_index_fwd (a : Type0) (v : vec a) (i : usize) : result a =
+ if i < length v then Return (index v i) else Fail Failure
+let vec_index_back (a : Type0) (v : vec a) (i : usize) (x : a) : result unit =
+ if i < length v then Return () else Fail Failure
+
+let vec_index_mut_fwd (a : Type0) (v : vec a) (i : usize) : result a =
+ if i < length v then Return (index v i) else Fail Failure
+let vec_index_mut_back (a : Type0) (v : vec a) (i : usize) (nx : a) : result (vec a) =
+ if i < length v then Return (list_update v i nx) else Fail Failure