summaryrefslogtreecommitdiff
path: root/Test/dafny3
diff options
context:
space:
mode:
Diffstat (limited to 'Test/dafny3')
-rw-r--r--Test/dafny3/CachedContainer.dfy.expect1
-rw-r--r--Test/dafny3/CalcExample.dfy6
-rw-r--r--Test/dafny3/Dijkstra.dfy4
-rw-r--r--Test/dafny3/Filter.dfy240
-rw-r--r--Test/dafny3/Filter.dfy.expect2
-rw-r--r--Test/dafny3/GenericSort.dfy60
-rw-r--r--Test/dafny3/InductionVsCoinduction.dfy2
-rw-r--r--Test/dafny3/InfiniteTrees.dfy44
-rw-r--r--Test/dafny3/SimpleInduction.dfy14
9 files changed, 189 insertions, 184 deletions
diff --git a/Test/dafny3/CachedContainer.dfy.expect b/Test/dafny3/CachedContainer.dfy.expect
index c6c90498..0185aacd 100644
--- a/Test/dafny3/CachedContainer.dfy.expect
+++ b/Test/dafny3/CachedContainer.dfy.expect
@@ -1,2 +1,3 @@
+CachedContainer.dfy(120,25): Warning: "import A as B" has been deprecated; in the new syntax, it is "import A:B"
Dafny program verifier finished with 47 verified, 0 errors
diff --git a/Test/dafny3/CalcExample.dfy b/Test/dafny3/CalcExample.dfy
index 2782d049..b9d3260b 100644
--- a/Test/dafny3/CalcExample.dfy
+++ b/Test/dafny3/CalcExample.dfy
@@ -3,14 +3,14 @@
function f(x: int, y: int): int
-ghost method Associativity(x: int, y: int, z: int)
+lemma Associativity(x: int, y: int, z: int)
ensures f(x, f(y, z)) == f(f(x, y), z);
-ghost method Monotonicity(y: int, z: int)
+lemma Monotonicity(y: int, z: int)
requires y <= z;
ensures forall x :: f(x, y) <= f(x, z);
-ghost method DiagonalIdentity(x: int)
+lemma DiagonalIdentity(x: int)
ensures f(x, x) == x;
method M(a: int, b: int, c: int, x: int)
diff --git a/Test/dafny3/Dijkstra.dfy b/Test/dafny3/Dijkstra.dfy
index 1fbd9b7c..56719180 100644
--- a/Test/dafny3/Dijkstra.dfy
+++ b/Test/dafny3/Dijkstra.dfy
@@ -3,7 +3,7 @@
// Example taken from:
// Edsger W. Dijkstra: Heuristics for a Calculational Proof. Inf. Process. Lett. (IPL) 53(3):141-143 (1995)
-// Transcribed into Dafny by Valentin Wüstholz and Nadia Polikarpova.
+// Transcribed into Dafny by Valentin Wüstholz and Nadia Polikarpova.
// f is an arbitrary function on the natural numbers
function f(n: nat) : nat
@@ -56,7 +56,7 @@ lemma lemma_ping(j: nat, n: nat)
}
}
-// The other directorion: f(n) <= n
+// The other direction: f(n) <= n
lemma lemma_pong(n: nat)
requires P()
ensures f(n) <= n
diff --git a/Test/dafny3/Filter.dfy b/Test/dafny3/Filter.dfy
index 24c8e94e..7473a580 100644
--- a/Test/dafny3/Filter.dfy
+++ b/Test/dafny3/Filter.dfy
@@ -1,4 +1,4 @@
-// RUN: %dafny /compile:0 /dprint:"%t.dprint" "%s" > "%t"
+// RUN: %dafny /compile:0 /dprint:"%t.dprint" /autoTriggers:0 "%s" > "%t"
// RUN: %diff "%s.expect" "%t"
codatatype Stream<T> = Cons(head: T, tail: Stream)
@@ -35,7 +35,7 @@ lemma Lemma_TailSubStreamK(s: Stream, u: Stream, k: nat) // this lemma could ha
{
if k != 0 {
Lemma_InTail(s.head, u);
- Lemma_TailSubStreamK(s.tail, u, k-1);
+ //Lemma_TailSubStreamK(s.tail, u, k-1);
}
}
lemma Lemma_InSubStream<T>(x: T, s: Stream<T>, u: Stream<T>)
@@ -53,104 +53,119 @@ lemma Lemma_InSubStream<T>(x: T, s: Stream<T>, u: Stream<T>)
}
}
-type PredicateHandle
-predicate P<T>(x: T, h: PredicateHandle)
+type Predicate<T> = T -> bool
-copredicate AllP(s: Stream, h: PredicateHandle)
+predicate Total<T,U>(f: T -> U)
+ reads f.reads
{
- P(s.head, h) && AllP(s.tail, h)
+ forall t :: f.reads(t) == {} && f.requires(t)
}
-lemma Lemma_InAllP<T>(x: T, s: Stream<T>, h: PredicateHandle)
- requires In(x, s) && AllP(s, h);
- ensures P(x, h);
+
+copredicate AllP(s: Stream, P: Predicate)
+ requires Total(P)
+{
+ P(s.head) && AllP(s.tail, P)
+}
+lemma Lemma_InAllP<T>(x: T, s: Stream<T>, P: Predicate)
+ requires Total(P)
+ requires In(x, s) && AllP(s, P)
+ ensures P(x)
{
var n :| 0 <= n && Tail(s, n).head == x;
var t := s;
while n != 0
- invariant 0 <= n;
- invariant Tail(t, n).head == x;
- invariant AllP(t, h);
+ invariant 0 <= n
+ invariant Tail(t, n).head == x
+ invariant AllP(t, P)
{
t, n := t.tail, n - 1;
}
}
-predicate IsAnother(s: Stream, h: PredicateHandle)
+predicate IsAnother(s: Stream, P: Predicate)
+ requires Total(P)
{
- exists n :: 0 <= n && P(Tail(s, n).head, h)
+ exists n :: 0 <= n && P(Tail(s, n).head)
}
-copredicate AlwaysAnother(s: Stream, h: PredicateHandle)
+copredicate AlwaysAnother(s: Stream, P: Predicate)
+ requires Total(P)
{
- IsAnother(s, h) && AlwaysAnother(s.tail, h)
+ IsAnother(s, P) && AlwaysAnother(s.tail, P)
}
-colemma Lemma_AllImpliesAlwaysAnother(s: Stream, h: PredicateHandle)
- requires AllP(s, h);
- ensures AlwaysAnother(s, h);
+colemma Lemma_AllImpliesAlwaysAnother(s: Stream, P: Predicate)
+ requires Total(P)
+ requires AllP(s, P)
+ ensures AlwaysAnother(s, P)
{
assert Tail(s, 0) == s;
}
-function Next(s: Stream, h: PredicateHandle): nat
- requires AlwaysAnother(s, h);
- ensures P(Tail(s, Next(s, h)).head, h);
- ensures forall i :: 0 <= i < Next(s, h) ==> !P(Tail(s, i).head, h);
+function Next(s: Stream, P: Predicate): nat
+ requires Total(P)
+ requires AlwaysAnother(s, P)
+ ensures P(Tail(s, Next(s, P)).head)
+ ensures forall i :: 0 <= i < Next(s, P) ==> !P(Tail(s, i).head)
{
- var n :| 0 <= n && P(Tail(s, n).head, h);
- NextMinimizer(s, h, n)
+ var n :| 0 <= n && P(Tail(s, n).head);
+ NextMinimizer(s, P, n)
}
// the following is an auxiliary function of the definition of Next
-function NextMinimizer(s: Stream, h: PredicateHandle, n: nat): nat
- requires P(Tail(s, n).head, h);
- ensures P(Tail(s, NextMinimizer(s, h, n)).head, h);
- ensures forall i :: 0 <= i < NextMinimizer(s, h, n) ==> !P(Tail(s, i).head, h);
+function NextMinimizer(s: Stream, P: Predicate, n: nat): nat
+ requires Total(P)
+ requires P(Tail(s, n).head)
+ ensures P(Tail(s, NextMinimizer(s, P, n)).head)
+ ensures forall i :: 0 <= i < NextMinimizer(s, P, n) ==> !P(Tail(s, i).head)
{
- if forall i :: 0 <= i < n ==> !P(Tail(s, i).head, h) then
+ if forall i :: 0 <= i < n ==> !P(Tail(s, i).head) then
n
else
- var k :| 0 <= k < n && P(Tail(s, k).head, h);
- NextMinimizer(s, h, k)
+ var k :| 0 <= k < n && P(Tail(s, k).head);
+ NextMinimizer(s, P, k)
}
-function Filter(s: Stream, h: PredicateHandle): Stream
- requires AlwaysAnother(s, h);
- decreases Next(s, h);
+function Filter(s: Stream, P: Predicate): Stream
+ requires Total(P)
+ requires AlwaysAnother(s, P)
+ decreases Next(s, P)
{
- if P(s.head, h) then
- Cons(s.head, Filter(s.tail, h))
+ if P(s.head) then
+ Cons(s.head, Filter(s.tail, P))
else
- Filter(s.tail, h)
+ Filter(s.tail, P)
}
// properties about Filter
-colemma Filter_AlwaysAnother(s: Stream, h: PredicateHandle)
- requires AlwaysAnother(s, h);
- ensures AllP(Filter(s, h), h);
- decreases Next(s, h);
+colemma Filter_AlwaysAnother(s: Stream, P: Predicate)
+ requires Total(P)
+ requires AlwaysAnother(s, P)
+ ensures AllP(Filter(s, P), P)
+ decreases Next(s, P)
{
- if P(s.head, h) {
- Filter_AlwaysAnother(s.tail, h);
+ if P(s.head) {
+ Filter_AlwaysAnother(s.tail, P);
} else {
- Filter_AlwaysAnother#[_k](s.tail, h);
+ Filter_AlwaysAnother#[_k](s.tail, P);
}
}
-colemma Filter_IsSubStream(s: Stream, h: PredicateHandle)
- requires AlwaysAnother(s, h);
- ensures IsSubStream(Filter(s, h), s);
- decreases Next(s, h);
+colemma Filter_IsSubStream(s: Stream, P: Predicate)
+ requires Total(P)
+ requires AlwaysAnother(s, P)
+ ensures IsSubStream(Filter(s, P), s)
+ decreases Next(s, P)
{
- if P(s.head, h) {
+ if P(s.head) {
// To prove IsSubStream#[_k](Filter(s, h), s), we prove the two conjuncts from the definition
calc {
true;
- == { Filter_IsSubStream(s.tail, h); } // induction hypothesis
- IsSubStream#[_k-1](Filter(s.tail, h), s.tail);
+ == { Filter_IsSubStream(s.tail, P); } // induction hypothesis
+ IsSubStream(Filter(s.tail, P), s.tail);
== // { assert Filter(s.tail, h) == Filter(s, h).tail; }
- IsSubStream#[_k-1](Filter(s, h).tail, s.tail);
- ==> { Lemma_TailSubStreamK(Filter(s, h).tail, s, _k-1); }
- IsSubStream#[_k-1](Filter(s, h).tail, s);
+ IsSubStream(Filter(s, P).tail, s.tail);
+ ==> { Lemma_TailSubStreamK(Filter(s, P).tail, s, _k-1); }
+ IsSubStream(Filter(s, P).tail, s);
}
calc {
- In(Filter(s, h).head, s);
- == { assert Filter(s, h) == Cons(s.head, Filter(s.tail, h)); }
+ In(Filter(s, P).head, s);
+ == { assert Filter(s, P) == Cons(s.head, Filter(s.tail, P)); }
In(s.head, s);
== { assert Tail(s, 0) == s;
assert exists n :: 0 <= n && Tail(s, n).head == s.head;
@@ -158,105 +173,114 @@ colemma Filter_IsSubStream(s: Stream, h: PredicateHandle)
true;
}
} else {
- Lemma_TailSubStreamK(Filter(s.tail, h), s, _k);
+ Lemma_TailSubStreamK(Filter(s.tail, P), s, _k);
}
}
// The following says nothing about the order of the elements in the stream
-lemma Theorem_Filter<T>(s: Stream<T>, h: PredicateHandle)
- requires AlwaysAnother(s, h);
- ensures forall x :: In(x, Filter(s, h)) <==> In(x, s) && P(x, h);
+lemma Theorem_Filter<T>(s: Stream<T>, P: Predicate)
+ requires Total(P)
+ requires AlwaysAnother(s, P)
+ ensures forall x :: In(x, Filter(s, P)) <==> In(x, s) && P(x)
{
forall x
- ensures In(x, Filter(s, h)) <==> In(x, s) && P(x, h);
+ ensures In(x, Filter(s, P)) <==> In(x, s) && P(x)
{
- if In(x, Filter(s, h)) {
- FS_Ping(s, h, x);
+ if In(x, Filter(s, P)) {
+ FS_Ping(s, P, x);
}
- if In(x, s) && P(x, h) {
+ if In(x, s) && P(x) {
var k :| 0 <= k && Tail(s, k).head == x;
- FS_Pong(s, h, x, k);
+ FS_Pong(s, P, x, k);
}
}
}
-lemma FS_Ping<T>(s: Stream<T>, h: PredicateHandle, x: T)
- requires AlwaysAnother(s, h) && In(x, Filter(s, h));
- ensures In(x, s) && P(x, h);
+lemma FS_Ping<T>(s: Stream<T>, P: Predicate, x: T)
+ requires Total(P)
+ requires AlwaysAnother(s, P) && In(x, Filter(s, P));
+ ensures In(x, s) && P(x);
{
- Filter_IsSubStream(s, h);
- Lemma_InSubStream(x, Filter(s, h), s);
+ Filter_IsSubStream(s, P);
+ Lemma_InSubStream(x, Filter(s, P), s);
- Filter_AlwaysAnother(s, h);
- assert AllP(Filter(s, h), h);
- Lemma_InAllP(x, Filter(s, h), h);
+ Filter_AlwaysAnother(s, P);
+ assert AllP(Filter(s, P), P);
+ Lemma_InAllP(x, Filter(s, P), P);
}
-lemma FS_Pong<T>(s: Stream<T>, h: PredicateHandle, x: T, k: nat)
- requires AlwaysAnother(s, h) && In(x, s) && P(x, h);
- requires Tail(s, k).head == x;
- ensures In(x, Filter(s, h));
- decreases k;
+lemma FS_Pong<T>(s: Stream<T>, P: Predicate, x: T, k: nat)
+ requires Total(P)
+ requires AlwaysAnother(s, P) && In(x, s) && P(x)
+ requires Tail(s, k).head == x
+ ensures In(x, Filter(s, P))
+ decreases k
{
- var fs := Filter(s, h);
+ var fs := Filter(s, P);
if s.head == x {
assert Tail(fs, 0) == fs;
- } else if P(s.head, h) {
- assert fs == Cons(s.head, Filter(s.tail, h)); // reminder of where we are
+ } else if P(s.head) {
+ assert fs == Cons(s.head, Filter(s.tail, P)); // reminder of where we are
calc {
true;
- == { FS_Pong(s.tail, h, x, k-1); }
- In(x, Filter(s.tail, h));
+ //== { FS_Pong(s.tail, h, x, k-1); }
+ In(x, Filter(s.tail, P));
==> { assert fs.head != x; Lemma_InTail(x, fs); }
In(x, fs);
}
} else {
- assert fs == Filter(s.tail, h); // reminder of where we are
- FS_Pong(s.tail, h, x, k-1);
+ //assert fs == Filter(s.tail, h); // reminder of where we are
+ //FS_Pong(s.tail, h, x, k-1);
}
}
// ----- orderings ------
-function Ord<T>(x: T, ord: PredicateHandle): int
+type Ord<T> = T -> int
-copredicate Increasing(s: Stream, ord: PredicateHandle)
+copredicate Increasing(s: Stream, ord: Ord)
+ requires Total(ord)
{
- Ord(s.head, ord) < Ord(s.tail.head, ord) && Increasing(s.tail, ord)
+ ord(s.head) < ord(s.tail.head) && Increasing(s.tail, ord)
}
-copredicate IncrFrom(s: Stream, low: int, ord: PredicateHandle)
+copredicate IncrFrom(s: Stream, low: int, ord: Ord)
+ requires Total(ord)
{
- low <= Ord(s.head, ord) && IncrFrom(s.tail, Ord(s.head, ord) + 1, ord)
+ low <= ord(s.head) && IncrFrom(s.tail, ord(s.head) + 1, ord)
}
-colemma Lemma_Incr0(s: Stream, low: int, ord: PredicateHandle)
- requires IncrFrom(s, low, ord);
- ensures Increasing(s, ord);
+colemma Lemma_Incr0(s: Stream, low: int, ord: Ord)
+ requires Total(ord)
+ requires IncrFrom(s, low, ord)
+ ensures Increasing(s, ord)
{
}
-colemma Lemma_Incr1(s: Stream, ord: PredicateHandle)
+colemma Lemma_Incr1(s: Stream, ord: Ord)
+ requires Total(ord)
requires Increasing(s, ord);
- ensures IncrFrom(s, Ord(s.head, ord), ord);
+ ensures IncrFrom(s, ord(s.head), ord);
{
Lemma_Incr1(s.tail, ord);
}
-lemma Theorem_FilterPreservesOrdering(s: Stream, h: PredicateHandle, ord: PredicateHandle)
- requires Increasing(s, ord) && AlwaysAnother(s, h);
- ensures Increasing(Filter(s, h), ord);
+lemma Theorem_FilterPreservesOrdering(s: Stream, P: Predicate, ord: Ord)
+ requires Total(P) && Total(ord)
+ requires Increasing(s, ord) && AlwaysAnother(s, P)
+ ensures Increasing(Filter(s, P), ord)
{
Lemma_Incr1(s, ord);
- Lemma_FilterPreservesIncrFrom(s, h, Ord(s.head, ord), ord);
- Lemma_Incr0(Filter(s, h), Ord(s.head, ord), ord);
+ Lemma_FilterPreservesIncrFrom(s, P, ord(s.head), ord);
+ Lemma_Incr0(Filter(s, P), ord(s.head), ord);
}
-colemma Lemma_FilterPreservesIncrFrom(s: Stream, h: PredicateHandle, low: int, ord: PredicateHandle)
- requires IncrFrom(s, low, ord) && AlwaysAnother(s, h) && low <= Ord(s.head, ord);
- ensures IncrFrom(Filter(s, h), low, ord);
- decreases Next(s, h);
+colemma Lemma_FilterPreservesIncrFrom(s: Stream, P: Predicate, low: int, ord: Ord)
+ requires Total(P) && Total(ord)
+ requires IncrFrom(s, low, ord) && AlwaysAnother(s, P) && low <= ord(s.head)
+ ensures IncrFrom(Filter(s, P), low, ord)
+ decreases Next(s, P)
{
- if P(s.head, h) {
- Lemma_FilterPreservesIncrFrom(s.tail, h, Ord(s.head, ord)+1, ord);
+ if P(s.head) {
+ Lemma_FilterPreservesIncrFrom(s.tail, P, ord(s.head)+1, ord);
} else {
- Lemma_FilterPreservesIncrFrom#[_k](s.tail, h, low, ord);
+ Lemma_FilterPreservesIncrFrom#[_k](s.tail, P, low, ord);
}
}
diff --git a/Test/dafny3/Filter.dfy.expect b/Test/dafny3/Filter.dfy.expect
index 91aa9b47..6ba9b9bc 100644
--- a/Test/dafny3/Filter.dfy.expect
+++ b/Test/dafny3/Filter.dfy.expect
@@ -1,2 +1,2 @@
-Dafny program verifier finished with 43 verified, 0 errors
+Dafny program verifier finished with 42 verified, 0 errors
diff --git a/Test/dafny3/GenericSort.dfy b/Test/dafny3/GenericSort.dfy
index 7ea038be..7555817c 100644
--- a/Test/dafny3/GenericSort.dfy
+++ b/Test/dafny3/GenericSort.dfy
@@ -7,25 +7,25 @@ abstract module TotalOrder {
// Three properties of total orders. Here, they are given as unproved
// lemmas, that is, as axioms.
lemma Antisymmetry(a: T, b: T)
- requires Leq(a, b) && Leq(b, a);
- ensures a == b;
+ requires Leq(a, b) && Leq(b, a)
+ ensures a == b
lemma Transitivity(a: T, b: T, c: T)
- requires Leq(a, b) && Leq(b, c);
- ensures Leq(a, c);
+ requires Leq(a, b) && Leq(b, c)
+ ensures Leq(a, c)
lemma Totality(a: T, b: T)
- ensures Leq(a, b) || Leq(b, a);
+ ensures Leq(a, b) || Leq(b, a)
}
abstract module Sort {
- import O as TotalOrder // let O denote some module that has the members of TotalOrder
+ import O : TotalOrder // let O denote some module that has the members of TotalOrder
predicate Sorted(a: array<O.T>, low: int, high: int)
- requires a != null && 0 <= low <= high <= a.Length;
- reads a;
+ requires a != null && 0 <= low <= high <= a.Length
+ reads a
// The body of the predicate is hidden outside the module, but the postcondition is
// "exported" (that is, visible, known) outside the module. Here, we choose the
// export the following property:
- ensures Sorted(a, low, high) ==> forall i, j :: low <= i < j < high ==> O.Leq(a[i], a[j]);
+ ensures Sorted(a, low, high) ==> forall i, j :: low <= i < j < high ==> O.Leq(a[i], a[j])
{
forall i, j :: low <= i < j < high ==> O.Leq(a[i], a[j])
}
@@ -33,18 +33,18 @@ abstract module Sort {
// In the insertion sort routine below, it's more convenient to keep track of only that
// neighboring elements of the array are sorted...
predicate NeighborSorted(a: array<O.T>, low: int, high: int)
- requires a != null && 0 <= low <= high <= a.Length;
- reads a;
+ requires a != null && 0 <= low <= high <= a.Length
+ reads a
{
- forall i :: low < i < high ==> O.Leq(a[i-1], a[i])
+ forall i {:nowarn} :: low < i < high ==> O.Leq(a[i-1], a[i])
}
// ...but we show that property to imply all pairs to be sorted. The proof of this
// lemma uses the transitivity property.
lemma NeighborSorted_implies_Sorted(a: array<O.T>, low: int, high: int)
- requires a != null && 0 <= low <= high <= a.Length;
- requires NeighborSorted(a, low, high);
- ensures Sorted(a, low, high);
- decreases high - low;
+ requires a != null && 0 <= low <= high <= a.Length
+ requires NeighborSorted(a, low, high)
+ ensures Sorted(a, low, high)
+ decreases high - low
{
if low != high {
NeighborSorted_implies_Sorted(a, low+1, high);
@@ -57,25 +57,25 @@ abstract module Sort {
// Standard insertion sort method
method InsertionSort(a: array<O.T>)
- requires a != null;
- modifies a;
- ensures Sorted(a, 0, a.Length);
- ensures multiset(a[..]) == old(multiset(a[..]));
+ requires a != null
+ modifies a
+ ensures Sorted(a, 0, a.Length)
+ ensures multiset(a[..]) == old(multiset(a[..]))
{
if a.Length == 0 { return; }
var i := 1;
while i < a.Length
- invariant 0 < i <= a.Length;
- invariant NeighborSorted(a, 0, i);
- invariant multiset(a[..]) == old(multiset(a[..]));
+ invariant 0 < i <= a.Length
+ invariant NeighborSorted(a, 0, i)
+ invariant multiset(a[..]) == old(multiset(a[..]))
{
var j := i;
while 0 < j && !O.Leq(a[j-1], a[j])
- invariant 0 <= j <= i;
- invariant NeighborSorted(a, 0, j);
- invariant NeighborSorted(a, j, i+1);
- invariant 0 < j < i ==> O.Leq(a[j-1], a[j+1]);
- invariant multiset(a[..]) == old(multiset(a[..]));
+ invariant 0 <= j <= i
+ invariant NeighborSorted(a, 0, j)
+ invariant NeighborSorted(a, j, i+1)
+ invariant 0 < j < i ==> O.Leq(a[j-1], a[j+1])
+ invariant multiset(a[..]) == old(multiset(a[..]))
{
// The proof of correctness uses the totality property here.
// It implies that if two elements are not previously in
@@ -97,7 +97,7 @@ module IntOrder refines TotalOrder {
datatype T = Int(i: int)
// Define the ordering on these integers
predicate method Leq ...
- ensures Leq(a, b) ==> a.i <= b.i;
+ ensures Leq(a, b) ==> a.i <= b.i
{
a.i <= b.i
}
@@ -156,7 +156,7 @@ module intOrder refines TotalOrder {
type T = int
// Define the ordering on these integers
predicate method Leq ...
- ensures Leq(a, b) ==> a <= b;
+ ensures Leq(a, b) ==> a <= b
{
a <= b
}
diff --git a/Test/dafny3/InductionVsCoinduction.dfy b/Test/dafny3/InductionVsCoinduction.dfy
index 89fa6cc8..0074b742 100644
--- a/Test/dafny3/InductionVsCoinduction.dfy
+++ b/Test/dafny3/InductionVsCoinduction.dfy
@@ -80,7 +80,7 @@ lemma SAppendIsAssociative(a:Stream, b:Stream, c:Stream)
{
forall k:nat { SAppendIsAssociativeK(k, a, b, c); }
// assert for clarity only, postcondition follows directly from it
- assert (forall k:nat :: SAppend(SAppend(a, b), c) ==#[k] SAppend(a, SAppend(b, c)));
+ assert (forall k:nat {:autotriggers false} :: SAppend(SAppend(a, b), c) ==#[k] SAppend(a, SAppend(b, c))); //FIXME: Should Dafny generate a trigger here? If so then which one?
}
// Equivalent proof using the colemma syntax.
diff --git a/Test/dafny3/InfiniteTrees.dfy b/Test/dafny3/InfiniteTrees.dfy
index 516a9e4e..85f73bc3 100644
--- a/Test/dafny3/InfiniteTrees.dfy
+++ b/Test/dafny3/InfiniteTrees.dfy
@@ -286,7 +286,7 @@ lemma Theorem1_Lemma(t: Tree, n: nat, p: Stream<int>)
LowerThan(ch, n);
==> // def. LowerThan
LowerThan(ch.head.children, n-1);
- ==> { Theorem1_Lemma(ch.head, n-1, tail); }
+ ==> //{ Theorem1_Lemma(ch.head, n-1, tail); }
!IsNeverEndingStream(tail);
==> // def. IsNeverEndingStream
!IsNeverEndingStream(p);
@@ -374,30 +374,30 @@ lemma Proposition3b()
}
}
lemma Proposition3b_Lemma(t: Tree, h: nat, p: Stream<int>)
- requires LowerThan(t.children, h) && ValidPath(t, p);
- ensures !IsNeverEndingStream(p);
- decreases h;
+ requires LowerThan(t.children, h) && ValidPath(t, p)
+ ensures !IsNeverEndingStream(p)
+ decreases h
{
match p {
case Nil =>
case Cons(index, tail) =>
// From the definition of ValidPath(t, p), we get the following:
var ch := Tail(t.children, index);
- assert ch.Cons? && ValidPath(ch.head, tail);
+ // assert ch.Cons? && ValidPath(ch.head, tail);
// From the definition of LowerThan(t.children, h), we get the following:
match t.children {
case Nil =>
ValidPath_Lemma(p);
assert false; // absurd case
case Cons(_, _) =>
- assert 1 <= h;
+ // assert 1 <= h;
LowerThan_Lemma(t.children, index, h);
- assert LowerThan(ch, h);
+ // assert LowerThan(ch, h);
}
// Putting these together, by ch.Cons? and the definition of LowerThan(ch, h), we get:
- assert LowerThan(ch.head.children, h-1);
+ // assert LowerThan(ch.head.children, h-1);
// And now we can invoke the induction hypothesis:
- Proposition3b_Lemma(ch.head, h-1, tail);
+ // Proposition3b_Lemma(ch.head, h-1, tail);
}
}
@@ -627,30 +627,10 @@ colemma Path_Lemma2'(p: Stream<int>)
}
}
colemma Path_Lemma2''(p: Stream<int>, n: nat, tail: Stream<int>)
- requires IsNeverEndingStream(p) && p.tail == tail;
- ensures InfinitePath'(S2N'(n, tail));
+ requires IsNeverEndingStream(p) && p.tail == tail
+ ensures InfinitePath'(S2N'(n, tail))
{
- if n <= 0 {
- calc {
- InfinitePath'#[_k](S2N'(n, tail));
- // def. S2N'
- InfinitePath'#[_k](Zero(S2N(tail)));
- // def. InfinitePath'
- InfinitePath#[_k-1](S2N(tail));
- { Path_Lemma2'(tail); }
- true;
- }
- } else {
- calc {
- InfinitePath'#[_k](S2N'(n, tail));
- // def. S2N'
- InfinitePath'#[_k](Succ(S2N'(n-1, tail)));
- // def. InfinitePath'
- InfinitePath'#[_k-1](S2N'(n-1, tail));
- { Path_Lemma2''(p, n-1, tail); }
- true;
- }
- }
+ Path_Lemma2'(tail);
}
lemma Path_Lemma3(r: CoOption<Number>)
ensures InfinitePath(r) ==> IsNeverEndingStream(N2S(r));
diff --git a/Test/dafny3/SimpleInduction.dfy b/Test/dafny3/SimpleInduction.dfy
index 83ea6d14..8cf937e1 100644
--- a/Test/dafny3/SimpleInduction.dfy
+++ b/Test/dafny3/SimpleInduction.dfy
@@ -13,7 +13,7 @@ function Fib(n: nat): nat
decreases n;
{ if n < 2 then n else Fib(n-2) + Fib(n-1) }
-ghost method FibLemma(n: nat)
+lemma FibLemma(n: nat)
ensures Fib(n) % 2 == 0 <==> n % 3 == 0;
decreases n;
{
@@ -30,7 +30,7 @@ ghost method FibLemma(n: nat)
satisfying 0 <= k < n, and in the second example, to all non-negative n.
*/
-ghost method FibLemma_Alternative(n: nat)
+lemma FibLemma_Alternative(n: nat)
ensures Fib(n) % 2 == 0 <==> n % 3 == 0;
{
forall k | 0 <= k < n {
@@ -38,7 +38,7 @@ ghost method FibLemma_Alternative(n: nat)
}
}
-ghost method FibLemma_All()
+lemma FibLemma_All()
ensures forall n :: 0 <= n ==> (Fib(n) % 2 == 0 <==> n % 3 == 0);
{
forall n | 0 <= n {
@@ -48,8 +48,8 @@ ghost method FibLemma_All()
/*
A standard inductive definition of a generic List type and a function Append
- that concatenates two lists. The ghost method states the lemma that Append
- is associative, and its recursive body gives the inductive proof.
+ that concatenates two lists. The lemma states that Append is associative,
+ and its recursive body gives the inductive proof.
We omitted the explicit declaration and uses of the List type parameter in
the signature of the method, since in simple cases like this, Dafny is able
@@ -68,7 +68,7 @@ function Append<T>(xs: List<T>, ys: List<T>): List<T>
// The {:induction false} attribute disables automatic induction tactic,
// so we can make the proof explicit.
-ghost method {:induction false} AppendIsAssociative(xs: List, ys: List, zs: List)
+lemma {:induction false} AppendIsAssociative(xs: List, ys: List, zs: List)
ensures Append(Append(xs, ys), zs) == Append(xs, Append(ys, zs));
decreases xs;
{
@@ -81,7 +81,7 @@ ghost method {:induction false} AppendIsAssociative(xs: List, ys: List, zs: List
// Here the proof is fully automatic - the body of the method is empty,
// yet still verifies.
-ghost method AppendIsAssociative_Auto(xs: List, ys: List, zs: List)
+lemma AppendIsAssociative_Auto(xs: List, ys: List, zs: List)
ensures Append(Append(xs, ys), zs) == Append(xs, Append(ys, zs));
{
}