From bde073a24d6a30c9e6f970d1a49d8281b7f986a7 Mon Sep 17 00:00:00 2001 From: Rustan Leino Date: Fri, 11 Nov 2011 17:40:39 -0800 Subject: Dafny: implemented the wellformedness check that datatype destructors are only applied to values created by the corresponding constructor Dafny: implement ghost destructors properly --- Source/Dafny/DafnyAst.cs | 11 +++++++++++ Source/Dafny/Resolver.cs | 2 +- Source/Dafny/Translator.cs | 7 +++++++ Test/dafny0/Answer | 12 ++++++++++-- Test/dafny0/Datatypes.dfy | 11 +++++++++++ Test/dafny0/ResolutionErrors.dfy | 6 ++++-- 6 files changed, 44 insertions(+), 5 deletions(-) diff --git a/Source/Dafny/DafnyAst.cs b/Source/Dafny/DafnyAst.cs index 04b6b00e..e07c6f1d 100644 --- a/Source/Dafny/DafnyAst.cs +++ b/Source/Dafny/DafnyAst.cs @@ -867,6 +867,17 @@ namespace Microsoft.Dafny { } } + public class DatatypeDestructor : SpecialField + { + public readonly DatatypeCtor EnclosingCtor; + + public DatatypeDestructor(IToken tok, DatatypeCtor enclosingCtor, string name, string compiledName, string preString, string postString, bool isGhost, Type type, Attributes attributes) + : base(tok, name, compiledName, preString, postString, isGhost, false, type, attributes) + { + EnclosingCtor = enclosingCtor; + } + } + public class CouplingInvariant : MemberDecl { public readonly Expression Expr; public readonly List/*!*/ Toks; diff --git a/Source/Dafny/Resolver.cs b/Source/Dafny/Resolver.cs index df2d2b0b..b6bc4059 100644 --- a/Source/Dafny/Resolver.cs +++ b/Source/Dafny/Resolver.cs @@ -254,7 +254,7 @@ namespace Microsoft.Dafny { if (members.ContainsKey(formal.Name)) { Error(ctor, "Name of deconstructor is used by another member of the datatype: {0}", formal.Name); } else { - dtor = new SpecialField(formal.tok, formal.Name, "dtor_" + formal.Name, "", "", false, false, formal.Type, null); + dtor = new DatatypeDestructor(formal.tok, ctor, formal.Name, "dtor_" + formal.Name, "", "", formal.IsGhost, formal.Type, null); dtor.EnclosingClass = dt; // resolve here members.Add(formal.Name, dtor); } diff --git a/Source/Dafny/Translator.cs b/Source/Dafny/Translator.cs index 37f80bf1..028891b5 100644 --- a/Source/Dafny/Translator.cs +++ b/Source/Dafny/Translator.cs @@ -1650,6 +1650,9 @@ namespace Microsoft.Dafny { var t = IsTotal(e.Obj, etran); if (e.Obj.Type.IsRefType) { t = BplAnd(t, Bpl.Expr.Neq(etran.TrExpr(e.Obj), predef.Null)); + } else if (e.Field is DatatypeDestructor) { + var dtor = (DatatypeDestructor)e.Field; + t = BplAnd(t, FunctionCall(e.tok, dtor.EnclosingCtor.QueryField.FullName, Bpl.Type.Bool, etran.TrExpr(e.Obj))); } return t; } @@ -2110,6 +2113,10 @@ namespace Microsoft.Dafny { CheckWellformed(e.Obj, options, locals, builder, etran); if (e.Obj.Type.IsRefType) { CheckNonNull(expr.tok, e.Obj, builder, etran, options.AssertKv); + } else if (e.Field is DatatypeDestructor) { + var dtor = (DatatypeDestructor)e.Field; + builder.Add(Assert(expr.tok, FunctionCall(e.tok, dtor.EnclosingCtor.QueryField.FullName, Bpl.Type.Bool, etran.TrExpr(e.Obj)), + string.Format("destructor '{0}' can only be applied to datatype values constructed by '{1}'", dtor.Name, dtor.EnclosingCtor.Name))); } if (options.DoReadsChecks && e.Field.IsMutable) { builder.Add(Assert(expr.tok, Bpl.Expr.SelectTok(expr.tok, etran.TheFrame(expr.tok), etran.TrExpr(e.Obj), GetField(e)), "insufficient reads clause to read field", options.AssertKv)); diff --git a/Test/dafny0/Answer b/Test/dafny0/Answer index 3198df33..50c238fc 100644 --- a/Test/dafny0/Answer +++ b/Test/dafny0/Answer @@ -484,7 +484,8 @@ ResolutionErrors.dfy(290,4): Error: a constructor is only allowed to be called w ResolutionErrors.dfy(304,16): Error: arguments must have the same type (got int and DTD_List) ResolutionErrors.dfy(305,16): Error: arguments must have the same type (got DTD_List and int) ResolutionErrors.dfy(306,25): Error: arguments must have the same type (got bool and int) -44 resolution/type errors detected in ResolutionErrors.dfy +ResolutionErrors.dfy(309,18): Error: ghost fields are allowed only in specification contexts +45 resolution/type errors detected in ResolutionErrors.dfy -------------------- ParseErrors.dfy -------------------- ParseErrors.dfy(4,19): error: a chain cannot have more than one != operator @@ -1154,8 +1155,15 @@ Execution trace: (0,0): anon0 (0,0): anon4_Else (0,0): anon5_Then +Datatypes.dfy(198,13): Error: destructor 'Car' can only be applied to datatype values constructed by 'XCons' +Execution trace: + (0,0): anon0 +Datatypes.dfy(201,17): Error: destructor 'Car' can only be applied to datatype values constructed by 'XCons' +Execution trace: + (0,0): anon0 + (0,0): anon6_Then -Dafny program verifier finished with 29 verified, 3 errors +Dafny program verifier finished with 29 verified, 5 errors -------------------- TypeAntecedents.dfy -------------------- TypeAntecedents.dfy(32,13): Error: assertion violation diff --git a/Test/dafny0/Datatypes.dfy b/Test/dafny0/Datatypes.dfy index eb527e0d..aff12fda 100644 --- a/Test/dafny0/Datatypes.dfy +++ b/Test/dafny0/Datatypes.dfy @@ -194,6 +194,17 @@ method InjectivityTests(d: XList) } } +method MatchingDestructor(d: XList) returns (r: XList) + ensures r.Car == 5; // error: specification is not well-formed (since r might not be an XCons) +{ + if (*) { + var x0 := d.Car; // error: d might not be an XCons + } else if (d.XCons?) { + var x1 := d.Car; + } + r := XCons(5, XNil); +} + // ------------- method FwdBug(f: Fwd, initialized: bool) diff --git a/Test/dafny0/ResolutionErrors.dfy b/Test/dafny0/ResolutionErrors.dfy index 1b68ad91..e5e56d03 100644 --- a/Test/dafny0/ResolutionErrors.dfy +++ b/Test/dafny0/ResolutionErrors.dfy @@ -292,7 +292,7 @@ method ConstructorTests() // ------------------- datatype destructors --------------------------------------- -datatype DTD_List = DTD_Nil | DTD_Cons(Car: int, Cdr: DTD_List); +datatype DTD_List = DTD_Nil | DTD_Cons(Car: int, Cdr: DTD_List, ghost g: int); method DatatypeDestructors(d: DTD_List) { if { @@ -304,6 +304,8 @@ method DatatypeDestructors(d: DTD_List) { assert hd == d.Cdr; // type error assert tl == d.Car; // type error assert d.DTD_Cons? == d.Car; // type error - assert d == DTD_Cons(hd, tl); + assert d == DTD_Cons(hd, tl, 5); + ghost var g0 := d.g; // fine + var g1 := d.g; // error: cannot use ghost member in non-ghost code } } -- cgit v1.2.3 From 9985eb5e81b1c5c62aceb4edab5d07b1f4f46809 Mon Sep 17 00:00:00 2001 From: Rustan Leino Date: Mon, 14 Nov 2011 23:47:59 -0800 Subject: Dafny: added let expressions (syntax: "var x := E0; E1") Dafny: firmed up semantics of assert/assume expressions (the condition is now good for all program control paths that pass through the expression) Dafny: various implementation clean-ups --- Source/Dafny/Compiler.cs | 11 ++ Source/Dafny/Dafny.atg | 33 ++++-- Source/Dafny/DafnyAst.cs | 21 ++++ Source/Dafny/Parser.cs | 93 +++++++++++------ Source/Dafny/Printer.cs | 19 +++- Source/Dafny/Resolver.cs | 24 +++++ Source/Dafny/Scanner.cs | 102 +++++++++---------- Source/Dafny/Translator.cs | 247 ++++++++++++++++++++++++++++----------------- Test/dafny0/Answer | 27 ++++- Test/dafny0/LetExpr.dfy | 109 ++++++++++++++++++++ Test/dafny0/PredExpr.dfy | 30 +++++- Test/dafny0/runtest.bat | 2 +- 12 files changed, 524 insertions(+), 194 deletions(-) create mode 100644 Test/dafny0/LetExpr.dfy diff --git a/Source/Dafny/Compiler.cs b/Source/Dafny/Compiler.cs index de863e39..d0e0cb0e 100644 --- a/Source/Dafny/Compiler.cs +++ b/Source/Dafny/Compiler.cs @@ -1667,6 +1667,17 @@ namespace Microsoft.Dafny { wr.Write(")"); } + } else if (expr is LetExpr) { + var e = (LetExpr)expr; + // Since there are no OldExpr's in an expression to be compiled, we can just do a regular substitution + // without having to worry about old-capture. + var substMap = new Dictionary(); + Contract.Assert(e.Vars.Count == e.RHSs.Count); // checked by resolution + for (int i = 0; i < e.Vars.Count; i++) { + substMap.Add(e.Vars[i], e.RHSs[i]); + } + TrExpr(Translator.Substitute(e.Body, null, substMap)); + } else if (expr is QuantifierExpr) { var e = (QuantifierExpr)expr; Contract.Assert(e.Bounds != null); // for non-ghost quantifiers, the resolver would have insisted on finding bounds diff --git a/Source/Dafny/Dafny.atg b/Source/Dafny/Dafny.atg index 8a9108f0..2eaaca7f 100644 --- a/Source/Dafny/Dafny.atg +++ b/Source/Dafny/Dafny.atg @@ -716,7 +716,7 @@ Rhs VarDeclStatement<.out Statement/*!*/ s.> = (. IToken x = null, assignTok = null; bool isGhost = false; VarDecl/*!*/ d; - AssignmentRhs r; Expression lhs0; + AssignmentRhs r; IdentifierExpr lhs0; List lhss = new List(); List rhss = new List(); .) @@ -727,7 +727,10 @@ VarDeclStatement<.out Statement/*!*/ s.> { "," LocalIdentTypeOptional (. lhss.Add(d); .) } - [ ":=" (. assignTok = t; lhs0 = new IdentifierExpr(lhss[0].Tok, lhss[0].Name); .) + [ ":=" (. assignTok = t; + lhs0 = new IdentifierExpr(lhss[0].Tok, lhss[0].Name); + lhs0.Var = lhss[0]; lhs0.Type = lhss[0].OptionalType; // resolve here + .) Rhs (. rhss.Add(r); .) { "," Rhs (. rhss.Add(r); .) } @@ -1204,20 +1207,34 @@ EndlessExpression = (. IToken/*!*/ x; Expression e0, e1; e = dummyExpr; + BoundVar d; + List letVars; List letRHSs; .) - ( "if" (. x = t; .) + ( "if" (. x = t; .) Expression "then" Expression - "else" Expression (. e = new ITEExpr(x, e, e0, e1); .) + "else" Expression (. e = new ITEExpr(x, e, e0, e1); .) | MatchExpression | QuantifierGuts | ComprehensionExpr - | "assert" (. x = t; .) + | "assert" (. x = t; .) Expression ";" - Expression (. e = new AssertExpr(x, e0, e1); .) - | "assume" (. x = t; .) + Expression (. e = new AssertExpr(x, e0, e1); .) + | "assume" (. x = t; .) Expression ";" - Expression (. e = new AssumeExpr(x, e0, e1); .) + Expression (. e = new AssumeExpr(x, e0, e1); .) + | "var" (. x = t; + letVars = new List(); + letRHSs = new List(); .) + IdentTypeOptional (. letVars.Add(d); .) + { "," IdentTypeOptional (. letVars.Add(d); .) + } + ":=" + Expression (. letRHSs.Add(e); .) + { "," Expression (. letRHSs.Add(e); .) + } + ";" + Expression (. e = new LetExpr(x, letVars, letRHSs, e); .) ) . MatchExpression diff --git a/Source/Dafny/DafnyAst.cs b/Source/Dafny/DafnyAst.cs index e07c6f1d..8272736c 100644 --- a/Source/Dafny/DafnyAst.cs +++ b/Source/Dafny/DafnyAst.cs @@ -2593,6 +2593,27 @@ namespace Microsoft.Dafny { } } + public class LetExpr : Expression + { + public readonly List Vars; + public readonly List RHSs; + public readonly Expression Body; + public LetExpr(IToken tok, List vars, List rhss, Expression body) + : base(tok) { + Vars = vars; + RHSs = rhss; + Body = body; + } + public override IEnumerable SubExpressions { + get { + foreach (var rhs in RHSs) { + yield return rhs; + } + yield return Body; + } + } + } + /// /// A ComprehensionExpr has the form: /// BINDER x Attributes | Range(x) :: Term(x) diff --git a/Source/Dafny/Parser.cs b/Source/Dafny/Parser.cs index 0dc30e8a..583fcc61 100644 --- a/Source/Dafny/Parser.cs +++ b/Source/Dafny/Parser.cs @@ -23,7 +23,7 @@ public class Parser { const bool T = true; const bool x = false; const int minErrDist = 2; - + public Scanner/*!*/ scanner; public Errors/*!*/ errors; @@ -134,10 +134,10 @@ public static int Parse (string/*!*/ s, string/*!*/ filename, List= minErrDist) errors.SemErr(t, msg); errDist = 0; } - - public void SemErr(IToken/*!*/ tok, string/*!*/ msg) { - Contract.Requires(tok != null); - Contract.Requires(msg != null); + + public void SemErr(IToken/*!*/ tok, string/*!*/ msg) { + Contract.Requires(tok != null); + Contract.Requires(msg != null); errors.SemErr(tok, msg); } @@ -150,15 +150,15 @@ public static int Parse (string/*!*/ s, string/*!*/ filename, List theImports; @@ -1100,7 +1100,7 @@ List/*!*/ decreases) { void VarDeclStatement(out Statement/*!*/ s) { IToken x = null, assignTok = null; bool isGhost = false; VarDecl/*!*/ d; - AssignmentRhs r; Expression lhs0; + AssignmentRhs r; IdentifierExpr lhs0; List lhss = new List(); List rhss = new List(); @@ -1119,7 +1119,10 @@ List/*!*/ decreases) { } if (la.kind == 49) { Get(); - assignTok = t; lhs0 = new IdentifierExpr(lhss[0].Tok, lhss[0].Name); + assignTok = t; + lhs0 = new IdentifierExpr(lhss[0].Tok, lhss[0].Name); + lhs0.Var = lhss[0]; lhs0.Type = lhss[0].OptionalType; // resolve here + Rhs(out r, lhs0); rhss.Add(r); while (la.kind == 19) { @@ -1789,7 +1792,7 @@ List/*!*/ decreases) { e = new UnaryExpr(x, UnaryExpr.Opcode.Not, e); break; } - case 38: case 55: case 61: case 62: case 63: case 98: case 99: case 100: case 101: { + case 18: case 38: case 55: case 61: case 62: case 63: case 98: case 99: case 100: case 101: { EndlessExpression(out e); break; } @@ -1845,6 +1848,8 @@ List/*!*/ decreases) { IToken/*!*/ x; Expression e0, e1; e = dummyExpr; + BoundVar d; + List letVars; List letRHSs; switch (la.kind) { case 55: { @@ -1888,6 +1893,31 @@ List/*!*/ decreases) { e = new AssumeExpr(x, e0, e1); break; } + case 18: { + Get(); + x = t; + letVars = new List(); + letRHSs = new List(); + IdentTypeOptional(out d); + letVars.Add(d); + while (la.kind == 19) { + Get(); + IdentTypeOptional(out d); + letVars.Add(d); + } + Expect(49); + Expression(out e); + letRHSs.Add(e); + while (la.kind == 19) { + Get(); + Expression(out e); + letRHSs.Add(e); + } + Expect(17); + Expression(out e); + e = new LetExpr(x, letVars, letRHSs, e); + break; + } default: SynErr(133); break; } } @@ -2277,13 +2307,13 @@ List/*!*/ decreases) { public void Parse() { la = new Token(); - la.val = ""; + la.val = ""; Get(); Dafny(); - Expect(0); + Expect(0); } - + static readonly bool[,]/*!*/ set = { {T,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x}, {x,x,x,x, x,T,x,x, x,T,T,T, T,T,T,x, x,x,T,x, T,x,x,x, x,T,T,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,T,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x}, @@ -2291,17 +2321,17 @@ List/*!*/ decreases) { {x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,T,T, T,x,x,x, x,x,x,x, x,x,x,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x}, {x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, T,T,T,T, T,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x}, {x,T,x,T, x,x,x,x, x,x,x,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,T, T,T,T,T, T,T,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x}, - {x,T,T,x, x,x,x,T, x,x,x,x, x,x,x,x, T,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,T,x,x, x,x,T,T, x,x,x,x, x,x,x,x, x,x,x,T, x,x,x,T, x,x,x,x, x,T,T,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, T,x,x,x, x,T,x,x, T,T,T,T, T,T,T,T, x,x,T,T, T,T,x,x, x,x}, + {x,T,T,x, x,x,x,T, x,x,x,x, x,x,x,x, T,x,T,x, x,x,x,x, x,x,x,x, x,x,x,x, x,T,x,x, x,x,T,T, x,x,x,x, x,x,x,x, x,x,x,T, x,x,x,T, x,x,x,x, x,T,T,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, T,x,x,x, x,T,x,x, T,T,T,T, T,T,T,T, x,x,T,T, T,T,x,x, x,x}, {x,T,T,x, x,x,x,T, x,x,x,T, x,x,x,x, T,x,T,x, x,x,x,x, x,x,x,x, x,x,x,x, x,T,x,x, x,x,x,x, x,x,x,x, x,x,T,T, T,x,x,x, x,x,x,T, x,x,x,T, x,T,T,T, T,T,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,T,T,T, T,T,T,T, x,x,x,x, x,x,x,x, x,x}, - {x,T,T,x, x,x,x,T, x,x,x,x, x,x,x,x, T,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,T,x,x, x,x,T,T, x,x,x,x, T,x,x,x, x,x,x,T, x,x,x,T, x,x,x,x, x,T,T,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, T,x,x,x, x,T,x,x, T,T,T,T, T,T,T,T, x,x,T,T, T,T,x,x, x,x}, + {x,T,T,x, x,x,x,T, x,x,x,x, x,x,x,x, T,x,T,x, x,x,x,x, x,x,x,x, x,x,x,x, x,T,x,x, x,x,T,T, x,x,x,x, T,x,x,x, x,x,x,T, x,x,x,T, x,x,x,x, x,T,T,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, T,x,x,x, x,T,x,x, T,T,T,T, T,T,T,T, x,x,T,T, T,T,x,x, x,x}, {x,x,x,x, x,x,x,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, T,T,x,x, T,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, T,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x}, - {x,T,T,x, x,x,x,T, x,x,x,x, x,x,x,x, T,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,T,x,x, x,x,T,T, x,x,x,x, T,x,x,x, x,x,T,T, x,x,T,T, x,x,x,x, x,T,T,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, T,x,x,x, x,T,x,x, T,T,T,T, T,T,T,T, x,x,T,T, T,T,x,x, x,x}, + {x,T,T,x, x,x,x,T, x,x,x,x, x,x,x,x, T,x,T,x, x,x,x,x, x,x,x,x, x,x,x,x, x,T,x,x, x,x,T,T, x,x,x,x, T,x,x,x, x,x,T,T, x,x,T,T, x,x,x,x, x,T,T,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, T,x,x,x, x,T,x,x, T,T,T,T, T,T,T,T, x,x,T,T, T,T,x,x, x,x}, {x,x,T,x, x,x,x,x, x,x,x,x, x,x,x,x, T,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,T,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,T,T,T, T,T,T,T, x,x,x,x, x,x,x,x, x,x}, {x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, T,T,x,x, T,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, T,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x}, {x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,T,T, T,T,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x}, {x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,T, T,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,T,T, T,T,T,T, T,T,T,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,x}, {x,x,x,x, x,x,x,T, T,x,x,x, x,x,x,x, T,T,x,T, x,x,x,T, T,x,x,x, x,x,x,x, x,x,T,x, x,x,x,x, x,x,x,x, T,T,x,x, x,T,x,x, T,x,x,x, T,T,T,x, x,x,x,x, x,x,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, T,T,T,T, x,x,x,x, x,x,x,x, T,T,x,x, x,x,T,T, x,x}, - {x,T,T,x, T,x,x,T, x,x,x,x, x,x,x,x, T,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, x,T,x,x, x,x,T,T, x,x,x,x, x,x,x,x, x,x,x,T, x,x,x,T, x,x,x,x, x,T,T,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, T,x,x,x, x,T,x,x, T,T,T,T, T,T,T,T, x,x,T,T, T,T,x,x, x,x} + {x,T,T,x, T,x,x,T, x,x,x,x, x,x,x,x, T,x,T,x, x,x,x,x, x,x,x,x, x,x,x,x, x,T,x,x, x,x,T,T, x,x,x,x, x,x,x,x, x,x,x,T, x,x,x,T, x,x,x,x, x,T,T,T, x,x,x,x, x,x,x,x, x,x,x,x, x,x,x,x, T,x,x,x, x,T,x,x, T,T,T,T, T,T,T,T, x,x,T,T, T,T,x,x, x,x} }; } // end Parser @@ -2310,20 +2340,18 @@ List/*!*/ decreases) { public class Errors { public int count = 0; // number of errors detected public System.IO.TextWriter/*!*/ errorStream = Console.Out; // error messages go to this stream - public string errMsgFormat = "{0}({1},{2}): error: {3}"; // 0=filename, 1=line, 2=column, 3=text - public string warningMsgFormat = "{0}({1},{2}): warning: {3}"; // 0=filename, 1=line, 2=column, 3=text - + public string errMsgFormat = "{0}({1},{2}): error: {3}"; // 0=filename, 1=line, 2=column, 3=text + public string warningMsgFormat = "{0}({1},{2}): warning: {3}"; // 0=filename, 1=line, 2=column, 3=text + public void SynErr(string filename, int line, int col, int n) { - SynErr(filename, line, col, GetSyntaxErrorString(n)); - } - - public virtual void SynErr(string filename, int line, int col, string/*!*/ msg) { - Contract.Requires(msg != null); + SynErr(filename, line, col, GetSyntaxErrorString(n)); + } + public virtual void SynErr(string filename, int line, int col, string msg) { + Contract.Requires(msg != null); errorStream.WriteLine(errMsgFormat, filename, line, col, msg); count++; - } - - string GetSyntaxErrorString(int n) { + } + string GetSyntaxErrorString(int n) { string s; switch (n) { case 0: s = "EOF expected"; break; @@ -2473,7 +2501,7 @@ public class Errors { default: s = "error " + n; break; } - return s; + return s; } public void SemErr(IToken/*!*/ tok, string/*!*/ msg) { // semantic errors @@ -2481,9 +2509,8 @@ public class Errors { Contract.Requires(msg != null); SemErr(tok.filename, tok.line, tok.col, msg); } - public virtual void SemErr(string filename, int line, int col, string/*!*/ msg) { - Contract.Requires(msg != null); + Contract.Requires(msg != null); errorStream.WriteLine(errMsgFormat, filename, line, col, msg); count++; } diff --git a/Source/Dafny/Printer.cs b/Source/Dafny/Printer.cs index 10099bf4..5b9ba1d6 100644 --- a/Source/Dafny/Printer.cs +++ b/Source/Dafny/Printer.cs @@ -1014,6 +1014,23 @@ namespace Microsoft.Dafny { } if (parensNeeded) { wr.Write(")"); } + } else if (expr is LetExpr) { + var e = (LetExpr)expr; + bool parensNeeded = !isRightmost; + if (parensNeeded) { wr.Write("("); } + string sep = ""; + foreach (var v in e.Vars) { + wr.Write("{0}{1}", sep, v.Name); + PrintType(": ", v.Type); + sep = ", "; + } + wr.Write(" := "); + PrintExpressionList(e.RHSs); + wr.Write("; "); + PrintExpression(e.Body); + if (parensNeeded) { wr.Write(")"); } + + } else if (expr is QuantifierExpr) { QuantifierExpr e = (QuantifierExpr)expr; bool parensNeeded = !isRightmost; @@ -1106,8 +1123,8 @@ namespace Microsoft.Dafny { string sep = ""; foreach (BoundVar bv in boundVars) { wr.Write("{0}{1}", sep, bv.Name); - sep = ", "; PrintType(": ", bv.Type); + sep = ", "; } PrintAttributes(attrs); if (range != null) { diff --git a/Source/Dafny/Resolver.cs b/Source/Dafny/Resolver.cs index b6bc4059..81216f32 100644 --- a/Source/Dafny/Resolver.cs +++ b/Source/Dafny/Resolver.cs @@ -2751,6 +2751,30 @@ namespace Microsoft.Dafny { } e.ResolvedOp = ResolveOp(e.Op, e.E1.Type); + } else if (expr is LetExpr) { + var e = (LetExpr)expr; + foreach (var rhs in e.RHSs) { + ResolveExpression(rhs, twoState); + } + scope.PushMarker(); + if (e.Vars.Count != e.RHSs.Count) { + Error(expr, "let expression must have same number of bound variables (found {0}) as RHSs (found {1})", e.Vars.Count, e.RHSs.Count); + } + int i = 0; + foreach (var v in e.Vars) { + if (!scope.Push(v.Name, v)) { + Error(v, "Duplicate let-variable name: {0}", v.Name); + } + ResolveType(v.tok, v.Type); + if (i < e.RHSs.Count && !UnifyTypes(v.Type, e.RHSs[i].Type)) { + Error(e.RHSs[i].tok, "type of RHS ({0}) does not match type of bound variable ({1})", e.RHSs[i].Type, v.Type); + } + i++; + } + ResolveExpression(e.Body, twoState); + scope.PopMarker(); + expr.Type = e.Body.Type; + } else if (expr is QuantifierExpr) { QuantifierExpr e = (QuantifierExpr)expr; int prevErrorCount = ErrorCount; diff --git a/Source/Dafny/Scanner.cs b/Source/Dafny/Scanner.cs index 634abf45..1ab06974 100644 --- a/Source/Dafny/Scanner.cs +++ b/Source/Dafny/Scanner.cs @@ -19,7 +19,7 @@ public class Buffer { // a) whole stream in buffer // b) part of stream in buffer // 2) non seekable stream (network, console) - + public const int EOF = 65535 + 1; // char.MaxValue + 1; const int MIN_BUFFER_LENGTH = 1024; // 1KB const int MAX_BUFFER_LENGTH = MIN_BUFFER_LENGTH * 64; // 64KB @@ -31,17 +31,15 @@ public class Buffer { Stream/*!*/ stream; // input stream (seekable) bool isUserStream; // was the stream opened by the user? - [ContractInvariantMethod] - void ObjectInvariant(){ - Contract.Invariant(buf != null); - Contract.Invariant(stream != null); - } - -// [NotDelayed] - public Buffer (Stream/*!*/ s, bool isUserStream) : base() { +[ContractInvariantMethod] +void ObjectInvariant(){ + Contract.Invariant(buf != null); + Contract.Invariant(stream != null);} + [NotDelayed] + public Buffer (Stream/*!*/ s, bool isUserStream) :base() { Contract.Requires(s != null); stream = s; this.isUserStream = isUserStream; - + int fl, bl; if (s.CanSeek) { fl = (int) s.Length; @@ -53,12 +51,12 @@ public class Buffer { buf = new byte[(bl>0) ? bl : MIN_BUFFER_LENGTH]; fileLen = fl; bufLen = bl; - + if (fileLen > 0) Pos = 0; // setup buffer to position 0 (start) else bufPos = 0; // index 0 is already after the file, thus Pos = 0 is invalid if (bufLen == fileLen && s.CanSeek) Close(); } - + protected Buffer(Buffer/*!*/ b) { // called in UTF8Buffer constructor Contract.Requires(b != null); buf = b.buf; @@ -75,14 +73,14 @@ public class Buffer { } ~Buffer() { Close(); } - + protected void Close() { if (!isUserStream && stream != null) { stream.Close(); //stream = null; } } - + public virtual int Read () { if (bufPos < bufLen) { return buf[bufPos++]; @@ -102,7 +100,7 @@ public class Buffer { Pos = curPos; return ch; } - + public string/*!*/ GetString (int beg, int end) { Contract.Ensures(Contract.Result() != null); int len = 0; @@ -141,7 +139,7 @@ public class Buffer { } } } - + // Read the next chunk of bytes from the stream, increases the buffer // if needed and updates the fields fileLen and bufLen. // Returns the number of bytes read. @@ -215,20 +213,19 @@ public class Scanner { const int noSym = 104; - [ContractInvariantMethod] - void objectInvariant(){ - Contract.Invariant(buffer!=null); - Contract.Invariant(t != null); - Contract.Invariant(start != null); - Contract.Invariant(tokens != null); - Contract.Invariant(pt != null); - Contract.Invariant(tval != null); - Contract.Invariant(Filename != null); - Contract.Invariant(errorHandler != null); - } - +[ContractInvariantMethod] +void objectInvariant(){ + Contract.Invariant(buffer!=null); + Contract.Invariant(t != null); + Contract.Invariant(start != null); + Contract.Invariant(tokens != null); + Contract.Invariant(pt != null); + Contract.Invariant(tval != null); + Contract.Invariant(Filename != null); + Contract.Invariant(errorHandler != null); +} public Buffer/*!*/ buffer; // scanner buffer - + Token/*!*/ t; // current token int ch; // current input character int pos; // byte position of current character @@ -239,13 +236,13 @@ public class Scanner { Token/*!*/ tokens; // list of tokens already peeked (first token is a dummy) Token/*!*/ pt; // current peek token - + char[]/*!*/ tval = new char[128]; // text of current token int tlen; // length of current token - + private string/*!*/ Filename; private Errors/*!*/ errorHandler; - + static Scanner() { start = new Hashtable(128); for (int i = 39; i <= 39; ++i) start[i] = 1; @@ -293,9 +290,9 @@ public class Scanner { start[Buffer.EOF] = -1; } - -// [NotDelayed] - public Scanner (string/*!*/ fileName, Errors/*!*/ errorHandler) : base() { + + [NotDelayed] + public Scanner (string/*!*/ fileName, Errors/*!*/ errorHandler) :base(){ Contract.Requires(fileName != null); Contract.Requires(errorHandler != null); this.errorHandler = errorHandler; @@ -305,14 +302,15 @@ public class Scanner { Stream stream = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read); buffer = new Buffer(stream, false); Filename = fileName; + Init(); } catch (IOException) { throw new FatalError("Cannot open file " + fileName); } } - -// [NotDelayed] - public Scanner (Stream/*!*/ s, Errors/*!*/ errorHandler, string/*!*/ fileName) : base() { + + [NotDelayed] + public Scanner (Stream/*!*/ s, Errors/*!*/ errorHandler, string/*!*/ fileName) :base(){ Contract.Requires(s != null); Contract.Requires(errorHandler != null); Contract.Requires(fileName != null); @@ -321,9 +319,10 @@ public class Scanner { buffer = new Buffer(s, true); this.errorHandler = errorHandler; this.Filename = fileName; + Init(); } - + void Init() { pos = -1; line = 1; col = 0; oldEols = 0; @@ -344,11 +343,11 @@ public class Scanner { Contract.Ensures(Contract.Result() != null); int p = buffer.Pos; int ch = buffer.Read(); - // replace isolated '\r' by '\n' in order to make + // replace isolated '\r' by '\n' in order to make // eol handling uniform across Windows, Unix and Mac if (ch == '\r' && buffer.Peek() != '\n') ch = EOL; while (ch != EOL && ch != Buffer.EOF){ - ch = buffer.Read(); + ch = buffer.Read(); // replace isolated '\r' by '\n' in order to make // eol handling uniform across Windows, Unix and Mac if (ch == '\r' && buffer.Peek() != '\n') ch = EOL; @@ -359,7 +358,7 @@ public class Scanner { } void NextCh() { - if (oldEols > 0) { ch = EOL; oldEols--; } + if (oldEols > 0) { ch = EOL; oldEols--; } else { // pos = buffer.Pos; // ch = buffer.Read(); col++; @@ -367,9 +366,9 @@ public class Scanner { // // eol handling uniform across Windows, Unix and Mac // if (ch == '\r' && buffer.Peek() != '\n') ch = EOL; // if (ch == EOL) { line++; col = 0; } - + while (true) { - pos = buffer.Pos; + pos = buffer.Pos; ch = buffer.Read(); col++; // replace isolated '\r' by '\n' in order to make // eol handling uniform across Windows, Unix and Mac @@ -419,7 +418,7 @@ public class Scanner { return; } - + } } @@ -557,13 +556,10 @@ public class Scanner { t.pos = pos; t.col = col; t.line = line; t.filename = this.Filename; int state; - if (start.ContainsKey(ch)) { - Contract.Assert(start[ch] != null); - state = (int) start[ch]; - } + if (start.ContainsKey(ch)) { state = (int) cce.NonNull( start[ch]); } else { state = 0; } tlen = 0; AddCh(); - + switch (state) { case -1: { t.kind = eofSym; break; } // NextCh already done case 0: { @@ -763,14 +759,14 @@ public class Scanner { t.val = new String(tval, 0, tlen); return t; } - + private void SetScannerBehindT() { buffer.Pos = t.pos; NextCh(); line = t.line; col = t.col; for (int i = 0; i < tlen; i++) NextCh(); } - + // get the next token (possibly a token already seen during peeking) public Token/*!*/ Scan () { Contract.Ensures(Contract.Result() != null); @@ -791,7 +787,7 @@ public class Scanner { } pt = pt.next; } while (pt.kind > maxT); // skip pragmas - + return pt; } diff --git a/Source/Dafny/Translator.cs b/Source/Dafny/Translator.cs index 028891b5..f3373736 100644 --- a/Source/Dafny/Translator.cs +++ b/Source/Dafny/Translator.cs @@ -1738,7 +1738,7 @@ namespace Microsoft.Dafny { return r; } else if (expr is OldExpr) { OldExpr e = (OldExpr)expr; - return new Bpl.OldExpr(expr.tok, IsTotal(e.E, etran)); + return IsTotal(e.E, etran.Old); } else if (expr is MultiSetFormingExpr) { MultiSetFormingExpr e = (MultiSetFormingExpr)expr; return IsTotal(e.E, etran); @@ -1780,6 +1780,14 @@ namespace Microsoft.Dafny { } Bpl.Expr r = BplAnd(t0, t1); return z == null ? r : BplAnd(r, z); + } else if (expr is LetExpr) { + var e = (LetExpr)expr; + Bpl.Expr total = Bpl.Expr.True; + foreach (var rhs in e.RHSs) { + total = BplAnd(total, IsTotal(rhs, etran)); + } + return BplAnd(total, IsTotal(etran.GetSubstitutedBody(e), etran)); + } else if (expr is ComprehensionExpr) { var e = (ComprehensionExpr)expr; var total = IsTotal(e.Term, etran); @@ -1797,7 +1805,7 @@ namespace Microsoft.Dafny { Bpl.Expr gTotal = IsTotal(e.Guard, etran); Bpl.Expr g = etran.TrExpr(e.Guard); Bpl.Expr bTotal = IsTotal(e.Body, etran); - if (e is AssertExpr) { + if (e is AssertExpr || CommandLineOptions.Clo.DisallowSoundnessCheating) { return BplAnd(gTotal, BplAnd(g, bTotal)); } else { return BplAnd(gTotal, Bpl.Expr.Imp(g, bTotal)); @@ -1896,7 +1904,7 @@ namespace Microsoft.Dafny { return CanCallAssumption(dtv.Arguments, etran); } else if (expr is OldExpr) { OldExpr e = (OldExpr)expr; - return new Bpl.OldExpr(expr.tok, CanCallAssumption(e.E, etran)); + return CanCallAssumption(e.E, etran.Old); } else if (expr is MultiSetFormingExpr) { MultiSetFormingExpr e = (MultiSetFormingExpr)expr; return CanCallAssumption(e.E, etran); @@ -1926,6 +1934,14 @@ namespace Microsoft.Dafny { break; } return BplAnd(t0, t1); + } else if (expr is LetExpr) { + var e = (LetExpr)expr; + Bpl.Expr canCall = Bpl.Expr.True; + foreach (var rhs in e.RHSs) { + canCall = BplAnd(canCall, CanCallAssumption(rhs, etran)); + } + return BplAnd(canCall, CanCallAssumption(etran.GetSubstitutedBody(e), etran)); + } else if (expr is ComprehensionExpr) { var e = (ComprehensionExpr)expr; var total = CanCallAssumption(e.Term, etran); @@ -1941,11 +1957,11 @@ namespace Microsoft.Dafny { } else if (expr is PredicateExpr) { var e = (PredicateExpr)expr; Bpl.Expr gCanCall = CanCallAssumption(e.Guard, etran); - Bpl.Expr g = etran.TrExpr(e.Guard); Bpl.Expr bCanCall = CanCallAssumption(e.Body, etran); - if (e is AssertExpr) { - return BplAnd(gCanCall, BplAnd(g, bCanCall)); + if (e is AssertExpr || CommandLineOptions.Clo.DisallowSoundnessCheating) { + return BplAnd(gCanCall, bCanCall); } else { + Bpl.Expr g = etran.TrExpr(e.Guard); return BplAnd(gCanCall, Bpl.Expr.Imp(g, bCanCall)); } } else if (expr is ITEExpr) { @@ -2328,6 +2344,13 @@ namespace Microsoft.Dafny { break; } + } else if (expr is LetExpr) { + var e = (LetExpr)expr; + foreach (var rhs in e.RHSs) { + CheckWellformed(rhs, options, locals, builder, etran); + } + CheckWellformedWithResult(etran.GetSubstitutedBody(e), options, result, resultType, locals, builder, etran); + } else if (expr is ComprehensionExpr) { var e = (ComprehensionExpr)expr; var substMap = SetupBoundVarsAsLocals(e.BoundVars, builder, locals, etran); @@ -2346,7 +2369,7 @@ namespace Microsoft.Dafny { } else if (expr is PredicateExpr) { var e = (PredicateExpr)expr; CheckWellformed(e.Guard, options, locals, builder, etran); - if (e is AssertExpr) { + if (e is AssertExpr || CommandLineOptions.Clo.DisallowSoundnessCheating) { bool splitHappened; var ss = TrSplitExpr(e.Guard, etran, out splitHappened); if (!splitHappened) { @@ -4146,7 +4169,7 @@ namespace Microsoft.Dafny { Contract.Requires(locals != null); Contract.Requires(etran != null); - Expression receiver = bReceiver == null ? dafnyReceiver : new BoogieWrapper(bReceiver); + Expression receiver = bReceiver == null ? dafnyReceiver : new BoogieWrapper(bReceiver, dafnyReceiver.Type); Bpl.ExprSeq ins = new Bpl.ExprSeq(); if (!method.IsStatic) { if (bReceiver == null) { @@ -5002,11 +5025,13 @@ namespace Microsoft.Dafny { internal class BoogieWrapper : Expression { public readonly Bpl.Expr Expr; - public BoogieWrapper(Bpl.Expr expr) + public BoogieWrapper(Bpl.Expr expr, Type dafnyType) : base(expr.tok) { Contract.Requires(expr != null); + Contract.Requires(dafnyType != null); Expr = expr; + Type = dafnyType; // resolve immediately } } @@ -5016,7 +5041,7 @@ namespace Microsoft.Dafny { public readonly PredefinedDecls predef; public readonly Translator translator; public readonly string This; - public readonly string modifiesFrame = "$_Frame"; // the name of the context's frame variable. + public readonly string modifiesFrame; // the name of the context's frame variable. readonly Function applyLimited_CurrentFunction; public readonly int layerOffset = 0; public int Statistics_CustomLayerFunctionCount = 0; @@ -5027,66 +5052,60 @@ namespace Microsoft.Dafny { Contract.Invariant(predef != null); Contract.Invariant(translator != null); Contract.Invariant(This != null); + Contract.Invariant(modifiesFrame != null); Contract.Invariant(layerOffset == 0 || layerOffset == 1); Contract.Invariant(0 <= Statistics_CustomLayerFunctionCount); } - public ExpressionTranslator(Translator translator, PredefinedDecls predef, IToken heapToken) { + /// + /// This is the most general constructor. It is private and takes all the parameters. Whenever + /// one ExpressionTranslator is constructed from another, unchanged parameters are just copied in. + /// + ExpressionTranslator(Translator translator, PredefinedDecls predef, Bpl.Expr heap, string thisVar, + Function applyLimited_CurrentFunction, int layerOffset, string modifiesFrame) { + Contract.Requires(translator != null); Contract.Requires(predef != null); - Contract.Requires(heapToken != null); + Contract.Requires(heap != null); + Contract.Requires(thisVar != null); + Contract.Invariant(layerOffset == 0 || layerOffset == 1); + Contract.Invariant(modifiesFrame != null); + this.translator = translator; this.predef = predef; - this.HeapExpr = new Bpl.IdentifierExpr(heapToken, predef.HeapVarName, predef.HeapType); - this.This = "this"; + this.HeapExpr = heap; + this.This = thisVar; + this.applyLimited_CurrentFunction = applyLimited_CurrentFunction; + this.layerOffset = layerOffset; + this.modifiesFrame = modifiesFrame; } - public ExpressionTranslator(Translator translator, PredefinedDecls predef, Bpl.Expr heap) { + public ExpressionTranslator(Translator translator, PredefinedDecls predef, IToken heapToken) + : this(translator, predef, new Bpl.IdentifierExpr(heapToken, predef.HeapVarName, predef.HeapType)) { Contract.Requires(translator != null); Contract.Requires(predef != null); - Contract.Requires(heap != null); - this.translator = translator; - this.predef = predef; - this.HeapExpr = heap; - this.This = "this"; + Contract.Requires(heapToken != null); } - public ExpressionTranslator(Translator translator, PredefinedDecls predef, Bpl.Expr heap, string thisVar) { + public ExpressionTranslator(Translator translator, PredefinedDecls predef, Bpl.Expr heap) + : this(translator, predef, heap, "this") { Contract.Requires(translator != null); Contract.Requires(predef != null); Contract.Requires(heap != null); - Contract.Requires(thisVar != null); - this.translator = translator; - this.predef = predef; - this.HeapExpr = heap; - this.This = thisVar; } - ExpressionTranslator(Translator translator, PredefinedDecls predef, Bpl.Expr heap, Function applyLimited_CurrentFunction, int layerOffset) - { - Contract.Requires(translator != null); - Contract.Requires(predef != null); - Contract.Requires(heap != null); - this.translator = translator; - this.predef = predef; - this.HeapExpr = heap; - this.applyLimited_CurrentFunction = applyLimited_CurrentFunction; - this.This = "this"; - this.layerOffset = layerOffset; + public ExpressionTranslator(Translator translator, PredefinedDecls predef, Bpl.Expr heap, string thisVar) + : this(translator, predef, heap, thisVar, null, 0, "$_Frame") { + Contract.Requires(translator != null); + Contract.Requires(predef != null); + Contract.Requires(heap != null); + Contract.Requires(thisVar != null); } public ExpressionTranslator(ExpressionTranslator etran, string modifiesFrame) - { - Contract.Requires(etran != null); - Contract.Requires(modifiesFrame != null); - this.translator = etran.translator; - this.HeapExpr = etran.HeapExpr; - this.predef = etran.predef; - this.This = etran.This; - this.applyLimited_CurrentFunction = etran.applyLimited_CurrentFunction; - this.layerOffset = etran.layerOffset; - this.oldEtran = etran.oldEtran; - this.modifiesFrame = modifiesFrame; + : this(etran.translator, etran.predef, etran.HeapExpr, etran.This, etran.applyLimited_CurrentFunction, etran.layerOffset, modifiesFrame) { + Contract.Requires(etran != null); + Contract.Requires(modifiesFrame != null); } ExpressionTranslator oldEtran; @@ -5095,7 +5114,7 @@ namespace Microsoft.Dafny { Contract.Ensures(Contract.Result() != null); if (oldEtran == null) { - oldEtran = new ExpressionTranslator(translator, predef, new Bpl.OldExpr(HeapExpr.tok, HeapExpr), applyLimited_CurrentFunction, layerOffset); + oldEtran = new ExpressionTranslator(translator, predef, new Bpl.OldExpr(HeapExpr.tok, HeapExpr), This, applyLimited_CurrentFunction, layerOffset, modifiesFrame); } return oldEtran; } @@ -5111,7 +5130,7 @@ namespace Microsoft.Dafny { Contract.Requires(applyLimited_CurrentFunction != null); Contract.Ensures(Contract.Result() != null); - return new ExpressionTranslator(translator, predef, HeapExpr, applyLimited_CurrentFunction, layerOffset); + return new ExpressionTranslator(translator, predef, HeapExpr, This, applyLimited_CurrentFunction, layerOffset, modifiesFrame); } public ExpressionTranslator LayerOffset(int offset) { @@ -5119,7 +5138,7 @@ namespace Microsoft.Dafny { Contract.Requires(layerOffset + offset <= 1); Contract.Ensures(Contract.Result() != null); - return new ExpressionTranslator(translator, predef, HeapExpr, applyLimited_CurrentFunction, layerOffset + offset); + return new ExpressionTranslator(translator, predef, HeapExpr, This, applyLimited_CurrentFunction, layerOffset + offset, modifiesFrame); } public Bpl.IdentifierExpr TheFrame(IToken tok) @@ -5131,7 +5150,7 @@ namespace Microsoft.Dafny { Bpl.TypeVariable alpha = new Bpl.TypeVariable(tok, "beta"); Bpl.Type fieldAlpha = predef.FieldName(tok, alpha); Bpl.Type ty = new Bpl.MapType(tok, new Bpl.TypeVariableSeq(alpha), new Bpl.TypeSeq(predef.RefType, fieldAlpha), Bpl.Type.Bool); - return new Bpl.IdentifierExpr(tok, this.modifiesFrame ?? "$_Frame", ty); + return new Bpl.IdentifierExpr(tok, this.modifiesFrame, ty); } public Bpl.IdentifierExpr Tick() { @@ -5141,24 +5160,33 @@ namespace Microsoft.Dafny { return new Bpl.IdentifierExpr(Token.NoToken, "$Tick", predef.TickType); } - public Bpl.IdentifierExpr ModuleContextHeight() - { - Contract.Ensures(Contract.Result().Type != null); + public Bpl.IdentifierExpr ModuleContextHeight() { + Contract.Ensures(Contract.Result().Type != null); return new Bpl.IdentifierExpr(Token.NoToken, "$ModuleContextHeight", Bpl.Type.Int); } - public Bpl.IdentifierExpr FunctionContextHeight() - { - Contract.Ensures(Contract.Result().Type != null); + public Bpl.IdentifierExpr FunctionContextHeight() { + Contract.Ensures(Contract.Result().Type != null); return new Bpl.IdentifierExpr(Token.NoToken, "$FunctionContextHeight", Bpl.Type.Int); } - public Bpl.IdentifierExpr InMethodContext() - { - Contract.Ensures(Contract.Result().Type != null); + public Bpl.IdentifierExpr InMethodContext() { + Contract.Ensures(Contract.Result().Type != null); return new Bpl.IdentifierExpr(Token.NoToken, "$InMethodContext", Bpl.Type.Bool); } + public Expression GetSubstitutedBody(LetExpr e) { + Contract.Requires(e != null); + var substMap = new Dictionary(); + Contract.Assert(e.Vars.Count == e.RHSs.Count); // checked by resolution + for (int i = 0; i < e.Vars.Count; i++) { + Expression rhs = e.RHSs[i]; + substMap.Add(e.Vars[i], new BoogieWrapper(TrExpr(rhs), rhs.Type)); + } + return Translator.Substitute(e.Body, null, substMap); + } + + /// /// Translates Dafny expression "expr" into a Boogie expression. If the type of "expr" can be a boolean, then the /// token (source location) of the resulting expression is filled in (it wouldn't hurt if the token were always @@ -5329,7 +5357,7 @@ namespace Microsoft.Dafny { } else if (expr is OldExpr) { OldExpr e = (OldExpr)expr; - return new Bpl.OldExpr(expr.tok, TrExpr(e.E)); + return Old.TrExpr(e.E); } else if (expr is MultiSetFormingExpr) { MultiSetFormingExpr e = (MultiSetFormingExpr)expr; @@ -5344,7 +5372,6 @@ namespace Microsoft.Dafny { } else if (expr is FreshExpr) { FreshExpr e = (FreshExpr)expr; - Bpl.Expr oldHeap = new Bpl.OldExpr(expr.tok, HeapExpr); if (e.E.Type is SetType) { // generate: (forall $o: ref :: $o != null && X[Box($o)] ==> !old($Heap)[$o,alloc]) // TODO: trigger? @@ -5352,7 +5379,7 @@ namespace Microsoft.Dafny { Bpl.Expr o = new Bpl.IdentifierExpr(expr.tok, oVar); Bpl.Expr oNotNull = Bpl.Expr.Neq(o, predef.Null); Bpl.Expr oInSet = TrInSet(expr.tok, o, e.E, ((SetType)e.E.Type).Arg); - Bpl.Expr oIsFresh = Bpl.Expr.Not(IsAlloced(expr.tok, o, oldHeap)); + Bpl.Expr oIsFresh = Bpl.Expr.Not(Old.IsAlloced(expr.tok, o)); Bpl.Expr body = Bpl.Expr.Imp(Bpl.Expr.And(oNotNull, oInSet), oIsFresh); return new Bpl.ForallExpr(expr.tok, new Bpl.VariableSeq(oVar), body); } else if (e.E.Type is SeqType) { @@ -5362,14 +5389,14 @@ namespace Microsoft.Dafny { Bpl.Expr i = new Bpl.IdentifierExpr(expr.tok, iVar); Bpl.Expr iBounds = translator.InSeqRange(expr.tok, i, TrExpr(e.E), true, null, false); Bpl.Expr XsubI = translator.FunctionCall(expr.tok, BuiltinFunction.SeqIndex, predef.RefType, TrExpr(e.E), i); - Bpl.Expr oIsFresh = Bpl.Expr.Not(IsAlloced(expr.tok, XsubI, oldHeap)); + Bpl.Expr oIsFresh = Bpl.Expr.Not(Old.IsAlloced(expr.tok, XsubI)); Bpl.Expr xsubiNotNull = Bpl.Expr.Neq(translator.FunctionCall(expr.tok, BuiltinFunction.Unbox, predef.RefType, XsubI), predef.Null); Bpl.Expr body = Bpl.Expr.Imp(Bpl.Expr.And(iBounds, xsubiNotNull), oIsFresh); return new Bpl.ForallExpr(expr.tok, new Bpl.VariableSeq(iVar), body); } else { // generate: x == null || !old($Heap)[x] Bpl.Expr oNull = Bpl.Expr.Eq(TrExpr(e.E), predef.Null); - Bpl.Expr oIsFresh = Bpl.Expr.Not(IsAlloced(expr.tok, TrExpr(e.E), oldHeap)); + Bpl.Expr oIsFresh = Bpl.Expr.Not(Old.IsAlloced(expr.tok, TrExpr(e.E))); return Bpl.Expr.Binary(expr.tok, BinaryOperator.Opcode.Or, oNull, oIsFresh); } @@ -5546,6 +5573,10 @@ namespace Microsoft.Dafny { } return Bpl.Expr.Binary(expr.tok, bOpcode, e0, e1); + } else if (expr is LetExpr) { + var e = (LetExpr)expr; + return TrExpr(GetSubstitutedBody(e)); + } else if (expr is QuantifierExpr) { QuantifierExpr e = (QuantifierExpr)expr; Bpl.VariableSeq bvars = new Bpl.VariableSeq(); @@ -5930,16 +5961,7 @@ namespace Microsoft.Dafny { Contract.Requires(e != null); Contract.Ensures(Contract.Result() != null); - return IsAlloced(tok, e, HeapExpr); - } - - Bpl.Expr IsAlloced(IToken tok, Bpl.Expr e, Bpl.Expr heap) { - Contract.Requires(tok != null); - Contract.Requires(e != null); - Contract.Requires(heap != null); - Contract.Ensures(Contract.Result() != null); - - return ReadHeap(tok, heap, e, predef.Alloc(tok)); + return ReadHeap(tok, HeapExpr, e, predef.Alloc(tok)); } public Bpl.Expr GoodRef(IToken tok, Bpl.Expr e, Type type) { @@ -6401,9 +6423,9 @@ namespace Microsoft.Dafny { var e = (ConcreteSyntaxExpression)expr; return TrSplitExpr(e.ResolvedExpression, splits, position, expandFunctions, etran); - } else if (expr is PredicateExpr) { - var e = (PredicateExpr)expr; - return TrSplitExpr(e.Body, splits, position, expandFunctions, etran); + } else if (expr is LetExpr) { + var e = (LetExpr)expr; + return TrSplitExpr(etran.GetSubstitutedBody(e), splits, position, expandFunctions, etran); } else if (expr is UnaryExpr) { var e = (UnaryExpr)expr; @@ -6474,15 +6496,33 @@ namespace Microsoft.Dafny { return true; } - } else if (expr is OldExpr) { - var e = (OldExpr)expr; - var ss = new List(); - if (TrSplitExpr(e.E, ss, position, expandFunctions, etran)) { + } else if (expr is PredicateExpr) { + var e = (PredicateExpr)expr; + // For a predicate expression in split position, the predicate can be used as an assumption. Unfortunately, + // this assumption is not generated in non-split positions (because I don't know how.) + // So, treat "assert/assume P; E" like "P ==> E". + if (position) { + var guard = etran.TrExpr(e.Guard); + var ss = new List(); + TrSplitExpr(e.Body, ss, position, expandFunctions, etran); + foreach (var s in ss) { + // as the source location in the following implication, use that of the translated "s" + splits.Add(new SplitExprInfo(s.IsFree, Bpl.Expr.Binary(s.E.tok, BinaryOperator.Opcode.Imp, guard, s.E))); + } + } else { + var ss = new List(); + TrSplitExpr(e.Guard, ss, !position, expandFunctions, etran); + var rhs = etran.TrExpr(e.Body); foreach (var s in ss) { - splits.Add(new SplitExprInfo(s.IsFree, new Bpl.OldExpr(s.E.tok, s.E))); + // as the source location in the following implication, use that of the translated "s" + splits.Add(new SplitExprInfo(s.IsFree, Bpl.Expr.Binary(s.E.tok, BinaryOperator.Opcode.Imp, s.E, rhs))); } - return true; } + return true; + + } else if (expr is OldExpr) { + var e = (OldExpr)expr; + return TrSplitExpr(e.E, splits, position, expandFunctions, etran.Old); } else if (expandFunctions && position && expr is FunctionCallExpr) { var fexp = (FunctionCallExpr)expr; @@ -6845,9 +6885,6 @@ namespace Microsoft.Dafny { var e = (DatatypeValue)expr; var q = n.Type.IsDatatype ? exprIsProminent : subExprIsProminent; // prominent status continues, if we're looking for a variable whose type is a datatype return e.Arguments.Exists(exp => VarOccursInArgumentToRecursiveFunction(exp, n, q)); - } else if (expr is OldExpr) { - var e = (OldExpr)expr; - return VarOccursInArgumentToRecursiveFunction(e.E, n, exprIsProminent); // prominent status continues } else if (expr is UnaryExpr) { var e = (UnaryExpr)expr; // both Not and SeqLength preserve prominence @@ -6965,7 +7002,7 @@ namespace Microsoft.Dafny { } } - static Expression Substitute(Expression expr, Expression receiverReplacement, Dictionary/*!*/ substMap) { + public static Expression Substitute(Expression expr, Expression receiverReplacement, Dictionary/*!*/ substMap) { Contract.Requires(expr != null); Contract.Requires(cce.NonNullDictionaryAndValues(substMap)); Contract.Ensures(Contract.Result() != null); @@ -7051,7 +7088,11 @@ namespace Microsoft.Dafny { } else if (expr is OldExpr) { OldExpr e = (OldExpr)expr; - Expression se = Substitute(e.E, receiverReplacement, substMap); // TODO: whoa, won't this do improper variable capture? + // Note, it is up to the caller to avoid variable capture. In most cases, this is not a + // problem, since variables have unique declarations. However, it is an issue if the substitution + // takes place inside an OldExpr. In those cases (see LetExpr), the caller can use a + // BoogieWrapper before calling Substitute. + Expression se = Substitute(e.E, receiverReplacement, substMap); if (se != e.E) { newExpr = new OldExpr(expr.tok, se); } @@ -7083,6 +7124,22 @@ namespace Microsoft.Dafny { newExpr = newBin; } + } else if (expr is LetExpr) { + var e = (LetExpr)expr; + var rhss = new List(); + bool anythingChanged = false; + foreach (var rhs in e.RHSs) { + var r = Substitute(rhs, receiverReplacement, substMap); + if (r != rhs) { + anythingChanged = true; + } + rhss.Add(r); + } + var body = Substitute(e.Body, receiverReplacement, substMap); + if (anythingChanged || body != e.Body) { + newExpr = new LetExpr(e.tok, e.Vars, rhss, body); + } + } else if (expr is ComprehensionExpr) { var e = (ComprehensionExpr)expr; Expression newRange = e.Range == null ? null : Substitute(e.Range, receiverReplacement, substMap); @@ -7109,10 +7166,12 @@ namespace Microsoft.Dafny { var e = (PredicateExpr)expr; Expression g = Substitute(e.Guard, receiverReplacement, substMap); Expression b = Substitute(e.Body, receiverReplacement, substMap); - if (expr is AssertExpr) { - newExpr = new AssertExpr(e.tok, g, b); - } else { - newExpr = new AssumeExpr(e.tok, g, b); + if (g != e.Guard || b != e.Body) { + if (expr is AssertExpr) { + newExpr = new AssertExpr(e.tok, g, b); + } else { + newExpr = new AssumeExpr(e.tok, g, b); + } } } else if (expr is ITEExpr) { diff --git a/Test/dafny0/Answer b/Test/dafny0/Answer index 50c238fc..15cec24f 100644 --- a/Test/dafny0/Answer +++ b/Test/dafny0/Answer @@ -1321,12 +1321,33 @@ CallStmtTests.dfy(15,8): Error: actual out-parameter 0 is required to be a ghost Dafny program verifier finished with 22 verified, 0 errors -------------------- PredExpr.dfy -------------------- -PredExpr.dfy(23,15): Error: value assigned to a nat must be non-negative +PredExpr.dfy(4,12): Error: condition in assert expression might not hold +Execution trace: + (0,0): anon3_Else +PredExpr.dfy(36,15): Error: value assigned to a nat must be non-negative Execution trace: (0,0): anon6_Else (0,0): anon7_Else -PredExpr.dfy(36,17): Error: condition in assert expression might not hold +PredExpr.dfy(49,17): Error: condition in assert expression might not hold +Execution trace: + (0,0): anon0 +PredExpr.dfy(74,14): Error: assertion violation +Execution trace: + (0,0): anon0 + (0,0): anon8_Else + (0,0): anon3 + PredExpr.dfy(73,20): anon10_Else + (0,0): anon6 + +Dafny program verifier finished with 11 verified, 4 errors + +-------------------- LetExpr.dfy -------------------- +LetExpr.dfy(5,12): Error: assertion violation Execution trace: (0,0): anon0 +LetExpr.dfy(104,21): Error: assertion violation +Execution trace: + (0,0): anon0 + (0,0): anon11_Then -Dafny program verifier finished with 9 verified, 2 errors +Dafny program verifier finished with 13 verified, 2 errors diff --git a/Test/dafny0/LetExpr.dfy b/Test/dafny0/LetExpr.dfy new file mode 100644 index 00000000..703813d2 --- /dev/null +++ b/Test/dafny0/LetExpr.dfy @@ -0,0 +1,109 @@ +method M0(n: int) + requires var f := 100; n < f; +{ + assert n < 200; + assert 0 <= n; // error +} + +method M1() +{ + assert var f := 54; var g := f + 1; g == 55; +} + +method M2() +{ + assert var f := 54; var f := f + 1; f == 55; +} + +function method Fib(n: nat): nat +{ + if n < 2 then n else Fib(n-1) + Fib(n-2) +} + +method M3(a: array) returns (r: int) + requires a != null && forall i :: 0 <= i < a.Length ==> a[i] == 6; + ensures (r + var t := r; t*2) == 3*r; +{ + assert Fib(2) + Fib(4) == Fib(0) + Fib(1) + Fib(2) + Fib(3); + + { + var x,y := Fib(8), Fib(11); + assume x == 21; + assert Fib(7) == 3 ==> Fib(9) == 24; + assume Fib(1000) == 1000; + assume Fib(9) - Fib(8) == 13; + assert Fib(9) <= Fib(10); + assert y == 89; + } + + assert Fib(1000) == 1000; // does it still know this? + + parallel (i | 0 <= i < a.Length) ensures true; { + var j := i+1; + assert j < a.Length ==> a[i] == a[j]; + } +} + +// M4 is pretty much the same as M3, except with things rolled into expressions. +method M4(a: array) returns (r: int) + requires a != null && forall i :: 0 <= i < a.Length ==> a[i] == 6; + ensures (r + var t := r; t*2) == 3*r; +{ + assert Fib(2) + Fib(4) == Fib(0) + Fib(1) + Fib(2) + Fib(3); + assert + var x,y := Fib(8), Fib(11); + assume x == 21; + assert Fib(7) == 3 ==> Fib(9) == 24; + assume Fib(1000) == 1000; + assume Fib(9) - Fib(8) == 13; + assert Fib(9) <= Fib(10); + y == 89; + assert Fib(1000) == 1000; // still known, because the assume was on the path here + assert forall i :: 0 <= i < a.Length ==> var j := i+1; j < a.Length ==> a[i] == a[j]; +} + +var index: int; +method P(a: array) returns (b: bool, ii: int) + requires a != null && exists k :: 0 <= k < a.Length && a[k] == 19; + modifies this, a; + ensures ii == index; + // The following uses a variable with a non-old definition inside an old expression: + ensures 0 <= index < a.Length && old(a[ii]) == 19; + ensures 0 <= index < a.Length && var newIndex := index; old(a[newIndex]) == 19; + // The following places both the variable and the body inside an old: + ensures b ==> old(var oldIndex := index; 0 <= oldIndex < a.Length && a[oldIndex] == 17); + // Here, the definition of the variable is old, and it's used both inside and + // inside an old expression: + ensures var oi := old(index); oi == index ==> a[oi] == 21 && old(a[oi]) == 19; +{ + b := 0 <= index < a.Length && a[index] == 17; + var i, j := 0, -1; + while (i < a.Length) + invariant 0 <= i <= a.Length; + invariant forall k :: 0 <= k < i ==> a[k] == 21; + invariant forall k :: i <= k < a.Length ==> a[k] == old(a[k]); + invariant (0 <= j < i && old(a[j]) == 19) || + (j == -1 && exists k :: i <= k < a.Length && a[k] == 19); + { + if (a[i] == 19) { j := i; } + i, a[i] := i + 1, 21; + } + index := j; + ii := index; +} + +method PMain(a: array) + requires a != null && exists k :: 0 <= k < a.Length && a[k] == 19; + modifies this, a; +{ + var s := a[..]; + var b17, ii := P(a); + assert s == old(a[..]); + assert s[index] == 19; + if (*) { + assert a[index] == 19; // error (a can have changed in P) + } else { + assert b17 ==> 0 <= old(index) < a.Length && old(a[index]) == 17; + assert index == old(index) ==> a[index] == 21 && old(a[index]) == 19; + } +} diff --git a/Test/dafny0/PredExpr.dfy b/Test/dafny0/PredExpr.dfy index 740c2308..96561ebd 100644 --- a/Test/dafny0/PredExpr.dfy +++ b/Test/dafny0/PredExpr.dfy @@ -1,3 +1,16 @@ +function SimpleAssert(n: int): int + ensures n < 100; +{ + assert n == 58; // error: assert violation + n // but postcondition is fine +} + +function SimpleAssume(n: int): int + ensures n < 100; +{ + assume n == 58; n // all is fine +} + function Subonacci(n: nat): nat { if 2 <= n then @@ -18,7 +31,7 @@ function F(n: int): nat function G(n: int, b: bool): nat { if b then - Subonacci(assume 0 <= n; n) + Subonacci(assume 0 <= n; n) + n // the last n is also affected by the assume else Subonacci(n) // error: n may not be a nat } @@ -49,3 +62,18 @@ function method FuncMeth(): int { // PredicateExpr is not included in compilation 15 } + +method M5(a: int, b: int) +{ + var k := if a < 0 then + assume b < 0; 5 + else + 5; + if (*) { + assert a == -7 ==> b < 0; + assert b < 0; // error: this condition does not hold on every path here + } else { + assert assume a == -7; b < 0; // note, this is different than the ==> above + assert b < 0; // this condition does hold on all paths to here + } +} diff --git a/Test/dafny0/runtest.bat b/Test/dafny0/runtest.bat index 32a60340..18c23e55 100644 --- a/Test/dafny0/runtest.bat +++ b/Test/dafny0/runtest.bat @@ -20,7 +20,7 @@ for %%f in (TypeTests.dfy NatTypes.dfy SmallTests.dfy Definedness.dfy TypeParameters.dfy Datatypes.dfy TypeAntecedents.dfy SplitExpr.dfy Refinement.dfy RefinementErrors.dfy LoopModifies.dfy ReturnErrors.dfy ReturnTests.dfy ChainingDisjointTests.dfy - CallStmtTests.dfy MultiSets.dfy PredExpr.dfy) do ( + CallStmtTests.dfy MultiSets.dfy PredExpr.dfy LetExpr.dfy) do ( echo. echo -------------------- %%f -------------------- %DAFNY_EXE% /compile:0 /dprint:out.dfy.tmp %* %%f -- cgit v1.2.3 From 9672dace9e0ecb4fd72f937f9305526669d67475 Mon Sep 17 00:00:00 2001 From: qadeer Date: Tue, 15 Nov 2011 10:18:59 -0800 Subject: changed inlining code so that candidate preconditions and postconditions are ignored --- Source/Core/Inline.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Source/Core/Inline.cs b/Source/Core/Inline.cs index a202bcfe..adc7e2f0 100644 --- a/Source/Core/Inline.cs +++ b/Source/Core/Inline.cs @@ -421,7 +421,7 @@ namespace Microsoft.Boogie { // inject non-free requires for (int i = 0; i < proc.Requires.Length; i++) { Requires/*!*/ req = cce.NonNull(proc.Requires[i]); - if (!req.Free) { + if (!req.Free && !QKeyValue.FindBoolAttribute(req.Attributes, "candidate")) { Requires/*!*/ reqCopy = (Requires/*!*/)cce.NonNull(req.Clone()); reqCopy.Condition = codeCopier.CopyExpr(req.Condition); AssertCmd/*!*/ a = new AssertRequiresCmd(callCmd, reqCopy); @@ -486,7 +486,7 @@ namespace Microsoft.Boogie { // inject non-free ensures for (int i = 0; i < proc.Ensures.Length; i++) { Ensures/*!*/ ens = cce.NonNull(proc.Ensures[i]); - if (!ens.Free) { + if (!ens.Free && !QKeyValue.FindBoolAttribute(ens.Attributes, "candidate")) { Ensures/*!*/ ensCopy = (Ensures/*!*/)cce.NonNull(ens.Clone()); ensCopy.Condition = codeCopier.CopyExpr(ens.Condition); AssertCmd/*!*/ a = new AssertEnsuresCmd(ensCopy); -- cgit v1.2.3 From 94fcbefe78e8f7f866e0e6f743fa4b1ab258b296 Mon Sep 17 00:00:00 2001 From: Rustan Leino Date: Tue, 15 Nov 2011 13:03:52 -0800 Subject: Added Dafny solutions to the VSTTE 2012 program verification competition --- Test/alltests.txt | 1 + Test/vstte2012/Answer | 20 ++ Test/vstte2012/BreadthFirstSearch.dfy | 276 ++++++++++++++++++++ Test/vstte2012/Combinators.dfy | 459 ++++++++++++++++++++++++++++++++++ Test/vstte2012/RingBuffer.dfy | 93 +++++++ Test/vstte2012/Tree.dfy | 172 +++++++++++++ Test/vstte2012/Two-Way-Sort.dfy | 58 +++++ Test/vstte2012/runtest.bat | 24 ++ 8 files changed, 1103 insertions(+) create mode 100644 Test/vstte2012/Answer create mode 100644 Test/vstte2012/BreadthFirstSearch.dfy create mode 100644 Test/vstte2012/Combinators.dfy create mode 100644 Test/vstte2012/RingBuffer.dfy create mode 100644 Test/vstte2012/Tree.dfy create mode 100644 Test/vstte2012/Two-Way-Sort.dfy create mode 100644 Test/vstte2012/runtest.bat diff --git a/Test/alltests.txt b/Test/alltests.txt index e50d0a61..baffc2a1 100644 --- a/Test/alltests.txt +++ b/Test/alltests.txt @@ -27,6 +27,7 @@ dafny2 Use More Dafny examples havoc0 Use HAVOC-generated bpl files VSI-Benchmarks Use Solutions to Verified Software Initiative verification challenges vacid0 Use Dafny attempts to VACID Edition 0 benchmarks +vstte2012 Use Dafny solutions for the VSTTE 2012 program verification competition livevars Use STORM benchmarks for testing correctness of live variable analysis lazyinline Postponed Lazy inlining benchmarks stratifiedinline Use Stratified inlining benchmarks diff --git a/Test/vstte2012/Answer b/Test/vstte2012/Answer new file mode 100644 index 00000000..15a95de1 --- /dev/null +++ b/Test/vstte2012/Answer @@ -0,0 +1,20 @@ + +-------------------- Two-Way-Sort.dfy -------------------- + +Dafny program verifier finished with 4 verified, 0 errors + +-------------------- Combinators.dfy -------------------- + +Dafny program verifier finished with 25 verified, 0 errors + +-------------------- RingBuffer.dfy -------------------- + +Dafny program verifier finished with 13 verified, 0 errors + +-------------------- Tree.dfy -------------------- + +Dafny program verifier finished with 15 verified, 0 errors + +-------------------- BreadthFirstSearch.dfy -------------------- + +Dafny program verifier finished with 24 verified, 0 errors diff --git a/Test/vstte2012/BreadthFirstSearch.dfy b/Test/vstte2012/BreadthFirstSearch.dfy new file mode 100644 index 00000000..5153b053 --- /dev/null +++ b/Test/vstte2012/BreadthFirstSearch.dfy @@ -0,0 +1,276 @@ +class BreadthFirstSearch +{ + // The following function is left uninterpreted (for the purpose of the + // verification problem, it can be thought of as a parameter to the class) + function method Succ(x: Vertex): set + + // This is the definition of what it means to be a path "p" from vertex + // "source" to vertex "dest" + function IsPath(source: Vertex, dest: Vertex, p: seq): bool + { + if source == dest then + p == [] + else + p != [] && dest in Succ(p[|p|-1]) && IsPath(source, p[|p|-1], p[..|p|-1]) + } + + function IsClosed(S: set): bool // says that S is closed under Succ + { + forall v :: v in S ==> Succ(v) <= S + } + + // This is the main method to be verified. Note, instead of using a + // postcondition that talks about that there exists a path (such that ...), + // this method returns, as a ghost out-parameter, that existential + // witness. The method could equally well have been written using an + // existential quantifier and no ghost out-parameter. + method BFS(source: Vertex, dest: Vertex, ghost AllVertices: set) + returns (d: int, ghost path: seq) + // source and dest are among AllVertices + requires source in AllVertices && dest in AllVertices; + // AllVertices is closed under Succ + requires IsClosed(AllVertices); + // This method has two basic outcomes, as indicated by the sign of "d". + // More precisely, "d" is non-negative iff "source" can reach "dest". + // The following postcondition says that under the "0 <= d" outcome, + // "path" denotes a path of length "d" from "source" to "dest": + ensures 0 <= d ==> IsPath(source, dest, path) && |path| == d; + // Moreover, that path is as short as any path from "source" to "dest": + ensures 0 <= d ==> forall p :: IsPath(source, dest, p) ==> |path| <= |p|; + // Finally, under the outcome "d < 0", there is no path from "source" to "dest": + ensures d < 0 ==> !exists p :: IsPath(source, dest, p); + { + var V, C, N := {source}, {source}, {}; + ghost var Processed, paths := {}, Maplet({source}, source, [], Empty); + // V - all encountered vertices + // Processed - vertices reachable from "source" is at most "d" steps + // C - unprocessed vertices reachable from "source" in "d" steps + // (but no less) + // N - vertices encountered and reachable from "source" in "d+1" steps + // (but no less) + d := 0; + var dd := Zero; + while (C != {}) + // V, Processed, C, N are all subsets of AllVertices: + invariant V <= AllVertices && Processed <= AllVertices && C <= AllVertices && N <= AllVertices; + // source is encountered: + invariant source in V; + // V is the disjoint union of Processed, C, and N: + invariant V == Processed + C + N; + invariant Processed !! C !! N; // Processed, C, and N are mutually disjoint + // "paths" records a path for every encountered vertex + invariant ValidMap(source, paths); + invariant V == Domain(paths); + // shortest paths for vertices in C have length d, and for vertices in N + // have length d+1 + invariant forall x :: x in C ==> |Find(source, x, paths)| == d; + invariant forall x :: x in N ==> |Find(source, x, paths)| == d + 1; + // "dd" is just an inductive-looking way of writing "d": + invariant Value(dd) == d; + // "dest" is not reachable for any smaller value of "d": + invariant dest in R(source, dd, AllVertices) ==> dest in C; + invariant d != 0 ==> dest !in R(source, dd.predecessor, AllVertices); + // together, Processed and C are all the vertices reachable in at most d steps: + invariant Processed + C == R(source, dd, AllVertices); + // N are the successors of Processed that are not reachable within d steps: + invariant N == Successors(Processed, AllVertices) - R(source, dd, AllVertices); + // if we have exhausted C, then we have also exhausted N: + invariant C == {} ==> N == {}; + // variant: + decreases AllVertices - Processed; + { + // remove a vertex "v" from "C" + var v := choose C; + C, Processed := C - {v}, Processed + {v}; + ghost var pathToV := Find(source, v, paths); + + if (v == dest) { + parallel (p | IsPath(source, dest, p)) + ensures |pathToV| <= |p|; + { + Lemma_IsPath_R(source, dest, p, AllVertices); + if (|p| < |pathToV|) { + // show that this branch is impossible + ToNat_Value_Bijection(|p|); + RMonotonicity(source, ToNat(|p|), dd.predecessor, AllVertices); + } + } + return d, pathToV; + } + + // process newly encountered successors + var newlyEncountered := set w | w in Succ(v) && w !in V; + V, N := V + newlyEncountered, N + newlyEncountered; + paths := UpdatePaths(newlyEncountered, source, paths, v, pathToV); + + if (C == {}) { + C, N, d, dd := N, {}, d+1, Suc(dd); + } + } + + // show that "dest" in not in any reachability set, no matter + // how many successors one follows + parallel (nn) + ensures dest !in R(source, nn, AllVertices); + { + if (Value(nn) < Value(dd)) { + RMonotonicity(source, nn, dd, AllVertices); + } else { + IsReachFixpoint(source, dd, nn, AllVertices); + } + } + + // Now, show what what the above means in terms of IsPath. More + // precisely, show that there is no path "p" from "source" to "dest". + parallel (p | IsPath(source, dest, p)) + // this and the previous two lines will establish the + // absurdity of a "p" satisfying IsPath(source, dest, p) + ensures false; + { + Lemma_IsPath_R(source, dest, p, AllVertices); + // a consequence of Lemma_IsPath_R is: + // dest in R(source, ToNat(|p|), AllVertices) + // but that contradicts the conclusion of the preceding parallel statement + } + + d := -1; // indicate "no path" + } + + // property of IsPath + + ghost method Lemma_IsPath_Closure(source: Vertex, dest: Vertex, + p: seq, AllVertices: set) + requires IsPath(source, dest, p) && source in AllVertices && IsClosed(AllVertices); + ensures dest in AllVertices && forall v :: v in p ==> v in AllVertices; + { + if (p != []) { + var last := |p| - 1; + Lemma_IsPath_Closure(source, p[last], p[..last], AllVertices); + } + } + + // operations on Nat + + function Value(nn: Nat): nat + ensures ToNat(Value(nn)) == nn; + { + match nn + case Zero => 0 + case Suc(mm) => Value(mm) + 1 + } + + function ToNat(n: nat): Nat + { + if n == 0 then Zero else Suc(ToNat(n - 1)) + } + + ghost method ToNat_Value_Bijection(n: nat) + ensures Value(ToNat(n)) == n; + { + // Dafny automatically figures out the inductive proof of the postcondition + } + + // Reachability + + function R(source: Vertex, nn: Nat, AllVertices: set): set + { + match nn + case Zero => {source} + case Suc(mm) => R(source, mm, AllVertices) + + Successors(R(source, mm, AllVertices), AllVertices) + } + + function Successors(S: set, AllVertices: set): set + { + set w | w in AllVertices && exists x :: x in S && w in Succ(x) + } + + ghost method RMonotonicity(source: Vertex, mm: Nat, nn: Nat, AllVertices: set) + requires Value(mm) <= Value(nn); + ensures R(source, mm, AllVertices) <= R(source, nn, AllVertices); + decreases Value(nn) - Value(mm); + { + if (Value(mm) < Value(nn)) { + RMonotonicity(source, Suc(mm), nn, AllVertices); + } + } + + ghost method IsReachFixpoint(source: Vertex, mm: Nat, nn: Nat, AllVertices: set) + requires R(source, mm, AllVertices) == R(source, Suc(mm), AllVertices); + requires Value(mm) <= Value(nn); + ensures R(source, mm, AllVertices) == R(source, nn, AllVertices); + decreases Value(nn) - Value(mm); + { + if (Value(mm) < Value(nn)) { + IsReachFixpoint(source, Suc(mm), nn, AllVertices); + } + } + + ghost method Lemma_IsPath_R(source: Vertex, x: Vertex, + p: seq, AllVertices: set) + requires IsPath(source, x, p) && source in AllVertices && IsClosed(AllVertices); + ensures x in R(source, ToNat(|p|), AllVertices); + { + if (p != []) { + Lemma_IsPath_Closure(source, x, p, AllVertices); + var last := |p| - 1; + Lemma_IsPath_R(source, p[last], p[..last], AllVertices); + } + } + + // operations on Map's + + function Domain(m: Map): set + { +// if m.Maplet? then m.dom else {} +// if m == Empty then {} else assert m.Maplet?; m.dom + match m + case Empty => {} + case Maplet(dom, t, s, nxt) => dom + } + + // ValidMap encodes the consistency of maps (think, invariant). + // An explanation of this idiom is explained in the README file. + function ValidMap(source: Vertex, m: Map): bool + { + match m + case Empty => true + case Maplet(dom, v, path, next) => + v in dom && dom == Domain(next) + {v} && + IsPath(source, v, path) && + ValidMap(source, next) + } + + function Find(source: Vertex, x: Vertex, m: Map): seq + requires ValidMap(source, m) && x in Domain(m); + ensures IsPath(source, x, Find(source, x, m)); + { + match m + case Maplet(dom, v, path, next) => + if x == v then path else Find(source, x, next) + } + + ghost method UpdatePaths(vSuccs: set, source: Vertex, + paths: Map, v: Vertex, pathToV: seq) + returns (newPaths: Map) + requires ValidMap(source, paths); + requires vSuccs !! Domain(paths); + requires forall succ :: succ in vSuccs ==> IsPath(source, succ, pathToV + [v]); + ensures ValidMap(source, newPaths) && Domain(newPaths) == Domain(paths) + vSuccs; + ensures forall x :: x in Domain(paths) ==> + Find(source, x, paths) == Find(source, x, newPaths); + ensures forall x :: x in vSuccs ==> Find(source, x, newPaths) == pathToV + [v]; + { + if (vSuccs == {}) { + newPaths := paths; + } else { + var succ := choose vSuccs; + newPaths := Maplet(Domain(paths) + {succ}, succ, pathToV + [v], paths); + newPaths := UpdatePaths(vSuccs - {succ}, source, newPaths, v, pathToV); + } + } +} + +datatype Map = Empty | Maplet(dom: set, T, seq, next: Map); + +datatype Nat = Zero | Suc(predecessor: Nat); diff --git a/Test/vstte2012/Combinators.dfy b/Test/vstte2012/Combinators.dfy new file mode 100644 index 00000000..46daf48d --- /dev/null +++ b/Test/vstte2012/Combinators.dfy @@ -0,0 +1,459 @@ +// Problem 2 concerns an interpreter for the language of S and K combinators. + +// ----------------------------------------------------------------------------- +// Definitions + +// First, we define the language of combinator terms. "Apply(x, y)" is what +// the problem description writes as "(x y)". In the following Dafny +// definition, "car" and "cdr" are declared to be destructors for terms +// constructed by Apply. + +datatype Term = S | K | Apply(car: Term, cdr: Term); + +// The problem defines values to be a subset of the terms. More precisely, +// a Value is a Term that fits the following grammar: +// Value = K | S | (K Value) | (S Value) | ((S Value) Value) +// The following predicate says whether or not a given term is a value. + +function method IsValue(t: Term): bool + ensures IsValue(t) && t.Apply? ==> IsValue(t.car) && IsValue(t.cdr); +{ + match t + case K => true + case S => true + case Apply(a, b) => + match a + case K => + assert IsValue(a); + IsValue(b) + case S => + assert IsValue(a); + IsValue(b) + case Apply(x, y) => + assert x==S && IsValue(y) && IsValue(b) ==> IsValue(a); + x==S && IsValue(y) && IsValue(b) +} + +// A context is essentially a term with one missing subterm, a "hole". It +// is defined as follows: + +datatype Context = Hole | C_term(Context, Term) | value_C(Term/*Value*/, Context); + +// The problem seems to suggest that the value_C form requires a value and +// a context. To formalize that notion, we define a predicate that checks this +// condition. + +function IsContext(C: Context): bool +{ + match C + case Hole => true // [] + case C_term(D, t) => IsContext(D) // (D t) + case value_C(v, D) => IsValue(v) && IsContext(D) // (v D) +} + +// The EvalExpr function replace the hole in a context with a given term. + +function EvalExpr(C: Context, t: Term): Term + requires IsContext(C); +{ + match C + case Hole => t + case C_term(D, u) => Apply(EvalExpr(D, t), u) + case value_C(v, D) => Apply(v, EvalExpr(D, t)) +} + +// A term can be reduced. This reduction operation is defined via +// a single-step reduction operation. In the problem, the single-step +// reduction has the form: +// C[t] --> C[r] +// We formalize the single-step reduction by a function Step, which +// performs the reduction if it applies or just returns the given term +// if it does. We will say that "Step applies" to refer to the first +// case, which is a slight abuse of language, since the function "Step" +// is total. +// +// Since the context C is the same on both sides in the single-step +// reduction above, we don't pass it to function Step. Rather, Step +// just takes a term "t" and returns the following: +// match t +// case ((K v1) v2) => v1 +// case (((S v1) v2) v3) => ((v1 v3) (v2 v3)) +// else t +// As you can see below, it takes more lines than shown here to express +// this matching in Dafny, but this is all that Step does. +// +// Again, note that Step returns the given term if neither or the two +// vs in the problem statement applies. +// +// One more thing: Step has a postcondition (and the body of Step +// contains three asserts that act as lemmas in proving the postcondition). +// The postcondition has been included for the benefit of Verification +// Task 2, and we describe the functions used in the Step postcondition +// only much later in this file. For now, the postcondition can be +// ignored, since it is, after all, just a consequence of the body +// of Step. + +function method Step(t: Term): Term + ensures !ContainsS(t) ==> + !ContainsS(Step(t)) && + (Step(t) == t || TermSize(Step(t)) < TermSize(t)); +{ + match t + case S => t + case K => t + case Apply(x, y) => + match x + case S => t + case K => t + case Apply(m, n) => + if m == K && IsValue(n) && IsValue(y) then + // this is the case t == Apply(Apply(K, n), y) + assert !ContainsS(t) ==> !ContainsS(x); + assert TermSize(n) < TermSize(Apply(m, n)); + n + else if m.Apply? && m.car == S && IsValue(m.cdr) && IsValue(n) && IsValue(y) then + // t == Apply(Apply(Apply(S, m.cdr), n), y) + assert ContainsS(m) && ContainsS(t); + Apply(Apply(m.cdr, y), Apply(n, y)) + else + t +} + +// The single-step reduction operation may be applied to any subexpression +// of a term that could be considered a hole. Function FindAndStep +// searches for a (context, term) pair C[u] that denotes a given term "t" +// such that Step applies to "u". If found, the function returns +// C[Step(u)], which will necessarily be different from "t". If no such +// C[u] pair exists, this function returns the given "t". +// +// Note, FindAndStep only applies one Step. We will get to repeated +// applications of steps in the "reduction" method below. +// +// For all definitions above, it was necessary to check (manually) that +// they correspond to the definitions intended in the problem statement. +// That is, the definitions above are all part of the specification. +// For function FindAndStep, the definition given does not require +// such scrutiny. Rather, we will soon state a theorem that states +// the properties of what FindAndStep returns. +// +// Like Step, FindAndStep has a postcondition, and it is also included to +// support Verification Task 2. + +function method FindAndStep(t: Term): Term + ensures !ContainsS(t) ==> + !ContainsS(FindAndStep(t)) && + (FindAndStep(t) == t || TermSize(FindAndStep(t)) < TermSize(t)); +{ + if Step(t) != t then + Step(t) + else if !t.Apply? then + t + else if FindAndStep(t.car) != t.car then + Apply(FindAndStep(t.car), t.cdr) + else if IsValue(t.car) && FindAndStep(t.cdr) != t.cdr then + Apply(t.car, FindAndStep(t.cdr)) + else + t +} + +// One part of the correctness of FindAndStep (and, indeed, of method +// "reduction" below) is that a term can be terminal, meaning that there is +// no way to apply Step to any part of it. + +function IsTerminal(t: Term): bool +{ + !(exists C,u :: IsContext(C) && t == EvalExpr(C,u) && Step(u) != u) +} + +// The following theorem states the correctness of the FindAndStep function: + +ghost method Theorem_FindAndStep(t: Term) + // If FindAndStep returns the term it started from, then there is no + // way to take a step. More precisely, there is no C[u] == t for which the + // Step applies to "u". + ensures FindAndStep(t) == t ==> IsTerminal(t); + // If FindAndStep returns a term that's different from what it started with, + // then it chose some C[u] == t for which the Step applies to "u", and then + // it returned C[Step(u)]. + ensures FindAndStep(t) != t ==> + exists C,u :: IsContext(C) && t == EvalExpr(C,u) && Step(u) != u && + FindAndStep(t) == EvalExpr(C, Step(u)); +{ + // The theorem follows from the following lemma, which itself is proved by + // induction. + var r, C, u := Lemma_FindAndStep(t); +} + +// This is the lemma that proves the theorem above. Whereas the theorem talks +// existentially about C and u, the lemma constructs C and u and returns them, +// which is useful in the proof by induction. The computation inside the +// lemma mimicks that done by function FindAndStep; indeed, the lemma +// computes the value of FindAndStep(t) as it goes along and it returns +// that value. + +ghost method Lemma_FindAndStep(t: Term) returns (r: Term, C: Context, u: Term) + ensures r == FindAndStep(t); + ensures r == t ==> IsTerminal(t); + ensures r != t ==> + IsContext(C) && t == EvalExpr(C,u) && Step(u) != u && + r == EvalExpr(C, Step(u)); +{ + Lemma_ContextPossibilities(t); + if (Step(t) != t) { + // t == Hole[t] and Step applies t. So, return Hole[Step(t)] + return Step(t), Hole, t; + } else if (!t.Apply?) { + r := t; + } else { + r, C, u := Lemma_FindAndStep(t.car); // (*) + if (r != t.car) { + // t has the form (a b) where a==t.car and b==t.cdr, and a==C[u] for some + // context C and some u to which the Step applies. t can therefore be + // denoted by (C[u] b) == (C b)[u] and the Step applies to u. So, return + // (C b)[Step(u)] == (C[Step(u)] b). Note that FindAndStep(a) + // gives C[Step(u)]. + return Apply(r, t.cdr), C_term(C, t.cdr), u; + } else if (IsValue(t.car)) { + r, C, u := Lemma_FindAndStep(t.cdr); + assert IsTerminal(t.car); // make sure this is still remembered from (*) + + if (r != t.cdr) { + // t has the form (a b) where a==t.car and b==t.cdr and "a" is a Value, + // and b==C[u] for some context C and some u to which the Step applies. + // t can therefore be denoted by (a C[u]) == (C a)[u] and the Step + // applies to u. So, return (C a)[Step(u)] == (a C[Step(u)]). Note + // that FindAndStep(b) gives C[Step(u)]. + return Apply(t.car, r), value_C(t.car, C), u; + } else { + parallel (C,u | IsContext(C) && t == EvalExpr(C,u)) + ensures Step(u) == u; + { + // The following assert and the first assert of each "case" are + // consequences of the Lemma_ContextPossibilities that was invoked + // above. + assert t.Apply? && IsValue(t.car); + match (C) { + case Hole => + assert t == u; + case C_term(D, bt) => + assert bt == t.cdr && t.car == EvalExpr(D, u); + case value_C(at, D) => + assert at == t.car && t.cdr == EvalExpr(D, u); + } + } + r := t; + } + } else { + r := t; + } + } +} + +// The proof of the lemma above used one more lemma, namely one that enumerates +// lays out the options for how to represent a term as a C[u] pair. + +ghost method Lemma_ContextPossibilities(t: Term) + ensures forall C,u :: IsContext(C) && t == EvalExpr(C, u) ==> + (C == Hole && t == u) || + (t.Apply? && exists D :: C == C_term(D, t.cdr) && t.car == EvalExpr(D, u)) || + (t.Apply? && IsValue(t.car) && + exists D :: C == value_C(t.car, D) && t.cdr == EvalExpr(D, u)); +{ + // Dafny's induction tactic rocks +} + +// We now define a way to record a sequence of reduction steps. +// IsTrace(trace, t, r) returns true iff "trace" gives witness to a +// sequence of terms from "t" to "r", each term reducing to its +// successor in the trace. + +datatype Trace = EmptyTrace | ReductionStep(Trace, Term); + +function IsTrace(trace: Trace, t: Term, r: Term): bool +{ + match trace + case EmptyTrace => + t == r + case ReductionStep(tr, u) => + IsTrace(tr, t, u) && FindAndStep(u) == r +} + +// Finally, we are ready to give the requested routine "reduction", which +// keeps applying FindAndStep until quiescence, that is, until Step +// no longer applies. +// +// As required by Verification Task 1, the "reduction" method has two +// postconditions. One says that the term returned, "r", was obtained +// from the original term, "t", by a sequence of reduction steps. The +// other says that "r" cannot be reduced any further. +// +// Unlike the other competition problems, this one requested code +// (for "reduction") that may not terminate. In order to allow reasoning +// about algorithms that may never terminate, Dafny has a special loop +// statement (a "while" loop with a declaration "decreases *") that +// thus comes in handy for "reduction". (Dafny never allows recursion +// to be non-terminating, only these special loops.) Note that use +// of the special loop statement does not have any effect on the +// specifications of the enclosing method (but this may change in a +// future version of Dafny). + +method reduction(t: Term) returns (r: Term) + // The result was obtained by a sequence of reductions: + ensures exists trace :: IsTrace(trace, t, r); + // The result "r" cannot be reduced any further: + ensures IsTerminal(r); +{ + r := t; + ghost var trace := EmptyTrace; + while (true) + invariant IsTrace(trace, t, r); + decreases *; // allow this statement to loop forever + { + var u := FindAndStep(r); + if (u == r) { + // we have found a fixpoint + Theorem_FindAndStep(r); + return; + } + r, trace := u, ReductionStep(trace, r); + } +} + +// ----------------------------------------------------------------------------- +// Verification Task 2 +// +// This part of the problem asks us to consider the reduction of terms that +// do not contain S. The following function formalizes what it means for a term +// to contain S: + +function method ContainsS(t: Term): bool +{ + match t + case S => true + case K => false + case Apply(x, y) => ContainsS(x) || ContainsS(y) +} + +// The verification task itself is to prove that "reduction" terminates on any +// term that does not contain S. To prove this, we need to supply a loop variant +// for the loop in "reduction". However, Dafny does not allow one loop to be +// proved to terminate in some cases and allowed not to terminate in other cases. +// There, we meet Verification Task 2 by manually copying the body of "reduction" +// into a new method (called VerificationTask2) and proving that this new method +// terminates. Of course, Dafny does not check that we copy the body correctly, +// so that needs to be checked by a human. +// +// In method VerificationTask2, we added not just the precondition given in the +// Verification Task and a loop variant, but we also added two loop invariants +// and one more postcondition. One of the loop invariants keep track of that +// there are no S's. The other loop invariant and the postcondition are for +// the benefit of Verification Task 3, as we explain later. + +method VerificationTask2(t: Term) returns (r: Term) + requires !ContainsS(t); // a sufficient condition for termination + // The result was obtained by a sequence of reductions: + ensures exists trace :: IsTrace(trace, t, r); + // The result "r" cannot be reduced any further: + ensures IsTerminal(r); + // Later in this file, we define a function TerminatingReduction, and the + // following postcondition says that TerminatingReduction computes the same + // term as this method does. + ensures r == TerminatingReduction(t); +{ + r := t; + ghost var trace := EmptyTrace; + while (true) + invariant IsTrace(trace, t, r) && !ContainsS(r); + invariant TerminatingReduction(t) == TerminatingReduction(r); + decreases TermSize(r); + { + var u := FindAndStep(r); + if (u == r) { + // we have found a fixpoint + Theorem_FindAndStep(r); + return; + } + r, trace := u, ReductionStep(trace, r); + } +} + +// What now follows is the definition TermSize, which is used in the +// loop variant. When a Step is applied to a term without S, TermSize +// is reduced, which is stated as a postcondition of both Step and +// FindAndStep. That postcondition of FindAndStep is used in the +// proof of termination of method VerificationTask2. + +// The loop variant is simply the count of nodes in the term: + +function TermSize(t: Term): nat +{ + match t + case S => 1 + case K => 1 + case Apply(x, y) => 1 + TermSize(x) + TermSize(y) +} + +// We have already given two methods for computing a reduction: +// method "reduction", which may or may not terminate, and method +// "VerificationTask2", whose precondition is strong enough to let +// us prove that the method will terminate. The correspondence +// between the two methods is checked by hand, seeing that +// VerificationTask2 includes the postconditions of "reduction" and +// seeing that the code is the same. +// +// We now define a third way of computing reductions, this time +// using a function (not a method). To prove that this function +// computes the same thing as method VerificationTask2, we had +// added a postcondition to VerificationTask2 above. This function +// is introduced for the benefit of stating and verifying Verification +// Task 3. + +function TerminatingReduction(t: Term): Term + requires !ContainsS(t); // a sufficient condition for termination + decreases TermSize(t); +{ + if FindAndStep(t) == t then + t // we have reached a fixpoint + else + TerminatingReduction(FindAndStep(t)) +} + +// ----------------------------------------------------------------------------- +// Verification Task 3 + +// Here is the function "ks" from Verification Task 3. It produces a particular +// family of terms that contain only Apply and K. Hence, we can establish, as a +// postcondition of the function, that ks(n) does not contain S. + +function method ks(n: nat): Term + ensures !ContainsS(ks(n)); +{ + if n == 0 then K else Apply(ks(n-1), K) +} + +// Verification Task 3 is now established by the following theorem. It says +// that reducing ks(n) results in either K and (K K), depending on the parity +// of n. The theorem uses function TerminatingReduction to speak of the +// reduction--remember that (by the last postcondition of method +// VerificationTask2) it computes the same thing as method VerificationTask2 +// does. + +ghost method VerificationTask3() + ensures forall n: nat :: + TerminatingReduction(ks(n)) == if n % 2 == 0 then K else Apply(K, K); +{ + parallel (n: nat) { + VT3(n); + } +} + +ghost method VT3(n: nat) + ensures TerminatingReduction(ks(n)) == if n % 2 == 0 then K else Apply(K, K); +{ + // Dafny's (way cool) induction tactic kicks in and proves the following + // assertion automatically: + assert forall p :: 2 <= p ==> FindAndStep(ks(p)) == ks(p-2); + // And then Dafny's (cool beyond words) induction tactic for ghost methods kicks + // in to prove the postcondition. (If this got you curious, scope out Leino's + // VMCAI 2012 paper "Automating Induction with an SMT Solver".) +} diff --git a/Test/vstte2012/RingBuffer.dfy b/Test/vstte2012/RingBuffer.dfy new file mode 100644 index 00000000..5262e462 --- /dev/null +++ b/Test/vstte2012/RingBuffer.dfy @@ -0,0 +1,93 @@ +class RingBuffer +{ + // public fields that are used to define the abstraction: + ghost var Contents: seq; // the contents of the ring buffer + ghost var N: nat; // the capacity of the ring buffer + ghost var Repr: set; // the set of objects used in the implementation + + // Valid encodes the consistency of RingBuffer objects (think, invariant). + // An explanation of this idiom is explained in the README file. + function Valid(): bool + reads this, Repr; + { + this in Repr && null !in Repr && + data != null && data in Repr && + data.Length == N && + (N == 0 ==> len == first == 0 && Contents == []) && + (N != 0 ==> len <= N && first < N) && + Contents == if first + len <= N then data[first..first+len] + else data[first..] + data[..first+len-N] + } + + // private implementation: + var data: array; + var first: nat; + var len: nat; + + constructor Create(n: nat) + modifies this; + ensures Valid() && fresh(Repr - {this}); + ensures Contents == [] && N == n; + { + Repr := {this}; + data := new T[n]; + Repr := Repr + {data}; + first, len := 0, 0; + Contents, N := [], n; + } + + method Clear() + requires Valid(); + modifies Repr; + ensures Valid() && fresh(Repr - old(Repr)); + ensures Contents == [] && N == old(N); + { + len := 0; + Contents := []; + } + + method Head() returns (x: T) + requires Valid(); + requires Contents != []; + ensures x == Contents[0]; + { + x := data[first]; + } + + method Push(x: T) + requires Valid(); + requires |Contents| != N; + modifies Repr; + ensures Valid() && fresh(Repr - old(Repr)); + ensures Contents == old(Contents) + [x] && N == old(N); + { + var nextEmpty := if first + len < data.Length + then first + len else first + len - data.Length; + data[nextEmpty] := x; + len := len + 1; + Contents := Contents + [x]; + } + + method Pop() returns (x: T) + requires Valid(); + requires Contents != []; + modifies Repr; + ensures Valid() && fresh(Repr - old(Repr)); + ensures x == old(Contents)[0] && Contents == old(Contents)[1..] && N == old(N); + { + x := data[first]; + first, len := if first + 1 == data.Length then 0 else first + 1, len - 1; + Contents := Contents[1..]; + } +} + +method TestHarness(x: int, y: int, z: int) +{ + var b := new RingBuffer.Create(2); + b.Push(x); + b.Push(y); + var h := b.Pop(); assert h == x; + b.Push(z); + h := b.Pop(); assert h == y; + h := b.Pop(); assert h == z; +} diff --git a/Test/vstte2012/Tree.dfy b/Test/vstte2012/Tree.dfy new file mode 100644 index 00000000..47ed19a4 --- /dev/null +++ b/Test/vstte2012/Tree.dfy @@ -0,0 +1,172 @@ +// The tree datatype +datatype Tree = Leaf | Node(Tree, Tree); + + +// This datatype is used for the result of the build functions. +// These functions either fail or yield a tree. Since we use +// a side-effect free implementation of the helper +// build_rec, it also has to yield the rest of the sequence, +// which still needs to be processed. For function build, +// this part is not used. +datatype Result = Fail | Res(t: Tree, sOut: seq); + + +// Function toList converts a tree to a sequence. +// We use Dafny's built-in value type sequence rather than +// an imperative implementation to simplify verification. +// The argument d is added to each element of the sequence; +// it is analogous to the argument d in build_rec. +// The postconditions state properties that are needed +// in the completeness proof. +function toList(d: int, t: Tree): seq + ensures toList(d, t) != [] && toList(d, t)[0] >= d; + ensures (toList(d, t)[0] == d) == (t == Leaf); + decreases t; +{ + match t + case Leaf => [d] + case Node(l, r) => toList(d+1, l) + toList(d+1, r) +} + + +// Function build_rec is a side-effect free implementation +// of the code given in the problem statement. +// The first postcondition is needed to show that the +// termination measure indeed decreases. +// The second postcondition specifies the soundness +// property; converting the resulting tree back into a +// sequence yields exactly that portion of the input +// sequence that has been consumed. +function method build_rec(d: int, s: seq): Result + ensures build_rec(d, s).Res? ==> + |build_rec(d, s).sOut| < |s| && + build_rec(d, s).sOut == s[|s|-|build_rec(d, s).sOut|..]; + ensures build_rec(d, s).Res? ==> + toList(d,build_rec(d, s).t) == s[..|s|-|build_rec(d, s).sOut|]; + decreases |s|, (if s==[] then 0 else s[0]-d); +{ + if s==[] || s[0] < d then + Fail + else if s[0] == d then + Res(Leaf, s[1..]) + else + var left := build_rec(d+1, s); + if left.Fail? then Fail else + var right := build_rec(d+1, left.sOut); + if right.Fail? then Fail else + Res(Node(left.t, right.t), right.sOut) +} + + +// Function build is a side-effect free implementation +// of the code given in the problem statement. +// The postcondition specifies the soundness property; +// converting the resulting tree back into a +// sequence yields exactly the input sequence. +// Completeness is proved as a lemma, see below. +function method build(s: seq): Result + ensures build(s).Res? ==> toList(0,build(s).t) == s; +{ + var r := build_rec(0, s); + if r.Res? && r.sOut == [] then r else Fail +} + + +// This ghost methods encodes the main lemma for the +// completeness theorem. If a sequence s starts with a +// valid encoding of a tree t then build_rec yields a +// result (i.e., does not fail) and the rest of the sequence. +// The body of the method proves the lemma by structural +// induction on t. Dafny proves termination (using the +// height of the term t as termination measure), which +// ensures that the induction hypothesis is applied +// correctly (encoded by calls to this ghost method). +ghost method lemma0(t: Tree, d: int, s: seq) + ensures build_rec(d, toList(d, t) + s).Res? && + build_rec(d, toList(d, t) + s).sOut == s; +{ + match(t) { + case Leaf => + assert toList(d, t) == [d]; + case Node(l, r) => + assert toList(d, t) + s == toList(d+1, l) + (toList(d+1, r) + s); + + lemma0(l, d+1, toList(d+1, r) + s); // apply the induction hypothesis + lemma0(r, d+1, s); // apply the induction hypothesis + } +} + + +// This ghost method encodes a lemma that states the +// completeness property. It is proved by applying the +// main lemma (lemma0). In this lemma, the bound variables +// of the completeness theorem are passed as arguments; +// the following two ghost methods replace these arguments +// by quantified variables. +ghost method lemma1(t: Tree, s:seq) + requires s == toList(0, t) + []; + ensures build(s).Res?; +{ + lemma0(t, 0, []); +} + + +// This ghost method encodes a lemma that introduces the +// existential quantifier in the completeness property. +ghost method lemma2(s: seq) + ensures (exists t: Tree :: toList(0,t) == s) ==> build(s).Res?; +{ + parallel(t | toList(0,t) == s) { + lemma1(t, s); + } +} + + +// This ghost method encodes the completeness theorem. +// For each sequence for which there is a corresponding +// tree, function build yields a result different from Fail. +// The body of the method converts the argument of lemma2 +// into a universally quantified variable. +ghost method completeness() + ensures forall s: seq :: ((exists t: Tree :: toList(0,t) == s) ==> build(s).Res?); +{ + parallel(s) { + lemma2(s); + } +} + + +// This method encodes the first test harness +// given in the problem statement. The local +// assertions are required by the verifier to +// unfold the necessary definitions. +method harness0() + ensures build([1,3,3,2]).Res? && + build([1,3,3,2]).t == Node(Leaf, Node(Node(Leaf, Leaf), Leaf)); +{ + assert build_rec(2, [2]) == + Res(Leaf, []); + assert build_rec(2, [3,3,2]) == + Res(Node(Leaf, Leaf), [2]); + assert build_rec(1, [3,3,2]) == + Res(Node(Node(Leaf, Leaf), Leaf), []); + assert build_rec(1, [1,3,3,2]) == + Res(Leaf, [3,3,2]); + assert build_rec(0, [1,3,3,2]) == + Res( + Node(build_rec(1, [1,3,3,2]).t, + build_rec(1, [3,3,2]).t), + []); +} + + +// This method encodes the second test harness +// given in the problem statement. The local +// assertions are required by the verifier to +// unfold the necessary definitions. +method harness1() + ensures build([1,3,2,2]).Fail?; +{ + assert build_rec(3, [2,2]).Fail?; + assert build_rec(1, [3,2,2]).Fail?; +} diff --git a/Test/vstte2012/Two-Way-Sort.dfy b/Test/vstte2012/Two-Way-Sort.dfy new file mode 100644 index 00000000..49ff29b4 --- /dev/null +++ b/Test/vstte2012/Two-Way-Sort.dfy @@ -0,0 +1,58 @@ +// This method is a slight generalization of the +// code provided in the problem statement since it +// is generic in the type of the array elements. +method swap(a: array, i: int, j: int) + requires a != null; + requires 0 <= i < j < a.Length; + modifies a; + ensures a[i] == old(a[j]); + ensures a[j] == old(a[i]); + ensures forall m :: 0 <= m < a.Length && m != i && m != j ==> a[m] == old(a[m]); + ensures multiset(a[..]) == old(multiset(a[..])); +{ + var t := a[i]; + a[i] := a[j]; + a[j] := t; +} + +// This method is a direct translation of the pseudo +// code given in the problem statement. +// The first postcondition expresses that the resulting +// array is sorted, that is, all occurrences of "false" +// come before all occurrences of "true". +// The second postcondition expresses that the post-state +// array is a permutation of the pre-state array. To express +// this, we use Dafny's built-in multisets. The built-in +// function "multiset" takes an array and yields the +// multiset of the array elements. +// Note that Dafny guesses a suitable ranking function +// for the termination proof of the while loop. +// We use the loop guard from the given pseudo-code. However, +// the program also verifies with the stronger guard "i < j" +// (without changing any of the other specifications or +// annotations). +method two_way_sort(a: array) + requires a != null; + modifies a; + ensures forall m,n :: 0 <= m < n < a.Length ==> (!a[m] || a[n]); + ensures multiset(a[..]) == old(multiset(a[..])); +{ + var i := 0; + var j := a.Length - 1; + while (i <= j) + invariant 0 <= i <= j + 1 <= a.Length; + invariant forall m :: 0 <= m < i ==> !a[m]; + invariant forall n :: j < n < a.Length ==> a[n]; + invariant multiset(a[..]) == old(multiset(a[..])); + { + if (!a[i]) { + i := i+1; + } else if (a[j]) { + j := j-1; + } else { + swap(a, i, j); + i := i+1; + j := j-1; + } + } +} diff --git a/Test/vstte2012/runtest.bat b/Test/vstte2012/runtest.bat new file mode 100644 index 00000000..07b5859e --- /dev/null +++ b/Test/vstte2012/runtest.bat @@ -0,0 +1,24 @@ +@echo off +setlocal + +set BOOGIEDIR=..\..\Binaries +set DAFNY_EXE=%BOOGIEDIR%\Dafny.exe +set CSC=c:/Windows/Microsoft.NET/Framework/v4.0.30319/csc.exe + +for %%f in ( + Two-Way-Sort.dfy + Combinators.dfy + RingBuffer.dfy + Tree.dfy + BreadthFirstSearch.dfy + ) do ( + echo. + echo -------------------- %%f -------------------- + + REM The following line will just run the verifier + IF "%COMPILEDAFNY%"=="" %DAFNY_EXE% /compile:0 /dprint:out.dfy.tmp %* %%f + + REM Alternatively, the following lines also produce C# code and compile it + IF NOT "%COMPILEDAFNY%"=="" %DAFNY_EXE% %* %%f + IF NOT "%COMPILEDAFNY%"=="" %CSC% /nologo /debug /t:library /out:out.dll /r:System.Numerics.dll out.cs +) -- cgit v1.2.3 From 73b9d30f3b18425e52ef80453977d8608a1db09b Mon Sep 17 00:00:00 2001 From: Rustan Leino Date: Tue, 15 Nov 2011 18:48:02 -0800 Subject: Boogie (and Dafny, with effects also on SscBoogie): I refactored CommandLineOptions to separate the options that belong to these 3 tools. --- Source/BoogieDriver/BoogieDriver.cs | 9 +- Source/Core/CommandLineOptions.cs | 2720 ++++++++++++++--------------------- Source/Dafny/DafnyMain.cs | 4 +- Source/Dafny/DafnyOptions.cs | 132 ++ Source/Dafny/DafnyPipeline.csproj | 1 + Source/Dafny/Translator.cs | 36 +- Source/DafnyDriver/DafnyDriver.cs | 56 +- Source/Provers/Z3api/ProverLayer.cs | 1 - Source/VCGeneration/VC.cs | 7 +- 9 files changed, 1251 insertions(+), 1715 deletions(-) create mode 100644 Source/Dafny/DafnyOptions.cs diff --git a/Source/BoogieDriver/BoogieDriver.cs b/Source/BoogieDriver/BoogieDriver.cs index 83e6009d..028ff1dd 100644 --- a/Source/BoogieDriver/BoogieDriver.cs +++ b/Source/BoogieDriver/BoogieDriver.cs @@ -35,8 +35,10 @@ namespace Microsoft.Boogie { public static void Main(string[] args) { Contract.Requires(cce.NonNullElements(args)); + CommandLineOptions.Install(new CommandLineOptions()); + CommandLineOptions.Clo.RunningBoogieFromCommandLine = true; - if (CommandLineOptions.Clo.Parse(args) != 1) { + if (!CommandLineOptions.Clo.Parse(args)) { goto END; } if (CommandLineOptions.Clo.Files.Count == 0) { @@ -77,7 +79,6 @@ namespace Microsoft.Boogie { goto END; } } - CommandLineOptions.Clo.RunningBoogieOnSsc = false; ProcessFiles(CommandLineOptions.Clo.Files); END: @@ -223,9 +224,9 @@ namespace Microsoft.Boogie { Contract.Requires(0 <= errors && 0 <= inconclusives && 0 <= timeOuts && 0 <= outOfMemories); Console.WriteLine(); if (CommandLineOptions.Clo.vcVariety == CommandLineOptions.VCVariety.Doomed) { - Console.Write("{0} finished with {1} credible, {2} doomed{3}", CommandLineOptions.Clo.ToolName, verified, errors, errors == 1 ? "" : "s"); + Console.Write("{0} finished with {1} credible, {2} doomed{3}", CommandLineOptions.Clo.DescriptiveToolName, verified, errors, errors == 1 ? "" : "s"); } else { - Console.Write("{0} finished with {1} verified, {2} error{3}", CommandLineOptions.Clo.ToolName, verified, errors, errors == 1 ? "" : "s"); + Console.Write("{0} finished with {1} verified, {2} error{3}", CommandLineOptions.Clo.DescriptiveToolName, verified, errors, errors == 1 ? "" : "s"); } if (inconclusives != 0) { Console.Write(", {0} inconclusive{1}", inconclusives, inconclusives == 1 ? "" : "s"); diff --git a/Source/Core/CommandLineOptions.cs b/Source/Core/CommandLineOptions.cs index 7d3fce84..d2bdee62 100644 --- a/Source/Core/CommandLineOptions.cs +++ b/Source/Core/CommandLineOptions.cs @@ -12,103 +12,353 @@ using System.Diagnostics; using System.Diagnostics.Contracts; namespace Microsoft.Boogie { - public class CommandLineOptions { + public class CommandLineOptionEngine + { + public readonly string ToolName; + public readonly string DescriptiveToolName; + [ContractInvariantMethod] + void ObjectInvariant() { + Contract.Invariant(ToolName != null); + Contract.Invariant(DescriptiveToolName != null); + Contract.Invariant(Environment != null); + Contract.Invariant(cce.NonNullElements(Files)); + } + + public string/*!*/ Environment = ""; + public List/*!*/ Files = new List(); + public bool HelpRequested = false; + public bool AttrHelpRequested = false; + + public CommandLineOptionEngine(string toolName, string descriptiveName) { + Contract.Requires(toolName != null); + Contract.Requires(descriptiveName != null); + ToolName = toolName; + DescriptiveToolName = descriptiveName; + } + public static string/*!*/ VersionNumber { get { Contract.Ensures(Contract.Result() != null); return cce.NonNull(cce.NonNull(System.Diagnostics.FileVersionInfo.GetVersionInfo(System.Reflection.Assembly.GetExecutingAssembly().Location)).FileVersion); } } - public const string ToolNameBoogie = "Boogie program verifier"; - public const string ToolNameSpecSharp = "Spec# program verifier"; - public const string ToolNameDafny = "Dafny program verifier"; public static string/*!*/ VersionSuffix { get { Contract.Ensures(Contract.Result() != null); return " version " + VersionNumber + ", Copyright (c) 2003-2011, Microsoft."; } } - public string/*!*/ InputFileExtension { - set { - Contract.Requires(value != null); - //modifies _toolname, _version; - switch (value) { - case ".bpl": - _toolname = ToolNameBoogie; - break; - case ".dfy": - _toolname = ToolNameDafny; - break; - default: - _toolname = ToolNameSpecSharp; - break; + public string/*!*/ Version { + get { + Contract.Ensures(Contract.Result() != null); + return DescriptiveToolName + VersionSuffix; + } + } + + public string/*!*/ FileTimestamp = cce.NonNull(DateTime.Now.ToString("o")).Replace(':', '.'); + public void ExpandFilename(ref string pattern, string logPrefix, string fileTimestamp) { + if (pattern != null) { + pattern = pattern.Replace("@PREFIX@", logPrefix).Replace("@TIME@", fileTimestamp); + string fn = Files.Count == 0 ? "" : Files[Files.Count - 1]; + fn = fn.Replace('/', '-').Replace('\\', '-'); + pattern = pattern.Replace("@FILE@", fn); + } + } + + /// + /// Process the option and modify "ps" accordingly. + /// Return true if the option is one that is recognized. + /// + protected virtual bool ParseOption(string name, CommandLineParseState ps) { + Contract.Requires(name != null); + Contract.Requires(ps != null); + + switch (name) { + case "help": + case "?": + if (ps.ConfirmArgumentCount(0)) { + HelpRequested = true; + } + return true; + case "attrHelp": + if (ps.ConfirmArgumentCount(0)) { + AttrHelpRequested = true; + } + return true; + default: + break; + } + return false; // unrecognized option + } + + protected class CommandLineParseState + { + public string s; + public bool hasColonArgument; + public readonly string[]/*!*/ args; + public int i; + public int nextIndex; + public bool EncounteredErrors; + public readonly string ToolName; + [ContractInvariantMethod] + void ObjectInvariant() { + Contract.Invariant(args != null); + Contract.Invariant(0 <= i && i <= args.Length); + Contract.Invariant(0 <= nextIndex && nextIndex <= args.Length); + } + + + public CommandLineParseState(string[] args, string toolName) { + Contract.Requires(args != null); + Contract.Requires(Contract.ForAll(0, args.Length, i => args[i] != null)); + Contract.Requires(toolName != null); + Contract.Ensures(this.args == args); + this.ToolName = toolName; + this.s = null; // set later by client + this.hasColonArgument = false; // set later by client + this.args = args; + this.i = 0; + this.nextIndex = 0; // set later by client + this.EncounteredErrors = false; + } + + public bool CheckBooleanFlag(string flagName, ref bool flag, bool valueWhenPresent) { + Contract.Requires(flagName != null); + //modifies nextIndex, encounteredErrors, Console.Error.*; + bool flagPresent = false; + + if ((s == "/" + flagName || s == "-" + flagName) && ConfirmArgumentCount(0)) { + flag = valueWhenPresent; + flagPresent = true; + } + return flagPresent; + } + + public bool CheckBooleanFlag(string flagName, ref bool flag) { + Contract.Requires(flagName != null); + //modifies nextIndex, encounteredErrors, Console.Error.*; + return CheckBooleanFlag(flagName, ref flag, true); + } + + /// + /// If there is one argument and it is a non-negative integer, then set "arg" to that number and return "true". + /// Otherwise, emit error message, leave "arg" unchanged, and return "false". + /// + public bool GetNumericArgument(ref int arg) { + //modifies nextIndex, encounteredErrors, Console.Error.*; + if (this.ConfirmArgumentCount(1)) { + try { + Contract.Assume(args[i] != null); + Contract.Assert(args[i] is string); // needed to prove args[i].IsPeerConsistent + int d = Convert.ToInt32(this.args[this.i]); + if (0 <= d) { + arg = d; + return true; + } + } catch (System.FormatException) { + } catch (System.OverflowException) { + } + } else { + return false; + } + Error("Invalid argument \"{0}\" to option {1}", args[this.i], this.s); + return false; + } + + /// + /// If there is one argument and it is a non-negative integer less than "limit", + /// then set "arg" to that number and return "true". + /// Otherwise, emit error message, leave "arg" unchanged, and return "false". + /// + public bool GetNumericArgument(ref int arg, int limit) { + Contract.Requires(this.i < args.Length); + //modifies nextIndex, encounteredErrors, Console.Error.*; + int a = arg; + if (!GetNumericArgument(ref a)) { + return false; + } else if (a < limit) { + arg = a; + return true; + } else { + Error("Invalid argument \"{0}\" to option {1}", args[this.i], this.s); + return false; + } + } + + /// + /// If there is one argument and it is a non-negative real, then set "arg" to that number and return "true". + /// Otherwise, emit an error message, leave "arg" unchanged, and return "false". + /// + public bool GetNumericArgument(ref double arg) { + //modifies nextIndex, encounteredErrors, Console.Error.*; + if (this.ConfirmArgumentCount(1)) { + try { + Contract.Assume(args[i] != null); + Contract.Assert(args[i] is string); // needed to prove args[i].IsPeerConsistent + double d = Convert.ToDouble(this.args[this.i]); + if (0 <= d) { + arg = d; + return true; + } + } catch (System.FormatException) { + } catch (System.OverflowException) { + } + } else { + return false; + } + Error("Invalid argument \"{0}\" to option {1}", args[this.i], this.s); + return false; + } + + public bool ConfirmArgumentCount(int argCount) { + Contract.Requires(0 <= argCount); + //modifies nextIndex, encounteredErrors, Console.Error.*; + Contract.Ensures(Contract.Result() == (!(hasColonArgument && argCount != 1) && !(args.Length < i + argCount))); + if (hasColonArgument && argCount != 1) { + Error("\"{0}\" cannot take a colon argument", s); + nextIndex = args.Length; + return false; + } else if (args.Length < i + argCount) { + Error("\"{0}\" expects {1} argument{2}", s, argCount.ToString(), (string)(argCount == 1 ? "" : "s")); + nextIndex = args.Length; + return false; + } else { + nextIndex = i + argCount; + return true; + } + } + + public void Error(string message, params string[] args) { + Contract.Requires(args != null); + Contract.Requires(message != null); + //modifies encounteredErrors, Console.Error.*; + Console.Error.WriteLine("{0}: Error: {1}", ToolName, String.Format(message, args)); + EncounteredErrors = true; + } + } + + public virtual void Usage() { + Console.WriteLine("{0}: usage: {0} [ option ... ] [ filename ... ]", ToolName); + Console.WriteLine(@" where